├── .circleci └── config.yml ├── .gitignore ├── .prettierignore ├── LICENSE ├── README.md ├── as-pect.config.js ├── assembly ├── __tests__ │ ├── as-pect.d.ts │ └── util.spec.ts ├── debug.ts ├── env.ts ├── evm.ts ├── keccak.ts ├── main.ts ├── opcodes.ts ├── package.json ├── rlp.ts ├── src │ └── ethash_keccak_funcs.wat ├── token.ts ├── tsconfig.json └── util.ts ├── greenkeeper.json ├── gulpfile.js ├── package.json ├── prettier.config.js ├── src ├── ee.ts ├── multiproof.ts ├── relayer │ ├── basic-evm.ts │ ├── bin.ts │ ├── fixture │ │ ├── add.json │ │ └── add_store.json │ ├── lib.ts │ ├── realistic.ts │ └── rpc.ts └── util.ts ├── test ├── ee.ts ├── fixture │ ├── eth_getProof_sample.json │ ├── eth_getproof_result.json │ ├── hex_encoded_securetrie_test.json │ ├── trietest.json │ └── trietest_secureTrie.json ├── multiproof.ts ├── realistic.ts └── relayer.ts ├── tsconfig.json ├── tsconfig.prod.json └── tslint.json /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | jobs: 3 | build: 4 | working_directory: ~/project/biturbo 5 | docker: 6 | - image: circleci/node:8-browsers 7 | steps: 8 | - checkout 9 | - restore_cache: 10 | key: dependency-cache-{{ checksum "package.json" }} 11 | - run: 12 | name: Install dependencies 13 | command: npm install 14 | - save_cache: 15 | key: dependency-cache-{{ checksum "package.json" }} 16 | paths: 17 | - node_modules 18 | - run: 19 | name: Lint 20 | command: npm run lint 21 | - run: 22 | name: Run tests 23 | command: npm test 24 | #- run: 25 | # name: Run assemblyscript tests 26 | # command: npm run test:as 27 | - run: 28 | name: Run token ee with generated input 29 | command: npm run token 30 | - run: 31 | name: Run token with realistic rpc input 32 | command: npm run token:realistic 33 | - run: 34 | name: Run EVM ee with generated input 35 | command: npm run evm 36 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | build 3 | package-lock.json 4 | yarn.lock 5 | dist 6 | 7 | # Scout config files 8 | *.yaml 9 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | .vscode 3 | package.json 4 | dist 5 | .nyc_output 6 | *.json 7 | docs 8 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Biturbo 2 | 3 | [![Greenkeeper badge](https://badges.greenkeeper.io/ewasm/biturbo.svg)](https://greenkeeper.io/) 4 | 5 | Biturbo is an Eth1 EE prototype, which uses Alexey Akhunov's [turboproof](https://github.com/ledgerwatch/turbo-geth/blob/master/docs/programmers_guide/guide.md) scheme as witness format. It started as a fork of @cdetrio's [turbo-mpas](https://github.com/cdetrio/scout.ts/tree/turbo-mpas) called Turbo Token, which aimed to prototype a token transfer EE compatible with Eth1 (i.e. MPT as accumulator, RLP for serialization and the same account structure). It's now being extended with EVM support, but we intend to maintain the token EE to allow for further experimentation and benchmarking. Therefore the repo contains both TurboEVM and TurboToken. 6 | 7 | In addition to the AS EE implementation, the repo also includes an [implementation](src/multiproof.ts) of Turboproof in typescript, a [relayer](src/relayer/lib.ts) which can generate scout test suites to simulate a shard block, and an [implementation](src/ee.ts) of the high-level EE logic in typescript. 8 | 9 | ## Install 10 | 11 | After cloning the repo, install the dependencies via npm: 12 | 13 | ```sh 14 | npm install 15 | ``` 16 | 17 | ## Run 18 | 19 | If you simply want to try one of the EEs, run: 20 | 21 | ```sh 22 | # To run token EE 23 | npm run token 24 | 25 | # To run EVM EE 26 | npm run evm 27 | ``` 28 | 29 | Each of the above commands does 3 things. It runs a simulated relayer to generate input data for the EE, then compiles the AssemblyScript source code, and finally runs the resulting Wasm module with the generated input data. To run each of the steps separately you can use the corresponding npm scripts: 30 | 31 | ```sh 32 | npm run token:relayer 33 | npm run token:build 34 | npm run token:run 35 | ``` 36 | 37 | ## Test 38 | 39 | There are some unit tests for the multiproof generation/verification and encoding. You can run them via: 40 | 41 | ```sh 42 | npm t 43 | ``` 44 | -------------------------------------------------------------------------------- /as-pect.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | /** 3 | * A set of globs passed to the glob package that qualify typescript files for testing. 4 | */ 5 | include: ["assembly/__tests__/**/*.spec.ts"], 6 | /** 7 | * A set of globs passed to the glob package that quality files to be added to each test. 8 | */ 9 | add: ["assembly/__tests__/**/*.include.ts"], 10 | /** 11 | * All the compiler flags needed for this test suite. Make sure that a binary file is output. 12 | */ 13 | flags: { 14 | /** To output a wat file, uncomment the following line. */ 15 | // "--textFile": ["output.wat"], 16 | /** A runtime must be provided here. */ 17 | "--runtime": ["none"] // Acceptable values are: full, half, stub (arena), and none 18 | }, 19 | /** 20 | * A set of regexp that will disclude source files from testing. 21 | */ 22 | disclude: [/node_modules/], 23 | /** 24 | * Add your required AssemblyScript imports here. 25 | */ 26 | imports: {}, 27 | /** 28 | * All performance statistics reporting can be configured here. 29 | */ 30 | performance: { 31 | /** Enable performance statistics gathering for every test. */ 32 | enabled: false, 33 | /** Set the maximum number of samples to run for every test. */ 34 | maxSamples: 10000, 35 | /** Set the maximum test run time in milliseconds for every test. */ 36 | maxTestRunTime: 5000, 37 | /** Report the median time in the default reporter for every test. */ 38 | reportMedian: true, 39 | /** Report the average time in milliseconds for every test. */ 40 | reportAverage: true, 41 | /** Report the standard deviation for every test. */ 42 | reportStandardDeviation: false, 43 | /** Report the maximum run time in milliseconds for every test. */ 44 | reportMax: false, 45 | /** Report the minimum run time in milliseconds for every test. */ 46 | reportMin: false, 47 | }, 48 | /** 49 | * Add a custom reporter here if you want one. The following example is in typescript. 50 | * 51 | * @example 52 | * import { TestReporter, TestGroup, TestResult, TestContext } from "as-pect"; 53 | * 54 | * export class CustomReporter extends TestReporter { 55 | * // implement each abstract method here 56 | * public abstract onStart(suite: TestContext): void; 57 | * public abstract onGroupStart(group: TestGroup): void; 58 | * public abstract onGroupFinish(group: TestGroup): void; 59 | * public abstract onTestStart(group: TestGroup, result: TestResult): void; 60 | * public abstract onTestFinish(group: TestGroup, result: TestResult): void; 61 | * public abstract onFinish(suite: TestContext): void; 62 | * } 63 | */ 64 | // reporter: new CustomReporter(), 65 | /** 66 | * Specify if the binary wasm file should be written to the file system. 67 | */ 68 | outputBinary: false, 69 | }; 70 | -------------------------------------------------------------------------------- /assembly/__tests__/as-pect.d.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * This function creates a test group in the test loader. 3 | * 4 | * @param {string} description - This is the name of the test group. 5 | * @param {() => void} callback - A function that contains all of the closures for this test group. 6 | * 7 | * @example 8 | * 9 | * ```ts 10 | * describe("my test suite", (): void => { 11 | * // put your tests here 12 | * }); 13 | * ``` 14 | */ 15 | declare function describe(description: string, callback: () => void): void 16 | 17 | /** 18 | * This function creates a test inside the given test group. It must be placed inside a describe 19 | * block. 20 | * 21 | * @param {string} description - This is the name of the test, and should describe a behavior. 22 | * @param {() => void} callback - A function that contains a set of expectations for this test. 23 | * 24 | * @example 25 | * 26 | * ```ts 27 | * describe("the meaning of life", (): void => { 28 | * it("should be 42", (): void => { 29 | * // put your expectations here 30 | * expect(29 + 13).toBe(42); 31 | * }); 32 | * }); 33 | * ``` 34 | */ 35 | declare function it(description: string, callback: () => void): void 36 | 37 | /** 38 | * A test that does not run, and is longhand equivalent to using todo function without a 39 | * callback. This test does not get run and is reported like a todo. 40 | * 41 | * @param {string} description - This is the name of the test, and should describe a behavior. 42 | * @param {() => void} callback - A function that contains a set of expectations for this test. 43 | */ 44 | declare function xit(description: string, callback: () => void): void 45 | 46 | /** 47 | * A test that does not run, and is longhand equivalent to using todo function without a 48 | * callback. This test does not get run and is reported like a todo. 49 | * 50 | * @param {string} description - This is the name of the test, and should describe a behavior. 51 | * @param {() => void} callback - A function that contains a set of expectations for this test. 52 | */ 53 | declare function xtest(description: string, callback: () => void): void 54 | 55 | /** 56 | * This function creates a test inside the given test group. It must be placed inside a describe 57 | * block. 58 | * 59 | * @param {string} description - This is the name of the test, and should describe a behavior. 60 | * @param {() => void} callback - A function that contains a set of expectations for this test. 61 | * 62 | * @example 63 | * ```ts 64 | * describe("the meaning of life", (): void => { 65 | * test("the value should be 42", (): void => { 66 | * // put your expectations here 67 | * expect(29 + 13).toBe(42); 68 | * }); 69 | * }); 70 | * ``` 71 | */ 72 | declare function test(description: string, callback: () => void): void 73 | 74 | /** 75 | * This function creates a test that is expected to fail. This is useful to verify if a given 76 | * behavior is expected to throw. 77 | * 78 | * @param {string} description - This is the name of the test, and should describe a behavior. 79 | * @param {() => void} callback - A function that contains a set of expectations for this test. 80 | * @param {string?} message - A message that describes why the test should fail. 81 | * 82 | * @example 83 | * 84 | * ```ts 85 | * describe("the meaning of life", (): void => { 86 | * throws("the value should be 42", (): void => { 87 | * // put your expectations here 88 | * expect(29 + 13).not.toBe(42); 89 | * }); 90 | * }); 91 | * ``` 92 | */ 93 | declare function throws(description: string, callback: () => void, message?: string): void 94 | 95 | /** 96 | * This function creates a test that is expected to fail. This is useful to verify if a given 97 | * behavior is expected to throw. 98 | * 99 | * @param {string} description - This is the name of the test, and should describe a behavior. 100 | * @param {() => void} callback - A function that contains a set of expectations for this test. 101 | * @param {string?} message - A message that describes why the test should fail. 102 | * 103 | * @example 104 | * 105 | * ```ts 106 | * describe("the meaning of life", (): void => { 107 | * itThrows("when the value should be 42", (): void => { 108 | * // put your expectations here 109 | * expect(29 + 13).not.toBe(42); 110 | * }, "The value is actually 42."); 111 | * }); 112 | * ``` 113 | */ 114 | declare function itThrows(description: string, callback: () => void, message?: string): void 115 | 116 | /** 117 | * This function creates a callback that is called before each individual test is run in this test 118 | * group. 119 | * 120 | * @param {function} callback - The function to be run before each test in the current test group. 121 | * 122 | * @example 123 | * 124 | * ```ts 125 | * // create a global 126 | * var cat: Cat = new Cat(); 127 | * 128 | * describe("cats", (): void => { 129 | * beforeEach((): void => { 130 | * cat.meow(1); // meow once per test 131 | * }); 132 | * }); 133 | * ``` 134 | */ 135 | declare function beforeEach(callback: () => void): void 136 | 137 | /** 138 | * This function creates a callback that is called before the whole test group is run, and only 139 | * once. 140 | * 141 | * @param {function} callback - The function to be run before each test in the current test group. 142 | * 143 | * @example 144 | * 145 | * ```ts 146 | * // create a global 147 | * var dog: Dog = null; 148 | * describe("dogs", (): void => { 149 | * beforeAll((): void => { 150 | * dog = new Dog(); // create a single dog once before the tests start 151 | * }); 152 | * }); 153 | * ``` 154 | */ 155 | declare function beforeAll(callback: () => void): void 156 | 157 | /** 158 | * This function creates a callback that is called after each individual test is run in this test 159 | * group. 160 | * 161 | * @param {function} callback - The function to be run after each test in the current test group. 162 | * 163 | * @example 164 | * 165 | * ```ts 166 | * // create a global 167 | * var cat: Cat = new Cat(); 168 | * 169 | * describe("cats", (): void => { 170 | * afterEach((): void => { 171 | * cat.sleep(12); // cats sleep a lot 172 | * }); 173 | * }); 174 | * ``` 175 | */ 176 | declare function afterEach(callback: () => void): void 177 | 178 | /** 179 | * This function creates a callback that is called after the whole test group is run, and only 180 | * once. 181 | * 182 | * @param {function} callback - The function to be run after each test in the current test group. 183 | * 184 | * @example 185 | * 186 | * ```ts 187 | * // create a global 188 | * var dog: Dog = null; 189 | * describe("dogs", (): void => { 190 | * afterAll((): void => { 191 | * memory.free(changetype(dog)); // free some memory 192 | * }); 193 | * }); 194 | * ``` 195 | */ 196 | declare function afterAll(callback: () => void): void 197 | 198 | /** 199 | * Describes a value and returns an expectation to test the value. 200 | * 201 | * @type {T} - The expectation's type. 202 | * @param {T} actual - The value being tested. 203 | * 204 | * @example 205 | * 206 | * ```ts 207 | * expect(42).not.toBe(-1, "42 should not be -1"); 208 | * expect(19 + 23).toBe(42, "19 + 23 should equal 42"); 209 | * ``` 210 | */ 211 | declare function expect(actual: T | null): Expectation 212 | 213 | /** 214 | * Describes a void function and returns an expectation to test the function. 215 | * 216 | * @param {() => void} callback - The callback being tested. 217 | * 218 | * @example 219 | * 220 | * ```ts 221 | * expectFn((): void => unreachable()).toThrow("unreachables do not throw"); 222 | * expectFn((): void => { 223 | * cat.meow(); 224 | * }).not.toThrow("Uhoh, cats can't meow!");; 225 | * ``` 226 | */ 227 | declare function expectFn(cb: () => void): Expectation<() => void> 228 | 229 | /** 230 | * Describes a test that needs to be written. 231 | * 232 | * @param {string} description - The description of the test that needs to be written. 233 | */ 234 | declare function todo(description: string): void 235 | 236 | /** 237 | * Logs a single value to the logger, and is stringified. It works for references, values, and 238 | * strings. 239 | * 240 | * @type {T} - The type to be logged. 241 | * @param {T | null} value - The value to be logged. 242 | * 243 | * @example 244 | * 245 | * ```ts 246 | * log("This is a logged value."); 247 | * log(42); 248 | * log(new Vec(1, 2, 3)); 249 | * log(null); 250 | * ``` 251 | */ 252 | declare function log(value: T | null): void 253 | 254 | /** 255 | * An expectation for a value. 256 | */ 257 | // @ts-ignore 258 | declare class Expectation { 259 | /** 260 | * Create a new expectation. 261 | * 262 | * @param {T | null} actual - The actual value of the expectation. 263 | */ 264 | constructor(actual: T | null) 265 | 266 | /** 267 | * This expectation performs a strict equality on value types and reference types. 268 | * 269 | * @param {T | null} expected - The value to be compared. 270 | * @param {string} message - The optional message that describes the expectation. 271 | * 272 | * @example 273 | * 274 | * ```ts 275 | * expect(42).not.toBe(-1, "42 should not be -1"); 276 | * expect(19 + 23).toBe(42, "19 + 23 should equal 42"); 277 | * ``` 278 | */ 279 | toBe(expected: T | null, message?: string): void 280 | 281 | /** 282 | * This expectation performs a strict equality on value types and performs a memcompare on 283 | * reference types. If the reference type `T` has reference types as properties, the comparison does 284 | * not perform property traversal. It will only compare the pointer values in the memory block, and 285 | * only compare `offsetof()` bytes, regardless of the allocated block size. 286 | * 287 | * @param {T | null} expected - The value to be compared. 288 | * @param {string} message - The optional message that describes the expectation. 289 | * 290 | * @example 291 | * 292 | * ```ts 293 | * expect(new Vec3(1, 2, 3)).toStrictEqual(new Vec(1, 2, 3), "Vectors of the same shape should be equal"); 294 | * ``` 295 | */ 296 | toStrictEqual(expected: T | null, message?: string): void 297 | 298 | /** 299 | * This expectation performs a strict memory block equality based on the allocated block sizes. 300 | * 301 | * @param {T | null} expected - The value to be compared. 302 | * @param {string} message - The optional message that describes the expectation. 303 | * 304 | * @example 305 | * 306 | * ```ts 307 | * expect(new Vec3(1, 2, 3)).toBlockEqual(new Vec(1, 2, 3), "Vectors of the same shape should be equal"); 308 | * ``` 309 | */ 310 | toBlockEqual(expected: T | null, message?: string): void 311 | 312 | /** 313 | * If the value is callable, it calls the function, and fails the expectation if it throws, or hits 314 | * an unreachable(). 315 | * 316 | * @param {string} message - The optional message that describes the expectation. 317 | * 318 | * @example 319 | * 320 | * ```ts 321 | * expectFn((): void => unreachable()).toThrow("unreachable() should throw."); 322 | * expectFn((): void => { 323 | * cat.sleep(100); // cats can sleep quite a lot 324 | * }).not.toThrow("cats should sleep, not throw"); 325 | * ``` 326 | */ 327 | toThrow(message?: string): void 328 | 329 | /** 330 | * This expecation asserts that the value is truthy, like in javascript. If the value is a string, 331 | * then strings of length 0 are not truthy. 332 | * 333 | * @param {string} message - The optional message that describes the expectation. 334 | * 335 | * @example 336 | * 337 | * ```ts 338 | * expect(true).toBeTruthy("true is truthy."); 339 | * expect(1).toBeTruthy("numeric values that are not 0 are truthy."); 340 | * expect(new Vec3(1, 2, 3)).toBeTruthy("reference types that aren't null are truthy."); 341 | * expect(false).not.toBeTruthy("false is not truthy."); 342 | * expect(0).not.toBeTruthy("0 is not truthy."); 343 | * expect(null).not.toBeTruthy("null is not truthy."); 344 | * ``` 345 | */ 346 | toBeTruthy(message?: string): void 347 | 348 | /** 349 | * This expectation tests the value to see if it is null. If the value is a value type, it is 350 | * never null. If the value is a reference type, it performs a strict null comparison. 351 | * 352 | * @param {string} message - The optional message that describes the expectation. 353 | * 354 | * @example 355 | * 356 | * ```ts 357 | * expect(0).not.toBeNull("numbers are never null"); 358 | * expect(null).toBeNull("null reference types are null."); 359 | * ``` 360 | */ 361 | toBeNull(message?: string): void 362 | 363 | /** 364 | * This expecation assert that the value is falsy, like in javascript. If the value is a string, 365 | * then strings of length 0 are falsy. 366 | * 367 | * @param {string} message - The optional message that describes the expectation. 368 | * 369 | * @example 370 | * 371 | * ```ts 372 | * expect(false).toBeFalsy("false is falsy."); 373 | * expect(0).toBeFalsy("0 is falsy."); 374 | * expect(null).toBeFalsy("null is falsy."); 375 | * expect(true).not.toBeFalsy("true is not falsy."); 376 | * expect(1).not.toBeFalsy("numeric values that are not 0 are not falsy."); 377 | * expect(new Vec3(1, 2, 3)).not.toBeFalsy("reference types that aren't null are not falsy."); 378 | * ``` 379 | */ 380 | toBeFalsy(message?: string): void 381 | 382 | /** 383 | * This expectation asserts that the value is greater than the expected value. Since operators can 384 | * be overloaded in assemblyscript, it's possible for this to work on reference types. 385 | * 386 | * @param {T | null} expected - The expected value that the actual value should be greater than. 387 | * @param {string} message - The optional message that describes this expectation. 388 | * 389 | * @example 390 | * 391 | * ```ts 392 | * expect(10).toBeGreaterThan(4); 393 | * expect(12).not.toBeGreaterThan(42); 394 | * ``` 395 | */ 396 | toBeGreaterThan(expected: T | null, message?: string): void 397 | 398 | /** 399 | * This expectation asserts that the value is less than the expected value. Since operators can 400 | * be overloaded in assemblyscript, it's possible for this to work on reference types. 401 | * 402 | * @param {T | null} value - The expected value that the actual value should be less than. 403 | * @param {string} message - The optional message that describes this expectation. 404 | * 405 | * @example 406 | * 407 | * ```ts 408 | * expect(10).not.toBeLessThan(4); 409 | * expect(12).toBeLessThan(42); 410 | * ``` 411 | */ 412 | toBeLessThan(expected: T | null, message?: string): void 413 | 414 | /** 415 | * This expectation asserts that the value is greater than or equal to the expected value. Since 416 | * operators can be overloaded in assemblyscript, it's possible for this to work on reference 417 | * types. 418 | * 419 | * @param {T | null} value - The expected value that the actual value should be greater than or 420 | * equal to. 421 | * @param {string} message - The optional message that describes this expectation. 422 | * 423 | * @example 424 | * 425 | * ```ts 426 | * expect(42).toBeGreaterThanOrEqual(42); 427 | * expect(10).toBeGreaterThanOrEqual(4); 428 | * expect(12).not.toBeGreaterThanOrEqual(42); 429 | * ``` 430 | */ 431 | toBeGreaterThanOrEqual(expected: T | null, message?: string): void 432 | 433 | /** 434 | * This expectation asserts that the value is less than or equal to the expected value. Since 435 | * operators can be overloaded in assemblyscript, it's possible for this to work on reference 436 | * types. 437 | * 438 | * @param {T | null} value - The expected value that the actual value should be less than or equal 439 | * to. 440 | * @param {string} message - The optional message that describes this expectation. 441 | * 442 | * @example 443 | * 444 | * ```ts 445 | * expect(42).toBeLessThanOrEqual(42); 446 | * expect(10).not.toBeLessThanOrEqual(4); 447 | * expect(12).toBeLessThanOrEqual(42); 448 | * ``` 449 | */ 450 | toBeLessThanOrEqual(expected: T | null, message?: string): void 451 | 452 | /** 453 | * This expectation asserts that the value is close to another value. Both numbers must be finite, 454 | * and T must extend f64 or f32. 455 | * 456 | * @param {T extends f64 | f32} value - The expected value to be close to. 457 | * @param {i32} decimalPlaces - The number of decimal places used to calculate epsilon. Default is 458 | * 2. 459 | * @param {string} message - The optional message that describes this expectation. 460 | * 461 | * @example 462 | * 463 | * ```ts 464 | * expect(0.1 + 0.2).toBeCloseTo(0.3); 465 | * ``` 466 | */ 467 | toBeCloseTo(expected: T, decimalPlaces?: number, message?: string): void 468 | 469 | /** 470 | * This function asserts the float type value is NaN. 471 | * 472 | * @param {string} message - The optional message the describes this expectation. 473 | * 474 | * @example 475 | * 476 | * ```ts 477 | * expect(NaN).toBeNaN(); 478 | * expect(42).not.toBeNaN(); 479 | * ``` 480 | */ 481 | toBeNaN(message?: string): void 482 | 483 | /** 484 | * This function asserts a float is finite. 485 | * 486 | * @param {string} message - The optional message the describes this expectation. 487 | * @example 488 | * 489 | * ```ts 490 | * expect(42).toBeFinite(); 491 | * expect(Infinity).not.toBeFinite(); 492 | * ``` 493 | */ 494 | toBeFinite(message?: string): void 495 | 496 | /** 497 | * This method asserts the item has the expected length. 498 | * 499 | * @param {i32} expected - The expected length. 500 | * @param {string} message - The optional message the describes this expectation. 501 | * 502 | * ```ts 503 | * expect([1, 2, 3]).toHaveLength(3); 504 | * ``` 505 | */ 506 | toHaveLength(expected: i32, message?: string): void 507 | 508 | /** 509 | * This method asserts that a given T that extends `Array` has a value/reference included. 510 | * 511 | * @param {valueof} expected - The expected item to be included in the Array. 512 | * @param {string} message - The optional message the describes this expectation. 513 | * 514 | * @example 515 | * 516 | * ```ts 517 | * expect([1, 2, 3]).toInclude(3); 518 | * ``` 519 | */ 520 | // @ts-ignore: expected value should be known at compile time 521 | toInclude(expected: valueof, message?: string): void 522 | 523 | /** 524 | * This method asserts that a given T that extends `Array` has a value/reference included. 525 | * 526 | * @param {valueof} expected - The expected item to be included in the Array. 527 | * @param {string} message - The optional message the describes this expectation. 528 | * 529 | * @example 530 | * 531 | * ```ts 532 | * expect([1, 2, 3]).toContain(3); 533 | * ``` 534 | */ 535 | // @ts-ignore: expected value should be known at compile time 536 | toContain(expected: valueof, message?: string): void 537 | 538 | /** 539 | * This method asserts that a given T that extends `Array` has a value/reference included and 540 | * compared via memory.compare(). 541 | * 542 | * @param {i32} expected - The expected item to be included in the Array. 543 | * @param {string} message - The optional message the describes this expectation. 544 | * 545 | * @example 546 | * ```ts 547 | * expect([new Vec3(1, 2, 3)]).toInclude(new Vec3(1, 2, 3)); 548 | * ``` 549 | */ 550 | // @ts-ignore: expected value should be known at compile time 551 | toIncludeEqual(expected: valueof, message?: string): void 552 | 553 | /** 554 | * This method asserts that a given T that extends `Array` has a value/reference included and 555 | * compared via memory.compare(). 556 | * 557 | * @param {i32} expected - The expected item to be included in the Array. 558 | * @param {string} message - The optional message the describes this expectation. 559 | * 560 | * @example 561 | * ```ts 562 | * expect([new Vec3(1, 2, 3)]).toInclude(new Vec3(1, 2, 3)); 563 | * ``` 564 | */ 565 | // @ts-ignore: expected value should be known at compile time 566 | toContainEqual(expected: valueof, message?: string): void 567 | 568 | /** 569 | * This computed property is chainable, and negates the existing expectation. It returns itself. 570 | * 571 | * @example 572 | * ```ts 573 | * expect(42).not.toBe(0, "42 is not 0"); 574 | */ 575 | not: Expectation 576 | 577 | /** 578 | * The actual value of the expectation. 579 | */ 580 | actual: T | null 581 | } 582 | 583 | /** 584 | * This is called to stop the debugger. e.g. `node --inspect-brk asp`. 585 | */ 586 | declare function debug(): void 587 | 588 | /** 589 | * This class contains a set of methods related to performance configuration. 590 | */ 591 | // @ts-ignore 592 | declare class Performance { 593 | /** 594 | * This function call enables performance statistics gathering for the following test. 595 | * 596 | * @param {bool} enabled - The bool to indicate if performance statistics should be gathered. 597 | */ 598 | public static enabled(enabled: bool): void 599 | 600 | /** 601 | * This function call sets the maximum number of samples to complete the following test. 602 | * 603 | * @param {f64} count - The maximum number of samples required. 604 | */ 605 | public static maxSamples(count: f64): void 606 | 607 | /** 608 | * This function call sets the number of decimal places to round to for the following test. 609 | * 610 | * @param {i32} deicmalPlaces - The number of decimal places to round to 611 | */ 612 | public static roundDecimalPlaces(count: i32): void 613 | 614 | /** 615 | * This function call will set the maximum amount of time that should pass before it can stop 616 | * gathering samples for the following test. 617 | * 618 | * @param {f64} time - The ammount of time in milliseconds. 619 | */ 620 | public static maxTestRunTime(time: f64): void 621 | 622 | /** 623 | * This function call enables gathering the average/mean run time of each sample for the following 624 | * test. 625 | * 626 | * @param {bool} enabled - The bool to indicate if the average/mean should be gathered. 627 | */ 628 | public static reportAverage(enabled: bool): void 629 | 630 | /** 631 | * This function call enables gathering the median run time of each sample for the following test. 632 | * 633 | * @param {bool} enabled - The bool to indicate if the median should be gathered. 634 | */ 635 | public static reportMedian(value: bool): void 636 | 637 | /** 638 | * This function call enables gathering the standard deviation of the run times of the samples 639 | * collected for the following test. 640 | * 641 | * @param {bool} enabled - The bool to indicate if the standard deviation should be gathered. 642 | */ 643 | public static reportStdDev(value: bool): void 644 | 645 | /** 646 | * This function call enables gathering the largest run time of the samples collected for the 647 | * following test. 648 | * 649 | * @param {bool} enabled - The bool to indicate if the max should be gathered. 650 | */ 651 | public static reportMax(value: bool): void 652 | 653 | /** 654 | * This function call enables gathering the smallest run time of the samples collected for the 655 | * following test. 656 | * 657 | * @param {bool} enabled - The bool to indicate if the min should be gathered. 658 | */ 659 | public static reportMin(value: bool): void 660 | 661 | /** 662 | * This function call enables gathering the varaince of the samples collected for the following test. 663 | * 664 | * @param {bool} enabled - The bool to indicate if the variance should be calculated. 665 | */ 666 | public static reportVariance(value: bool): void 667 | } 668 | /** 669 | * Assemblyscript uses reference counting to perform garbage collection. This means when you 670 | * allocate a managed object and return it, it's reference count is one. If another variable aliases 671 | * it then the reference count goes up. This static class contains a few convenience methods for 672 | * developers to test the current number of blocks allocated on the heap to make sure you aren't leaking 673 | * references, e.i. keeping references to objects you expect to be collected. 674 | */ 675 | declare class RTrace { 676 | /** 677 | * This bool indicates if `RTrace` should call into JavaScript to obtain reference counts. 678 | */ 679 | public static enabled: bool 680 | 681 | /** 682 | * This method returns the current number of active references on the heap. 683 | */ 684 | public static count(): i32 685 | 686 | /** 687 | * This method starts a new refcounting group, and causes the next call to `RTrace.end(label)` to 688 | * return a delta in reference counts on the heap. 689 | * 690 | * @param {i32} label - The numeric label for this refcounting group. 691 | */ 692 | public static start(label: i32): void 693 | 694 | /** 695 | * This method returns a delta of how many new (positive) or collected (negative) are on the heap. 696 | * 697 | * @param {i32} label - The numeric label for this refcounting group. 698 | */ 699 | public static end(label: i32): i32 700 | 701 | /** 702 | * This method returns the number of increments that have occurred over the course of a test 703 | * file. 704 | */ 705 | public static increments(): i32 706 | 707 | /** 708 | * This method returns the number of decrements that have occurred over the course of a test 709 | * file. 710 | */ 711 | public static decrements(): i32 712 | 713 | /** 714 | * This method returns the number of increments that have occurred over the course of a test 715 | * group. 716 | */ 717 | public static groupIncrements(): i32 718 | 719 | /** 720 | * This method returns the number of decrements that have occurred over the course of a test 721 | * group. 722 | */ 723 | public static groupDecrements(): i32 724 | 725 | /** 726 | * This method returns the number of increments that have occurred over the course of a test 727 | * group. 728 | */ 729 | public static testIncrements(): i32 730 | 731 | /** 732 | * This method returns the number of decrements that have occurred over the course of a test 733 | * group. 734 | */ 735 | public static testDecrements(): i32 736 | 737 | /** 738 | * This method returns the number of allocations that have occurred over the course of a test 739 | * file. 740 | */ 741 | public static allocations(): i32 742 | 743 | /** 744 | * This method returns the number of frees that have occurred over the course of a test 745 | * file. 746 | */ 747 | public static frees(): i32 748 | 749 | /** 750 | * This method returns the number of allocations that have occurred over the course of a test 751 | * group. 752 | */ 753 | public static groupAllocations(): i32 754 | 755 | /** 756 | * This method returns the number of frees that have occurred over the course of a test 757 | * group. 758 | */ 759 | public static groupFrees(): i32 760 | 761 | /** 762 | * This method returns the number of allocations that have occurred over the course of a test 763 | * group. 764 | */ 765 | public static testAllocations(): i32 766 | 767 | /** 768 | * This method returns the number of frees that have occurred over the course of a test 769 | * group. 770 | */ 771 | public static testFrees(): i32 772 | 773 | /** 774 | * This method triggers a garbage collection. 775 | */ 776 | public static collect(): void 777 | 778 | /** 779 | * Get the class id of the pointer. 780 | * 781 | * @param {usize} pointer - The pointer. 782 | * @returns {u32} - The class id of the allocated block. 783 | */ 784 | public static classIdOf(pointer: usize): u32 785 | 786 | /** 787 | * Get the size of a block or buffer. 788 | * 789 | * @param {T} reference - The reference. 790 | * @returns {u32} - The size of the allocated block. 791 | */ 792 | public static sizeOf(reference: T): u32 793 | 794 | /** 795 | * Get the currently allocated blocks. 796 | */ 797 | public static activeBlocks(): usize[] 798 | 799 | /** 800 | * Get the current groups allocated blocks. 801 | */ 802 | public static activeGroupBlocks(): usize[] 803 | 804 | /** 805 | * Get the current tests allocated blocks. 806 | */ 807 | public static activeTestBlocks(): usize[] 808 | } 809 | 810 | /** 811 | * This class is static and contains private global values that contain metadata about the Actual 812 | * value. 813 | * 814 | * @example 815 | * ```ts 816 | * Actual.report("This is an expected string."); 817 | * Actual.report([1, 2, 3]); 818 | * Actual.report(42); 819 | * ``` 820 | */ 821 | declare class Actual { 822 | /** 823 | * This function performs reporting to javascript what the actual value of this expectation is. 824 | * 825 | * @param {T} actual - The actual value to be reported. 826 | */ 827 | public static report(value: T): void 828 | 829 | /** 830 | * Clear the actual value and release any private memory stored as a global. 831 | */ 832 | public static clear(): void 833 | } 834 | 835 | /** 836 | * This class is static and contains private global values that contain metadata about the Expected 837 | * value. 838 | * 839 | * @example 840 | * ```ts 841 | * Expected.report("This is an expected string."); 842 | * Expected.report([1, 2, 3]); 843 | * Expected.report(42, i32(true)); // not 42 844 | * ``` 845 | */ 846 | declare class Expected { 847 | /** 848 | * This function performs reporting to javascript what the expected value of this expectation is. 849 | * It notifies javascript if the expectation is negated. 850 | * 851 | * @param {T} value - The actual value to be reported. 852 | * @param {i32} negated - An indicator if the expectation is negated. Pass `1` to negate the 853 | * expectation. (default: 0) 854 | */ 855 | public static report(value: T, negated?: i32): void 856 | 857 | /** 858 | * Clear the expected value and release any private memory stored as a global. 859 | */ 860 | public static clear(): void 861 | } 862 | -------------------------------------------------------------------------------- /assembly/__tests__/util.spec.ts: -------------------------------------------------------------------------------- 1 | import { stripBuf, cmpBuf } from '../util' 2 | 3 | describe('stripBuf', () => { 4 | it('should strip buffer with preceding zeros', () => { 5 | const buf = new Uint8Array(5) 6 | buf.fill(0) 7 | buf[2] = 2 8 | const res = stripBuf(buf) 9 | expect(res.length).toBe(3) 10 | }) 11 | 12 | it('shouldnt strip if first byte is nonzero', () => { 13 | const buf = new Uint8Array(5) 14 | buf.fill(0) 15 | buf[0] = 2 16 | const res = stripBuf(buf) 17 | expect(res.length).toBe(5) 18 | }) 19 | 20 | it('should strip buffer of all zeros', () => { 21 | const buf = new Uint8Array(5) 22 | buf.fill(0) 23 | const res = stripBuf(buf) 24 | expect(res.length).toBe(0) 25 | }) 26 | }) 27 | 28 | describe('cmpBuf', () => { 29 | it('should return 1 for longer buffer', () => { 30 | const buf = new Uint8Array(5) 31 | buf.fill(1) 32 | const other = new Uint8Array(4) 33 | other.fill(1) 34 | expect(cmpBuf(buf, other)).toBe(1) 35 | }) 36 | 37 | it('should return -1 for shorter buffer', () => { 38 | const buf = new Uint8Array(4) 39 | buf.fill(1) 40 | const other = new Uint8Array(5) 41 | other.fill(1) 42 | expect(cmpBuf(buf, other)).toBe(-1) 43 | }) 44 | 45 | it('should return 0 for equal buffers', () => { 46 | const buf = new Uint8Array(5) 47 | buf.fill(1) 48 | const other = new Uint8Array(5) 49 | other.fill(1) 50 | expect(cmpBuf(buf, other)).toBe(0) 51 | }) 52 | 53 | it('should return 1 for greater buffers', () => { 54 | const buf = new Uint8Array(5) 55 | buf.fill(1) 56 | buf[0] = 2 57 | const other = new Uint8Array(5) 58 | other.fill(2) 59 | other[0] = 1 60 | expect(cmpBuf(buf, other)).toBe(1) 61 | }) 62 | 63 | it('should return -1 for lesser buffers', () => { 64 | const buf = new Uint8Array(5) 65 | buf.fill(2) 66 | buf[0] = 1 67 | const other = new Uint8Array(5) 68 | other.fill(1) 69 | other[0] = 2 70 | expect(cmpBuf(buf, other)).toBe(-1) 71 | }) 72 | }) 73 | -------------------------------------------------------------------------------- /assembly/debug.ts: -------------------------------------------------------------------------------- 1 | import { debug_print32, debug_printMem } from '../node_modules/scout.ts/assembly/env' 2 | 3 | export function debugMem(buf: Uint8Array): void { 4 | debug_printMem(buf.dataStart, buf.length) 5 | } 6 | 7 | export function debug(v: i32): void { 8 | debug_print32(v) 9 | } 10 | -------------------------------------------------------------------------------- /assembly/env.ts: -------------------------------------------------------------------------------- 1 | // eth2 namespace 2 | export declare function eth2_loadPreStateRoot(offset: u32): void 3 | export declare function eth2_blockDataSize(): u32 4 | export declare function eth2_blockDataCopy(outputOffset: u32, offset: u32, length: u32): void 5 | export declare function eth2_savePostStateRoot(offset: u32): void 6 | export declare function eth2_pushNewDeposit(offset: u32): void 7 | 8 | // debug namespace 9 | export declare function debug_print32(value: i32): void 10 | export declare function debug_printMem(ptr: i32, length: i32): void 11 | export declare function debug_printMemHex(ptr: i32, length: i32): void 12 | 13 | // bignum namespace 14 | export declare function bignum_add256(elem_a: u32, elem_b: u32, result: u32): void 15 | export declare function bignum_mul256(stackTop: u32): u32 16 | export declare function bignum_sub256(elem_a: u32, elem_b: u32, result: u32): void 17 | export declare function bignum_lt256(stackTop: u32): u32 18 | export declare function bignum_div256(stackTop: u32): u32 19 | export declare function bignum_jumpi(stackTop: u32, pc: i32): u32 20 | -------------------------------------------------------------------------------- /assembly/evm.ts: -------------------------------------------------------------------------------- 1 | import { debug, debugMem } from './debug' 2 | import { bignum_add256, debug_print32 } from './env' 3 | import { Opcodes } from './opcodes' 4 | 5 | // TODO: Assume RETURN returns one byte, return 0 if no RETURN 6 | export function interpret(code: Uint8Array): u8 { 7 | // stack size is 100 elements 8 | // each stack element is 32 bytes 9 | const stackSize = 100 10 | const stackElementSize = 32 11 | let stack = new ArrayBuffer(stackElementSize * stackSize) 12 | let stackPtr = changetype(stack) 13 | let stackElements = new Array(stackSize) 14 | 15 | for (let i = 0; i < stackSize; i++) { 16 | stackElements[i] = Uint8Array.wrap(stack, i * stackElementSize, stackElementSize) 17 | } 18 | 19 | const memorySize = 100 20 | const memoryElementSize = 32 21 | let memory = new ArrayBuffer(memoryElementSize * memorySize) 22 | let memoryPtr = changetype(memory) 23 | let memoryElements = new Array(memorySize) 24 | 25 | for (let i = 0; i < memorySize; i++) { 26 | memoryElements[i] = Uint8Array.wrap(memory, i * memoryElementSize, memoryElementSize) 27 | } 28 | 29 | let stackTop: i32 = 0 30 | 31 | let pc: i32 = 0 32 | let returnValue: u8 = 0 33 | 34 | while (pc < code.length) { 35 | let opcode: u8 = code[pc] 36 | pc++ 37 | 38 | debug_print32(opcode) 39 | switch (opcode) { 40 | case Opcodes.Push1: 41 | let push_val = code[pc] 42 | pc++ 43 | let stack_slot = stackElements[stackTop] 44 | stack_slot.fill(0, 0, 32) 45 | 46 | // 1 byte goes in the last byte of the 32-byte stack slot 47 | stack_slot[31] = push_val 48 | 49 | stackTop++ 50 | 51 | break 52 | case Opcodes.Add: 53 | let a_pos = stackPtr + 32 * (stackTop - 1) 54 | let b_pos = stackPtr + 32 * (stackTop - 2) 55 | 56 | bignum_add256(a_pos, b_pos, b_pos) 57 | 58 | stackTop-- 59 | break 60 | case Opcodes.SStore: 61 | // TODO: 62 | // store(position, value) 63 | break 64 | case Opcodes.MStore8: 65 | let offset = stackElements[stackTop - 1] 66 | let val = stackElements[stackTop - 2] 67 | 68 | // TODO: Consider whole offset, not only last byte 69 | let offsetU8: u8 = offset[31] 70 | store(memoryPtr + offsetU8, val[31], 0) 71 | 72 | stackTop -= 2 73 | break 74 | case Opcodes.Return: 75 | let offset = stackElements[stackTop - 1] 76 | let length = stackElements[stackTop - 2] 77 | 78 | // TODO: Consider whole offset and length 79 | // not only last byte 80 | let offsetU8: u8 = offset[31] 81 | let lengthU8: u8 = length[31] 82 | if (lengthU8 !== 1) throw new Error('Unimplemented') 83 | 84 | returnValue = load(memoryPtr + offsetU8) 85 | // Finish execution 86 | pc = code.length 87 | break 88 | default: 89 | debug_print32(404) 90 | pc = code.length // unknown opcode, finish execution 91 | break 92 | } 93 | } 94 | 95 | return returnValue 96 | } 97 | -------------------------------------------------------------------------------- /assembly/keccak.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * to call a wasm function defined outside of AS from within AS, I guess 3 | * we have declare it as an import. the function won't actually be an imported 4 | * host function, because in gulpfile.js we splice the wat code and paste keccak-funcs.wat 5 | * into the AssemblyScript output main.wat. 6 | * then the keccak function will be declared twice (once as an import statement 7 | * generated by AS, and a second time as the function def/wat code pasted in). 8 | * so to make it valid again, gulpfile.js will find and delete the import statement 9 | * put there by AS. 10 | */ 11 | 12 | @external('watimports', '$ethash_keccak256') 13 | export declare function ethash_keccak256(outputOffset: i32, inputOffset: i32, inputLen: i32): void 14 | 15 | /* 16 | 17 | (call $_ethash_keccak256 18 | (i32.const 16384) ;; output offset 1024 * 16 19 | (i32.const 0) ;; mem offset in 20 | (i32.const 266) ;; input length 21 | ) 22 | 23 | * test vector from https://github.com/axic/keccak-wasm/blob/master/test.wast 24 | ;; input (136 bytes of zeroes): 00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 25 | ;; output (keccak-256): 3a5912a7c5faa06ee4fe906253e339467a9ce87d533c65be3c15cb231cdb25f9 26 | * 27 | */ 28 | -------------------------------------------------------------------------------- /assembly/main.ts: -------------------------------------------------------------------------------- 1 | import { ethash_keccak256 } from './keccak' 2 | import { 3 | hashBranchNode, 4 | RLPBranchNode, 5 | RLPData, 6 | decode, 7 | encode, 8 | hashExtension, 9 | hashBranch, 10 | encodeLeaf, 11 | encodeAccount, 12 | decodeAccount, 13 | } from './rlp' 14 | import { 15 | parseU8, 16 | padBuf, 17 | cmpBuf, 18 | stripBuf, 19 | hash, 20 | nibbleArrToUintArr, 21 | addHexPrefix, 22 | uintArrToNibbleArr, 23 | removeHexPrefix, 24 | } from './util' 25 | import { debug, debugMem } from './debug' 26 | 27 | import { 28 | eth2_blockDataSize, 29 | eth2_blockDataCopy, 30 | eth2_loadPreStateRoot, 31 | eth2_savePostStateRoot, 32 | bignum_add256, 33 | bignum_sub256, 34 | } from '../node_modules/scout.ts/assembly/env' 35 | 36 | import { interpret } from './evm' 37 | 38 | export enum Opcode { 39 | Branch = 0, 40 | Hasher = 1, 41 | Leaf = 2, 42 | Extension = 3, 43 | } 44 | 45 | export enum NodeType { 46 | Branch = 0, 47 | Leaf = 1, 48 | Extension = 2, 49 | Hash = 3, 50 | } 51 | 52 | // for Map binaryen toText generates function names with commas, which wabt doesn't like. 53 | const Trie = new Map() 54 | 55 | class Node { 56 | constructor( 57 | //public type: u8, 58 | public type: NodeType, 59 | 60 | public branchBody: RLPBranchNode | null, 61 | public leafBody: Uint8Array | null, // a leaf body is just the rlp encoded account 62 | ) {} 63 | } 64 | 65 | export function main(): void { 66 | // INPUT 1: pre-state root 67 | let preStateRootBuf = new ArrayBuffer(32) 68 | let preStateRoot = Uint8Array.wrap(preStateRootBuf, 0, 32) 69 | eth2_loadPreStateRoot(preStateRootBuf as usize) 70 | 71 | // INPUT 2: proof data from the EEI 72 | let blockDataSize = eth2_blockDataSize() 73 | let blockDataBuf = new ArrayBuffer(blockDataSize) 74 | eth2_blockDataCopy(blockDataBuf as usize, 0, blockDataSize) 75 | let blockData = Uint8Array.wrap(blockDataBuf, 0, blockDataSize) 76 | 77 | let postStateRoot = processBlock(preStateRoot, blockData) 78 | 79 | eth2_savePostStateRoot(postStateRoot.dataStart as usize) 80 | } 81 | 82 | export function processBlock(preStateRoot: Uint8Array, blockData: Uint8Array): Uint8Array { 83 | // input data is RLP 84 | let inputDecoded = decode(blockData) 85 | let inputChildren = inputDecoded.children 86 | 87 | // input_decoded is type RLPData: { buffer: Uint8Array, children: RLPData[] } 88 | // [txes, addrs, hashes, leaves, instructions, codeHashes, bytecode] 89 | let txes = inputChildren[0].children 90 | let addrs = inputChildren[1].children 91 | let hashes = inputChildren[2].children 92 | let leafKeys = inputChildren[3].children 93 | let accounts = inputChildren[4].children 94 | // Instructions are flat-encoded 95 | let instructions = inputChildren[5].buffer 96 | let codeHashes: RLPData[] = inputChildren[6].children 97 | let bytecode: RLPData[] = inputChildren[7].children 98 | let expectedReturnValue = inputChildren[8].buffer 99 | 100 | if (addrs.length !== leafKeys.length || addrs.length !== accounts.length) { 101 | throw new Error('invalid multiproof') 102 | } 103 | 104 | let updatedAccounts = new Array(accounts.length) 105 | for (let i = 0; i < txes.length; i++) { 106 | let tx = txes[i] 107 | // [toIdx, value, nonce, fromIdx] 108 | let toIdx = parseU8(tx.children[0].buffer) 109 | let fromIdx = parseU8(tx.children[3].buffer) 110 | let value = tx.children[1].buffer 111 | let nonce = tx.children[2].buffer 112 | 113 | // TODO: Hash unsigned tx, recover from address, check against fromIdx 114 | let fromAccountRaw = accounts[fromIdx].buffer 115 | // If `from` has been modified by previous txes 116 | // load the updated one. 117 | if (updatedAccounts[fromIdx] !== null) { 118 | fromAccountRaw = updatedAccounts[fromIdx] as Uint8Array 119 | } 120 | 121 | let toAccountRaw = accounts[toIdx].buffer 122 | if (updatedAccounts[toIdx] !== null) { 123 | toAccountRaw = updatedAccounts[toIdx] as Uint8Array 124 | } 125 | 126 | let fromAccount = decodeAccount(fromAccountRaw) 127 | let toAccount = decodeAccount(toAccountRaw) 128 | 129 | // Sender's nonce should match tx's nonce 130 | if (cmpBuf(fromAccount[0], nonce) != 0) { 131 | throw new Error('Invalid nonce') 132 | } 133 | 134 | // Sender has enough balance 135 | if (cmpBuf(fromAccount[1], value) == -1) { 136 | throw new Error('Insufficient funds') 137 | } 138 | 139 | // Update nonce and balances 140 | value = padBuf(value, 32) 141 | let fromBalance = padBuf(fromAccount[1], 32) 142 | let newFromBalance = new ArrayBuffer(32) 143 | bignum_sub256(fromBalance.dataStart, value.dataStart, newFromBalance as usize) 144 | 145 | let toBalance = padBuf(toAccount[1], 32) 146 | let newToBalance = new ArrayBuffer(32) 147 | bignum_add256(toBalance.dataStart, value.dataStart, newToBalance as usize) 148 | 149 | let paddedNonce = padBuf(nonce, 32) 150 | let fromNonce = padBuf(fromAccount[0], 32) 151 | let newFromNonce = new ArrayBuffer(32) 152 | let one256 = new ArrayBuffer(32) 153 | let onedv = new DataView(one256) 154 | onedv.setUint8(31, 1) 155 | bignum_add256(fromNonce.dataStart, one256 as usize, newFromNonce as usize) 156 | 157 | // Encode updated accounts 158 | let newFromAccount = encodeAccount( 159 | stripBuf(Uint8Array.wrap(newFromNonce)), 160 | stripBuf(Uint8Array.wrap(newFromBalance)), 161 | fromAccount[2], 162 | fromAccount[3], 163 | ) 164 | let newToAccount = encodeAccount( 165 | toAccount[0], 166 | stripBuf(Uint8Array.wrap(newToBalance)), 167 | toAccount[2], 168 | toAccount[3], 169 | ) 170 | 171 | updatedAccounts[fromIdx] = newFromAccount 172 | updatedAccounts[toIdx] = newToAccount 173 | 174 | // check if to account is contract 175 | if (isContract(toAccount)) { 176 | let code = getCode(toAccount, codeHashes, bytecode) 177 | let returnValue = interpret(code) 178 | if (expectedReturnValue.length !== 1) throw new Error('Unimplemented') 179 | if (returnValue !== expectedReturnValue[0]) { 180 | throw new Error('Invalid return value') 181 | } 182 | } 183 | } 184 | 185 | let addrsLen = addrs.length 186 | let keys = new Array(addrsLen) 187 | for (let i = 0; i < addrsLen; i++) { 188 | keys[i] = hash(addrs[i].buffer) 189 | } 190 | 191 | let postStateRoot = verifyMultiproofAndUpdate( 192 | preStateRoot, 193 | hashes, 194 | leafKeys, 195 | accounts, 196 | updatedAccounts as Uint8Array[], 197 | instructions, 198 | keys, 199 | ) 200 | 201 | return postStateRoot 202 | 203 | /* 204 | // **** update post-state 205 | // INPUT 2: address is another input, also hardcoded for testing. 206 | let address_hash = Array.create(32); 207 | // keccak("eb79aa62d6433e0a23efeb3c859eae7b3c74e850") 208 | address_hash = [14, 141, 0, 120, 132, 143, 54, 115, 94, 135, 183, 110, 98, 144, 35, 193, 245, 40, 118, 164, 66, 9, 192, 120, 221, 66, 131, 45, 8, 208, 15, 177]; 209 | 210 | let key_nibbles = u8ArrToNibbleArr(address_hash); 211 | //debug_mem((key_nibbles.buffer as usize) + key_nibbles.byteOffset, key_nibbles.byteLength); 212 | 213 | let new_leaf_account_rlp_array = Array.create(73); 214 | //let new_leaf_account_rlp = new Uint8Array(73); 215 | // f8478083ffffffa056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470 216 | new_leaf_account_rlp_array = [248, 71, 128, 131, 255, 255, 255, 160, 86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91, 72, 224, 27, 153, 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33, 160, 197, 210, 70, 1, 134, 247, 35, 60, 146, 126, 125, 178, 220, 199, 3, 192, 229, 0, 182, 83, 202, 130, 39, 59, 123, 250, 216, 4, 93, 133, 164, 112]; 217 | 218 | let new_leaf_account_rlp = Uint8Array.wrap(new_leaf_account_rlp_array.buffer, 0, 73); 219 | //debug_mem((new_leaf_account_rlp.buffer as usize) + new_leaf_account_rlp.byteOffset, new_leaf_account_rlp.byteLength); 220 | 221 | insertNewLeafNewBranch(verified_prestate_root_ptr, key_nibbles, new_leaf_account_rlp); 222 | 223 | let new_root_ptr = rehashNode(verified_prestate_root_ptr); 224 | //debug_mem(new_root_ptr, 32); 225 | 226 | // **** verify pre-state + update post-state 227 | // 9 calls to keccak256 in the first verify + update 228 | // 22 iterations total to 198 calls to keccak256 229 | for (let i = 0; i < 21; i++) { 230 | let calculated_prestate_root_ptr = verifyMultiproof(input_decoded); 231 | insertNewLeafNewBranch(calculated_prestate_root_ptr, key_nibbles, new_leaf_account_rlp); 232 | let calculated_poststate_ptr = rehashNode(verified_prestate_root_ptr); 233 | new_root_ptr = calculated_poststate_ptr; 234 | } 235 | eth2_savePostStateRoot(new_root_ptr); 236 | */ 237 | } 238 | 239 | class StackItem { 240 | constructor( 241 | public kind: NodeType, 242 | public pathIndices: Array, 243 | public hash: Uint8Array | null, 244 | public newHash: Uint8Array | null, 245 | ) {} 246 | } 247 | 248 | function verifyMultiproofAndUpdate( 249 | preStateRoot: Uint8Array, 250 | hashes: RLPData[], 251 | leafKeys: RLPData[], 252 | accounts: RLPData[], 253 | updatedAccounts: Uint8Array[], 254 | instructions: Uint8Array, 255 | keys: Uint8Array[], 256 | ): Uint8Array { 257 | let pc = 0 258 | let hashIdx = 0 259 | let leafIdx = 0 260 | let stack = new Array(100) 261 | let stackTop = 0 262 | 263 | let leafKeysLen = leafKeys.length 264 | let paths = new Array>(leafKeysLen) 265 | for (let i = 0; i < leafKeysLen; i++) { 266 | paths[i] = new Array() 267 | } 268 | 269 | while (pc < instructions.length) { 270 | let op = instructions[pc++] 271 | switch (op) { 272 | case Opcode.Hasher: { 273 | if (hashIdx >= hashes.length) { 274 | throw new Error('Not enough hashes in multiproof') 275 | } 276 | let h = hashes[hashIdx++].buffer 277 | stack[stackTop++] = new StackItem(NodeType.Hash, [], h, h) 278 | break 279 | } 280 | case Opcode.Leaf: { 281 | if (leafIdx >= leafKeys.length) { 282 | throw new Error('Not enough leaves in multiproof') 283 | } 284 | 285 | let path = removeHexPrefix(uintArrToNibbleArr(leafKeys[leafIdx].buffer)) 286 | paths[leafIdx] = path 287 | let l = encodeLeaf(leafKeys[leafIdx].buffer, accounts[leafIdx].buffer) 288 | let ul = encodeLeaf(leafKeys[leafIdx].buffer, updatedAccounts[leafIdx]) 289 | leafIdx++ 290 | let h = hash(l) 291 | let nh = hash(ul) 292 | stack[stackTop++] = new StackItem(NodeType.Leaf, [leafIdx - 1], h, nh) 293 | break 294 | } 295 | case Opcode.Branch: { 296 | let indicesLen = instructions[pc++] as i32 297 | let branchIndices = new Array(indicesLen) 298 | for (let i = 0; i < indicesLen; i++) { 299 | branchIndices[i] = instructions[pc + i] 300 | } 301 | pc += indicesLen 302 | 303 | let children = new Array(17) 304 | let newChildren = new Array(17) 305 | let pathIndices = new Array() 306 | for (let i = 0; i < branchIndices.length; i++) { 307 | let idx = branchIndices[i] 308 | let n = stack[--stackTop] 309 | 310 | children[idx] = n.hash 311 | newChildren[idx] = n.newHash 312 | 313 | let nPathIndices = n.pathIndices 314 | pathIndices = pathIndices.concat(nPathIndices) 315 | for (let i = 0, len = nPathIndices.length; i < len; i++) { 316 | paths[nPathIndices[i]].unshift(idx) 317 | } 318 | } 319 | let h = hashBranch(children) 320 | let nh = hashBranch(newChildren) 321 | 322 | stack[stackTop++] = new StackItem(NodeType.Branch, pathIndices, h, nh) 323 | break 324 | } 325 | case Opcode.Extension: { 326 | let nibblesLen = instructions[pc++] as i32 327 | let nibbles = new Array(nibblesLen) 328 | for (let i = 0; i < nibblesLen; i++) { 329 | nibbles[i] = instructions[pc + i] 330 | } 331 | pc += nibblesLen 332 | 333 | let key = nibbleArrToUintArr(addHexPrefix(nibbles, false)) 334 | // addHexPrefix modifies array in-place 335 | nibbles = removeHexPrefix(nibbles) 336 | 337 | let n = stack[--stackTop] 338 | let h = hashExtension(key, n.hash!) 339 | let nh = hashExtension(key, n.newHash!) 340 | 341 | stack[stackTop++] = new StackItem(NodeType.Extension, n.pathIndices.slice(0), h, nh) 342 | let nPathIndices = n.pathIndices 343 | for (let i = 0, len = nPathIndices.length; i < len; i++) { 344 | let pathIndex = nPathIndices[i] 345 | paths[pathIndex] = nibbles.concat(paths[pathIndex]) 346 | } 347 | break 348 | } 349 | } 350 | } 351 | let r = stack[stackTop - 1] 352 | let rootHash = r.hash! 353 | let newRootHash = r.newHash! 354 | 355 | if (cmpBuf(rootHash, preStateRoot) != 0) { 356 | throw new Error('invalid root hash') 357 | } 358 | 359 | // Verify given keys match computed paths 360 | for (let i = 0, len = paths.length; i < len; i++) { 361 | let path = nibbleArrToUintArr(paths[i]) 362 | if (cmpBuf(path, keys[i]) != 0) { 363 | throw new Error('invalid key') 364 | } 365 | } 366 | 367 | return newRootHash 368 | } 369 | 370 | function insertNewLeafNewBranch( 371 | prestate_root_hash_ptr: usize, 372 | new_leaf_key_nibbles: Array, 373 | new_leaf_account_rlp: Uint8Array, 374 | ): void { 375 | let currentNode = Trie.get(prestate_root_hash_ptr) 376 | 377 | // hash current_node for debug test 378 | //let hashOutput = new ArrayBuffer(32); 379 | //let hashOutputPtr = changetype(hashOutput); 380 | //debug(8891); 381 | 382 | //let encoded_node = encode(current_node.bodyrlp); 383 | //debug(8892); 384 | 385 | //ethash_keccak256(hashOutputPtr, (encoded_node.buffer as usize) + encoded_node.byteOffset, encoded_node.byteLength); 386 | //debug(8899); 387 | //debug_mem(hashOutputPtr, 32); 388 | 389 | // current_node.bodyrlp 390 | 391 | // pathStack could be smaller than 40 392 | const pathStackSize = 40 393 | let pathStack = new Array(pathStackSize) 394 | pathStack.push(prestate_root_hash_ptr) 395 | 396 | for (let k_i = 0; k_i < pathStackSize; k_i++) { 397 | let branch_index_i = new_leaf_key_nibbles[k_i] 398 | 399 | if (currentNode.type == NodeType.Leaf) { 400 | createNewBranchWhereLeafExists( 401 | new_leaf_account_rlp, 402 | new_leaf_key_nibbles, 403 | k_i, 404 | currentNode, 405 | pathStack, 406 | ) 407 | return 408 | } else if (currentNode.type == NodeType.Branch) { 409 | if (currentNode.branchBody!.dirty === null) { 410 | currentNode.branchBody!.dirty = new Array(16) 411 | 412 | // setting the dirty flag to an empty array indicates that no children are dity but the branch node itself needs to be rehashed 413 | // explanation: 414 | // if the next node is a leaf, then it will be converted into a new child branch node with two leafs underneath it (the existing leaf and the new leaf) 415 | // the new child branch won't be dirty, since it will be created with the new hash 416 | // the current branch (i.e. the parent of the new branch) will have the new hash inserted into the branch index 417 | } 418 | 419 | let next_node_in_path_hash_ptr = currentNode.branchBody.children[branch_index_i] 420 | // if next_node_in_path_hash_ptr is 0, then the child is empty 421 | 422 | //debug_mem(next_node_in_path_hash_ptr, 32); 423 | 424 | if (next_node_in_path_hash_ptr == 0) { 425 | // end of path is an already existing branch node 426 | // can just insert the leaf into the branch 427 | throw new Error( 428 | 'Dont yet handle inserting a leaf into an already existing branch (but its easy) ', 429 | ) 430 | } 431 | 432 | pathStack.push(next_node_in_path_hash_ptr) 433 | // next node in path is either a leaf or a branch 434 | let nextNodeInPath = Trie.get(next_node_in_path_hash_ptr) 435 | 436 | // TODO: we already check this types ni the for loop.. merge the logic? 437 | if (nextNodeInPath.type == NodeType.Branch) { 438 | // keep walking... 439 | currentNode.branchBody!.dirty.push(branch_index_i) 440 | currentNode = nextNodeInPath 441 | } else if (nextNodeInPath.type == NodeType.Leaf) { 442 | // next node is a leaf, and so is last node in the path 443 | // next step will create the new branch node 444 | currentNode = nextNodeInPath 445 | } else { 446 | throw new Error('extension nodes are unimplemented!') 447 | } 448 | } else { 449 | throw new Error('extension nodes are unimplemented.') 450 | } 451 | } // end for loop 452 | } 453 | 454 | function createNewBranchWhereLeafExists( 455 | new_leaf_account_rlp: Uint8Array, 456 | new_key_nibbles: Array, 457 | k_i: u32, 458 | existingLeafNode: Node, 459 | pathStack: Array, 460 | ): void { 461 | //debug_mem((new_leaf_account_rlp.buffer as usize) + new_leaf_account_rlp.byteOffset, new_leaf_account_rlp.byteLength); 462 | 463 | let existing_leaf_key_value = decode(existingLeafNode.leafBody as Uint8Array) 464 | let existing_leaf_value = existing_leaf_key_value.children[1].buffer 465 | //debug_mem((existing_leaf_value.buffer as usize) + existing_leaf_value.byteOffset, existing_leaf_value.byteLength); 466 | 467 | let existing_leaf_key_nibbles = uintArrToNibbleArr(existing_leaf_key_value.children[0].buffer) 468 | existing_leaf_key_nibbles = removeHexPrefix(existing_leaf_key_nibbles) 469 | 470 | if (new_key_nibbles[k_i] == existing_leaf_key_nibbles[0]) { 471 | throw new Error('TODO: handle extension node insertion') 472 | } 473 | 474 | // recreate existing leaf 475 | 476 | let new_key_for_existing_leaf_nibbles = existing_leaf_key_nibbles 477 | // first nibble of the existing leaf key becomes its branch index in the new branch 478 | let branch_index_for_existing_leaf = new_key_for_existing_leaf_nibbles.shift() 479 | 480 | let new_key_for_existing_leaf = nibbleArrToUintArr( 481 | addHexPrefix(new_key_for_existing_leaf_nibbles, true), 482 | ) 483 | 484 | let new_node_for_existing_leaf_rlp_children = new Array(2) 485 | 486 | new_node_for_existing_leaf_rlp_children.push(new RLPData(null, new Array())) 487 | new_node_for_existing_leaf_rlp_children.push(new RLPData(null, new Array())) 488 | new_node_for_existing_leaf_rlp_children[0].buffer = new_key_for_existing_leaf 489 | new_node_for_existing_leaf_rlp_children[1].buffer = existing_leaf_value 490 | let new_node_for_existing_leaf_rlp = new RLPData(null, new_node_for_existing_leaf_rlp_children) 491 | let new_node_for_existing_leaf = encode(new_node_for_existing_leaf_rlp) 492 | 493 | let new_hash_for_existing_leaf_buffer = new ArrayBuffer(32) 494 | let new_hash_for_existing_leaf_ptr = changetype(new_hash_for_existing_leaf_buffer) 495 | 496 | ethash_keccak256( 497 | new_hash_for_existing_leaf_ptr, 498 | new_node_for_existing_leaf.dataStart as usize, 499 | new_node_for_existing_leaf.byteLength, 500 | ) 501 | //debug_mem(new_hash_for_existing_leaf_ptr, 32); 502 | 503 | let new_node_for_existing_leaf_obj = new Node(NodeType.Leaf, null, new_node_for_existing_leaf) 504 | Trie.set(new_hash_for_existing_leaf_ptr, new_node_for_existing_leaf_obj) 505 | 506 | // create new leaf 507 | 508 | let branch_index_for_new_leaf = new_key_nibbles[k_i] 509 | let key_for_new_leaf = nibbleArrToUintArr(addHexPrefix(new_key_nibbles.slice(k_i + 1), true)) 510 | 511 | let node_for_new_leaf_rlp_children = new Array(2) 512 | 513 | node_for_new_leaf_rlp_children.push(new RLPData(null, new Array())) 514 | node_for_new_leaf_rlp_children.push(new RLPData(null, new Array())) 515 | node_for_new_leaf_rlp_children[0].buffer = key_for_new_leaf 516 | node_for_new_leaf_rlp_children[1].buffer = new_leaf_account_rlp 517 | let node_for_new_leaf_rlp = new RLPData(null, node_for_new_leaf_rlp_children) 518 | let node_for_new_leaf = encode(node_for_new_leaf_rlp) 519 | 520 | let hash_for_new_leaf_buffer = new ArrayBuffer(32) 521 | let hash_for_new_leaf_ptr = changetype(hash_for_new_leaf_buffer) 522 | 523 | ethash_keccak256( 524 | hash_for_new_leaf_ptr, 525 | node_for_new_leaf.dataStart as usize, 526 | node_for_new_leaf.byteLength, 527 | ) 528 | //debug_mem(hash_for_new_leaf_ptr, 32); 529 | 530 | let node_for_new_leaf_obj = new Node(NodeType.Leaf, null, node_for_new_leaf) 531 | Trie.set(hash_for_new_leaf_ptr, node_for_new_leaf_obj) 532 | 533 | // both leafs created. now create new branch node. 534 | 535 | let new_branch_node = new RLPBranchNode(new Array(17), null) 536 | 537 | new_branch_node.children[branch_index_for_existing_leaf] = new_hash_for_existing_leaf_ptr 538 | new_branch_node.children[branch_index_for_new_leaf] = hash_for_new_leaf_ptr 539 | 540 | let new_branch_hash_ptr = hashBranchNode(new_branch_node.children) 541 | let new_branch_node_obj = new Node(NodeType.Branch, new_branch_node, null) 542 | Trie.set(new_branch_hash_ptr, new_branch_node_obj) 543 | 544 | // now we have a new branch node. but we need to replace the pointer in the parent branch node 545 | // parent branch node previously pointed to a leaf 546 | // in the updated trie, it should point to the new branch node 547 | 548 | let parent_branch_hash_ptr = pathStack[pathStack.length - 2] 549 | 550 | let parentBranchNode = Trie.get(parent_branch_hash_ptr) 551 | 552 | let new_branch_node_parent_index = new_key_nibbles[k_i - 1] 553 | 554 | parentBranchNode.branchBody!.children[new_branch_node_parent_index] = new_branch_hash_ptr 555 | //Trie.set(parent_branch_hash_ptr, parentBranchNode); 556 | // TODO: do we need to reset it with Trie.set? 557 | 558 | return 559 | } 560 | 561 | function rehashNode(staleHashPtr: usize): usize { 562 | // accepts a hash pointer, returns a pointer to the new hash 563 | 564 | //debug_mem(staleHashPtr, 32); 565 | // lookup the new node using the stale hash 566 | let node_with_stale_hash = Trie.get(staleHashPtr) 567 | if (node_with_stale_hash.type == NodeType.Leaf) { 568 | throw new Error('TODO: handle dirty leaf') 569 | } 570 | 571 | if (node_with_stale_hash.type == NodeType.Branch) { 572 | // recurse on dirty children 573 | let dirty_indexes = node_with_stale_hash.branchBody!.dirty 574 | if (dirty_indexes == null) { 575 | throw new Error('ERROR: called rehash on a branch node that has no dirty flag') 576 | } 577 | 578 | for (let i = 0; i < dirty_indexes.length; i++) { 579 | let dirty_i = dirty_indexes[i] 580 | let stale_hash_for_dirty_child_ptr = node_with_stale_hash.branchBody!.children[dirty_i] 581 | let new_hash_for_dirty_child_ptr = rehashNode(stale_hash_for_dirty_child_ptr) 582 | node_with_stale_hash.branchBody.children[dirty_i] = new_hash_for_dirty_child_ptr 583 | } 584 | 585 | // if drity_indexes.length == 0, no dirty children (only new children already hashed) 586 | // branch node itself needs to be rehashed 587 | 588 | // TODO: check if renaming does a deep copy or shallow 589 | //let new_branch_node = node_with_stale_hash; 590 | 591 | // all child hashes have been updated, so clear the dirty flag 592 | //node_with_stale_hash.dirty = null 593 | 594 | let new_branch_hash_ptr = hashBranchNode(node_with_stale_hash.branchBody!.children) 595 | 596 | Trie.set(new_branch_hash_ptr, node_with_stale_hash) 597 | return new_branch_hash_ptr 598 | } 599 | 600 | // TODO: handle extension nodes 601 | throw new Error('only branch nodes and leaf nodes are implemented') 602 | } 603 | 604 | function isContract(account: Array): bool { 605 | let emptyBuf: u8[] = [ 606 | 197, 607 | 210, 608 | 70, 609 | 1, 610 | 134, 611 | 247, 612 | 35, 613 | 60, 614 | 146, 615 | 126, 616 | 125, 617 | 178, 618 | 220, 619 | 199, 620 | 3, 621 | 192, 622 | 229, 623 | 0, 624 | 182, 625 | 83, 626 | 202, 627 | 130, 628 | 39, 629 | 59, 630 | 123, 631 | 250, 632 | 216, 633 | 4, 634 | 93, 635 | 133, 636 | 164, 637 | 112, 638 | ] 639 | let empty = Uint8Array.wrap(emptyBuf.buffer, 0, 32) 640 | 641 | if (cmpBuf(account[3], empty) === 0) { 642 | return false 643 | } else { 644 | return true 645 | } 646 | } 647 | 648 | function getCode( 649 | account: Array, 650 | codeHashes: RLPData[], 651 | bytecode: RLPData[], 652 | ): Uint8Array { 653 | let codeHash = account[3] 654 | let index = -1 655 | 656 | for (let i = 0, len = codeHashes.length; i < len; i++) { 657 | if (cmpBuf(codeHash, unchecked(codeHashes[i]).buffer) === 0) { 658 | index = i 659 | break 660 | } 661 | } 662 | 663 | if (index >= 0) { 664 | return bytecode[index].buffer 665 | } else { 666 | return new Uint8Array(0) 667 | } 668 | } 669 | -------------------------------------------------------------------------------- /assembly/opcodes.ts: -------------------------------------------------------------------------------- 1 | export enum Opcodes { 2 | Stop = 0x00, 3 | Add = 0x01, 4 | Mul = 0x02, 5 | Sub = 0x03, 6 | Div = 0x04, 7 | Lt = 0x10, 8 | Eq = 0x14, 9 | IsZero = 0x15, 10 | Not = 0x19, 11 | CallValue = 0x34, 12 | CallDataLoad = 0x35, 13 | CallDataSize = 0x36, 14 | CodeCopy = 0x39, 15 | Pop = 0x50, 16 | MLoad = 0x51, 17 | MStore = 0x52, 18 | MStore8 = 0x53, 19 | SStore = 0x55, 20 | Jump = 0x56, 21 | Jumpi = 0x57, 22 | JumpDest = 0x5b, 23 | Push1 = 0x60, 24 | Push2 = 0x61, 25 | Push3 = 0x62, 26 | Push4 = 0x63, 27 | Push29 = 0x7c, 28 | Dup1 = 0x80, 29 | Dup2 = 0x81, 30 | Dup3 = 0x82, 31 | Swap1 = 0x90, 32 | Swap2 = 0x91, 33 | Swap3 = 0x92, 34 | Return = 0xf3, 35 | Revert = 0xfd, 36 | Invalid = 0xfe, 37 | } 38 | -------------------------------------------------------------------------------- /assembly/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "turbo-mpas", 3 | "description": "", 4 | "version": "1.0.0", 5 | "scripts": { 6 | "build": "gulp" 7 | }, 8 | "devDependencies": { 9 | "gulp": "^4", 10 | "wabt": "1.0.11" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /assembly/rlp.ts: -------------------------------------------------------------------------------- 1 | // RLP code is taken from https://github.com/nearprotocol/assemblyscript-rlp 2 | import { ethash_keccak256 } from './keccak' 3 | import { hash } from './util' 4 | 5 | export class RLPBranchNode { 6 | constructor(public children: Array, public dirty: Array | null) {} 7 | } 8 | 9 | /** 10 | * According to the RLP spec: 11 | * A single byte whose value is in the [0x00, 0x7f] range, 12 | * that byte is its own RLP encoding. 13 | */ 14 | @inline 15 | function isSmallByte(buf: Uint8Array): boolean { 16 | return buf.length === 1 && buf[0] < 0x80 17 | } 18 | 19 | export function decodeAccount(buf: Uint8Array): Array { 20 | // Data will have length > 55 21 | // buf[0] == 0xf8 22 | let dataLen = buf[1] 23 | 24 | let nonce: Uint8Array 25 | let nonceLen = buf[2] 26 | let offset = 3 27 | if (nonceLen == 0x80) { 28 | nonce = new Uint8Array(0) 29 | } else if (nonceLen <= 0x7f) { 30 | nonce = buf.subarray(2, 3) 31 | } else { 32 | offset = 3 + nonceLen - 0x80 33 | nonce = buf.subarray(3, offset) 34 | } 35 | 36 | let balance: Uint8Array 37 | let balanceFirstByte = buf[offset] 38 | if (balanceFirstByte == 0x80) { 39 | balance = new Uint8Array(0) 40 | offset++ 41 | } else if (balanceFirstByte <= 0x7f) { 42 | let end = offset + 1 43 | balance = buf.subarray(offset, end) 44 | offset = end 45 | } else { 46 | offset++ 47 | let end = offset + balanceFirstByte - 0x80 48 | balance = buf.subarray(offset, end) 49 | offset = end 50 | } 51 | 52 | // stateRoot and codeHash are 32 byte hashes 53 | offset++ 54 | let stateRoot = buf.subarray(offset, offset + 32) 55 | offset += 33 56 | let codeHash = buf.subarray(offset, offset + 32) 57 | 58 | return [nonce, balance, stateRoot, codeHash] 59 | } 60 | 61 | export function encodeAccount( 62 | nonce: Uint8Array, 63 | balance: Uint8Array, 64 | stateRoot: Uint8Array, 65 | codeHash: Uint8Array, 66 | ): Uint8Array { 67 | // Nonce and balance are buffers with 0 <= length <= 32 68 | // We assume stateRoot and codeHash to be constant hashes 69 | let nonceLen = isSmallByte(nonce) ? 1 : nonce.length + 1 70 | let balanceLen = isSmallByte(balance) ? 1 : balance.length + 1 71 | let dataLen = nonceLen + balanceLen + 33 + 33 72 | 73 | let buf = new Uint8Array(2 + dataLen) 74 | buf[0] = 0xf8 75 | buf[1] = dataLen 76 | let offset = 2 77 | 78 | if (nonce.length == 0) { 79 | buf[offset++] = 0x80 80 | } else if (isSmallByte(nonce)) { 81 | buf[offset++] = nonce[0] 82 | } else { 83 | buf[offset++] = (0x80 + nonce.length) as u8 84 | memory.copy((buf.dataStart as usize) + offset, nonce.dataStart as usize, nonce.length) 85 | offset += nonce.length 86 | } 87 | 88 | if (balance.length == 0) { 89 | buf[offset++] = 0x80 90 | } else if (isSmallByte(balance)) { 91 | buf[offset++] = balance[0] 92 | } else { 93 | buf[offset++] = (0x80 + balance.length) as u8 94 | memory.copy((buf.dataStart as usize) + offset, balance.dataStart as usize, balance.length) 95 | offset += balance.length 96 | } 97 | 98 | // The first value in an array reference is a pointer 99 | // to the backing buffer. See the memory layout of arrays for more info. 100 | buf[offset++] = 0xa0 // 0x80 + 0x20 101 | memory.copy((buf.dataStart as usize) + offset, stateRoot.dataStart, 32) 102 | offset += 32 103 | 104 | buf[offset++] = 0xa0 105 | memory.copy((buf.dataStart as usize) + offset, codeHash.dataStart, 32) 106 | 107 | return buf 108 | } 109 | 110 | export function encodeLeaf(key: Uint8Array, value: Uint8Array): Uint8Array { 111 | // Key is buffer with 1 < length < 32 112 | // Value is a buffer with 64 <= length <= 128 113 | let keyBufLen: u8 = isSmallByte(key) ? 1 : (key.length as u8) + 1 114 | let valueBufLen: u8 = (value.length as u8) + 2 115 | let dataLen: u8 = keyBufLen + valueBufLen 116 | 117 | // We need 1 byte to express length of length 118 | // and 1 byte to express length of list 119 | let buf = new Uint8Array(2 + dataLen) 120 | buf[0] = 0xf8 121 | buf[1] = dataLen 122 | let offset = 2 123 | 124 | // Encode key 125 | if (isSmallByte(key)) { 126 | buf[offset++] = key[0] 127 | } else { 128 | buf[offset++] = 0x80 + key.length 129 | memory.copy((buf.dataStart as usize) + offset, key.dataStart as usize, key.length) 130 | offset += key.length 131 | } 132 | 133 | // Encode value 134 | buf[offset++] = 0xb8 135 | buf[offset++] = value.length as u8 136 | memory.copy((buf.dataStart as usize) + offset, value.dataStart as usize, value.length) 137 | 138 | return buf 139 | } 140 | 141 | export function hashExtension(key: Uint8Array, value: Uint8Array): Uint8Array { 142 | // Key is buffer with 1 < length < 32 143 | // Value is a hash with length == 32 144 | // If key.length >= 21, then rlp structure changes 145 | 146 | // Each buffer with length < 55 needs 1 byte of metadata (0x80 + length) 147 | let keyBufLen: u8 = isSmallByte(key) ? 1 : (key.length as u8) + 1 148 | let dataLen: u8 = keyBufLen + 33 149 | 150 | let buf: Uint8Array 151 | let offset: u8 = 0 152 | if (dataLen > 55) { 153 | // We need 1 byte to express length of length 154 | // and 1 byte to express length of list 155 | buf = new Uint8Array(2 + dataLen) 156 | buf[0] = 0xf8 157 | buf[1] = dataLen 158 | offset = 2 159 | } else { 160 | // We need 1 byte to express list's length 161 | buf = new Uint8Array(1 + dataLen) 162 | buf[0] = 0xc0 + dataLen 163 | offset = 1 164 | } 165 | 166 | // Encode key 167 | if (isSmallByte(key)) { 168 | buf[offset++] = key[0] 169 | } else { 170 | buf[offset++] = 0x80 + key.length 171 | memory.copy((buf.dataStart as usize) + offset, key.dataStart as usize, key.length) 172 | offset += key.length as u8 173 | } 174 | 175 | // Encode value 176 | buf[offset++] = 0xa0 // 0x80 + 0x20 177 | memory.copy((buf.dataStart as usize) + offset, value.dataStart as usize, 32) 178 | 179 | return hash(buf) 180 | } 181 | 182 | export function hashBranch(children: Array): Uint8Array { 183 | let dataLen: u32 = 0 184 | for (let i = 0; i < 17; i++) { 185 | let c = children[i] 186 | if (c === null) { 187 | dataLen++ 188 | } else { 189 | if (c.length == 32) { 190 | // Needs 1 byte for encoding length and 32 bytes of data 191 | dataLen += 33 192 | } else { 193 | throw new Error('Invalid branch child') 194 | } 195 | } 196 | } 197 | 198 | // How many bytes do we need to encode length? 199 | let lenOfLen = 1 + i32(dataLen > 255) 200 | let totalLen = 1 + lenOfLen + dataLen 201 | let buf = new Uint8Array(totalLen) 202 | let offset = 0 203 | buf[0] = 0xf7 + lenOfLen 204 | if (lenOfLen == 1) { 205 | buf[1] = dataLen 206 | offset = 2 207 | } else { 208 | let dv = new DataView(buf.buffer) 209 | dv.setUint16(1, dataLen as u16) 210 | offset = 3 211 | } 212 | 213 | for (let i = 0; i < 17; i++) { 214 | let c = children[i] 215 | if (c == null) { 216 | buf[offset++] = 0x80 217 | } else { 218 | buf[offset++] = 0xa0 // 0x80 + 0x20 219 | memory.copy((buf.dataStart as usize) + offset, c.dataStart as usize, 32) 220 | offset += 32 221 | } 222 | } 223 | 224 | return hash(buf) 225 | } 226 | 227 | export function hashBranchNode(branchNodeChildren: Array): usize { 228 | // manually construct the encoded branch node 229 | /* 230 | // here's an encoded branch node with 4 child hashes: 231 | f891808080808080a09f48e0438e53d55e53bb935c4a80e294ff56055cc4b584635b4bafbf894226088080a04216caf9df3c72b105e86b5b75ecb16e09e4a6a718bb27b0b83ec6fd79bb6c0c80a0e17ee4374bd5002160209877201836362b93a75ce5813bf4789053dd613d22e08080a0b82fb32a26c22edc12788287a7d157a5be2443a4ea2a0722c77f5b995ef40d038080 232 | // it's 147 bytes. the 4 children are 32 * 4 == 128 bytes 233 | // the branch node is an RLP list of 17 elements, so 13 elements are empty. an empty element is `80`, so that's 13 bytes. 128 + 13 = 141 bytes. 234 | // that leaves 6 bytes for encoding (2 for 0xf891, 4 of 0xa0) 235 | f8 ;; RLP list over 55 bytes. length of byte length is (0xf8 - 0xf7 = 1) 236 | 91 ;; list length = 145 237 | 80 ;; branch index 0 238 | 80 ;; 1 239 | 80 ;; 2 240 | 80 ;; branch index 3 241 | 80 ;; branch index 4 242 | 80 ;; branch index 5 243 | a0 ;; length of string is (0xa0 - 0x80) = 32 244 | 9f48e0438e53d55e53bb935c4a80e294ff56055cc4b584635b4bafbf89422608 ;; hash at branch index 6 245 | 80 ;; branch index 7 246 | 80 ;; branch index 8 247 | a0 ;; length of string 248 | 4216caf9df3c72b105e86b5b75ecb16e09e4a6a718bb27b0b83ec6fd79bb6c0c ;; branch index 9 249 | 80 ;; branch index a 250 | a0 251 | e17ee4374bd5002160209877201836362b93a75ce5813bf4789053dd613d22e0 ;; branch index b 252 | 80 ;; c 253 | 80 ;; d 254 | a0 255 | b82fb32a26c22edc12788287a7d157a5be2443a4ea2a0722c77f5b995ef40d03 ;; branch index e 256 | 80 ;; f 257 | 80 ;; 17th element 258 | */ 259 | 260 | // branch node will always have at least 2 children, so its length will always be at least 64 bytes 261 | // first byte of a branch node will be either f8 (<= 7 children) or f9 (>= 8 children) 262 | // two children: length is 81 bytes (0x51) 263 | // three children: length is 113 bytes (0x71) 264 | // four children: length is 145 bytes (0x91) 265 | // five children: length is 177 bytes (0xb1) 266 | 267 | // bytes for hashes = len(0xa0 + hash) = 33*branch_num_children 268 | // bytes for empty nodes (0x80) = (17 - branch_num_children) 269 | 270 | let child_indexes = new Array() 271 | let child_hash_ptrs = new Array() 272 | for (let i = 0; i < 17; i++) { 273 | // read child index 274 | let branchNodeChild = branchNodeChildren[i] 275 | if (branchNodeChild > 0) { 276 | child_indexes.push(i as u8) 277 | child_hash_ptrs.push(branchNodeChild) 278 | } 279 | } 280 | 281 | let branch_num_children = child_indexes.length 282 | 283 | // allocate buffer for branch node 284 | let list_bytes_len = 33 * branch_num_children + (17 - branch_num_children) 285 | let branch_node_bytes: usize 286 | let branch_node_bytes_len: usize 287 | let branch_node_datastart: usize 288 | if (branch_num_children < 8) { 289 | //0xf8 + (list_len as u8) + bytes.. 290 | branch_node_bytes = changetype(new ArrayBuffer(list_bytes_len + 2)) 291 | branch_node_bytes_len = list_bytes_len + 2 292 | store(branch_node_bytes, 0xf8) 293 | store(branch_node_bytes + 1, list_bytes_len as u8) 294 | branch_node_datastart = branch_node_bytes + 2 295 | } else { 296 | //0xf9 + (list_len as u16) + bytes.. 297 | branch_node_bytes = changetype(new ArrayBuffer(list_bytes_len + 3)) 298 | branch_node_bytes_len = list_bytes_len + 3 299 | store(branch_node_bytes, 0xf9) 300 | store(branch_node_bytes + 1, bswap(list_bytes_len as u16)) 301 | branch_node_datastart = branch_node_bytes + 3 302 | } 303 | 304 | let children_copied = 0 305 | let next_child: u8 306 | 307 | let branch_node_offset = branch_node_datastart 308 | let i: u8 = 0 309 | while (i < 17) { 310 | if (children_copied < branch_num_children) { 311 | next_child = child_indexes[children_copied] 312 | // first insert all the 0x80's for empty slots 313 | if (i < next_child) { 314 | // TODO: maybe the check isn't necessary if memory.fill accepts 0 length inputs 315 | 316 | let num_empties = next_child - i 317 | memory.fill(branch_node_offset, 0x80, num_empties) 318 | branch_node_offset = branch_node_offset + num_empties 319 | 320 | i = next_child 321 | } 322 | 323 | // could be optimized to reduce memory copying. might require a different sequence of opcodes / hashes 324 | // or using keccak.update() might be better 325 | 326 | // now copy the child 327 | // insert 0xa0 byte 328 | store(branch_node_offset, 0xa0) 329 | branch_node_offset++ 330 | // copy the child hash 331 | let child_hash_ptr = child_hash_ptrs[children_copied] 332 | memory.copy(branch_node_offset, child_hash_ptr, 32) 333 | branch_node_offset = branch_node_offset + 32 334 | children_copied++ 335 | } else { 336 | // children_copied >= branch_num_children 337 | // we've copied all children and still haven't filled all 17 slots 338 | // copy empties to the end 339 | let num_empties = 17 - i 340 | memory.fill(branch_node_offset, 0x80, num_empties) 341 | branch_node_offset = branch_node_offset + num_empties 342 | break 343 | } 344 | 345 | i = i + 1 346 | } 347 | 348 | // branch node is constructed, now hash it and push hash back on stack 349 | 350 | //debug_mem(branch_node_bytes, branch_node_bytes_len); 351 | 352 | let branchHashOutputPtr = changetype(new ArrayBuffer(32)) 353 | ethash_keccak256(branchHashOutputPtr, branch_node_bytes, branch_node_bytes_len) 354 | //debug_mem(branchHashOutputPtr, 32); 355 | 356 | return branchHashOutputPtr 357 | } 358 | 359 | /** 360 | * class that represents data in rlp format. Due to the lack of support for recursive 361 | * data types, we have to use a class instead. 362 | */ 363 | export class RLPData { 364 | buffer: Uint8Array 365 | children: RLPData[] 366 | 367 | constructor(input: Uint8Array | null, children: RLPData[] | null) { 368 | if (input) { 369 | this.buffer = input 370 | } else { 371 | this.buffer = new Uint8Array(0) 372 | } 373 | 374 | if (children) { 375 | this.children = children 376 | } else { 377 | this.children = new Array() 378 | } 379 | } 380 | } 381 | 382 | const hexAlphabet = '0123456789abcdef' 383 | 384 | export class Decoded { 385 | data: RLPData 386 | remainder: Uint8Array 387 | 388 | constructor(data: RLPData, remainder: Uint8Array) { 389 | this.data = data 390 | this.remainder = remainder 391 | } 392 | } 393 | 394 | /** 395 | * Parse integers. Check if there is no leading zeros 396 | * Note that this is NOT safe in assemblyscript due to 397 | * the lack of error handling. 398 | * @param v The value to parse 399 | * @param base The base to parse the integer into 400 | */ 401 | function safeParseInt(v: string, base: u32): u32 { 402 | // v.slice(0, 2) == '00' 403 | if (v.charCodeAt(0) == 0x30 && v.charCodeAt(1) == 0x30) { 404 | throw new Error('invalid RLP: extra zeros') 405 | } 406 | return I32.parseInt(v, base) 407 | } 408 | 409 | /** Transform an integer into its hexadecimal value */ 410 | function intToHex(integer: u32): string { 411 | let res = new Array() 412 | do { 413 | let t = integer / 16 414 | let r = integer % 16 415 | integer = t 416 | res.push(hexAlphabet[r]) 417 | } while (integer) 418 | let hex = res.reverse().join('') 419 | return hex.length & 1 ? '0' + hex : hex 420 | } 421 | 422 | function bytesToHex(bytes: Uint8Array): string { 423 | let len = bytes.length 424 | let res = new Uint8Array(len * 2) 425 | for (let i = 0; i < len; i++) { 426 | let hex = intToHex(bytes[i]) 427 | unchecked((res[i * 2 + 0] = hex.charCodeAt(0))) 428 | unchecked((res[i * 2 + 1] = hex.charCodeAt(1))) 429 | } 430 | return String.UTF8.decodeUnsafe(res.dataStart as usize, res.byteLength) 431 | } 432 | 433 | function hexToBytes(hex: string): Uint8Array { 434 | if (!hex.length) { 435 | return new Uint8Array(0) 436 | } 437 | assert((hex.length & 1) == 0) 438 | let byteLength = hex.length / 2 439 | let res = new Uint8Array(byteLength) 440 | for (let i = 0; i < byteLength; i++) { 441 | res[i] = U8.parseInt(hex.substring(i * 2, 2), 16) 442 | } 443 | return res 444 | } 445 | 446 | function concatUint8Array(arr1: Uint8Array, arr2: Uint8Array): Uint8Array { 447 | let len1 = arr1.byteLength 448 | let len2 = arr2.byteLength 449 | let res = new Uint8Array(len1 + len2) 450 | let dst = res.dataStart as usize 451 | memory.copy(dst, arr1.dataStart as usize, len1) 452 | memory.copy(dst + len1, arr2.dataStart as usize, len2) 453 | return res 454 | } 455 | 456 | function concatUint8Arrays(arrays: Array): Uint8Array { 457 | let len = arrays.reduce((acc, x) => acc + x.byteLength, 0 as u32) 458 | let res = new Uint8Array(len) 459 | let counter = 0 460 | for (let i = 0, len = arrays.length; i < len; i++) { 461 | // TODO: check that arrays[1].byteOffset is right and covered by tests 462 | let arr = unchecked(arrays[i]) 463 | memory.copy((res.dataStart as usize) + counter, arr.dataStart as usize, arr.byteLength) 464 | counter += arr.byteLength 465 | } 466 | return res 467 | } 468 | 469 | /** 470 | * RLP Encoding based on: https://github.com/ethereum/wiki/wiki/%5BEnglish%5D-RLP 471 | * This function takes in an argument of type Input and returns the rlp encoding of it. 472 | * @param input: a Uint8Array or an array of inputs. 473 | * @returns returns rlp encoded byte array. 474 | **/ 475 | export function encode(input: RLPData): Uint8Array { 476 | let children = input.children 477 | let len = children.length 478 | if (len) { 479 | let output = [new Uint8Array(0)] 480 | let totalLen = 0 481 | for (let i = 0; i < len; i++) { 482 | let e = encode(children[i]) 483 | output.push(e) 484 | totalLen += output[i + 1].byteLength 485 | } 486 | output[0] = encodeLength(totalLen, 192) 487 | return concatUint8Arrays(output) 488 | } else { 489 | //debug_mem((input.buffer.buffer as usize) + input.buffer.byteOffset, input.buffer.byteLength); 490 | let inputBuffer = input.buffer 491 | len = inputBuffer.length 492 | if (len == 1 && inputBuffer[0] < 128) { 493 | return inputBuffer 494 | } 495 | let encodedLen = encodeLength(len, 128) 496 | //debug_mem(len_encoded.dataStart, encodedLen.byteLength); 497 | return concatUint8Array(encodedLen, inputBuffer) 498 | } 499 | } 500 | 501 | function encodeLength(len: u32, offset: u32): Uint8Array { 502 | if (len < 56) { 503 | //let hex_from_int = intToHex(len + offset); 504 | let int = len + offset 505 | //debug(int); 506 | if (int < 256) { 507 | let int_as_bytes = new Uint8Array(1) 508 | int_as_bytes[0] = int as u8 509 | return int_as_bytes 510 | } 511 | if (int < 65536) { 512 | let int_as_bytes = new Uint8Array(2) 513 | //let int_view = DataView.wrap(int_as_bytes.buffer, 0, 2); 514 | let int_view = new DataView(int_as_bytes.buffer, 0, 2) 515 | int_view.setUint16(0, int as u16) 516 | return int_as_bytes 517 | } 518 | throw new Error('longer lengths unsupported') 519 | // return hexToBytes(intToHex(len + offset)); 520 | // return hexToBytes(intToHex(len + offset)) 521 | } else { 522 | /* 523 | let hexLength = intToHex(len); 524 | let lLength = hexLength.length / 2; 525 | let firstByte = intToHex(offset + 55 + lLength); 526 | return concatUint8Array(hexToBytes(firstByte), hexToBytes(hexLength)); 527 | */ 528 | let len_as_bytes: Uint8Array 529 | let lLength: u32 530 | //debug(len); 531 | if (len < 256) { 532 | lLength = 1 533 | len_as_bytes = new Uint8Array(1) 534 | len_as_bytes[0] = len as u8 535 | } else if (len < 65536) { 536 | lLength = 2 537 | len_as_bytes = new Uint8Array(2) 538 | let len_view = new DataView(len_as_bytes.buffer, 0, 2) 539 | len_view.setUint16(0, len as u16) 540 | } else { 541 | throw new Error('longer lengths unsupported') 542 | } 543 | 544 | let firstByte_as_bytes: Uint8Array 545 | let firstByte = offset + 55 + lLength 546 | //debug(firstByte); 547 | if (firstByte < 256) { 548 | firstByte_as_bytes = new Uint8Array(1) 549 | firstByte_as_bytes[0] = firstByte as u8 550 | //return int_as_bytes; 551 | } else if (firstByte < 65536) { 552 | firstByte_as_bytes = new Uint8Array(2) 553 | //let int_view = DataView.wrap(int_as_bytes.buffer, 0, 2); 554 | let int_view = new DataView(firstByte_as_bytes.buffer, 0, 2) 555 | int_view.setUint16(0, firstByte as u16) 556 | //return int_as_bytes; 557 | } else { 558 | throw new Error('longer lengths unsupported') 559 | } 560 | 561 | return concatUint8Array(firstByte_as_bytes, len_as_bytes) 562 | } 563 | } 564 | 565 | /** 566 | * RLP Decoding based on: {@link https://github.com/ethereum/wiki/wiki/%5BEnglish%5D-RLP|RLP} 567 | * @param input - Uint8Array 568 | * @returns - returns RLPData containing the original message 569 | **/ 570 | export function decode(input: Uint8Array): RLPData { 571 | let res = _decode(input) 572 | if (res.remainder.length != 0) { 573 | throw new Error('invalid remainder') 574 | } 575 | return res.data 576 | } 577 | 578 | export function _decode(input: Uint8Array): Decoded { 579 | let length: u32 580 | if (!input.length) { 581 | throw new Error('invalid input: cannot be empty') 582 | } 583 | let firstByte = input[0] 584 | if (firstByte <= 0x7f) { 585 | // a single byte whose value is in the [0x00, 0x7f] range, that byte is its own RLP encoding. 586 | return new Decoded(new RLPData(input.subarray(0, 1), null), input.subarray(1)) 587 | } else if (firstByte <= 0xb7) { 588 | length = firstByte - 0x7f 589 | if (firstByte == 0x80) { 590 | //return new Decoded(new RLPData(new Uint8Array(0), null), new Uint8Array(0)); 591 | return new Decoded(new RLPData(new Uint8Array(0), null), input.subarray(length)) 592 | } 593 | let data = input.subarray(1, length) 594 | if (length == 2 && data[0] < 0x80) { 595 | throw new Error('invalid rlp encoding: byte must be less 0x80') 596 | } 597 | return new Decoded(new RLPData(data, null), input.subarray(length)) 598 | } else if (firstByte <= 0xbf) { 599 | let llength = firstByte - 0xb6 600 | length = safeParseInt(bytesToHex(input.subarray(1, llength)), 16) 601 | let data = input.subarray(llength, length + llength) 602 | if ((data.length as u32) < length) { 603 | throw new Error('invalid RLP') 604 | } 605 | return new Decoded(new RLPData(data, null), input.subarray(length + llength)) 606 | 607 | /* 608 | // a string longer than 55 bytes 609 | // firstByte is between 0xb7 and 0xbf 610 | 611 | //sayHello(96); 612 | //sayHello(input.length); 613 | let input_ptr = changetype(input.buffer); 614 | //debug(input_ptr + input.byteOffset); 615 | let llength = firstByte - 0xb6; 616 | // 0xb6 is used because the slice/subarray operator exclusive and wants length + 1; 617 | let len_length = llength - 1; 618 | // if the string is longer than 256 bytes, then the length needs to be read from two bytes 619 | // if its longer than 65536 then it needs three bytes 620 | // llength is the "length of the length" 621 | 622 | //sayHello(33); 623 | //sayHello(llength); 624 | 625 | //length = safeParseInt(bytesToHex(input.subarray(1, llength)), 16); 626 | //length = i32(input.subarray(1, llength)); 627 | 628 | var length_view = new DataView(input.buffer, input.byteOffset + 1, llength); 629 | 630 | //sayHello(77); 631 | //sayHello(length_view.byteLength); 632 | //debug(length_view.dataStart); 633 | 634 | if (len_length == 1) { // read length from one byte 635 | length = length_view.getUint8(0); 636 | } 637 | if (len_length == 2) { // read length from two bytes 638 | length = length_view.getUint16(0); 639 | } 640 | //sayHello(length); 641 | if (llength > 3) { 642 | throw new Error('TODO'); 643 | } 644 | 645 | let data = input.subarray(llength, length + llength); 646 | if ((data.length as u32) < length) { 647 | //sayHello(666); 648 | throw new Error('invalid RLP'); 649 | } 650 | return new Decoded(new RLPData(data, null), input.subarray(length + llength)); 651 | */ 652 | } else if (firstByte <= 0xf7) { 653 | length = firstByte - 0xbf 654 | let remainder = input.subarray(1, length) 655 | let decoded: RLPData[] = [] 656 | while (remainder.length) { 657 | let d = _decode(remainder) 658 | decoded.push(d.data) 659 | remainder = d.remainder 660 | } 661 | return new Decoded(new RLPData(null, decoded), input.subarray(length)) 662 | } else { 663 | // a list over 55 bytes long 664 | 665 | let llength = firstByte - 0xf6 666 | length = safeParseInt(bytesToHex(input.subarray(1, llength)), 16) 667 | let totalLength = llength + length 668 | if (totalLength > (input.length as u32)) { 669 | throw new Error('invalid rlp: total length is larger than the data') 670 | } 671 | 672 | let remainder = input.subarray(llength, totalLength) 673 | if (remainder.length == 0) { 674 | throw new Error('invalid rlp, List has a invalid length') 675 | } 676 | let decoded: RLPData[] = [] 677 | while (remainder.length) { 678 | let d = _decode(remainder) 679 | decoded.push(d.data) 680 | remainder = d.remainder 681 | } 682 | return new Decoded(new RLPData(null, decoded), input.subarray(totalLength)) 683 | 684 | /* 685 | let llength = firstByte - 0xf6; 686 | 687 | // TODO: use a DataView here to read the length, for better clarity 688 | length = i32(input.subarray(1, llength)); 689 | 690 | // old code, not sure if safeParseInt is actually necessary 691 | //length = safeParseInt(bytesToHex(input.subarray(1, llength)), 16); 692 | 693 | let totalLength = i32(llength + length); 694 | 695 | //sayHello(1); 696 | //if (totalLength > i32(input.length)) { 697 | if (totalLength == 0) { // TODO: check this 698 | throw new Error('invalid rlp: total length is larger than the data'); 699 | } 700 | 701 | //sayHello(2); 702 | let remainder = input.subarray(llength, totalLength); 703 | 704 | if (remainder.length == 0) { 705 | throw new Error('invalid rlp, List has a invalid length'); 706 | } 707 | 708 | //sayHello(3); 709 | let decoded: RLPData[] = []; 710 | while (remainder.length) { 711 | //sayHello(remainder.length) 712 | //sayHello(4); 713 | let d = _decode(remainder); 714 | //sayHello(6); 715 | decoded.push(d.data); 716 | remainder = d.remainder; 717 | //sayHello(7); 718 | } 719 | //sayHello(5); 720 | return new Decoded(new RLPData(null, decoded), input.subarray(totalLength)); 721 | */ 722 | } 723 | } 724 | -------------------------------------------------------------------------------- /assembly/token.ts: -------------------------------------------------------------------------------- 1 | import { ethash_keccak256 } from './keccak' 2 | import { 3 | hashBranchNode, 4 | RLPBranchNode, 5 | RLPData, 6 | decode, 7 | encode, 8 | hashExtension, 9 | hashBranch, 10 | encodeLeaf, 11 | encodeAccount, 12 | decodeAccount, 13 | } from './rlp' 14 | import { 15 | parseU8, 16 | padBuf, 17 | cmpBuf, 18 | stripBuf, 19 | hash, 20 | nibbleArrToUintArr, 21 | addHexPrefix, 22 | uintArrToNibbleArr, 23 | removeHexPrefix, 24 | } from './util' 25 | import { debug, debugMem } from './debug' 26 | import { 27 | eth2_blockDataSize, 28 | eth2_blockDataCopy, 29 | eth2_loadPreStateRoot, 30 | eth2_savePostStateRoot, 31 | bignum_add256, 32 | bignum_sub256, 33 | } from '../node_modules/scout.ts/assembly/env' 34 | 35 | export enum Opcode { 36 | Branch = 0, 37 | Hasher = 1, 38 | Leaf = 2, 39 | Extension = 3, 40 | } 41 | 42 | export enum NodeType { 43 | Branch = 0, 44 | Leaf = 1, 45 | Extension = 2, 46 | Hash = 3, 47 | } 48 | 49 | // for Map binaryen toText generates function names with commas, which wabt doesn't like. 50 | const Trie = new Map() 51 | 52 | class Node { 53 | constructor( 54 | //public type: u8, 55 | public type: NodeType, 56 | 57 | public branchBody: RLPBranchNode | null, 58 | public leafBody: Uint8Array | null, // a leaf body is just the rlp encoded account 59 | ) {} 60 | } 61 | 62 | export function main(): void { 63 | // INPUT 1: pre-state root 64 | let preStateRootBuf = new ArrayBuffer(32) 65 | let preStateRoot = Uint8Array.wrap(preStateRootBuf, 0, 32) 66 | eth2_loadPreStateRoot(preStateRootBuf as usize) 67 | 68 | // INPUT 2: proof data from the EEI 69 | let blockDataSize = eth2_blockDataSize() 70 | let blockDataBuf = new ArrayBuffer(blockDataSize) 71 | eth2_blockDataCopy(blockDataBuf as usize, 0, blockDataSize) 72 | let blockData = Uint8Array.wrap(blockDataBuf, 0, blockDataSize) 73 | 74 | let postStateRoot = processBlock(preStateRoot, blockData) 75 | 76 | eth2_savePostStateRoot((postStateRoot.buffer as usize) + postStateRoot.byteOffset) 77 | } 78 | 79 | export function processBlock(preStateRoot: Uint8Array, blockData: Uint8Array): Uint8Array { 80 | // input data is RLP 81 | let inputDecoded = decode(blockData) 82 | let inputChildren = inputDecoded.children 83 | 84 | // input_decoded is type RLPData: { buffer: Uint8Array, children: RLPData[] } 85 | // [txes, addrs, hashes, leaves, instructions] 86 | let txes = inputChildren[0].children 87 | let addrs = inputChildren[1].children 88 | let hashes = inputChildren[2].children 89 | let leafKeys = inputChildren[3].children 90 | let accounts = inputChildren[4].children 91 | // Instructions are flat-encoded 92 | let instructions = inputChildren[5].buffer 93 | 94 | if (addrs.length !== leafKeys.length || addrs.length !== accounts.length) { 95 | throw new Error('invalid multiproof') 96 | } 97 | let updatedAccounts = new Array(accounts.length) 98 | 99 | for (let i = 0, len = txes.length; i < len; i++) { 100 | let tx = txes[i] 101 | let txChildren = tx.children 102 | // [toIdx, value, nonce, fromIdx] 103 | let toIdx = parseU8(txChildren[0].buffer) 104 | let fromIdx = parseU8(txChildren[3].buffer) 105 | let value = txChildren[1].buffer 106 | let nonce = txChildren[2].buffer 107 | 108 | // TODO: Hash unsigned tx, recover from address, check against fromIdx 109 | 110 | let fromAccountRaw = accounts[fromIdx].buffer 111 | // If `from` has been modified by previous txes 112 | // load the updated one. 113 | if (updatedAccounts[fromIdx] != null) { 114 | fromAccountRaw = updatedAccounts[fromIdx] as Uint8Array 115 | } 116 | 117 | let toAccountRaw = accounts[toIdx].buffer 118 | if (updatedAccounts[toIdx] != null) { 119 | toAccountRaw = updatedAccounts[toIdx] as Uint8Array 120 | } 121 | 122 | let fromAccount = decodeAccount(fromAccountRaw) 123 | let toAccount = decodeAccount(toAccountRaw) 124 | // Sender's nonce should match tx's nonce 125 | if (cmpBuf(fromAccount[0], nonce) != 0) { 126 | throw new Error('Invalid nonce') 127 | } 128 | // Sender has enough balance 129 | if (cmpBuf(fromAccount[1], value) == -1) { 130 | throw new Error('Insufficient funds') 131 | } 132 | 133 | // Update nonce and balances 134 | value = padBuf(value, 32) 135 | let fromBalance = padBuf(fromAccount[1], 32) 136 | let newFromBalance = new ArrayBuffer(32) 137 | bignum_sub256(fromBalance.dataStart, value.dataStart, newFromBalance as usize) 138 | 139 | let toBalance = padBuf(toAccount[1], 32) 140 | let newToBalance = new ArrayBuffer(32) 141 | bignum_add256(toBalance.dataStart, value.dataStart, newToBalance as usize) 142 | 143 | let paddedNonce = padBuf(nonce, 32) 144 | let fromNonce = padBuf(fromAccount[0], 32) 145 | let newFromNonce = new ArrayBuffer(32) 146 | let one256 = new ArrayBuffer(32) 147 | let onedv = new DataView(one256) 148 | onedv.setUint8(31, 1) 149 | bignum_add256(fromNonce.dataStart, one256 as usize, newFromNonce as usize) 150 | 151 | // Encode updated accounts 152 | let newFromAccount = encodeAccount( 153 | stripBuf(Uint8Array.wrap(newFromNonce)), 154 | stripBuf(Uint8Array.wrap(newFromBalance)), 155 | fromAccount[2], 156 | fromAccount[3], 157 | ) 158 | let newToAccount = encodeAccount( 159 | toAccount[0], 160 | stripBuf(Uint8Array.wrap(newToBalance)), 161 | toAccount[2], 162 | toAccount[3], 163 | ) 164 | 165 | updatedAccounts[fromIdx] = newFromAccount 166 | updatedAccounts[toIdx] = newToAccount 167 | } 168 | 169 | let addrsLen = addrs.length 170 | let keys = new Array(addrsLen) 171 | for (let i = 0; i < addrsLen; i++) { 172 | keys[i] = hash(addrs[i].buffer) 173 | } 174 | 175 | let postStateRoot = verifyMultiproofAndUpdate( 176 | preStateRoot, 177 | hashes, 178 | leafKeys, 179 | accounts, 180 | updatedAccounts as Uint8Array[], 181 | instructions, 182 | keys, 183 | ) 184 | 185 | return postStateRoot 186 | 187 | /* 188 | // **** update post-state 189 | // INPUT 2: address is another input, also hardcoded for testing. 190 | let address_hash = Array.create(32); 191 | // keccak("eb79aa62d6433e0a23efeb3c859eae7b3c74e850") 192 | address_hash = [14, 141, 0, 120, 132, 143, 54, 115, 94, 135, 183, 110, 98, 144, 35, 193, 245, 40, 118, 164, 66, 9, 192, 120, 221, 66, 131, 45, 8, 208, 15, 177]; 193 | 194 | let key_nibbles = u8ArrToNibbleArr(address_hash); 195 | //debug_mem((key_nibbles.buffer as usize) + key_nibbles.byteOffset, key_nibbles.byteLength); 196 | 197 | let new_leaf_account_rlp_array = Array.create(73); 198 | //let new_leaf_account_rlp = new Uint8Array(73); 199 | // f8478083ffffffa056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470 200 | new_leaf_account_rlp_array = [248, 71, 128, 131, 255, 255, 255, 160, 86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91, 72, 224, 27, 153, 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33, 160, 197, 210, 70, 1, 134, 247, 35, 60, 146, 126, 125, 178, 220, 199, 3, 192, 229, 0, 182, 83, 202, 130, 39, 59, 123, 250, 216, 4, 93, 133, 164, 112]; 201 | 202 | let new_leaf_account_rlp = Uint8Array.wrap(new_leaf_account_rlp_array.buffer, 0, 73); 203 | //debug_mem((new_leaf_account_rlp.buffer as usize) + new_leaf_account_rlp.byteOffset, new_leaf_account_rlp.byteLength); 204 | 205 | insertNewLeafNewBranch(verified_prestate_root_ptr, key_nibbles, new_leaf_account_rlp); 206 | 207 | let new_root_ptr = rehashNode(verified_prestate_root_ptr); 208 | //debug_mem(new_root_ptr, 32); 209 | 210 | // **** verify pre-state + update post-state 211 | // 9 calls to keccak256 in the first verify + update 212 | // 22 iterations total to 198 calls to keccak256 213 | for (let i = 0; i < 21; i++) { 214 | let calculated_prestate_root_ptr = verifyMultiproof(input_decoded); 215 | insertNewLeafNewBranch(calculated_prestate_root_ptr, key_nibbles, new_leaf_account_rlp); 216 | let calculated_poststate_ptr = rehashNode(verified_prestate_root_ptr); 217 | new_root_ptr = calculated_poststate_ptr; 218 | } 219 | eth2_savePostStateRoot(new_root_ptr); 220 | */ 221 | } 222 | 223 | class StackItem { 224 | constructor( 225 | public kind: NodeType, 226 | public pathIndices: Array, 227 | public hash: Uint8Array | null, 228 | public newHash: Uint8Array | null, 229 | ) {} 230 | } 231 | 232 | function verifyMultiproofAndUpdate( 233 | preStateRoot: Uint8Array, 234 | hashes: RLPData[], 235 | leafKeys: RLPData[], 236 | accounts: RLPData[], 237 | updatedAccounts: Uint8Array[], 238 | instructions: Uint8Array, 239 | keys: Uint8Array[], 240 | ): Uint8Array { 241 | let pc = 0 242 | let hashIdx = 0 243 | let leafIdx = 0 244 | let stack = new Array(100) 245 | let stackTop = 0 246 | 247 | let leafKeysLen = leafKeys.length 248 | let paths = new Array>(leafKeysLen) 249 | for (let i = 0; i < leafKeysLen; i++) { 250 | paths[i] = new Array() 251 | } 252 | 253 | while (pc < instructions.length) { 254 | let op = instructions[pc++] 255 | switch (op) { 256 | case Opcode.Hasher: { 257 | if (hashIdx >= hashes.length) { 258 | throw new Error('Not enough hashes in multiproof') 259 | } 260 | let h = hashes[hashIdx++].buffer 261 | stack[stackTop++] = new StackItem(NodeType.Hash, [], h, h) 262 | break 263 | } 264 | case Opcode.Leaf: { 265 | if (leafIdx >= leafKeys.length) { 266 | throw new Error('Not enough leaves in multiproof') 267 | } 268 | 269 | let path = removeHexPrefix(uintArrToNibbleArr(leafKeys[leafIdx].buffer)) 270 | paths[leafIdx] = path 271 | let l = encodeLeaf(leafKeys[leafIdx].buffer, accounts[leafIdx].buffer) 272 | let ul = encodeLeaf(leafKeys[leafIdx].buffer, updatedAccounts[leafIdx]) 273 | leafIdx++ 274 | let h = hash(l) 275 | let nh = hash(ul) 276 | stack[stackTop++] = new StackItem(NodeType.Leaf, [leafIdx - 1], h, nh) 277 | break 278 | } 279 | case Opcode.Branch: { 280 | let indicesLen = instructions[pc++] 281 | let branchIndices = new Array(indicesLen) 282 | for (let i = 0; i < (indicesLen as i32); i++) { 283 | branchIndices[i] = instructions[pc + i] 284 | } 285 | pc += indicesLen 286 | 287 | let children = new Array(17) 288 | let newChildren = new Array(17) 289 | let pathIndices = new Array() 290 | for (let i = 0; i < branchIndices.length; i++) { 291 | let idx = branchIndices[i] 292 | let n = stack[--stackTop] 293 | 294 | children[idx] = n.hash 295 | newChildren[idx] = n.newHash 296 | 297 | pathIndices = pathIndices.concat(n.pathIndices) 298 | for (let i = 0; i < n.pathIndices.length; i++) { 299 | paths[n.pathIndices[i]].unshift(idx) 300 | } 301 | } 302 | let h = hashBranch(children) 303 | let nh = hashBranch(newChildren) 304 | 305 | stack[stackTop++] = new StackItem(NodeType.Branch, pathIndices, h, nh) 306 | break 307 | } 308 | case Opcode.Extension: 309 | let nibblesLen = instructions[pc++] 310 | let nibbles = new Array(nibblesLen) 311 | for (let i = 0; i < (nibblesLen as i32); i++) { 312 | nibbles[i] = instructions[pc + i] 313 | } 314 | pc += nibblesLen 315 | 316 | let key = nibbleArrToUintArr(addHexPrefix(nibbles, false)) 317 | // addHexPrefix modifies array in-place 318 | nibbles = removeHexPrefix(nibbles) 319 | 320 | let n = stack[--stackTop] 321 | let h = hashExtension(key, n.hash as Uint8Array) 322 | let nh = hashExtension(key, n.newHash as Uint8Array) 323 | 324 | stack[stackTop++] = new StackItem(NodeType.Extension, n.pathIndices.slice(0), h, nh) 325 | for (let i = 0; i < n.pathIndices.length; i++) { 326 | paths[n.pathIndices[i]] = nibbles.concat(paths[n.pathIndices[i]]) 327 | } 328 | break 329 | } 330 | } 331 | let r = stack[stackTop - 1] 332 | let rootHash = r.hash as Uint8Array 333 | let newRootHash = r.newHash as Uint8Array 334 | 335 | if (cmpBuf(rootHash, preStateRoot) != 0) { 336 | throw new Error('invalid root hash') 337 | } 338 | 339 | // Verify given keys match computed paths 340 | for (let i = 0, len = paths.length; i < len; i++) { 341 | let path = nibbleArrToUintArr(paths[i]) 342 | if (cmpBuf(path, keys[i]) != 0) { 343 | throw new Error('invalid key') 344 | } 345 | } 346 | 347 | return newRootHash 348 | } 349 | 350 | function insertNewLeafNewBranch( 351 | prestate_root_hash_ptr: usize, 352 | new_leaf_key_nibbles: Array, 353 | new_leaf_account_rlp: Uint8Array, 354 | ): void { 355 | let currentNode = Trie.get(prestate_root_hash_ptr) 356 | 357 | // hash current_node for debug test 358 | //let hashOutput = new ArrayBuffer(32); 359 | //let hashOutputPtr = changetype(hashOutput); 360 | //debug(8891); 361 | 362 | //let encoded_node = encode(current_node.bodyrlp); 363 | //debug(8892); 364 | 365 | //ethash_keccak256(hashOutputPtr, (encoded_node.buffer as usize) + encoded_node.byteOffset, encoded_node.byteLength); 366 | //debug(8899); 367 | //debug_mem(hashOutputPtr, 32); 368 | 369 | // current_node.bodyrlp 370 | 371 | // pathStack could be smaller than 40 372 | let pathStack = [prestate_root_hash_ptr] 373 | for (let k_i = 0; k_i < 40; k_i++) { 374 | let branch_index_i = new_leaf_key_nibbles[k_i] 375 | 376 | if (currentNode.type == NodeType.Leaf) { 377 | createNewBranchWhereLeafExists( 378 | new_leaf_account_rlp, 379 | new_leaf_key_nibbles, 380 | k_i, 381 | currentNode, 382 | pathStack, 383 | ) 384 | return 385 | } else if (currentNode.type == NodeType.Branch) { 386 | if (currentNode.branchBody.dirty == null) { 387 | currentNode.branchBody.dirty = new Array() 388 | 389 | // setting the dirty flag to an empty array indicates that no children are dity but the branch node itself needs to be rehashed 390 | // explanation: 391 | // if the next node is a leaf, then it will be converted into a new child branch node with two leafs underneath it (the existing leaf and the new leaf) 392 | // the new child branch won't be dirty, since it will be created with the new hash 393 | // the current branch (i.e. the parent of the new branch) will have the new hash inserted into the branch index 394 | } 395 | 396 | let next_node_in_path_hash_ptr = currentNode.branchBody.children[branch_index_i] 397 | // if next_node_in_path_hash_ptr is 0, then the child is empty 398 | 399 | //debug_mem(next_node_in_path_hash_ptr, 32); 400 | 401 | if (next_node_in_path_hash_ptr == 0) { 402 | // end of path is an already existing branch node 403 | // can just insert the leaf into the branch 404 | throw new Error( 405 | 'Dont yet handle inserting a leaf into an already existing branch (but its easy) ', 406 | ) 407 | } 408 | 409 | pathStack.push(next_node_in_path_hash_ptr) 410 | // next node in path is either a leaf or a branch 411 | let nextNodeInPath = Trie.get(next_node_in_path_hash_ptr) 412 | 413 | // TODO: we already check this types ni the for loop.. merge the logic? 414 | if (nextNodeInPath.type == NodeType.Branch) { 415 | // keep walking... 416 | currentNode.branchBody.dirty.push(branch_index_i) 417 | currentNode = nextNodeInPath 418 | } else if (nextNodeInPath.type == NodeType.Leaf) { 419 | // next node is a leaf, and so is last node in the path 420 | // next step will create the new branch node 421 | currentNode = nextNodeInPath 422 | } else { 423 | throw new Error('extension nodes are unimplemented!') 424 | } 425 | } else { 426 | throw new Error('extension nodes are unimplemented.') 427 | } 428 | } // end for loop 429 | } 430 | 431 | function createNewBranchWhereLeafExists( 432 | new_leaf_account_rlp: Uint8Array, 433 | new_key_nibbles: Array, 434 | k_i: u32, 435 | existingLeafNode: Node, 436 | pathStack: Array, 437 | ): void { 438 | //debug_mem((new_leaf_account_rlp.buffer as usize) + new_leaf_account_rlp.byteOffset, new_leaf_account_rlp.byteLength); 439 | 440 | let existing_leaf_key_value = decode(existingLeafNode.leafBody as Uint8Array) 441 | let existing_leaf_value = existing_leaf_key_value.children[1].buffer 442 | //debug_mem((existing_leaf_value.buffer as usize) + existing_leaf_value.byteOffset, existing_leaf_value.byteLength); 443 | 444 | let existing_leaf_key_nibbles = uintArrToNibbleArr(existing_leaf_key_value.children[0].buffer) 445 | existing_leaf_key_nibbles = removeHexPrefix(existing_leaf_key_nibbles) 446 | 447 | if (new_key_nibbles[k_i] == existing_leaf_key_nibbles[0]) { 448 | throw new Error('TODO: handle extension node insertion') 449 | } 450 | 451 | // recreate existing leaf 452 | 453 | let new_key_for_existing_leaf_nibbles = existing_leaf_key_nibbles 454 | // first nibble of the existing leaf key becomes its branch index in the new branch 455 | let branch_index_for_existing_leaf = new_key_for_existing_leaf_nibbles.shift() 456 | 457 | let new_key_for_existing_leaf = nibbleArrToUintArr( 458 | addHexPrefix(new_key_for_existing_leaf_nibbles, true), 459 | ) 460 | 461 | let new_node_for_existing_leaf_rlp_children = [ 462 | new RLPData(null, new Array()), 463 | new RLPData(null, new Array()), 464 | ] 465 | new_node_for_existing_leaf_rlp_children[0].buffer = new_key_for_existing_leaf 466 | new_node_for_existing_leaf_rlp_children[1].buffer = existing_leaf_value 467 | let new_node_for_existing_leaf_rlp = new RLPData(null, new_node_for_existing_leaf_rlp_children) 468 | let new_node_for_existing_leaf = encode(new_node_for_existing_leaf_rlp) 469 | 470 | let new_hash_for_existing_leaf_buffer = new ArrayBuffer(32) 471 | let new_hash_for_existing_leaf_ptr = changetype(new_hash_for_existing_leaf_buffer) 472 | 473 | ethash_keccak256( 474 | new_hash_for_existing_leaf_ptr, 475 | (new_node_for_existing_leaf.buffer as usize) + new_node_for_existing_leaf.byteOffset, 476 | new_node_for_existing_leaf.byteLength, 477 | ) 478 | //debug_mem(new_hash_for_existing_leaf_ptr, 32); 479 | 480 | let new_node_for_existing_leaf_obj = new Node(NodeType.Leaf, null, new_node_for_existing_leaf) 481 | Trie.set(new_hash_for_existing_leaf_ptr, new_node_for_existing_leaf_obj) 482 | 483 | // create new leaf 484 | 485 | let branch_index_for_new_leaf = new_key_nibbles[k_i] 486 | let key_for_new_leaf = nibbleArrToUintArr(addHexPrefix(new_key_nibbles.slice(k_i + 1), true)) 487 | 488 | let node_for_new_leaf_rlp_children = [ 489 | new RLPData(null, new Array()), 490 | new RLPData(null, new Array()), 491 | ] 492 | node_for_new_leaf_rlp_children[0].buffer = key_for_new_leaf 493 | node_for_new_leaf_rlp_children[1].buffer = new_leaf_account_rlp 494 | let node_for_new_leaf_rlp = new RLPData(null, node_for_new_leaf_rlp_children) 495 | let node_for_new_leaf = encode(node_for_new_leaf_rlp) 496 | 497 | let hash_for_new_leaf_buffer = new ArrayBuffer(32) 498 | let hash_for_new_leaf_ptr = changetype(hash_for_new_leaf_buffer) 499 | 500 | ethash_keccak256( 501 | hash_for_new_leaf_ptr, 502 | (node_for_new_leaf.buffer as usize) + node_for_new_leaf.byteOffset, 503 | node_for_new_leaf.byteLength, 504 | ) 505 | //debug_mem(hash_for_new_leaf_ptr, 32); 506 | 507 | let node_for_new_leaf_obj = new Node(NodeType.Leaf, null, node_for_new_leaf) 508 | Trie.set(hash_for_new_leaf_ptr, node_for_new_leaf_obj) 509 | 510 | // both leafs created. now create new branch node. 511 | 512 | let new_branch_node = new RLPBranchNode(new Array(17), null) 513 | 514 | new_branch_node.children[branch_index_for_existing_leaf] = new_hash_for_existing_leaf_ptr 515 | new_branch_node.children[branch_index_for_new_leaf] = hash_for_new_leaf_ptr 516 | 517 | let new_branch_hash_ptr = hashBranchNode(new_branch_node.children) 518 | let new_branch_node_obj = new Node(NodeType.Branch, new_branch_node, null) 519 | Trie.set(new_branch_hash_ptr, new_branch_node_obj) 520 | 521 | // now we have a new branch node. but we need to replace the pointer in the parent branch node 522 | // parent branch node previously pointed to a leaf 523 | // in the updated trie, it should point to the new branch node 524 | 525 | let parent_branch_hash_ptr = pathStack[pathStack.length - 2] 526 | 527 | let parentBranchNode = Trie.get(parent_branch_hash_ptr) 528 | 529 | let new_branch_node_parent_index = new_key_nibbles[k_i - 1] 530 | 531 | parentBranchNode.branchBody.children[new_branch_node_parent_index] = new_branch_hash_ptr 532 | //Trie.set(parent_branch_hash_ptr, parentBranchNode); 533 | // TODO: do we need to reset it with Trie.set? 534 | 535 | return 536 | } 537 | 538 | function rehashNode(staleHashPtr: usize): usize { 539 | // accepts a hash pointer, returns a pointer to the new hash 540 | 541 | //debug_mem(staleHashPtr, 32); 542 | // lookup the new node using the stale hash 543 | let node_with_stale_hash = Trie.get(staleHashPtr) 544 | if (node_with_stale_hash.type == NodeType.Leaf) { 545 | throw new Error('TODO: handle dirty leaf') 546 | } 547 | 548 | if (node_with_stale_hash.type == NodeType.Branch) { 549 | // recurse on dirty children 550 | let dirty_indexes = node_with_stale_hash.branchBody.dirty 551 | if (dirty_indexes == null) { 552 | throw new Error('ERROR: called rehash on a branch node that has no dirty flag') 553 | } 554 | 555 | for (let i = 0, len = dirty_indexes.length; i < len; i++) { 556 | let dirty_i = dirty_indexes[i] 557 | let stale_hash_for_dirty_child_ptr = node_with_stale_hash.branchBody.children[dirty_i] 558 | let new_hash_for_dirty_child_ptr = rehashNode(stale_hash_for_dirty_child_ptr) 559 | node_with_stale_hash.branchBody.children[dirty_i] = new_hash_for_dirty_child_ptr 560 | } 561 | 562 | // if drity_indexes.length == 0, no dirty children (only new children already hashed) 563 | // branch node itself needs to be rehashed 564 | 565 | // TODO: check if renaming does a deep copy or shallow 566 | //let new_branch_node = node_with_stale_hash; 567 | 568 | // all child hashes have been updated, so clear the dirty flag 569 | //node_with_stale_hash.dirty = null 570 | 571 | let new_branch_hash_ptr = hashBranchNode(node_with_stale_hash.branchBody.children) 572 | 573 | Trie.set(new_branch_hash_ptr, node_with_stale_hash) 574 | return new_branch_hash_ptr 575 | } 576 | 577 | // TODO: handle extension nodes 578 | throw new Error('only branch nodes and leaf nodes are implemented') 579 | } 580 | -------------------------------------------------------------------------------- /assembly/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../node_modules/assemblyscript/std/assembly.json", 3 | "include": [ 4 | "./**/*.ts" 5 | ] 6 | } 7 | -------------------------------------------------------------------------------- /assembly/util.ts: -------------------------------------------------------------------------------- 1 | import { ethash_keccak256 } from './keccak' 2 | 3 | @inline 4 | export function parseU8(buf: Uint8Array): u8 { 5 | // @ts-ignore 6 | return buf.length ? load(buf.dataStart as usize) : 0 7 | } 8 | 9 | export function bufEq(buf: Uint8Array, other: Uint8Array): boolean { 10 | let bufLen = buf.length 11 | if (bufLen != other.length) return false 12 | return memory.compare(buf.dataStart as usize, other.dataStart as usize, bufLen) == 0 13 | } 14 | 15 | export function padBuf(buf: Uint8Array, length: usize): Uint8Array { 16 | let diff = length - buf.length 17 | if (diff < 0) { 18 | throw new Error('Buffer bigger than expected') 19 | } else if (diff == 0) { 20 | return buf 21 | } 22 | let res = new ArrayBuffer(length) 23 | // @ts-ignore 24 | memory.copy((res as usize) + diff, buf.dataStart as usize, buf.length) 25 | return Uint8Array.wrap(res, 0, length) 26 | } 27 | 28 | export function cmpBuf(buf: Uint8Array, other: Uint8Array): i32 { 29 | if (buf === other) return 0 // fast compare by references 30 | let bufLen = buf.length 31 | let otherLen = other.length 32 | if (bufLen > otherLen) return 1 33 | if (bufLen < otherLen) return -1 34 | // Assume Big-endian 35 | for (let i = 0; i < bufLen; i++) { 36 | let a = unchecked(buf[i]) 37 | let b = unchecked(other[i]) 38 | if (a != b) return i32(a > b) - i32(a < b) 39 | } 40 | return 0 41 | } 42 | 43 | export function stripBuf(buf: Uint8Array): Uint8Array { 44 | let start = buf.length 45 | for (let i = 0, len = start; i < len; i++) { 46 | if (unchecked(buf[i]) != 0) { 47 | start = i 48 | break 49 | } 50 | } 51 | return buf.subarray(start) 52 | } 53 | 54 | @inline 55 | export function hash(buf: Uint8Array): Uint8Array { 56 | let res = new Uint8Array(32) 57 | // @ts-ignore 58 | ethash_keccak256(res.dataStart as usize, buf.dataStart as usize, buf.byteLength) 59 | return res 60 | } 61 | 62 | @inline 63 | export function removeHexPrefix(nib_arr: Array): Array { 64 | // the hex prefix is merkle-patricia-trie encoding, not RLP 65 | return nib_arr.slice(1 + i32((nib_arr[0] & 1) == 0)) 66 | } 67 | 68 | export function addHexPrefix(key_nib_arr: Array, terminator: bool): Array { 69 | if (key_nib_arr.length & 1) { 70 | // odd 71 | key_nib_arr.unshift(1) 72 | } else { 73 | // even 74 | key_nib_arr.unshift(0) 75 | key_nib_arr.unshift(0) 76 | } 77 | 78 | if (terminator) { 79 | key_nib_arr[0] += 2 80 | } 81 | 82 | return key_nib_arr 83 | } 84 | 85 | export function u8ArrToNibbleArr(u8_arr: Array): Array { 86 | let len = u8_arr.length 87 | 88 | let nib_arr = new Array(len * 2) // length is num of hex chars for address_hash 89 | // TODO: we might not need to convert the whole thing to nibbles, just enough chars to follow the path to the proof 90 | for (let i = 0; i < len; i++) { 91 | let byte = u8_arr[i] 92 | nib_arr[(i << 1) + 0] = byte >> 4 93 | nib_arr[(i << 1) + 1] = byte & 15 94 | } 95 | return nib_arr 96 | } 97 | 98 | export function uintArrToNibbleArr(uint_arr: Uint8Array): Array { 99 | let len = uint_arr.length 100 | let nib_arr = new Array(len * 2) // length is num of hex chars for address_hash 101 | // TODO: we might not need to convert the whole thing to nibbles, just enough chars to follow the path to the proof 102 | for (let i = 0; i < len; i++) { 103 | let byte = uint_arr[i] 104 | nib_arr[(i << 1) + 0] = byte >> 4 105 | nib_arr[(i << 1) + 1] = byte & 15 106 | } 107 | return nib_arr 108 | } 109 | 110 | export function nibbleArrToUintArr(arr: Array): Uint8Array { 111 | let len = arr.length / 2 112 | let buf = new Uint8Array(len) 113 | for (let i = 0; i < len; i++) { 114 | unchecked((buf[i] = (arr[i << 1] << 4) + arr[(i << 1) + 1])) 115 | } 116 | return buf 117 | } 118 | 119 | export function u8ArrToUintArr(arr: Array): Uint8Array { 120 | let len = arr.length 121 | let buf = new Uint8Array(len) 122 | memory.copy(buf.dataStart as usize, arr.dataStart as usize, len) 123 | return buf 124 | } 125 | -------------------------------------------------------------------------------- /greenkeeper.json: -------------------------------------------------------------------------------- 1 | { 2 | "groups": { 3 | "main": { 4 | "packages": [ 5 | "package.json" 6 | ] 7 | }, 8 | "assembly": { 9 | "packages": [ 10 | "assembly/package.json" 11 | ] 12 | } 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /gulpfile.js: -------------------------------------------------------------------------------- 1 | const gulp = require("gulp") 2 | const fs = require("fs") 3 | const wabt = require("wabt")() 4 | const asc = require("assemblyscript/cli/asc") 5 | 6 | /** 7 | * A bunch of magic happens below to merge functions from a wat file 8 | * into the assemblyscript output wasm. 9 | * 10 | * The `ImportStatementToDelete` is a config setting that you might 11 | * have to update if the `export declare function keccak(...)` 12 | * is moved between different files. 13 | * 14 | * If you change something and AS uses a different imported name, 15 | * don't forget to edit the entry function in keccak-funcs.wat 16 | * so that it matches. see the line near the bottom: 17 | * (func $keccak/keccak ;; this name needs to match what assemblyscript generates 18 | * 19 | */ 20 | 21 | const ImportStatementToDelete = '(import "watimports" "$ethash_keccak256" (func $assembly/keccak/ethash_keccak256 (param i32 i32 i32)))' 22 | 23 | /* 24 | Runtime variants: 25 | "--runtime", "full" (default) 26 | A proper memory manager and reference-counting based garbage collector, with runtime interfaces 27 | being exported to the host for being able to create managed objects externally. 28 | "--runtime", "half" 29 | The same as full but without any exports, i.e. where creating objects externally is not required. 30 | This allows the optimizer to eliminate parts of the runtime that are not needed. 31 | "--runtime", "stub" 32 | A minimalist arena memory manager without any means of freeing up memory again, but the same external 33 | interface as full. Useful for very short-lived programs or programs with hardly any memory footprint, 34 | while keeping the option to switch to full without any further changes. No garbage collection. 35 | "--runtime", "none" 36 | The same as stub but without any exports, for the same reasons as explained in half. Essentially 37 | evaporates entirely after optimizations. 38 | For more information see: https://docs.assemblyscript.org/details/runtime 39 | */ 40 | //gulp.task("build", callback => { 41 | async function build() { 42 | console.log('gulp.js build task..') 43 | await buildEvm() 44 | await buildToken() 45 | } 46 | 47 | async function buildEvm() { 48 | await compileEvm() 49 | mergeWats('evm', 'evm_with_keccak') 50 | } 51 | 52 | function compileEvm() { 53 | return new Promise((resolve, reject) => { 54 | asc.main([ 55 | "assembly/main.ts", 56 | //"--baseDir", "assembly", 57 | "--binaryFile", "build/evm.wasm", 58 | "--textFile", "build/evm.wat", 59 | "--sourceMap", 60 | "--measure", 61 | "--runtime", "none", 62 | "--use", "abort=", 63 | "--memoryBase", "10000", 64 | "-O3" 65 | ], (res) => { 66 | console.log("ascDone res:", res) 67 | if (res) { 68 | return reject(new Error('AssemblyScript error')) 69 | } 70 | return resolve() 71 | }) 72 | }) 73 | } 74 | 75 | async function buildToken() { 76 | await compileToken() 77 | mergeWats('token', 'token_with_keccak') 78 | } 79 | 80 | function compileToken() { 81 | return new Promise((resolve, reject) => { 82 | asc.main([ 83 | "assembly/token.ts", 84 | //"--baseDir", "assembly", 85 | "--binaryFile", "build/token.wasm", 86 | "--textFile", "build/token.wat", 87 | "--sourceMap", 88 | "--measure", 89 | "--runtime", "none", 90 | "--use", "abort=", 91 | "--memoryBase", "10000", 92 | "-O3" 93 | ], (res) => { 94 | console.log("ascDone res:", res) 95 | if (res) { 96 | return reject(new Error('AssemblyScript error')) 97 | } 98 | return resolve() 99 | }) 100 | }) 101 | } 102 | 103 | function mergeWats(inputName, outputName) { 104 | console.log('wabt:', wabt); 105 | 106 | //const utils = require("@wasm/studio-utils"); 107 | //console.log("loading src/ethash_keccak_funcs.wat..."); 108 | //const keccakWat = utils.project.getFile("src/ethash_keccak_funcs.wat").getData(); 109 | const keccakWat = fs.readFileSync("assembly/src/ethash_keccak_funcs.wat", "utf8"); 110 | //console.log("loaded keccak wat:", keccakWat); 111 | const keccakLines = keccakWat.split("\n") 112 | 113 | 114 | // wabt wat parsing might file on out/main.wat, but works if the wat doesn't names 115 | console.log(`loading build/${inputName}.wat...`); 116 | //const mainWat = utils.project.getFile("out/main.wat").getData(); 117 | const mainWat = fs.readFileSync(`build/${inputName}.wat`, "utf8"); 118 | 119 | /* 120 | const mainWasm = fs.readFileSync("out/main.wasm", "binary"); 121 | var mainModule = wabt.readWasm(mainWasm, {readDebugNames: true}); 122 | mainModule.validate(); 123 | console.log('mainModule is valid.'); 124 | // the wat code needs to call keccak256 using names, because the regex below will replace the import with a function of the same name 125 | mainModule.resolveNames(); 126 | mainModule.generateNames() 127 | mainModule.applyNames(); 128 | 129 | const mainWat = mainModule.toText({}); 130 | */ 131 | 132 | // remove commas from function names generated by binaryen to please wabt 133 | let mainWatReplaced = mainWat.replace(/Uint\d+Array,/g, "Uint64Array"); 134 | //console.log('mainWatReplaced:', mainWatReplaced) 135 | mainWatReplaced = mainWatReplaced.replace(/Map keccak256(a)) 136 | 137 | return verifyMultiproof(preStateRoot, blockData.multiproof, keys) 138 | } 139 | -------------------------------------------------------------------------------- /src/multiproof.ts: -------------------------------------------------------------------------------- 1 | import * as assert from 'assert' 2 | import { decode, encode } from 'rlp' 3 | import { keccak256 } from 'ethereumjs-util' 4 | import { addHexPrefix, removeHexPrefix, bufToU8, lookupNode } from './util' 5 | const promisify = require('util.promisify') 6 | const Trie = require('merkle-patricia-tree/baseTrie') 7 | const TrieNode = require('merkle-patricia-tree/trieNode') 8 | const { 9 | stringToNibbles, 10 | nibblesToBuffer, 11 | getNodeType, 12 | isRawNode, 13 | } = require('merkle-patricia-tree/trieNode') 14 | const { matchingNibbleLength } = require('merkle-patricia-tree/util') 15 | 16 | export enum Opcode { 17 | Branch = 0, 18 | Hasher = 1, 19 | Leaf = 2, 20 | Extension = 3, 21 | } 22 | 23 | export enum NodeType { 24 | Branch = 0, 25 | Leaf = 1, 26 | Extension = 2, 27 | Hash = 3, 28 | } 29 | 30 | export interface Instruction { 31 | kind: Opcode 32 | value?: number | number[] 33 | } 34 | 35 | export interface Multiproof { 36 | hashes: Buffer[] 37 | keyvals: Buffer[] 38 | instructions: Instruction[] 39 | } 40 | 41 | export interface StackItem { 42 | kind: NodeType 43 | // For now keeping raw for re-constructuring trie 44 | raw: any 45 | pathIndices: number[] 46 | // Buffer or raw (for embedded nodes) 47 | hash: any 48 | } 49 | 50 | export function verifyMultiproof(root: Buffer, proof: Multiproof, keys: Buffer[]): boolean { 51 | const stack: StackItem[] = [] 52 | 53 | const leaves = proof.keyvals.map((l: Buffer) => decode(l)) 54 | assert(leaves.length === keys.length) 55 | let leafIdx = 0 56 | let hashIdx = 0 57 | const paths = new Array(leaves.length).fill(undefined) 58 | 59 | for (const instr of proof.instructions) { 60 | if (instr.kind === Opcode.Hasher) { 61 | const h = proof.hashes[hashIdx++] 62 | if (!h) { 63 | throw new Error('Not enough hashes in multiproof') 64 | } 65 | stack.push({ 66 | kind: NodeType.Hash, 67 | raw: [h], 68 | pathIndices: [], 69 | hash: h.length < 32 ? decode(h) : h, 70 | }) 71 | } else if (instr.kind === Opcode.Leaf) { 72 | const l = leaves[leafIdx++] 73 | if (!l) { 74 | throw new Error('Expected leaf in multiproof') 75 | } 76 | const raw = [l[0], l[1]] 77 | const e = encode(raw) 78 | stack.push({ 79 | kind: NodeType.Leaf, 80 | raw: [l[0], l[1]], 81 | pathIndices: [leafIdx - 1], 82 | hash: e.length >= 32 ? keccak256(e) : raw, 83 | }) 84 | // @ts-ignore 85 | paths[leafIdx - 1] = removeHexPrefix(stringToNibbles(l[0])) 86 | } else if (instr.kind === Opcode.Branch) { 87 | const branchIndices = instr.value as number[] 88 | const children = new Array(16).fill(null) 89 | const sponge = new Array(17).fill(Buffer.alloc(0)) 90 | let pathIndices: number[] = [] 91 | for (const idx of branchIndices) { 92 | const n = stack.pop() 93 | if (!n) { 94 | throw new Error('Stack underflow') 95 | } 96 | children[idx] = n 97 | 98 | sponge[idx] = n.hash 99 | pathIndices = [...pathIndices, ...n.pathIndices] 100 | for (const pi of n.pathIndices) { 101 | paths[pi] = [idx, ...paths[pi]] 102 | } 103 | } 104 | const uniqPathIndices = Array.from(new Set(pathIndices)) 105 | const h = keccak256(encode(sponge)) 106 | stack.push({ kind: NodeType.Branch, raw: children, pathIndices: uniqPathIndices, hash: h }) 107 | } else if (instr.kind === Opcode.Extension) { 108 | const n = stack.pop() 109 | if (!n) { 110 | throw new Error('Stack underflow') 111 | } 112 | 113 | let nh = n.hash 114 | // Compute the extension node's hash and push to hashStack 115 | const raw = [nibblesToBuffer(addHexPrefix(instr.value as number[], false)), nh] 116 | const e = encode(raw) 117 | const h = e.length >= 32 ? keccak256(e) : raw 118 | 119 | stack.push({ 120 | kind: NodeType.Extension, 121 | raw: [instr.value, n], 122 | pathIndices: n.pathIndices.slice(), 123 | hash: h, 124 | }) 125 | 126 | for (let i = 0; i < n.pathIndices.length; i++) { 127 | paths[n.pathIndices[i]] = [...(instr.value as number[]), ...paths[n.pathIndices[i]]] 128 | } 129 | } else { 130 | throw new Error('Invalid opcode') 131 | } 132 | } 133 | 134 | // Assuming sorted keys 135 | for (let i = 0; i < paths.length; i++) { 136 | const addr = nibblesToBuffer(paths[i]) 137 | assert(addr.equals(keys[i]), `expected ${keys[i].toString('hex')} == ${addr.toString('hex')}`) 138 | } 139 | 140 | const r = stack.pop() 141 | if (!r) { 142 | throw new Error('Expected root node on top of stack') 143 | } 144 | 145 | let h = r.hash 146 | // Special case, if trie contains only one leaf 147 | // and that leaf has length < 32 148 | if (Array.isArray(h)) { 149 | h = keccak256(encode(h)) 150 | } 151 | 152 | return h.equals(root) 153 | } 154 | 155 | function hashBranch(sponge: any): Buffer { 156 | assert(Array.isArray(sponge) && sponge.length === 17) 157 | const e = encode(sponge) 158 | return e.length >= 32 ? keccak256(e) : sponge 159 | } 160 | 161 | export async function makeMultiproof(trie: any, keys: Buffer[]): Promise { 162 | if (keys.length === 0) { 163 | return { 164 | hashes: [trie.root], 165 | keyvals: [], 166 | instructions: [{ kind: Opcode.Hasher }], 167 | } 168 | } 169 | const keysNibbles = [] 170 | for (const k of keys) { 171 | keysNibbles.push(stringToNibbles(k)) 172 | } 173 | 174 | return _makeMultiproof(trie, trie.root, keysNibbles) 175 | } 176 | 177 | async function _makeMultiproof(trie: any, rootHash: any, keys: number[][]): Promise { 178 | let proof: Multiproof = { 179 | hashes: [], 180 | keyvals: [], 181 | instructions: [], 182 | } 183 | 184 | let root 185 | if (Buffer.isBuffer(rootHash)) { 186 | root = await lookupNode(trie, rootHash) 187 | } else if (isRawNode(rootHash)) { 188 | // Embedded node 189 | root = new TrieNode(rootHash) 190 | } else { 191 | throw new Error('Unexpected root') 192 | } 193 | 194 | if (root.type === 'branch') { 195 | // Truncate first nibble of keys 196 | const table = new Array(16).fill(undefined) 197 | // Group target keys based by their first nibbles. 198 | // Also implicitly sorts the keys. 199 | for (const k of keys) { 200 | const idx = k[0] 201 | if (!table[idx]) table[idx] = [] 202 | table[idx].push(k.slice(1)) 203 | } 204 | 205 | let branchIndices = [] 206 | for (let i = 0; i < 16; i++) { 207 | if (table[i] === undefined) { 208 | // None of the target keys are in this subtree. 209 | // If non-empty hash it and add a HASHER op. 210 | const child = root.getValue(i) 211 | if (child) { 212 | proof.instructions.push({ kind: Opcode.Hasher }) 213 | // TODO: Make sure child is a hash 214 | // what to do if embedded? 215 | if (Buffer.isBuffer(child)) { 216 | proof.hashes.push(child) 217 | } else if (Array.isArray(child)) { 218 | proof.hashes.push(encode(child)) 219 | } else { 220 | throw new Error('Invalid branch child') 221 | } 222 | branchIndices.push(i) 223 | } 224 | } else { 225 | const child = root.getValue(i) as Buffer 226 | if (!child) { 227 | throw new Error('Key not in trie') 228 | } 229 | const p = await _makeMultiproof(trie, child, table[i]) 230 | 231 | proof.hashes.push(...p.hashes) 232 | proof.keyvals.push(...p.keyvals) 233 | proof.instructions.push(...p.instructions) 234 | branchIndices.push(i) 235 | } 236 | } 237 | branchIndices.reverse() 238 | proof.instructions.push({ kind: Opcode.Branch, value: branchIndices }) 239 | } else if (root.type === 'extention') { 240 | const extkey = root.key 241 | // Make sure all keys follow the extension node 242 | // and truncate them. 243 | for (let i = 0; i < keys.length; i++) { 244 | const k = keys[i] 245 | if (matchingNibbleLength(k, extkey) !== extkey.length) { 246 | // TODO: Maybe allow proving non-existent keys 247 | throw new Error('Key not in trie') 248 | } 249 | keys[i] = k.slice(extkey.length) 250 | } 251 | const p = await _makeMultiproof(trie, root.value, keys) 252 | proof.hashes.push(...p.hashes) 253 | proof.keyvals.push(...p.keyvals) 254 | proof.instructions.push(...p.instructions) 255 | proof.instructions.push({ kind: Opcode.Extension, value: extkey }) 256 | } else if (root.type === 'leaf') { 257 | if (keys.length !== 1) { 258 | throw new Error('Expected 1 remaining key') 259 | } 260 | if (matchingNibbleLength(keys[0], root.key) !== root.key.length) { 261 | throw new Error("Leaf key doesn't match target key") 262 | } 263 | // TODO: Check key matches leaf's key 264 | proof = { 265 | hashes: [], 266 | keyvals: [root.serialize()], 267 | instructions: [{ kind: Opcode.Leaf }], 268 | } 269 | } else { 270 | throw new Error('Unexpected node type') 271 | } 272 | return proof 273 | } 274 | 275 | export function decodeMultiproof(raw: Buffer): Multiproof { 276 | const dec = decode(raw) 277 | assert(dec.length === 3) 278 | 279 | return { 280 | // @ts-ignore 281 | hashes: dec[0], 282 | // @ts-ignore 283 | keyvals: dec[1], 284 | // @ts-ignore 285 | instructions: decodeInstructions(dec[2]), 286 | } 287 | } 288 | 289 | export function encodeMultiproof(proof: Multiproof, flatInstructions: boolean = false): Buffer { 290 | return encode(rawMultiproof(proof, flatInstructions)) 291 | } 292 | 293 | export function rawMultiproof(proof: Multiproof, flatInstructions: boolean = false): any { 294 | if (flatInstructions) { 295 | return [proof.hashes, proof.keyvals, flatEncodeInstructions(proof.instructions)] 296 | } else { 297 | return [ 298 | proof.hashes, 299 | proof.keyvals, 300 | proof.instructions.map(i => { 301 | if (i.value !== undefined) return [i.kind, i.value] 302 | return [i.kind] 303 | }), 304 | ] 305 | } 306 | } 307 | 308 | export function flatEncodeInstructions(instructions: Instruction[]): Buffer { 309 | const res: number[] = [] 310 | for (const instr of instructions) { 311 | res.push(instr.kind) 312 | if (instr.kind === Opcode.Branch) { 313 | const indices = instr.value as number[] 314 | res.push(indices.length) 315 | res.push(...indices) 316 | } else if (instr.kind === Opcode.Extension) { 317 | const nibbles = instr.value as number[] 318 | res.push(nibbles.length) 319 | res.push(...nibbles) 320 | } 321 | } 322 | return Buffer.from(new Uint8Array(res)) 323 | } 324 | 325 | export function flatDecodeInstructions(raw: Buffer): Instruction[] { 326 | const res = [] 327 | let i = 0 328 | while (i < raw.length) { 329 | const op = raw[i++] 330 | switch (op) { 331 | case Opcode.Branch: 332 | const ilength = raw.readUInt8(i++) 333 | const indices = [] 334 | for (let j = 0; j < ilength; j++) { 335 | indices.push(raw[i++]) 336 | } 337 | res.push({ kind: Opcode.Branch, value: indices }) 338 | break 339 | case Opcode.Hasher: 340 | res.push({ kind: Opcode.Hasher }) 341 | break 342 | case Opcode.Leaf: 343 | res.push({ kind: Opcode.Leaf }) 344 | break 345 | case Opcode.Extension: 346 | const nlength = raw.readUInt8(i++) 347 | const nibbles = [] 348 | for (let j = 0; j < nlength; j++) { 349 | nibbles.push(raw[i++]) 350 | } 351 | res.push({ kind: Opcode.Extension, value: nibbles }) 352 | break 353 | } 354 | } 355 | return res 356 | } 357 | 358 | export function decodeInstructions(instructions: Buffer[][]) { 359 | const res = [] 360 | for (const op of instructions) { 361 | switch (bufToU8(op[0])) { 362 | case Opcode.Branch: 363 | // @ts-ignore 364 | res.push({ kind: Opcode.Branch, value: op[1].map(v => bufToU8(v)) }) 365 | break 366 | case Opcode.Hasher: 367 | res.push({ kind: Opcode.Hasher }) 368 | break 369 | case Opcode.Leaf: 370 | res.push({ kind: Opcode.Leaf }) 371 | break 372 | case Opcode.Extension: 373 | // @ts-ignore 374 | res.push({ kind: Opcode.Extension, value: op[1].map(v => bufToU8(v)) }) 375 | break 376 | } 377 | } 378 | return res 379 | } 380 | 381 | /* 382 | * @deprecated 383 | */ 384 | function hashTrie(node: any): Buffer { 385 | const typ = node[0] 386 | node = node[1] 387 | if (typ === NodeType.Branch) { 388 | const res = new Array(17).fill(Buffer.alloc(0)) 389 | for (let i = 0; i < 16; i++) { 390 | if (node[i] === null) { 391 | continue 392 | } 393 | res[i] = hashTrie(node[i]) 394 | } 395 | const e = encode(res) 396 | if (e.length >= 32) { 397 | return keccak256(e) 398 | } else { 399 | return e 400 | } 401 | } else if (typ === NodeType.Leaf) { 402 | const e = encode(node) 403 | if (e.length >= 32) { 404 | return keccak256(e) 405 | } else { 406 | return node 407 | } 408 | } else if (typ === NodeType.Hash) { 409 | if (node[0].length < 32) { 410 | // Embedded node, decode to get correct serialization for parent node 411 | return decode(node[0]) as Buffer 412 | } 413 | return node[0] 414 | } else if (typ === NodeType.Extension) { 415 | const hashedNode = hashTrie(node[1]) 416 | node = [nibblesToBuffer(addHexPrefix(node[0], false)), hashedNode] 417 | const e = encode(node) 418 | if (e.length >= 32) { 419 | return keccak256(e) 420 | } else { 421 | return e 422 | } 423 | } else { 424 | throw new Error('Invalid node') 425 | } 426 | } 427 | -------------------------------------------------------------------------------- /src/relayer/basic-evm.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs' 2 | import * as path from 'path' 3 | import { encode } from 'rlp' 4 | import { Multiproof } from '../multiproof' 5 | import { 6 | TestSuite, 7 | getTestsAccounts, 8 | transfer, 9 | rawMultiproof, 10 | SimulationData, 11 | getTestsTxes, 12 | } from './lib' 13 | const Trie = require('merkle-patricia-tree/secure') 14 | 15 | export async function basicEvmTestSuite(p: string = 'fixture/add.json'): Promise { 16 | const trie = new Trie() 17 | 18 | const test = JSON.parse(fs.readFileSync(path.join(__dirname, p), 'utf-8')) 19 | const [accounts, codeHashes, bytecode] = await getTestsAccounts(trie, test) 20 | 21 | const preStateRoot = trie.root 22 | const [txes, addrs, multiproof, simulationData] = await getTestsTxes(trie, accounts, test) 23 | const returnValue = Buffer.from(test.returnValue.slice(2), 'hex') 24 | 25 | // Serialize witnesses and tx data 26 | const blockData = encode([ 27 | txes, 28 | addrs, 29 | ...rawMultiproof(multiproof as Multiproof, true), 30 | codeHashes, 31 | bytecode, 32 | returnValue, 33 | ]) 34 | 35 | // Apply txes on top of trie to compute post state root 36 | for (const tx of simulationData as SimulationData[]) { 37 | await transfer(trie, tx) 38 | } 39 | 40 | return { 41 | preStateRoot, 42 | blockData, 43 | postStateRoot: trie.root, 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/relayer/bin.ts: -------------------------------------------------------------------------------- 1 | // tslint:disable:no-console 2 | import { generateTestSuite, TestSuite, stateTestRunner, RunnerArgs, TestGetterArgs } from './lib' 3 | import { basicEvmTestSuite } from './basic-evm' 4 | import { generateRealisticTestSuite } from './realistic' 5 | const fs = require('fs') 6 | const yaml = require('js-yaml') 7 | const testing = require('ethereumjs-testing') 8 | 9 | async function main() { 10 | const args = process.argv 11 | 12 | if (args.length === 4 && args[2] === '--stateTest') { 13 | const testCase = args[3] 14 | const testGetterArgs: TestGetterArgs = { test: testCase } 15 | const runnerArgs: RunnerArgs = { 16 | stateless: true, 17 | fork: 'Petersburg', 18 | test: testCase, 19 | scout: 'true', 20 | dist: '?', 21 | forkConfig: 'Petersburg', 22 | jsontrace: false, 23 | debug: false, 24 | data: '', 25 | gasLimit: 0, 26 | value: 0, 27 | } 28 | 29 | await testing 30 | .getTestsFromArgs( 31 | 'GeneralStateTests', 32 | async (_filename: any, _testName: any, test: any) => { 33 | const testSuite = await stateTestRunner(runnerArgs, test) 34 | writeScoutConfig(testSuite, testCase + '.yaml', 'build/evm_with_keccak.wasm') 35 | }, 36 | testGetterArgs, 37 | ) 38 | .then(() => {}) 39 | .catch((err: any) => { 40 | console.log('Err: ', err) 41 | }) 42 | } else if (args.length === 4 && args[2] === '--realistic') { 43 | const rpcData = JSON.parse(fs.readFileSync(process.argv[3])) 44 | const testSuite = await generateRealisticTestSuite(rpcData) 45 | writeScoutConfig(testSuite, 'turbo-token-realistic.yaml', 'build/token_with_keccak.wasm') 46 | } else if (args.length >= 3 && args[2] === '--basicEvm') { 47 | const testSuite = await basicEvmTestSuite(args[3]) 48 | writeScoutConfig(testSuite, 'basic-evm.yaml', 'build/evm_with_keccak.wasm') 49 | } else { 50 | const testSuite = await generateTestSuite() 51 | writeScoutConfig(testSuite, 'turbo-token.yaml', 'build/token_with_keccak.wasm') 52 | } 53 | } 54 | 55 | function writeScoutConfig(data: TestSuite, outPath: string, wasmPath: string) { 56 | const testSuite = { 57 | beacon_state: { 58 | execution_scripts: [wasmPath], 59 | }, 60 | shard_pre_state: { 61 | exec_env_states: [data.preStateRoot.toString('hex')], 62 | }, 63 | shard_blocks: [{ env: 0, data: data.blockData.toString('hex') }], 64 | shard_post_state: { 65 | exec_env_states: [data.postStateRoot.toString('hex')], 66 | }, 67 | } 68 | 69 | const serializedTestSuite = yaml.safeDump(testSuite) 70 | fs.writeFileSync(outPath, serializedTestSuite) 71 | } 72 | 73 | main() 74 | .then(() => {}) 75 | .catch((e: Error) => console.log(e)) 76 | -------------------------------------------------------------------------------- /src/relayer/fixture/add.json: -------------------------------------------------------------------------------- 1 | { 2 | "_comment": "ADD two values, MSTORE8 result, and RETURN", 3 | "returnValue": "0x05", 4 | "pre" : { 5 | "0x095e7baea6a6c7c4c2dfeb977efac326af552d87" : { 6 | "balance" : "0x0de0b6b3a7640000", 7 | "code" : "0x600260030160005360016000f3", 8 | "nonce" : "0x00", 9 | "storage" : { 10 | } 11 | }, 12 | "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" : { 13 | "balance" : "0x0de0b6b3a7640000", 14 | "code" : "0x", 15 | "nonce" : "0x00", 16 | "storage" : { 17 | } 18 | } 19 | }, 20 | "transaction" : { 21 | "data" : [ 22 | "0x" 23 | ], 24 | "gasLimit" : [ 25 | "0x061a80" 26 | ], 27 | "gasPrice" : "0x01", 28 | "nonce" : "0x00", 29 | "secretKey" : "0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8", 30 | "to" : "0x095e7baea6a6c7c4c2dfeb977efac326af552d87", 31 | "value" : [ 32 | "0x0186a0" 33 | ] 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/relayer/fixture/add_store.json: -------------------------------------------------------------------------------- 1 | { 2 | "_comment": "ADD two values, SSTORE result, and RETURN", 3 | "returnValue": "0x05", 4 | "pre" : { 5 | "0x095e7baea6a6c7c4c2dfeb977efac326af552d87" : { 6 | "balance" : "0x0de0b6b3a7640000", 7 | "code" : "0x600260030160005560005460005360016000f3", 8 | "nonce" : "0x00", 9 | "storage" : { 10 | "0x00": "0x01" 11 | } 12 | }, 13 | "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b" : { 14 | "balance" : "0x0de0b6b3a7640000", 15 | "code" : "0x", 16 | "nonce" : "0x00", 17 | "storage" : { 18 | } 19 | } 20 | }, 21 | "transaction" : { 22 | "data" : [ 23 | "0x" 24 | ], 25 | "gasLimit" : [ 26 | "0x061a80" 27 | ], 28 | "gasPrice" : "0x01", 29 | "nonce" : "0x00", 30 | "secretKey" : "0x45a915e4d060149eb4365960e6a7a45f334393093061116b197e3240065ff2d8", 31 | "to" : "0x095e7baea6a6c7c4c2dfeb977efac326af552d87", 32 | "value" : [ 33 | "0x0186a0" 34 | ] 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/relayer/lib.ts: -------------------------------------------------------------------------------- 1 | import BN = require('bn.js') 2 | import Account from 'ethereumjs-account' 3 | import { keccak256, ecsign, stripZeros } from 'ethereumjs-util' 4 | import { encode, decode } from 'rlp' 5 | import { Multiproof, verifyMultiproof, makeMultiproof, flatEncodeInstructions } from '../multiproof' 6 | import VM from 'ethereumjs-vm' 7 | import { Transaction } from 'ethereumjs-tx' 8 | import { getOpcodesForHF } from 'ethereumjs-vm/dist/evm/opcodes' 9 | 10 | const assert = require('assert') 11 | const { promisify } = require('util') 12 | const Wallet = require('ethereumjs-wallet') 13 | const Trie = require('merkle-patricia-tree/secure') 14 | 15 | export interface TestSuite { 16 | preStateRoot: Buffer 17 | blockData: Buffer 18 | postStateRoot: Buffer 19 | } 20 | 21 | export interface RunnerArgs { 22 | stateless: boolean 23 | fork: string 24 | test: string 25 | scout: string 26 | dist: string 27 | forkConfig: string 28 | jsontrace: boolean 29 | debug: boolean 30 | data: string 31 | gasLimit: number 32 | value: number 33 | } 34 | 35 | export interface TestGetterArgs { 36 | test: string 37 | } 38 | 39 | export interface SimulationData { 40 | from: Buffer 41 | to: Buffer 42 | value: BN 43 | nonce: BN 44 | } 45 | 46 | export interface AccountInfo { 47 | address: Buffer 48 | privateKey: Buffer 49 | account: Account 50 | } 51 | 52 | export async function generateTestSuite(): Promise { 53 | const trie = new Trie() 54 | // Generate random accounts 55 | const accounts = await generateAccounts(trie, 5000) 56 | 57 | const preStateRoot = trie.root 58 | 59 | // Generate txes 60 | const [txes, addrs, multiproof, simulationData] = await generateTxes(trie, accounts, 70) 61 | 62 | // Serialize witnesses and tx data 63 | const blockData = encode([txes, addrs, ...rawMultiproof(multiproof as Multiproof, true)]) 64 | 65 | // Apply txes on top of trie to compute post state root 66 | for (const tx of simulationData as SimulationData[]) { 67 | await transfer(trie, tx) 68 | } 69 | 70 | return { 71 | preStateRoot, 72 | blockData, 73 | postStateRoot: trie.root, 74 | } 75 | } 76 | 77 | async function generateTxes(trie: any, accounts: AccountInfo[], count = 50) { 78 | const txes = [] 79 | const simulationData = [] 80 | const root = trie.root 81 | const toProve: any = {} 82 | for (let i = 0; i < count; i++) { 83 | const from = accounts[i].address 84 | const to = accounts[i + 1].address 85 | const value = new BN('00000000000000000000000000000000000000000000000000000000000000ff', 16) 86 | const nonce = new BN('0000000000000000000000000000000000000000000000000000000000000000', 16) 87 | simulationData.push({ from, to, value, nonce }) 88 | 89 | const fromKey = from.toString('hex') 90 | if (!toProve[fromKey]) { 91 | toProve[fromKey] = [] 92 | } 93 | toProve[fromKey].push({ txId: i, fieldIdx: 3 }) 94 | 95 | const toKey = to.toString('hex') 96 | if (!toProve[toKey]) { 97 | toProve[toKey] = [] 98 | } 99 | toProve[toKey].push({ txId: i, fieldIdx: 0 }) 100 | 101 | const txRlp = encode([ 102 | to, 103 | stripZeros(value.toBuffer('be', 32)), 104 | stripZeros(nonce.toBuffer('be', 32)), 105 | ]) 106 | const txHash = keccak256(txRlp) 107 | const txSig = ecsign(txHash, accounts[i].privateKey) 108 | assert(txSig.r.byteLength === 32) 109 | assert(txSig.s.byteLength === 32) 110 | assert(txSig.v < 256) 111 | 112 | txes.push([ 113 | to, 114 | stripZeros(value.toBuffer('be', 32)), 115 | stripZeros(nonce.toBuffer('be', 32)), 116 | from, 117 | ]) 118 | } 119 | // Make sure keys are unique and sort them 120 | const unsortedAddrs = Object.keys(toProve).map(s => Buffer.from(s, 'hex')) 121 | const keys = unsortedAddrs.map(a => keccak256(a)) 122 | keys.sort(Buffer.compare) 123 | const sortedAddrs = sortAddrsByHash(unsortedAddrs) 124 | 125 | const proof = await makeMultiproof(trie, keys) 126 | // Verify proof is valid 127 | assert(verifyMultiproof(root, proof, keys)) 128 | 129 | // Modify txes and replace from and to addresses 130 | // with their index in the keys array 131 | for (let i = 0; i < sortedAddrs.length; i++) { 132 | const addr = sortedAddrs[i] 133 | const addrData = toProve[addr.toString('hex')] 134 | for (const instance of addrData) { 135 | txes[instance.txId][instance.fieldIdx] = i 136 | } 137 | } 138 | 139 | return [txes, sortedAddrs, proof, simulationData] 140 | } 141 | 142 | export async function transfer(trie: any, tx: SimulationData) { 143 | const { from, to, value, nonce } = tx 144 | assert(value.gten(0)) 145 | 146 | const fromAcc = await getAccount(trie, from) 147 | const toAcc = await getAccount(trie, to) 148 | 149 | assert(new BN(fromAcc.balance).gte(value)) 150 | assert(new BN(fromAcc.nonce).eq(nonce)) 151 | 152 | const newFromBalance = new BN(fromAcc.balance).sub(value) 153 | fromAcc.balance = newFromBalance.toBuffer() 154 | fromAcc.nonce = nonce.addn(1).toBuffer() 155 | const newToBalance = new BN(toAcc.balance).add(value) 156 | toAcc.balance = newToBalance.toBuffer() 157 | 158 | await putAccount(trie, from, fromAcc) 159 | await putAccount(trie, to, toAcc) 160 | } 161 | 162 | // Sort addresses based on their hashes. 163 | // Naive algorithm 164 | export function sortAddrsByHash(addrs: Buffer[]): Buffer[] { 165 | const keys = addrs.map((a: Buffer) => keccak256(a)) 166 | keys.sort(Buffer.compare) 167 | const sortedAddrs = new Array(keys.length).fill(undefined) 168 | 169 | for (const a of addrs) { 170 | let idx = -1 171 | const h = keccak256(a) 172 | for (let i = 0; i < keys.length; i++) { 173 | const k = keys[i] 174 | if (h.equals(k)) { 175 | idx = i 176 | } 177 | } 178 | assert(idx >= 0) 179 | sortedAddrs[idx] = a 180 | } 181 | 182 | return sortedAddrs 183 | } 184 | 185 | export async function generateAccounts(trie: any, count = 500): Promise { 186 | const accounts = [] 187 | for (let i = 0; i < count; i++) { 188 | const wallet = Wallet.generate() 189 | const address = wallet.getAddress() 190 | const privateKey = wallet.getPrivateKey() 191 | const account = new Account() 192 | account.balance = new BN('ffffff', 16).toBuffer() 193 | accounts.push({ 194 | address, 195 | privateKey, 196 | account, 197 | }) 198 | await putAccount(trie, address, account) 199 | } 200 | return accounts 201 | } 202 | 203 | export async function stateTestRunner(runnerArgs: RunnerArgs, test: any): Promise { 204 | const trie = new Trie() 205 | 206 | const [accounts, codeHashes, bytecode] = await getTestsAccounts(trie, test) 207 | 208 | const preStateRoot = trie.root 209 | const [txes, addrs, multiproof, simulationData, pks] = await getTestsTxes(trie, accounts, test) 210 | 211 | const blockData = encode([ 212 | txes, 213 | addrs, 214 | ...rawMultiproof(multiproof as Multiproof, true), 215 | codeHashes, 216 | bytecode, 217 | ]) 218 | 219 | // Execute txes on top of trie to compute post state root 220 | let i = 0 221 | for (const tx of simulationData as SimulationData[]) { 222 | const pk = (pks as Buffer[])[0] 223 | await execute(runnerArgs, trie, tx, pk) 224 | i = i + 1 225 | } 226 | 227 | return { 228 | preStateRoot, 229 | blockData, 230 | postStateRoot: trie.root, 231 | } 232 | } 233 | 234 | export async function getTestsTxes(trie: any, accounts: AccountInfo[], test: any) { 235 | const txes = [] 236 | const pks = [] 237 | const simulationData = [] 238 | const root = trie.root 239 | const toProve: any = {} 240 | 241 | const from = accounts[1].address 242 | const to = accounts[0].address 243 | 244 | const value = new BN(test.transaction.value[0].substring(2), 16) 245 | const nonce = new BN(test.pre['0x' + from.toString('hex')].nonce.substring(2), 16) 246 | 247 | simulationData.push({ from, to, value, nonce }) 248 | 249 | const fromKey = from.toString('hex') 250 | 251 | if (!toProve[fromKey]) { 252 | toProve[fromKey] = [] 253 | } 254 | toProve[fromKey].push({ txId: 0, fieldIdx: 3 }) 255 | 256 | const toKey = to.toString('hex') 257 | 258 | if (!toProve[toKey]) { 259 | toProve[toKey] = [] 260 | } 261 | 262 | toProve[toKey].push({ txId: 0, fieldIdx: 0 }) 263 | 264 | txes.push([to, stripZeros(value.toBuffer('be', 32)), stripZeros(nonce.toBuffer('be', 32)), from]) 265 | 266 | pks.push(accounts[1].privateKey) 267 | 268 | // Make sure keys are unique and sort them 269 | const unsortedAddrs = Object.keys(toProve).map(s => Buffer.from(s, 'hex')) 270 | const keys = unsortedAddrs.map(a => keccak256(a)) 271 | keys.sort(Buffer.compare) 272 | const sortedAddrs = sortAddrsByHash(unsortedAddrs) 273 | 274 | const proof = await makeMultiproof(trie, keys) 275 | 276 | // Verify proof is valid 277 | assert(verifyMultiproof(root, proof, keys)) 278 | 279 | // Modify txes and replace from and to addresses 280 | // with their index in the keys array 281 | for (let i = 0; i < sortedAddrs.length; i++) { 282 | const addr = sortedAddrs[i] 283 | const addrData = toProve[addr.toString('hex')] 284 | for (const instance of addrData) { 285 | txes[instance.txId][instance.fieldIdx] = i 286 | } 287 | } 288 | 289 | return [txes, sortedAddrs, proof, simulationData, pks] 290 | } 291 | 292 | async function execute(options: any, trie: any, tx: SimulationData, pk: any) { 293 | const rawTx = { 294 | nonce: '0x' + tx.nonce.toString('hex'), 295 | gasLimit: '0x61a80', 296 | gasPrice: '0x1', 297 | value: '0x' + tx.value.toString('hex'), 298 | from: '0x' + tx.from.toString('hex'), 299 | to: '0x' + tx.to.toString('hex'), 300 | } 301 | 302 | const vm = new VM({ 303 | state: trie, 304 | hardfork: options.forkConfig.toLowerCase(), 305 | }) 306 | 307 | await runTx(vm, rawTx, pk) 308 | } 309 | 310 | async function runTx(vm: any, rawTx: any, pk: any) { 311 | const tx = new Transaction(rawTx) 312 | tx.sign(pk) 313 | 314 | const results = await vm.runTx({ 315 | tx: tx, 316 | }) 317 | 318 | return results 319 | } 320 | 321 | export async function getTestsAccounts( 322 | trie: any, 323 | test: any, 324 | ): Promise<[AccountInfo[], Buffer[], Buffer[]]> { 325 | const accounts: AccountInfo[] = [] 326 | const codeHashes: Buffer[] = [] 327 | const bytecode: Buffer[] = [] 328 | const privateKey = test.transaction.secretKey 329 | 330 | for (const address in test.pre) { 331 | const acct = test.pre[address] 332 | const code = Buffer.from(acct.code.substring(2), 'hex') 333 | const codeHash = keccak256(code) 334 | 335 | const acct_data = { 336 | nonce: acct.nonce, 337 | balance: acct.balance, 338 | codeHash: codeHash, 339 | } 340 | 341 | const account = new Account(acct_data) 342 | 343 | const addr_buf = Buffer.from(address.substring(2), 'hex') 344 | accounts.push({ 345 | address: addr_buf, 346 | privateKey: Buffer.from(privateKey.substring(2), 'hex'), 347 | account: account, 348 | }) 349 | 350 | await putAccount(trie, addr_buf, account) 351 | 352 | await new Promise((resolve, reject) => { 353 | account.setCode(trie, code, (err: any, codeHash: Buffer) => { 354 | if (err) { 355 | return reject(err) 356 | } 357 | codeHashes.push(codeHash) 358 | bytecode.push(code) 359 | resolve(codeHash) 360 | }) 361 | }) 362 | } 363 | 364 | return [accounts, codeHashes, bytecode] 365 | } 366 | 367 | async function putAccount(trie: any, address: Buffer, account: Account) { 368 | await promisify(trie.put.bind(trie))(address, account.serialize()) 369 | } 370 | 371 | async function getAccount(trie: any, address: Buffer): Promise { 372 | const raw = await promisify(trie.get.bind(trie))(address) 373 | if (!raw) { 374 | return new Account() 375 | } else { 376 | return new Account(raw) 377 | } 378 | } 379 | 380 | export function rawMultiproof(proof: Multiproof, flatInstructions: boolean = false): any { 381 | const keys = [] 382 | const values = [] 383 | for (const kv of proof.keyvals) { 384 | const raw = decode(kv) 385 | keys.push(raw[0]) 386 | values.push(raw[1]) 387 | } 388 | if (flatInstructions) { 389 | return [proof.hashes, keys, values, flatEncodeInstructions(proof.instructions)] 390 | } else { 391 | return [ 392 | proof.hashes, 393 | keys, 394 | values, 395 | proof.instructions.map(i => { 396 | if (i.value !== undefined) return [i.kind, i.value] 397 | return [i.kind] 398 | }), 399 | ] 400 | } 401 | } 402 | 403 | export function getBasicBlockIndices(code: Buffer): number[][] { 404 | const TERMINATING_OPS = ['JUMP', 'JUMPI', 'STOP', 'RETURN', 'REVERT', 'SELFDESTRUCT'] 405 | const opcodes = getOpcodesForHF('istanbul') 406 | const getOp = (i: number) => (opcodes[code[i]] ? opcodes[code[i]].name : 'INVALID') 407 | 408 | // [start, end) indices 409 | const blocks = [[0, -1]] 410 | for (let i = 0; i < code.length; i++) { 411 | const op = getOp(i) 412 | // Skip push args 413 | if (op === 'PUSH') { 414 | i += code[i] - 0x5f 415 | } 416 | 417 | // Current instruction terminates block or next instruction is JUMPDEST 418 | if (TERMINATING_OPS.includes(op) || (i + 1 < code.length && getOp(i + 1) === 'JUMPDEST')) { 419 | blocks[blocks.length - 1][1] = i + 1 420 | // Create new block if not at end of code 421 | if (i + 1 < code.length) { 422 | blocks.push([i + 1, -1]) 423 | } 424 | } 425 | } 426 | 427 | // Close block if no terminating instruction at the end 428 | if (blocks[blocks.length - 1][1] === undefined) { 429 | blocks[blocks.length - 1][1] = code.length 430 | } 431 | 432 | return blocks 433 | } 434 | 435 | /** 436 | * Does a single pass over bytecode to find the list 437 | * of all basic blocks (i.e. blocks of code with no 438 | * control flow change). 439 | */ 440 | export function getBasicBlocks(code: Buffer): Buffer[] { 441 | const blocks = getBasicBlockIndices(code) 442 | // Slice code based on block indices 443 | return blocks.map((b: number[]) => code.slice(b[0], b[1])) 444 | } 445 | 446 | /** 447 | * Divides code into basic blocks and constructs a MPT 448 | * with these blocks as leaves. The key for each block is 449 | * the index of the first byte of that block in the bytecode. 450 | */ 451 | export async function merkelizeCode(code: Buffer): Promise { 452 | const blockIndices = getBasicBlockIndices(code) 453 | const trie = new Trie() 454 | const putP = promisify(trie.put.bind(trie)) 455 | // Keys are indices into the bytecode. Determine key length by 456 | // how large the last index is. 457 | const keyLength = new BN(code.length - 1).byteLength() 458 | for (let i = 0; i < blockIndices.length; i++) { 459 | const key = new BN(blockIndices[i][0]).toBuffer('be', keyLength) 460 | const val = code.slice(blockIndices[i][0], blockIndices[i][1]) 461 | await putP(key, val) 462 | } 463 | return trie 464 | } 465 | -------------------------------------------------------------------------------- /src/relayer/realistic.ts: -------------------------------------------------------------------------------- 1 | import BN = require('bn.js') 2 | import { keccak256, stripZeros } from 'ethereumjs-util' 3 | import { encode } from 'rlp' 4 | import { Multiproof, makeMultiproof } from '../multiproof' 5 | import { TestSuite, sortAddrsByHash, rawMultiproof, SimulationData, transfer } from './lib' 6 | const { promisify } = require('util') 7 | const Trie = require('merkle-patricia-tree/secure') 8 | 9 | export interface AccountData { 10 | address: Buffer 11 | accountProof: Buffer[] 12 | nonce: BN 13 | balance: BN 14 | codeHash: Buffer 15 | storageHash: Buffer 16 | storageProof: Buffer[] 17 | } 18 | 19 | export interface TransactionData { 20 | to: Buffer 21 | value: BN 22 | nonce: BN 23 | from: Buffer 24 | } 25 | 26 | export async function generateRealisticTestSuite(data: any): Promise { 27 | const trie = new Trie() 28 | 29 | const accData = [] 30 | const addrs = [] 31 | for (const acc of data.accounts) { 32 | accData.push(accountDataFromJSON(acc.result)) 33 | addrs.push(accData[accData.length - 1].address) 34 | } 35 | 36 | const multiproof = await turboproofFromAccountData(trie, accData) 37 | const preStateRoot = trie.root 38 | 39 | const sortedAddrs = sortAddrsByHash(addrs) 40 | const txes = [] 41 | const simulationData = [] 42 | for (const rawTx of data.transactions) { 43 | const rawTxData = rawTx.result ? rawTx.result : rawTx 44 | const txData = transactionDataFromJSON(rawTxData) 45 | simulationData.push({ 46 | to: txData.to, 47 | value: txData.value, 48 | nonce: txData.nonce, 49 | from: txData.from, 50 | }) 51 | const toIdx = sortedAddrs.findIndex((a: Buffer) => a.equals(txData.to)) 52 | const fromIdx = sortedAddrs.findIndex((a: Buffer) => a.equals(txData.from)) 53 | if (toIdx === -1 || fromIdx === -1) { 54 | throw new Error('Invalid transaction sender/recipient') 55 | } 56 | txes.push([ 57 | toIdx, 58 | stripZeros(txData.value.toBuffer('be', 32)), 59 | stripZeros(txData.nonce.toBuffer('be', 32)), 60 | fromIdx, 61 | ]) 62 | } 63 | 64 | const blockData = encode([txes, sortedAddrs, ...rawMultiproof(multiproof, true)]) 65 | 66 | // Apply txes on top of trie to compute post state root 67 | for (const tx of simulationData as SimulationData[]) { 68 | await transfer(trie, tx) 69 | } 70 | 71 | return { 72 | preStateRoot, 73 | blockData, 74 | postStateRoot: trie.root, 75 | } 76 | } 77 | 78 | export async function turboproofFromAccountData( 79 | trie: any, 80 | data: AccountData[], 81 | ): Promise { 82 | const putRaw = promisify(trie._putRaw.bind(trie)) 83 | const addrs = [] 84 | const preStateRoot = keccak256(data[0].accountProof[0]) 85 | trie.root = preStateRoot 86 | for (const accountData of data) { 87 | addrs.push(accountData.address) 88 | for (const node of accountData.accountProof) { 89 | await putRaw(keccak256(node), node) 90 | } 91 | } 92 | const keys = addrs.map((a: Buffer) => keccak256(a)) 93 | keys.sort(Buffer.compare) 94 | 95 | return makeMultiproof(trie, keys) 96 | } 97 | 98 | export function accountDataFromJSON(data: any): AccountData { 99 | return { 100 | address: toBuffer(data.address), 101 | accountProof: data.accountProof.map((n: string) => toBuffer(n)), 102 | nonce: toBN(data.nonce), 103 | balance: toBN(data.balance), 104 | codeHash: toBuffer(data.codeHash), 105 | storageHash: toBuffer(data.storageHash), 106 | storageProof: data.storageProof.map((n: string) => toBuffer(n)), 107 | } 108 | } 109 | 110 | export function transactionDataFromJSON(data: any): TransactionData { 111 | return { 112 | to: toBuffer(data.to), 113 | value: toBN(data.value), 114 | nonce: toBN(data.nonce), 115 | from: toBuffer(data.from), 116 | } 117 | } 118 | 119 | export function toBuffer(str: string): Buffer { 120 | return Buffer.from(str.slice(2), 'hex') 121 | } 122 | 123 | function toBN(str: string): BN { 124 | return new BN(str.slice(2), 16) 125 | } 126 | -------------------------------------------------------------------------------- /src/relayer/rpc.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs' 2 | import axios from 'axios' 3 | 4 | // Based on @cdetrio's python script: 5 | // https://github.com/ewasm/biturbo/blob/7dccdbcff4e01e3ed7bec2659a9a377a4703565d/test/fetch_realistic_rpc.py 6 | 7 | const ENDPOINT = 'http://localhost:8545' 8 | const blockNumber = 9125141 9 | 10 | async function getBlockByNumber(n: number): Promise { 11 | const res = await axios.post(ENDPOINT, { 12 | method: 'eth_getBlockByNumber', 13 | params: [toHex(n), true], 14 | id: 1, 15 | }) 16 | 17 | return res.data 18 | } 19 | 20 | async function getProof(n: number, addr: string): Promise { 21 | const res = await axios.post(ENDPOINT, { 22 | method: 'eth_getProof', 23 | params: [addr, [], toHex(n)], 24 | id: 1, 25 | }) 26 | 27 | const data = res.data 28 | if (data.error) { 29 | throw new Error(`eth_getProof error (${data.error.code}): ${data.error.message}`) 30 | } 31 | 32 | return data 33 | } 34 | 35 | async function getBlockWitnesses(n: number): Promise { 36 | const res = await getBlockByNumber(n) 37 | const block = res.result 38 | const txes = [] 39 | const accounts: any = {} 40 | for (const tx of block.transactions) { 41 | // Skip create txes 42 | if (!tx.to) continue 43 | txes.push(tx) 44 | 45 | if (accounts[tx.to] === undefined) { 46 | accounts[tx.to] = await getProof(n - 1, tx.to) 47 | } 48 | if (accounts[tx.from] === undefined) { 49 | accounts[tx.from] = await getProof(n - 1, tx.from) 50 | } 51 | } 52 | 53 | return { transactions: txes, accounts: Object.values(accounts) } 54 | } 55 | 56 | async function main() { 57 | const res = await getBlockWitnesses(blockNumber) 58 | let path = 'test/fixture/eth_getproof_result.json' 59 | if (process.argv.length === 3) { 60 | path = process.argv[2] 61 | } 62 | fs.writeFileSync(path, JSON.stringify(res, null, 2)) 63 | } 64 | 65 | function toHex(n: number): string { 66 | return '0x' + n.toString(16) 67 | } 68 | 69 | main() 70 | .then() 71 | .catch(e => { 72 | throw new Error(e) 73 | }) 74 | -------------------------------------------------------------------------------- /src/util.ts: -------------------------------------------------------------------------------- 1 | export function bufToU8(b: Buffer): number { 2 | // RLP decoding of 0 is empty buffer 3 | if (b.length === 0) { 4 | return 0 5 | } 6 | return b.readUInt8(0) 7 | } 8 | 9 | export function lookupNode(trie: any, hash: any) { 10 | return new Promise((resolve, reject) => { 11 | try { 12 | trie._lookupNode(hash, (v: any) => { 13 | resolve(v) 14 | }) 15 | } catch (e) { 16 | reject(e) 17 | } 18 | }) 19 | } 20 | 21 | /** 22 | * Prepends hex prefix to an array of nibbles. 23 | * @method addHexPrefix 24 | * @param {Array} Array of nibbles 25 | * @returns {Array} - returns buffer of encoded data 26 | **/ 27 | export function addHexPrefix(key: number[], terminator: boolean): number[] { 28 | const res = key.slice() 29 | // odd 30 | if (res.length % 2) { 31 | res.unshift(1) 32 | } else { 33 | // even 34 | res.unshift(0) 35 | res.unshift(0) 36 | } 37 | 38 | if (terminator) { 39 | res[0] += 2 40 | } 41 | 42 | return res 43 | } 44 | 45 | /** 46 | * Removes hex prefix of an array of nibbles. 47 | * @method removeHexPrefix 48 | * @param {Array} Array of nibbles 49 | * @private 50 | */ 51 | export function removeHexPrefix(val: number[]): number[] { 52 | let res = val.slice() 53 | 54 | if (res[0] % 2) { 55 | res = val.slice(1) 56 | } else { 57 | res = val.slice(2) 58 | } 59 | 60 | return res 61 | } 62 | -------------------------------------------------------------------------------- /test/ee.ts: -------------------------------------------------------------------------------- 1 | import * as tape from 'tape' 2 | import { main, decodeBlockData } from '../src/ee' 3 | import { generateTestSuite } from '../src/relayer/lib' 4 | 5 | tape('turboproof ee', async t => { 6 | const testSuite = await generateTestSuite() 7 | //const testSuite = fromScout('turboproof.yaml') 8 | // Only verifies multiproof, doesn't update trie 9 | const isValid = main({ preStateRoot: testSuite.preStateRoot, blockData: testSuite.blockData }) 10 | t.true(isValid) 11 | t.end() 12 | }) 13 | 14 | function fromScout(path: string) { 15 | const fs = require('fs') 16 | const yaml = require('js-yaml') 17 | const testCase = yaml.safeLoad(fs.readFileSync(path)) 18 | 19 | return { 20 | preStateRoot: Buffer.from(testCase.shard_pre_state.exec_env_states[0], 'hex'), 21 | postStateRoot: Buffer.from(testCase.shard_post_state.exec_env_states[0], 'hex'), 22 | blockData: Buffer.from(testCase.shard_blocks[0].data, 'hex'), 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /test/fixture/eth_getProof_sample.json: -------------------------------------------------------------------------------- 1 | { 2 | "preStateRoot": "0x9a68e09e35376855b0b37f82592d503768f6a305a91e4ab17cc9d7ee8880a966", 3 | "accounts": [ 4 | {"jsonrpc":"2.0","id":1,"result":{"address":"0x85a43fe911f777f9238ac25c77125d51c280df85","accountProof":["0xf8f180a086fd41bb68a0f4a475e60980095758be85644ded4f299234ed297830513ff26d8080a01a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a04a0b5d7a91be5ee273cce27e2ad9a160d2faadd5a6ba518d384019b68728a4f62f4a0c2c799b60a0cd6acd42c1015512872e86c186bcf196e85061e76842f3b7cf86080a02e0d86c3befd177f574a20ac63804532889077e955320c9361cd10b7cc6f580980a06301b39b2ea8a44df8b0356120db64b788e71f52e1d7a6309d0d2e5b86fee7cb8080a00441e31691969e770b8749c6d63814e01096a9606ede6699531be56c86b33e93808080","0xf8518080808080a0ed2fba131fadeadeb1082f565fff16ceb008f693056e3140204716c0739cf1e08080a0645793ede6fb93c6830662130970fd4a3812f50e0d0a1c1d491ad1e22a71f56f8080808080808080","0xf8518080a03e1ce0d57176a97c1d5aee9587d872d197068db6d766e3dd4544ded62ef4b5068080808080808080a0ceaa292ecbd4cc29c0e85120e376dfc52a903f79b275f2efc0c066b1811e62258080808080","0xf8889f39b523aa40ae4d154a1b8161938d550dda1caae5a2ef78cec049cfef2170cfb866f86402a0fffffffffffffffffffffffffffffffffffffffffffffffffd39750f44ebfff7a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"],"balance":"0xfffffffffffffffffffffffffffffffffffffffffffffffffd39750f44ebfff7","codeHash":"0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470","nonce":"0x2","storageHash":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","storageProof":[]}}, 5 | {"jsonrpc":"2.0","id":1,"result":{"address":"0xb3c02212ef4317e3dbae99d8368346220e9802ff","accountProof":["0xf8f180a086fd41bb68a0f4a475e60980095758be85644ded4f299234ed297830513ff26d8080a01a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a04a0b5d7a91be5ee273cce27e2ad9a160d2faadd5a6ba518d384019b68728a4f62f4a0c2c799b60a0cd6acd42c1015512872e86c186bcf196e85061e76842f3b7cf86080a02e0d86c3befd177f574a20ac63804532889077e955320c9361cd10b7cc6f580980a06301b39b2ea8a44df8b0356120db64b788e71f52e1d7a6309d0d2e5b86fee7cb8080a00441e31691969e770b8749c6d63814e01096a9606ede6699531be56c86b33e93808080","0xf85180808080a0998f1a60662f16e1b7d5626752bea5ff7a672769512921d900ebd1758c504b278080a0f4e2c36a4945f95fac2dec5ddcc12a290c0bf7a13fbecba475ad3c78a6033097808080808080808080","0xf871a0201bdb62e37bf9425e0091e45881e017589840ff24c569f803f59cdfb815dfadb84ef84c8088016345785d8a0000a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"],"balance":"0x16345785d8a0000","codeHash":"0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470","nonce":"0x0","storageHash":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","storageProof":[]}}, 6 | {"jsonrpc":"2.0","id":1,"result":{"address":"0x49430da50f7955222c4cfeb009997409db3dbcd2","accountProof":["0xf8f180a086fd41bb68a0f4a475e60980095758be85644ded4f299234ed297830513ff26d8080a01a697e814758281972fcd13bc9707dbcd2f195986b05463d7b78426508445a04a0b5d7a91be5ee273cce27e2ad9a160d2faadd5a6ba518d384019b68728a4f62f4a0c2c799b60a0cd6acd42c1015512872e86c186bcf196e85061e76842f3b7cf86080a02e0d86c3befd177f574a20ac63804532889077e955320c9361cd10b7cc6f580980a06301b39b2ea8a44df8b0356120db64b788e71f52e1d7a6309d0d2e5b86fee7cb8080a00441e31691969e770b8749c6d63814e01096a9606ede6699531be56c86b33e93808080","0xf8518080808080a0ed2fba131fadeadeb1082f565fff16ceb008f693056e3140204716c0739cf1e08080a0645793ede6fb93c6830662130970fd4a3812f50e0d0a1c1d491ad1e22a71f56f8080808080808080","0xf8518080a03e1ce0d57176a97c1d5aee9587d872d197068db6d766e3dd4544ded62ef4b5068080808080808080a0ceaa292ecbd4cc29c0e85120e376dfc52a903f79b275f2efc0c066b1811e62258080808080","0xf8709f3dd96f46dd800a6ca7688da65bf9f97b429f8d24467228379f67690f889018b84ef84c8088016345785d8a0000a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"],"balance":"0x16345785d8a0000","codeHash":"0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470","nonce":"0x0","storageHash":"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421","storageProof":[]}} 7 | ], 8 | "transactions": [ 9 | {"jsonrpc":"2.0","id":1,"result":{"blockHash":"0xf1add87570527cf773c478c1209434da65aff453571761563af1fa1455280915","blockNumber":"0x3","from":"0x85a43fe911f777f9238ac25c77125d51c280df85","gas":"0x5208","gasPrice":"0x1","hash":"0x0eb40191d2e6fa7dbaabeccb4ca703c982d6fc777553f7a5ccbc936c4ce94e7f","input":"0x","nonce":"0x2","to":"0xb3c02212ef4317e3dbae99d8368346220e9802ff","transactionIndex":"0x0","value":"0x2ea11e32ad50000","v":"0xa96","r":"0x2eb2c365014d918ee85ec095631ea1926b58417bbf0c2f29aed88ba08da00393","s":"0x41f9a91b497e727f1081b39d89ee25fc3ec19369879dd0e8a46f93a516550682"}}, 10 | {"jsonrpc":"2.0","id":1,"result":{"blockHash":"0x9f1e28dbca4c23725b90b803a2dd791f6dd7376a6c60a73536fa22cf5ba0f78c","blockNumber":"0x4","from":"0x85a43fe911f777f9238ac25c77125d51c280df85","gas":"0x5208","gasPrice":"0x1","hash":"0x5985285c1f85af666aa841525d083a81a29abf1c7f670bd7ce2813424edb3891","input":"0x","nonce":"0x3","to":"0x49430da50f7955222c4cfeb009997409db3dbcd2","transactionIndex":"0x0","value":"0x30d98d59a960000","v":"0xa96","r":"0x6319bce4b5b325419c09b9d69b865ef860d091932efc87dd30723e50aa44b3c3","s":"0x7e77f07c91362e804614733a9c7552850027d9b3675457a0444e6b74fb0bde3"}} 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /test/fixture/hex_encoded_securetrie_test.json: -------------------------------------------------------------------------------- 1 | { 2 | "source": "https://github.com/ethereum/tests/blob/develop/TrieTests/hex_encoded_securetrie_test.json", 3 | "commit": "7d66cbfff1e6561d1046e45df8b7918d186b136f", 4 | "date": "2019-01-10", 5 | "tests": { 6 | "test1": { 7 | "in": { 8 | "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": 9 | "0xf848018405f446a7a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470", 10 | "0x095e7baea6a6c7c4c2dfeb977efac326af552d87": 11 | "0xf8440101a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a004bccc5d94f4d1f99aab44369a910179931772f2a5c001c3229f57831c102769", 12 | "0xd2571607e241ecf590ed94b12d87c94babe36db6": 13 | "0xf8440180a0ba4b47865c55a341a4a78759bb913cd15c3ee8eaf30a62fa8d1c8863113d84e8a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470", 14 | "0x62c01474f089b07dae603491675dc5b5748f7049": 15 | "0xf8448080a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470", 16 | "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba": 17 | "0xf8478083019a59a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470" 18 | }, 19 | "root": "0x730a444e08ab4b8dee147c9b232fc52d34a223d600031c1e9d25bfc985cbd797", 20 | "hexEncoded": true 21 | }, 22 | "test2": { 23 | "in": { 24 | "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": 25 | "0xf84c01880de0b6b3a7622746a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470", 26 | "0x095e7baea6a6c7c4c2dfeb977efac326af552d87": 27 | "0xf84780830186b7a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0501653f02840675b1aab0328c6634762af5d51764e78f9641cccd9b27b90db4f", 28 | "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba": 29 | "0xf8468082521aa056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470" 30 | }, 31 | "root": "0xa7c787bf470808896308c215e22c7a580a0087bb6db6e8695fb4759537283a83", 32 | "hexEncoded": true 33 | }, 34 | "test3": { 35 | "in": { 36 | "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b": 37 | "0xf84c01880de0b6b3a7614bc3a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470", 38 | "0x095e7baea6a6c7c4c2dfeb977efac326af552d87": 39 | "0xf84880840132b3a0a065fee2fffd7a68488cf7ef79f35f7979133172ac5727b5e0cf322953d13de492a06e5d8fec8b6b9bf41c3fb9b61696d5c87b66f6daa98d5f02ba9361b0c6916467", 40 | "0x0000000000000000000000000000000000000001": 41 | "0xf8448080a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470", 42 | "0x2adc25665018aa1fe0e6bc666dac8fc2697ff9ba": 43 | "0xf8478083012d9da056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470" 44 | }, 45 | "root": "0x40b37be88a49e2c08b8d33fcb03a0676ffd0481df54dfebd3512b8ec54f40cad", 46 | "hexEncoded": true 47 | } 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /test/fixture/trietest.json: -------------------------------------------------------------------------------- 1 | { 2 | "source": "https://github.com/ethereum/tests/blob/develop/TrieTests/trietest.json", 3 | "commit": "7d66cbfff1e6561d1046e45df8b7918d186b136f", 4 | "date": "2019-01-10", 5 | "tests": { 6 | "emptyValues": { 7 | "in": [ 8 | ["do", "verb"], 9 | ["ether", "wookiedoo"], 10 | ["horse", "stallion"], 11 | ["shaman", "horse"], 12 | ["doge", "coin"], 13 | ["ether", null], 14 | ["dog", "puppy"], 15 | ["shaman", null] 16 | ], 17 | "root": "0x5991bb8c6514148a29db676a14ac506cd2cd5775ace63c30a4fe457715e9ac84" 18 | }, 19 | "branchingTests": { 20 | "in":[ 21 | ["0x04110d816c380812a427968ece99b1c963dfbce6", "something"], 22 | ["0x095e7baea6a6c7c4c2dfeb977efac326af552d87", "something"], 23 | ["0x0a517d755cebbf66312b30fff713666a9cb917e0", "something"], 24 | ["0x24dd378f51adc67a50e339e8031fe9bd4aafab36", "something"], 25 | ["0x293f982d000532a7861ab122bdc4bbfd26bf9030", "something"], 26 | ["0x2cf5732f017b0cf1b1f13a1478e10239716bf6b5", "something"], 27 | ["0x31c640b92c21a1f1465c91070b4b3b4d6854195f", "something"], 28 | ["0x37f998764813b136ddf5a754f34063fd03065e36", "something"], 29 | ["0x37fa399a749c121f8a15ce77e3d9f9bec8020d7a", "something"], 30 | ["0x4f36659fa632310b6ec438dea4085b522a2dd077", "something"], 31 | ["0x62c01474f089b07dae603491675dc5b5748f7049", "something"], 32 | ["0x729af7294be595a0efd7d891c9e51f89c07950c7", "something"], 33 | ["0x83e3e5a16d3b696a0314b30b2534804dd5e11197", "something"], 34 | ["0x8703df2417e0d7c59d063caa9583cb10a4d20532", "something"], 35 | ["0x8dffcd74e5b5923512916c6a64b502689cfa65e1", "something"], 36 | ["0x95a4d7cccb5204733874fa87285a176fe1e9e240", "something"], 37 | ["0x99b2fcba8120bedd048fe79f5262a6690ed38c39", "something"], 38 | ["0xa4202b8b8afd5354e3e40a219bdc17f6001bf2cf", "something"], 39 | ["0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", "something"], 40 | ["0xa9647f4a0a14042d91dc33c0328030a7157c93ae", "something"], 41 | ["0xaa6cffe5185732689c18f37a7f86170cb7304c2a", "something"], 42 | ["0xaae4a2e3c51c04606dcb3723456e58f3ed214f45", "something"], 43 | ["0xc37a43e940dfb5baf581a0b82b351d48305fc885", "something"], 44 | ["0xd2571607e241ecf590ed94b12d87c94babe36db6", "something"], 45 | ["0xf735071cbee190d76b704ce68384fc21e389fbe7", "something"], 46 | ["0x04110d816c380812a427968ece99b1c963dfbce6", null], 47 | ["0x095e7baea6a6c7c4c2dfeb977efac326af552d87", null], 48 | ["0x0a517d755cebbf66312b30fff713666a9cb917e0", null], 49 | ["0x24dd378f51adc67a50e339e8031fe9bd4aafab36", null], 50 | ["0x293f982d000532a7861ab122bdc4bbfd26bf9030", null], 51 | ["0x2cf5732f017b0cf1b1f13a1478e10239716bf6b5", null], 52 | ["0x31c640b92c21a1f1465c91070b4b3b4d6854195f", null], 53 | ["0x37f998764813b136ddf5a754f34063fd03065e36", null], 54 | ["0x37fa399a749c121f8a15ce77e3d9f9bec8020d7a", null], 55 | ["0x4f36659fa632310b6ec438dea4085b522a2dd077", null], 56 | ["0x62c01474f089b07dae603491675dc5b5748f7049", null], 57 | ["0x729af7294be595a0efd7d891c9e51f89c07950c7", null], 58 | ["0x83e3e5a16d3b696a0314b30b2534804dd5e11197", null], 59 | ["0x8703df2417e0d7c59d063caa9583cb10a4d20532", null], 60 | ["0x8dffcd74e5b5923512916c6a64b502689cfa65e1", null], 61 | ["0x95a4d7cccb5204733874fa87285a176fe1e9e240", null], 62 | ["0x99b2fcba8120bedd048fe79f5262a6690ed38c39", null], 63 | ["0xa4202b8b8afd5354e3e40a219bdc17f6001bf2cf", null], 64 | ["0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", null], 65 | ["0xa9647f4a0a14042d91dc33c0328030a7157c93ae", null], 66 | ["0xaa6cffe5185732689c18f37a7f86170cb7304c2a", null], 67 | ["0xaae4a2e3c51c04606dcb3723456e58f3ed214f45", null], 68 | ["0xc37a43e940dfb5baf581a0b82b351d48305fc885", null], 69 | ["0xd2571607e241ecf590ed94b12d87c94babe36db6", null], 70 | ["0xf735071cbee190d76b704ce68384fc21e389fbe7", null] 71 | ], 72 | "root": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" 73 | }, 74 | "jeff": { 75 | "in": [ 76 | ["0x0000000000000000000000000000000000000000000000000000000000000045", "0x22b224a1420a802ab51d326e29fa98e34c4f24ea"], 77 | ["0x0000000000000000000000000000000000000000000000000000000000000046", "0x67706c2076330000000000000000000000000000000000000000000000000000"], 78 | ["0x0000000000000000000000000000000000000000000000000000001234567890", "0x697c7b8c961b56f675d570498424ac8de1a918f6"], 79 | ["0x000000000000000000000000697c7b8c961b56f675d570498424ac8de1a918f6", "0x1234567890"], 80 | ["0x0000000000000000000000007ef9e639e2733cb34e4dfc576d4b23f72db776b2", "0x4655474156000000000000000000000000000000000000000000000000000000"], 81 | ["0x000000000000000000000000ec4f34c97e43fbb2816cfd95e388353c7181dab1", "0x4e616d6552656700000000000000000000000000000000000000000000000000"], 82 | ["0x4655474156000000000000000000000000000000000000000000000000000000", "0x7ef9e639e2733cb34e4dfc576d4b23f72db776b2"], 83 | ["0x4e616d6552656700000000000000000000000000000000000000000000000000", "0xec4f34c97e43fbb2816cfd95e388353c7181dab1"], 84 | ["0x0000000000000000000000000000000000000000000000000000001234567890", null], 85 | ["0x000000000000000000000000697c7b8c961b56f675d570498424ac8de1a918f6", "0x6f6f6f6820736f2067726561742c207265616c6c6c793f000000000000000000"], 86 | ["0x6f6f6f6820736f2067726561742c207265616c6c6c793f000000000000000000", "0x697c7b8c961b56f675d570498424ac8de1a918f6"] 87 | ], 88 | "root": "0x9f6221ebb8efe7cff60a716ecb886e67dd042014be444669f0159d8e68b42100" 89 | }, 90 | "insert-middle-leaf": { 91 | "in": [ 92 | [ "key1aa", "0123456789012345678901234567890123456789xxx"], 93 | [ "key1", "0123456789012345678901234567890123456789Very_Long"], 94 | [ "key2bb", "aval3"], 95 | [ "key2", "short"], 96 | [ "key3cc", "aval3"], 97 | [ "key3","1234567890123456789012345678901"] 98 | ], 99 | "root": "0xcb65032e2f76c48b82b5c24b3db8f670ce73982869d38cd39a624f23d62a9e89" 100 | }, 101 | "branch-value-update": { 102 | "in": [ 103 | [ "abc", "123" ], 104 | [ "abcd", "abcd" ], 105 | [ "abc", "abc" ] 106 | ], 107 | "root": "0x7a320748f780ad9ad5b0837302075ce0eeba6c26e3d8562c67ccc0f1b273298a" 108 | } 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /test/fixture/trietest_secureTrie.json: -------------------------------------------------------------------------------- 1 | { 2 | "source": "https://github.com/ethereum/tests/blob/develop/TrieTests/trietest_secureTrie.json", 3 | "commit": "7d66cbfff1e6561d1046e45df8b7918d186b136f", 4 | "date": "2019-01-10", 5 | "tests": { 6 | "emptyValues": { 7 | "in": [ 8 | ["do", "verb"], 9 | ["ether", "wookiedoo"], 10 | ["horse", "stallion"], 11 | ["shaman", "horse"], 12 | ["doge", "coin"], 13 | ["ether", null], 14 | ["dog", "puppy"], 15 | ["shaman", null] 16 | ], 17 | "root": "0x29b235a58c3c25ab83010c327d5932bcf05324b7d6b1185e650798034783ca9d" 18 | }, 19 | "branchingTests": { 20 | "in":[ 21 | ["0x04110d816c380812a427968ece99b1c963dfbce6", "something"], 22 | ["0x095e7baea6a6c7c4c2dfeb977efac326af552d87", "something"], 23 | ["0x0a517d755cebbf66312b30fff713666a9cb917e0", "something"], 24 | ["0x24dd378f51adc67a50e339e8031fe9bd4aafab36", "something"], 25 | ["0x293f982d000532a7861ab122bdc4bbfd26bf9030", "something"], 26 | ["0x2cf5732f017b0cf1b1f13a1478e10239716bf6b5", "something"], 27 | ["0x31c640b92c21a1f1465c91070b4b3b4d6854195f", "something"], 28 | ["0x37f998764813b136ddf5a754f34063fd03065e36", "something"], 29 | ["0x37fa399a749c121f8a15ce77e3d9f9bec8020d7a", "something"], 30 | ["0x4f36659fa632310b6ec438dea4085b522a2dd077", "something"], 31 | ["0x62c01474f089b07dae603491675dc5b5748f7049", "something"], 32 | ["0x729af7294be595a0efd7d891c9e51f89c07950c7", "something"], 33 | ["0x83e3e5a16d3b696a0314b30b2534804dd5e11197", "something"], 34 | ["0x8703df2417e0d7c59d063caa9583cb10a4d20532", "something"], 35 | ["0x8dffcd74e5b5923512916c6a64b502689cfa65e1", "something"], 36 | ["0x95a4d7cccb5204733874fa87285a176fe1e9e240", "something"], 37 | ["0x99b2fcba8120bedd048fe79f5262a6690ed38c39", "something"], 38 | ["0xa4202b8b8afd5354e3e40a219bdc17f6001bf2cf", "something"], 39 | ["0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", "something"], 40 | ["0xa9647f4a0a14042d91dc33c0328030a7157c93ae", "something"], 41 | ["0xaa6cffe5185732689c18f37a7f86170cb7304c2a", "something"], 42 | ["0xaae4a2e3c51c04606dcb3723456e58f3ed214f45", "something"], 43 | ["0xc37a43e940dfb5baf581a0b82b351d48305fc885", "something"], 44 | ["0xd2571607e241ecf590ed94b12d87c94babe36db6", "something"], 45 | ["0xf735071cbee190d76b704ce68384fc21e389fbe7", "something"], 46 | ["0x04110d816c380812a427968ece99b1c963dfbce6", null], 47 | ["0x095e7baea6a6c7c4c2dfeb977efac326af552d87", null], 48 | ["0x0a517d755cebbf66312b30fff713666a9cb917e0", null], 49 | ["0x24dd378f51adc67a50e339e8031fe9bd4aafab36", null], 50 | ["0x293f982d000532a7861ab122bdc4bbfd26bf9030", null], 51 | ["0x2cf5732f017b0cf1b1f13a1478e10239716bf6b5", null], 52 | ["0x31c640b92c21a1f1465c91070b4b3b4d6854195f", null], 53 | ["0x37f998764813b136ddf5a754f34063fd03065e36", null], 54 | ["0x37fa399a749c121f8a15ce77e3d9f9bec8020d7a", null], 55 | ["0x4f36659fa632310b6ec438dea4085b522a2dd077", null], 56 | ["0x62c01474f089b07dae603491675dc5b5748f7049", null], 57 | ["0x729af7294be595a0efd7d891c9e51f89c07950c7", null], 58 | ["0x83e3e5a16d3b696a0314b30b2534804dd5e11197", null], 59 | ["0x8703df2417e0d7c59d063caa9583cb10a4d20532", null], 60 | ["0x8dffcd74e5b5923512916c6a64b502689cfa65e1", null], 61 | ["0x95a4d7cccb5204733874fa87285a176fe1e9e240", null], 62 | ["0x99b2fcba8120bedd048fe79f5262a6690ed38c39", null], 63 | ["0xa4202b8b8afd5354e3e40a219bdc17f6001bf2cf", null], 64 | ["0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b", null], 65 | ["0xa9647f4a0a14042d91dc33c0328030a7157c93ae", null], 66 | ["0xaa6cffe5185732689c18f37a7f86170cb7304c2a", null], 67 | ["0xaae4a2e3c51c04606dcb3723456e58f3ed214f45", null], 68 | ["0xc37a43e940dfb5baf581a0b82b351d48305fc885", null], 69 | ["0xd2571607e241ecf590ed94b12d87c94babe36db6", null], 70 | ["0xf735071cbee190d76b704ce68384fc21e389fbe7", null] 71 | ], 72 | "root": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" 73 | }, 74 | "jeff": { 75 | "in": [ 76 | ["0x0000000000000000000000000000000000000000000000000000000000000045", "0x22b224a1420a802ab51d326e29fa98e34c4f24ea"], 77 | ["0x0000000000000000000000000000000000000000000000000000000000000046", "0x67706c2076330000000000000000000000000000000000000000000000000000"], 78 | ["0x0000000000000000000000000000000000000000000000000000001234567890", "0x697c7b8c961b56f675d570498424ac8de1a918f6"], 79 | ["0x000000000000000000000000697c7b8c961b56f675d570498424ac8de1a918f6", "0x1234567890"], 80 | ["0x0000000000000000000000007ef9e639e2733cb34e4dfc576d4b23f72db776b2", "0x4655474156000000000000000000000000000000000000000000000000000000"], 81 | ["0x000000000000000000000000ec4f34c97e43fbb2816cfd95e388353c7181dab1", "0x4e616d6552656700000000000000000000000000000000000000000000000000"], 82 | ["0x4655474156000000000000000000000000000000000000000000000000000000", "0x7ef9e639e2733cb34e4dfc576d4b23f72db776b2"], 83 | ["0x4e616d6552656700000000000000000000000000000000000000000000000000", "0xec4f34c97e43fbb2816cfd95e388353c7181dab1"], 84 | ["0x0000000000000000000000000000000000000000000000000000001234567890", null], 85 | ["0x000000000000000000000000697c7b8c961b56f675d570498424ac8de1a918f6", "0x6f6f6f6820736f2067726561742c207265616c6c6c793f000000000000000000"], 86 | ["0x6f6f6f6820736f2067726561742c207265616c6c6c793f000000000000000000", "0x697c7b8c961b56f675d570498424ac8de1a918f6"] 87 | ], 88 | "root": "0x72adb52e9d9428f808e3e8045be18d3baa77881d0cfab89a17a2bcbacee2f320" 89 | } 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /test/multiproof.ts: -------------------------------------------------------------------------------- 1 | import * as tape from 'tape' 2 | import * as rlp from 'rlp' 3 | import { keccak256 } from 'ethereumjs-util' 4 | import { 5 | decodeMultiproof, 6 | rawMultiproof, 7 | encodeMultiproof, 8 | decodeInstructions, 9 | flatEncodeInstructions, 10 | flatDecodeInstructions, 11 | verifyMultiproof, 12 | makeMultiproof, 13 | Instruction, 14 | Opcode, 15 | } from '../src/multiproof' 16 | import { lookupNode } from '../src/util' 17 | const promisify = require('util.promisify') 18 | const Trie = require('merkle-patricia-tree/baseTrie') 19 | const SecureTrie = require('merkle-patricia-tree/secure') 20 | 21 | tape('decode and encode instructions', t => { 22 | t.test('rlp encoding', st => { 23 | const raw = Buffer.from('cdc102c603c403030303c380c106', 'hex') 24 | const expected = [ 25 | { kind: Opcode.Leaf }, 26 | { kind: Opcode.Extension, value: [3, 3, 3, 3] }, 27 | { kind: Opcode.Branch, value: [6] }, 28 | ] 29 | // @ts-ignore 30 | const res = decodeInstructions(rlp.decode(raw)) 31 | st.deepEqual(expected, res) 32 | st.end() 33 | }) 34 | 35 | t.test('flat encoding', st => { 36 | const raw = Buffer.from('02030403030303000106', 'hex') 37 | const instructions = [ 38 | { kind: Opcode.Leaf }, 39 | { kind: Opcode.Extension, value: [3, 3, 3, 3] }, 40 | { kind: Opcode.Branch, value: [6] }, 41 | ] 42 | const encoded = flatEncodeInstructions(instructions) 43 | st.assert(raw.equals(encoded)) 44 | const decoded = flatDecodeInstructions(raw) 45 | st.deepEqual(instructions, decoded) 46 | st.end() 47 | }) 48 | }) 49 | 50 | tape('decode and encode multiproof', t => { 51 | t.test('decode and encode one leaf', st => { 52 | const raw = Buffer.from( 53 | 'eae1a00101010101010101010101010101010101010101010101010101010101010101c483c20102c2c102', 54 | 'hex', 55 | ) 56 | const expected = { 57 | hashes: [Buffer.alloc(32, 1)], 58 | instructions: [{ kind: Opcode.Leaf }], 59 | keyvals: [Buffer.from('c20102', 'hex')], 60 | } 61 | const proof = decodeMultiproof(raw) 62 | st.deepEqual(expected, proof) 63 | 64 | const encoded = encodeMultiproof(expected) 65 | st.assert(raw.equals(encoded)) 66 | 67 | st.end() 68 | }) 69 | 70 | t.test('decode and encode two out of three leaves with extension', async st => { 71 | const t = new Trie() 72 | const put = promisify(t.put.bind(t)) 73 | const key1 = Buffer.from('1'.repeat(40), 'hex') 74 | const key2 = Buffer.from('2'.repeat(40), 'hex') 75 | const key3 = Buffer.from('1'.repeat(10).concat('3'.repeat(30)), 'hex') 76 | await put(key1, Buffer.from('f'.repeat(64), 'hex')) 77 | await put(key2, Buffer.from('e'.repeat(64), 'hex')) 78 | await put(key3, Buffer.from('d'.repeat(64), 'hex')) 79 | 80 | const keys = [key3, key1] 81 | const proof = await makeMultiproof(t, keys) 82 | const encoded = encodeMultiproof(proof) 83 | const decoded = decodeMultiproof(encoded) 84 | st.deepEqual(proof, decoded) 85 | st.end() 86 | }) 87 | }) 88 | 89 | tape('multiproof tests', (t: tape.Test) => { 90 | tape.skip('hash before nested nodes in branch', st => { 91 | // TODO: Replace with valid multiproof 92 | const raw = Buffer.from( 93 | 'f876e1a01bbb8445ba6497d9a4642a114cb06b3a61ea8e49ca3853991b4f07b7e1e04892f845b843f8419f02020202020202020202020202020202020202020202020202020202020202a00000000000000000000000000000000000000000000000000000000000000000ccc20180c28001c2021fc20402', 94 | 'hex', 95 | ) 96 | const expectedRoot = Buffer.from( 97 | '0d76455583723bb10c56d34cfad1fb218e692299ae2edb5dd56a950f7062a6e0', 98 | 'hex', 99 | ) 100 | const expectedInstructions = [ 101 | { kind: Opcode.Hasher }, 102 | { kind: Opcode.Branch, value: 1 }, 103 | { kind: Opcode.Leaf }, 104 | //{ kind: Opcode.Add, value: 2 }, 105 | ] 106 | const proof = decodeMultiproof(raw) 107 | st.deepEqual(proof.instructions, expectedInstructions) 108 | st.assert(verifyMultiproof(expectedRoot, proof, [])) 109 | st.end() 110 | }) 111 | 112 | tape.skip('two values', st => { 113 | // TODO: Replace with valid multiproof 114 | const raw = Buffer.from( 115 | 'f8c1e1a09afbad9ae00ded5a066bd6f0ec67a45d51f31c258066b997e9bb8336bc13eba8f88ab843f8419f01010101010101010101010101010101010101010101010101010101010101a00101010101010101010101010101010101010101010101010101010101010101b843f8419f02020202020202020202020202020202020202020202020202020202020202a00000000000000000000000000000000000000000000000000000000000000000d2c2021fc28001c2021fc20402c20180c20408', 116 | 'hex', 117 | ) 118 | const expectedRoot = Buffer.from( 119 | '32291409ceb27a3b68b6beff58cfc41c084c0bde9e6aca03a20ce9aa795bb248', 120 | 'hex', 121 | ) 122 | const expectedInstructions = [ 123 | { kind: Opcode.Leaf }, 124 | { kind: Opcode.Branch, value: 1 }, 125 | { kind: Opcode.Leaf }, 126 | //{ kind: Opcode.Add, value: 2 }, 127 | { kind: Opcode.Hasher }, 128 | //{ kind: Opcode.Add, value: 8 }, 129 | ] 130 | const proof = decodeMultiproof(raw) 131 | st.deepEqual(proof.instructions, expectedInstructions) 132 | st.assert(verifyMultiproof(expectedRoot, proof, [])) 133 | st.end() 134 | }) 135 | 136 | t.end() 137 | }) 138 | 139 | tape('make multiproof', t => { 140 | t.test('trie with one leaf', async st => { 141 | const t = new Trie() 142 | const put = promisify(t.put.bind(t)) 143 | const key = Buffer.from('1'.repeat(40), 'hex') 144 | await put(key, Buffer.from('ffff', 'hex')) 145 | const leaf: any = await lookupNode(t, t.root) 146 | 147 | const proof = await makeMultiproof(t, [key]) 148 | st.deepEqual(proof, { 149 | hashes: [], 150 | keyvals: [leaf.serialize()], 151 | instructions: [{ kind: Opcode.Leaf }], 152 | }) 153 | st.assert(verifyMultiproof(t.root, proof, [key])) 154 | st.end() 155 | }) 156 | 157 | t.test('prove one of two leaves in trie', async st => { 158 | const t = new Trie() 159 | const put = promisify(t.put.bind(t)) 160 | const key1 = Buffer.from('1'.repeat(40), 'hex') 161 | const key2 = Buffer.from('2'.repeat(40), 'hex') 162 | await put(key1, Buffer.from('f'.repeat(64), 'hex')) 163 | await put(key2, Buffer.from('e'.repeat(64), 'hex')) 164 | 165 | const proof = await makeMultiproof(t, [key1]) 166 | st.equal(proof.hashes.length, 1) 167 | st.equal(proof.keyvals.length, 1) 168 | st.equal(proof.instructions.length, 3) 169 | st.assert(verifyMultiproof(t.root, proof, [key1])) 170 | st.end() 171 | }) 172 | 173 | t.test('prove two of three leaves in trie', async st => { 174 | const t = new Trie() 175 | const put = promisify(t.put.bind(t)) 176 | const key1 = Buffer.from('1'.repeat(40), 'hex') 177 | const key2 = Buffer.from('2'.repeat(40), 'hex') 178 | const key3 = Buffer.from('3'.repeat(40), 'hex') 179 | await put(key1, Buffer.from('f'.repeat(64), 'hex')) 180 | await put(key2, Buffer.from('e'.repeat(64), 'hex')) 181 | await put(key3, Buffer.from('d'.repeat(64), 'hex')) 182 | 183 | const proof = await makeMultiproof(t, [key3, key1]) 184 | st.assert(verifyMultiproof(t.root, proof, [key1, key3])) 185 | st.end() 186 | }) 187 | 188 | t.test('prove two of three leaves (with extension) in trie', async st => { 189 | const t = new Trie() 190 | const put = promisify(t.put.bind(t)) 191 | const key1 = Buffer.from('1'.repeat(40), 'hex') 192 | const key2 = Buffer.from('2'.repeat(40), 'hex') 193 | const key3 = Buffer.from('1'.repeat(10).concat('3'.repeat(30)), 'hex') 194 | await put(key1, Buffer.from('f'.repeat(64), 'hex')) 195 | await put(key2, Buffer.from('e'.repeat(64), 'hex')) 196 | await put(key3, Buffer.from('d'.repeat(64), 'hex')) 197 | 198 | const keys = [key3, key1] 199 | const proof = await makeMultiproof(t, keys) 200 | keys.sort(Buffer.compare) 201 | st.assert(verifyMultiproof(t.root, proof, keys)) 202 | st.end() 203 | }) 204 | 205 | t.test('two embedded leaves in branch', async st => { 206 | const t = new Trie() 207 | const put = promisify(t.put.bind(t)) 208 | const key1 = Buffer.from('1'.repeat(40), 'hex') 209 | const key2 = Buffer.from('2'.repeat(40), 'hex') 210 | await put(key1, Buffer.from('f'.repeat(4), 'hex')) 211 | await put(key2, Buffer.from('e'.repeat(4), 'hex')) 212 | 213 | const proof = await makeMultiproof(t, [key1]) 214 | st.assert(verifyMultiproof(t.root, proof, [key1])) 215 | st.end() 216 | }) 217 | }) 218 | 219 | tape('fuzz multiproof generation/verification with official tests', async t => { 220 | const trietest = Object.assign({}, require('./fixture/trietest.json').tests) 221 | const trietestSecure = Object.assign({}, require('./fixture/trietest_secureTrie.json').tests) 222 | const hexEncodedTests = Object.assign( 223 | {}, 224 | require('./fixture/hex_encoded_securetrie_test.json').tests, 225 | ) 226 | // Inputs of hex encoded tests are objects instead of arrays 227 | Object.keys(hexEncodedTests).map(k => { 228 | hexEncodedTests[k].in = Object.keys(hexEncodedTests[k].in).map(key => [ 229 | key, 230 | hexEncodedTests[k].in[key], 231 | ]) 232 | }) 233 | const testCases = [ 234 | { name: 'jeff', secure: false, input: trietest.jeff.in, root: trietest.jeff.root }, 235 | { 236 | name: 'jeffSecure', 237 | secure: true, 238 | input: trietestSecure.jeff.in, 239 | root: trietestSecure.jeff.root, 240 | }, 241 | { 242 | name: 'emptyValuesSecure', 243 | secure: true, 244 | input: trietestSecure.emptyValues.in, 245 | root: trietestSecure.emptyValues.root, 246 | }, 247 | { 248 | name: 'test1', 249 | secure: true, 250 | input: hexEncodedTests.test1.in, 251 | root: hexEncodedTests.test1.root, 252 | }, 253 | { 254 | name: 'test2', 255 | secure: true, 256 | input: hexEncodedTests.test2.in, 257 | root: hexEncodedTests.test2.root, 258 | }, 259 | { 260 | name: 'test3', 261 | secure: true, 262 | input: hexEncodedTests.test3.in, 263 | root: hexEncodedTests.test3.root, 264 | }, 265 | ] 266 | for (const testCase of testCases) { 267 | const testName = testCase.name 268 | t.comment(testName) 269 | const expect = Buffer.from(testCase.root.slice(2), 'hex') 270 | const removedKeys: { [key: string]: boolean } = {} 271 | // Clean inputs 272 | let inputs = testCase.input.map((input: any) => { 273 | const res: any = [null, null] 274 | for (let i = 0; i < 2; i++) { 275 | if (!input[i]) continue 276 | if (input[i].slice(0, 2) === '0x') { 277 | res[i] = Buffer.from(input[i].slice(2), 'hex') 278 | } else { 279 | res[i] = Buffer.from(input[i]) 280 | } 281 | } 282 | if (res[1] === null) { 283 | removedKeys[res[0]!.toString('hex')] = true 284 | } 285 | return res 286 | }) 287 | 288 | let trie 289 | if (testCase.secure) { 290 | trie = new SecureTrie() 291 | } else { 292 | trie = new Trie() 293 | } 294 | for (let input of inputs) { 295 | await promisify(trie.put.bind(trie))(input[0], input[1]) 296 | } 297 | t.assert(trie.root.equals(expect)) 298 | 299 | // TODO: include keys that have been removed from trie 300 | const keyCombinations = getCombinations( 301 | inputs.map((i: any) => i[0]).filter((i: any) => removedKeys[i.toString('hex')] !== true), 302 | ) 303 | for (let combination of keyCombinations) { 304 | // If using secure make sure to hash keys 305 | if (testCase.secure) { 306 | combination = combination.map(k => keccak256(k)) 307 | } 308 | try { 309 | const proof = await makeMultiproof(trie, combination) 310 | // Verification expects a sorted array of keys 311 | combination.sort(Buffer.compare) 312 | t.assert(verifyMultiproof(trie.root, proof, combination)) 313 | } catch (e) { 314 | if (e.message !== 'Key not in trie') { 315 | t.fail(e) 316 | } 317 | t.comment('skipped combination because key is not in trie') 318 | } 319 | } 320 | } 321 | t.end() 322 | }) 323 | 324 | // Given array [a, b, c], produce combinations 325 | // with all lengths [1, arr.length]: 326 | // [[a], [b], [c], [a, b], [a, c], [b, c], [a, b, c]] 327 | function getCombinations(arr: Buffer[]): Buffer[][] { 328 | // Make sure there are no duplicates 329 | for (let i = 0; i < arr.length; i++) { 330 | for (let j = i + 1; j < arr.length; j++) { 331 | if (arr[i].equals(arr[j])) { 332 | arr.splice(j, 1) 333 | } 334 | } 335 | } 336 | 337 | const res = [] 338 | const numCombinations = Math.pow(2, arr.length) 339 | for (let i = 0; i < numCombinations; i++) { 340 | const tmp = [] 341 | for (let j = 0; j < arr.length; j++) { 342 | if (i & Math.pow(2, j)) { 343 | tmp.push(arr[j]) 344 | } 345 | } 346 | res.push(tmp) 347 | } 348 | return res 349 | } 350 | -------------------------------------------------------------------------------- /test/realistic.ts: -------------------------------------------------------------------------------- 1 | import * as tape from 'tape' 2 | import * as fs from 'fs' 3 | import * as path from 'path' 4 | import { keccak256 } from 'ethereumjs-util' 5 | import { verifyMultiproof } from '../src/multiproof' 6 | import { 7 | AccountData, 8 | accountDataFromJSON, 9 | turboproofFromAccountData, 10 | toBuffer, 11 | generateRealisticTestSuite, 12 | } from '../src/relayer/realistic' 13 | import { main } from '../src/ee' 14 | const Trie = require('merkle-patricia-tree/secure') 15 | 16 | tape('turboproof from rpc data', async t => { 17 | const testCase = JSON.parse( 18 | fs.readFileSync(path.join(__dirname, 'fixture/eth_getProof_sample.json'), { encoding: 'utf8' }), 19 | ) 20 | const accData = [] 21 | const preStateRoot = toBuffer(testCase.preStateRoot) 22 | for (const acc of testCase.accounts) { 23 | accData.push(accountDataFromJSON(acc.result)) 24 | } 25 | const trie = new Trie() 26 | trie.root = preStateRoot 27 | const p = await turboproofFromAccountData(trie, accData) 28 | 29 | const keys = accData.map((a: AccountData) => keccak256(a.address)) 30 | keys.sort(Buffer.compare) 31 | t.true(await verifyMultiproof(preStateRoot, p, keys)) 32 | 33 | t.end() 34 | }) 35 | 36 | tape('run ee with rpc data', async t => { 37 | const testCase = JSON.parse( 38 | fs.readFileSync(path.join(__dirname, 'fixture/eth_getProof_sample.json'), { encoding: 'utf8' }), 39 | ) 40 | const testSuite = await generateRealisticTestSuite(testCase) 41 | // Only verifies multiproof, doesn't update trie 42 | const isValid = main({ preStateRoot: testSuite.preStateRoot, blockData: testSuite.blockData }) 43 | t.true(isValid) 44 | t.end() 45 | }) 46 | -------------------------------------------------------------------------------- /test/relayer.ts: -------------------------------------------------------------------------------- 1 | import * as tape from 'tape' 2 | import { getBasicBlocks, getBasicBlockIndices, merkelizeCode } from '../src/relayer/lib' 3 | import { keccak256 } from 'ethereumjs-util' 4 | const { promisify } = require('util') 5 | const { prove, verifyProof } = require('merkle-patricia-tree/proof') 6 | const proveP = promisify(prove) 7 | const verifyProofP = promisify(verifyProof) 8 | 9 | tape('get evm basic blocks', async t => { 10 | t.test('add11 bytecode, one block', (st: tape.Test) => { 11 | const codeHex = '600160010160005500' 12 | const code = Buffer.from(codeHex, 'hex') 13 | const blocks = getBasicBlocks(code) 14 | t.equal(blocks.length, 1, 'bytecode should have one block') 15 | t.equal(blocks[0].toString('hex'), codeHex) 16 | st.end() 17 | }) 18 | 19 | t.test('two blocks separated by JUMPDEST', (st: tape.Test) => { 20 | const code = Buffer.from('60005b600000', 'hex') 21 | const blocks = getBasicBlocks(code) 22 | t.equal(blocks.length, 2, 'bytecode should have two block') 23 | t.equal(blocks[0].toString('hex'), '6000') 24 | t.equal(blocks[1].toString('hex'), '5b600000') 25 | st.end() 26 | }) 27 | }) 28 | 29 | tape('merkelize evm bytecode', async t => { 30 | t.test('merkelize basic code', async (st: tape.Test) => { 31 | const code = Buffer.from('60005b600000', 'hex') 32 | const blocks = getBasicBlockIndices(code) 33 | const trie = await merkelizeCode(code) 34 | const key = keccak256(Buffer.from('02', 'hex')) 35 | const p = await proveP(trie, key) 36 | const v = await verifyProofP(trie.root, key, p) 37 | st.equal(v.toString('hex'), '5b600000') 38 | st.end() 39 | }) 40 | }) 41 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@ethereumjs/config-tsc", 3 | "include": ["src/**/*.ts"] 4 | } 5 | -------------------------------------------------------------------------------- /tsconfig.prod.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@ethereumjs/config-tsc", 3 | "compilerOptions": { 4 | "outDir": "./dist" 5 | }, 6 | "include": ["src/**/*.ts"] 7 | } 8 | -------------------------------------------------------------------------------- /tslint.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@ethereumjs/config-tslint" 3 | } 4 | --------------------------------------------------------------------------------