├── .gitattributes ├── .gitignore ├── .npmignore ├── .vscode ├── launch.json ├── settings.json └── tasks.json ├── LICENSE ├── README.md ├── bin ├── README.md ├── index.js ├── lib │ ├── MerkleTree.js │ ├── assembly │ │ ├── blake2s.wasm │ │ └── index.js │ ├── hash │ │ ├── JsHash.js │ │ ├── index.js │ │ └── wasmBlake2s.js │ └── vectors │ │ ├── JsVector.js │ │ └── WasmVector.js ├── merkle.d.ts ├── package-lock.json └── package.json ├── gulpfile.js ├── index.ts ├── lib ├── MerkleTree.ts ├── assembly │ ├── blake2s.as.ts │ └── index.ts ├── hash │ ├── JsHash.ts │ ├── index.ts │ └── wasmBlake2s.ts └── vectors │ ├── JsVector.ts │ └── WasmVector.ts ├── merkle.d.ts ├── package-lock.json ├── package.json ├── tests ├── MerkleTree.spec.ts └── run.ts └── tsconfig.json /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | #TypeScript Map Files 2 | *.map 3 | 4 | # Installed modules 5 | node_modules/ 6 | 7 | # Compiled Tests 8 | /bin/tests/ 9 | 10 | # .npmignore in bin folder 11 | /bin/.npmignore 12 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | tests/ 2 | *.js.map 3 | *.wasm.map 4 | .npmignore -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "name": "Launch", 6 | "type": "node", 7 | "request": "launch", 8 | "program": "${workspaceRoot}/bin/tests/run.js", 9 | "cwd": "${workspaceRoot}/bin", 10 | "preLaunchTask": "build", 11 | "env": { 12 | "NODE_ENV": "development" 13 | }, 14 | "sourceMaps": true, 15 | "outFiles": ["${workspaceRoot}/bin/**/*.js"], 16 | "console": "internalConsole" 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "files.exclude": { 3 | "**/.git": true, 4 | ".gitignore": true, 5 | ".gitattributes": true, 6 | ".npmignore": true, 7 | "package-lock.json": true, 8 | "**/*.js.map": true 9 | }, 10 | "search.exclude": { 11 | "**/node_modules": true, 12 | "**/*.js": true, 13 | "**/bin": true, 14 | "package-lock.json": true 15 | }, 16 | "cSpell.words": [ 17 | "changetype", 18 | "usize" 19 | ] 20 | } 21 | -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "tasks": [ 4 | { 5 | "label": "setup", 6 | "type": "shell", 7 | "command": "npm install", 8 | "presentation": { 9 | "reveal": "always" 10 | } 11 | }, 12 | { 13 | "label": "build", 14 | "type": "gulp", 15 | "task": "build", 16 | "group": { 17 | "kind": "build", 18 | "isDefault": true 19 | }, 20 | "presentation": { 21 | "reveal": "silent" 22 | }, 23 | "problemMatcher": [ "$tsc" ] 24 | }, 25 | { 26 | "label": "test", 27 | "type": "gulp", 28 | "task": "test", 29 | "group": { 30 | "kind": "test", 31 | "isDefault": true 32 | }, 33 | "presentation": { 34 | "reveal": "always" 35 | }, 36 | "problemMatcher": [ "$tsc" ] 37 | }, 38 | { 39 | "label": "publish", 40 | "type": "gulp", 41 | "task": "publish", 42 | "presentation": { 43 | "reveal": "always" 44 | }, 45 | "problemMatcher": [ "$tsc" ] 46 | } 47 | ] 48 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Guild of Weavers 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Merkle 2 | Merkle tree and other data structures. 3 | 4 | ## Install 5 | ```bash 6 | $ npm install @guildofweavers/merkle --save 7 | ``` 8 | 9 | ## Example 10 | ```TypeScript 11 | import { MerkleTree, createHash } from '@guildofweavers/merkle'; 12 | 13 | // create an array of values to put into a tree 14 | const values = [ 15 | Buffer.from('a'), 16 | Buffer.from('b'), 17 | Buffer.from('c'), 18 | Buffer.from('d') 19 | ]; 20 | 21 | // create a Merkle tree 22 | const hash = createHash('sha256'); 23 | const tree = MerkleTree.create(values, hash); 24 | 25 | // create a proof for the second position in the tree (value 'b') 26 | const proof = tree.prove(1); 27 | console.log(proof[0].toString()); // 'b' 28 | 29 | // verify the proof 30 | const result = MerkleTree.verify(tree.root, 1, proof, hash); 31 | console.log(result); // true 32 | ``` 33 | 34 | ## API 35 | You can find complete API definitions in [merkle.d.ts](/merkle.d.ts). Here is a quick overview of the provided functionality: 36 | 37 | ### Creating Merkle trees 38 | You can create a Merkle Tree from a list of values: 39 | 40 | * static **create**(values: `Buffer[]` | `Vector`, hash: `Hash`): `MerkleTree` 41 | * static **createAsync**(values: `Buffer[]` | `Vector`, hash: `Hash`): `Promise` 42 | 43 | The meaning of the parameters is as follows: 44 | 45 | | Parameter | Description | 46 | | --------- | ----------- | 47 | | values | Values that will form the leaves of the Merkle tree. If provided as an array of `Buffer` objects, all buffers are assumed to have the same length (otherwise, bad things will happen). Can also be provided as an object that complies with `Vector` interface, or as a single `Buffer` object. | 48 | | valueSize | If `values` are provided as a single `Buffer`, this parameter specifies length of a single value (in bytes). | 49 | | hash | A [hash](#Hash) object that will be used to hash values and internal nodes. | 50 | 51 | **Note:** async method is currently just a placeholder. All it does is call the sync version and returns the result. 52 | 53 | ### Creating Merkle proofs 54 | Once you have a tree, you can use it to prove that a value is located at a certain index like so: 55 | 56 | * **prove**(index: `number`): `Buffer[]`
57 | The returned proof is an array which has the values as the first element, and nodes comprising the proof as all other elements. 58 | 59 | You can also create a proof for many indexes at the same time: 60 | 61 | * **proveBatch**(indexes: `number[]`): `BatchMerkleProof`
62 | The resulting proof is [compressed](#Batch-proof-compression). So, if you need to prove membership of multiple values, this is a much more efficient approach. 63 | 64 | Batch proof has the following form: 65 | 66 | ```TypeScript 67 | interface BatchMerkleProof { 68 | values : Buffer[]; 69 | nodes : Buffer[][]; 70 | depth : number; 71 | } 72 | ``` 73 | where, `values` are the leaves located at the indexes covered by the proof, `nodes` are the internal nodes that form the actual proof, and `depth` is the depth of the source tree. 74 | 75 | ### Verifying Merkle proofs 76 | Once you have a proof, you can verify it against a tree root like so: 77 | 78 | * static **verify**(root: `Buffer`, index: `number`, proof: `Buffer[]`, hash: `Hash`): `boolean`
79 | This will return `true` if the value located at the first position in the `proof` array is indeed located at the specified `index` in the tree. 80 | 81 | For the batched version use: 82 | 83 | * static **verifyBatch**(root: `Buffer`, indexes: `number[]`, proof: `BatchMerkleProof`, hash: `Hash`): `boolean`
84 | Similarly to single-index version, this will return `true` if the values in `proof.values` are indeed located at the specified `indexes` in the tree. 85 | 86 | ### Hash 87 | A `Hash` object is required when creating Merkle trees and when verifying Merkle proofs. Internally, it is used for hashing of all values and tree nodes. To create a Hash object, you can use `createHash()` function: 88 | 89 | * **createHash**(algorithm: `string`, useWasm?: `boolean`): `Hash`
90 | Creates a Hash object for the specified `algorithm`. If `useWasm` is set to true, will try to instantiate a WebAssembly-optimized version of the algorithm. If WASM optimization is not available for the specified algorithm, Node's native implementation will be used. 91 | 92 | * **createHash**(algorithm: `string`, wasmOptions: `WasmOptions`): `Hash`
93 | Tries to create a WebAssembly-optimized Hash object for the specified `algorithm` and pass the provided options to it. If WASM optimization is not available for the specified algorithm, Node's native implementation will be used. 94 | 95 | Currently, the following hash algorithms are supported: 96 | 97 | | Algorithm | WASM-optimized | 98 | | ---------- | :------------: | 99 | | sha256 | no | 100 | | blake2s256 | yes | 101 | 102 | Hash objects returned from `createHash()` function will have the following form: 103 | ```TypeScript 104 | interface Hash { 105 | readonly algorithm : HashAlgorithm; 106 | readonly digestSize : number; 107 | 108 | digest(value: Buffer): Buffer; 109 | merge(a: Buffer, b: Buffer): Buffer; 110 | } 111 | ``` 112 | where, `digest(value)` hashes the provided value, and `merge(a,b)` hashes a concatenation of values `a` and `b`. 113 | 114 | ## Performance 115 | Some very informal benchmarks run on Intel Core i5-7300U @ 2.60GHz (single thread) for generating a tree out of 220 32-byte values: 116 | 117 | | Hash Algorithm | Native JS | WASM (external) | WASM (internal) | 118 | | -------------- | --------- | --------------- | ---------------- | 119 | | sha256 | 3.5 sec | N/A | N/A | 120 | | blake2s256 | 3.2 sec | 750 ms | 650 ms | 121 | 122 | The difference between _external_ and _internal_ cases for WASM is that in the internal case, values from which the tree is to be built are already in WASM memory, while in the external case, they need to be copied into WASM memory. 123 | 124 | **Note:** while WebAssembly-optimized version of Blake2s algorithm is much faster at hashing small values (i.e. 32-256 bytes), it is slower at hashing large values. For example, when hashing 1KB values, Node's native implementation is about 50% faster. 125 | 126 | ### Batch proof compression 127 | When you generate batch proofs, the proofs are compressed by removing redundant nodes. The table below shows an approximate size of batch proof for a given number of indexes against trees of a given size. 128 | 129 | | Tree leaves | 32 indexes | 64 indexes | 128 indexes | 256 indexes | 130 | | :------------: | ------------: | ------------: | ------------: | ------------: | 131 | | 210 | 5.2 KB (47%) | 8.6 KB (39%) | 13.4 KB (30%) | 20.1 KB (23%) | 132 | | 212 | 7.0 KB (54%) | 12.4 KB (48%) | 20.6 KB (40%) | 34.0 KB (33%) | 133 | | 214 | 9.2 KB (61%) | 16.2 KB (54%) | 28.6 KB (48%) | 49.3 KB (41%) | 134 | | 216 | 11.0 KB (65%) | 20.3 KB (60%) | 36.5 KB (54%) | 65.2 KB (48%) | 135 | | 218 | 13.1 KB (69%) | 24.5 KB (63%) | 44.6 KB (59%) | 81.0 KB (53%) | 136 | | 220 | 15.1 KB (72%) | 28.4 KB (68%) | 52.5 KB (63%) | 96.8 KB (58%) | 137 | 138 | The percentages next to proof sizes are ratios of the batch proof size to a naive proof size. For example, if you generate a batch proof for 32 indexes against a tree of 210 leaves, your proof will be about 5.2 KB, and that will be 47% of 32 individual proofs against the same tree. 139 | 140 | ## References 141 | 142 | * Wikipedia article on [Merkle trees](https://en.wikipedia.org/wiki/Merkle_tree). 143 | * Batch proof/verification use a variation of the Octopus algorithm from [this paper](https://eprint.iacr.org/2017/933.pdf). 144 | 145 | # License 146 | [MIT](/LICENSE) © 2019 Guild of Weavers -------------------------------------------------------------------------------- /bin/README.md: -------------------------------------------------------------------------------- 1 | # Merkle 2 | Merkle tree and other data structures. 3 | 4 | ## Install 5 | ```bash 6 | $ npm install @guildofweavers/merkle --save 7 | ``` 8 | 9 | ## Example 10 | ```TypeScript 11 | import { MerkleTree, createHash } from '@guildofweavers/merkle'; 12 | 13 | // create an array of values to put into a tree 14 | const values = [ 15 | Buffer.from('a'), 16 | Buffer.from('b'), 17 | Buffer.from('c'), 18 | Buffer.from('d') 19 | ]; 20 | 21 | // create a Merkle tree 22 | const hash = createHash('sha256'); 23 | const tree = MerkleTree.create(values, hash); 24 | 25 | // create a proof for the second position in the tree (value 'b') 26 | const proof = tree.prove(1); 27 | console.log(proof[0].toString()); // 'b' 28 | 29 | // verify the proof 30 | const result = MerkleTree.verify(tree.root, 1, proof, hash); 31 | console.log(result); // true 32 | ``` 33 | 34 | ## API 35 | You can find complete API definitions in [merkle.d.ts](/merkle.d.ts). Here is a quick overview of the provided functionality: 36 | 37 | ### Creating Merkle trees 38 | You can create a Merkle Tree from a list of values: 39 | 40 | * static **create**(values: `Buffer[]` | `Vector`, hash: `Hash`): `MerkleTree` 41 | * static **createAsync**(values: `Buffer[]` | `Vector`, hash: `Hash`): `Promise` 42 | 43 | The meaning of the parameters is as follows: 44 | 45 | | Parameter | Description | 46 | | --------- | ----------- | 47 | | values | Values that will form the leaves of the Merkle tree. If provided as an array of `Buffer` objects, all buffers are assumed to have the same length (otherwise, bad things will happen). Can also be provided as an object that complies with `Vector` interface, or as a single `Buffer` object. | 48 | | valueSize | If `values` are provided as a single `Buffer`, this parameter specifies length of a single value (in bytes). | 49 | | hash | A [hash](#Hash) object that will be used to hash values and internal nodes. | 50 | 51 | **Note:** async method is currently just a placeholder. All it does is call the sync version and returns the result. 52 | 53 | ### Creating Merkle proofs 54 | Once you have a tree, you can use it to prove that a value is located at a certain index like so: 55 | 56 | * **prove**(index: `number`): `Buffer[]`
57 | The returned proof is an array which has the values as the first element, and nodes comprising the proof as all other elements. 58 | 59 | You can also create a proof for many indexes at the same time: 60 | 61 | * **proveBatch**(indexes: `number[]`): `BatchMerkleProof`
62 | The resulting proof is [compressed](#Batch-proof-compression). So, if you need to prove membership of multiple values, this is a much more efficient approach. 63 | 64 | Batch proof has the following form: 65 | 66 | ```TypeScript 67 | interface BatchMerkleProof { 68 | values : Buffer[]; 69 | nodes : Buffer[][]; 70 | depth : number; 71 | } 72 | ``` 73 | where, `values` are the leaves located at the indexes covered by the proof, `nodes` are the internal nodes that form the actual proof, and `depth` is the depth of the source tree. 74 | 75 | ### Verifying Merkle proofs 76 | Once you have a proof, you can verify it against a tree root like so: 77 | 78 | * static **verify**(root: `Buffer`, index: `number`, proof: `Buffer[]`, hash: `Hash`): `boolean`
79 | This will return `true` if the value located at the first position in the `proof` array is indeed located at the specified `index` in the tree. 80 | 81 | For the batched version use: 82 | 83 | * static **verifyBatch**(root: `Buffer`, indexes: `number[]`, proof: `BatchMerkleProof`, hash: `Hash`): `boolean`
84 | Similarly to single-index version, this will return `true` if the values in `proof.values` are indeed located at the specified `indexes` in the tree. 85 | 86 | ### Hash 87 | A `Hash` object is required when creating Merkle trees and when verifying Merkle proofs. Internally, it is used for hashing of all values and tree nodes. To create a Hash object, you can use `createHash()` function: 88 | 89 | * **createHash**(algorithm: `string`, useWasm?: `boolean`): `Hash`
90 | Creates a Hash object for the specified `algorithm`. If `useWasm` is set to true, will try to instantiate a WebAssembly-optimized version of the algorithm. If WASM optimization is not available for the specified algorithm, Node's native implementation will be used. 91 | 92 | * **createHash**(algorithm: `string`, wasmOptions: `WasmOptions`): `Hash`
93 | Tries to create a WebAssembly-optimized Hash object for the specified `algorithm` and pass the provided options to it. If WASM optimization is not available for the specified algorithm, Node's native implementation will be used. 94 | 95 | Currently, the following hash algorithms are supported: 96 | 97 | | Algorithm | WASM-optimized | 98 | | ---------- | :------------: | 99 | | sha256 | no | 100 | | blake2s256 | yes | 101 | 102 | Hash objects returned from `createHash()` function will have the following form: 103 | ```TypeScript 104 | interface Hash { 105 | readonly algorithm : HashAlgorithm; 106 | readonly digestSize : number; 107 | 108 | digest(value: Buffer): Buffer; 109 | merge(a: Buffer, b: Buffer): Buffer; 110 | } 111 | ``` 112 | where, `digest(value)` hashes the provided value, and `merge(a,b)` hashes a concatenation of values `a` and `b`. 113 | 114 | ## Performance 115 | Some very informal benchmarks run on Intel Core i5-7300U @ 2.60GHz (single thread) for generating a tree out of 220 32-byte values: 116 | 117 | | Hash Algorithm | Native JS | WASM (external) | WASM (internal) | 118 | | -------------- | --------- | --------------- | ---------------- | 119 | | sha256 | 3.5 sec | N/A | N/A | 120 | | blake2s256 | 3.2 sec | 750 ms | 650 ms | 121 | 122 | The difference between _external_ and _internal_ cases for WASM is that in the internal case, values from which the tree is to be built are already in WASM memory, while in the external case, they need to be copied into WASM memory. 123 | 124 | **Note:** while WebAssembly-optimized version of Blake2s algorithm is much faster at hashing small values (i.e. 32-256 bytes), it is slower at hashing large values. For example, when hashing 1KB values, Node's native implementation is about 50% faster. 125 | 126 | ### Batch proof compression 127 | When you generate batch proofs, the proofs are compressed by removing redundant nodes. The table below shows an approximate size of batch proof for a given number of indexes against trees of a given size. 128 | 129 | | Tree leaves | 32 indexes | 64 indexes | 128 indexes | 256 indexes | 130 | | :------------: | ------------: | ------------: | ------------: | ------------: | 131 | | 210 | 5.2 KB (47%) | 8.6 KB (39%) | 13.4 KB (30%) | 20.1 KB (23%) | 132 | | 212 | 7.0 KB (54%) | 12.4 KB (48%) | 20.6 KB (40%) | 34.0 KB (33%) | 133 | | 214 | 9.2 KB (61%) | 16.2 KB (54%) | 28.6 KB (48%) | 49.3 KB (41%) | 134 | | 216 | 11.0 KB (65%) | 20.3 KB (60%) | 36.5 KB (54%) | 65.2 KB (48%) | 135 | | 218 | 13.1 KB (69%) | 24.5 KB (63%) | 44.6 KB (59%) | 81.0 KB (53%) | 136 | | 220 | 15.1 KB (72%) | 28.4 KB (68%) | 52.5 KB (63%) | 96.8 KB (58%) | 137 | 138 | The percentages next to proof sizes are ratios of the batch proof size to a naive proof size. For example, if you generate a batch proof for 32 indexes against a tree of 210 leaves, your proof will be about 5.2 KB, and that will be 47% of 32 individual proofs against the same tree. 139 | 140 | ## References 141 | 142 | * Wikipedia article on [Merkle trees](https://en.wikipedia.org/wiki/Merkle_tree). 143 | * Batch proof/verification use a variation of the Octopus algorithm from [this paper](https://eprint.iacr.org/2017/933.pdf). 144 | 145 | # License 146 | [MIT](/LICENSE) © 2019 Guild of Weavers -------------------------------------------------------------------------------- /bin/index.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | Object.defineProperty(exports, "__esModule", { value: true }); 3 | // RE-EXPORTS 4 | // ================================================================================================ 5 | var hash_1 = require("./lib/hash"); 6 | exports.createHash = hash_1.createHash; 7 | exports.isWasmOptimized = hash_1.isWasmOptimized; 8 | var MerkleTree_1 = require("./lib/MerkleTree"); 9 | exports.MerkleTree = MerkleTree_1.MerkleTree; 10 | //# sourceMappingURL=index.js.map -------------------------------------------------------------------------------- /bin/lib/MerkleTree.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | Object.defineProperty(exports, "__esModule", { value: true }); 3 | const JsVector_1 = require("./vectors/JsVector"); 4 | // CLASS DEFINITION 5 | // ================================================================================================ 6 | class MerkleTree { 7 | constructor(nodes, leaves, depth, nodeSize) { 8 | this.depth = depth; 9 | this.nodes = Buffer.from(nodes); 10 | this.nodeSize = nodeSize; 11 | this.values = leaves; 12 | } 13 | // CONSTRUCTORS 14 | // -------------------------------------------------------------------------------------------- 15 | static async createAsync(values, hash) { 16 | // FUTURE: implement asynchronous instantiation 17 | return MerkleTree.create(values, hash); 18 | } 19 | static create(values, hash) { 20 | const leaves = Array.isArray(values) ? new JsVector_1.JsVector(values) : values; 21 | const depth = Math.ceil(Math.log2(leaves.length)); 22 | const nodes = hash.buildMerkleNodes(depth, leaves); 23 | return new MerkleTree(nodes, leaves, depth, hash.digestSize); 24 | } 25 | // PUBLIC ACCESSORS 26 | // -------------------------------------------------------------------------------------------- 27 | get root() { 28 | // makes a copy of a node at position 1 29 | return this.nodes.slice(this.nodeSize, this.nodeSize + this.nodeSize); 30 | } 31 | getLeaf(index) { 32 | // makes a copy of the leaf value 33 | return Buffer.from(this.values.toBuffer(index, 1)); 34 | } 35 | getLeaves() { 36 | // makes a deep copy of all leaves 37 | const leaves = new Array(this.values.length); 38 | for (let i = 0; i < leaves.length; i++) { 39 | leaves[i] = Buffer.from(this.values.toBuffer(i, 1)); 40 | } 41 | return leaves; 42 | } 43 | // PUBLIC METHODS 44 | // -------------------------------------------------------------------------------------------- 45 | prove(index) { 46 | if (index < 0) 47 | throw new TypeError(`Invalid index: ${index}`); 48 | if (index > this.values.length) 49 | throw new TypeError(`Invalid index: ${index}`); 50 | if (!Number.isInteger(index)) 51 | throw new TypeError(`Invalid index: ${index}`); 52 | const nodeSize = this.nodeSize; 53 | const nodeCount = this.nodes.byteLength / nodeSize; 54 | const value1 = this.getLeaf(index); 55 | const value2 = this.getLeaf(index ^ 1); 56 | const proof = [value1, value2]; 57 | index = (index + nodeCount) >> 1; 58 | while (index > 1) { 59 | let siblingOffset = (index ^ 1) * nodeSize; 60 | let sibling = this.nodes.slice(siblingOffset, siblingOffset + nodeSize); 61 | proof.push(sibling); 62 | index = index >> 1; 63 | } 64 | return proof; 65 | } 66 | proveBatch(indexes) { 67 | const nodeSize = this.nodeSize; 68 | const nodeCount = this.nodes.byteLength / nodeSize; 69 | const indexMap = mapIndexes(indexes, this.values.length - 1); 70 | indexes = normalizeIndexes(indexes); 71 | const proof = { 72 | values: new Array(indexMap.size), 73 | nodes: new Array(indexes.length), 74 | depth: this.depth 75 | }; 76 | // populate the proof with leaf node values 77 | let nextIndexes = []; 78 | for (let i = 0; i < indexes.length; i++) { 79 | let index = indexes[i]; 80 | let v1 = this.getLeaf(index); 81 | let v2 = this.getLeaf(index + 1); 82 | // only values for indexes that were explicitly requested are included in values array 83 | const inputIndex1 = indexMap.get(index); 84 | const inputIndex2 = indexMap.get(index + 1); 85 | if (inputIndex1 !== undefined) { 86 | if (inputIndex2 !== undefined) { 87 | proof.values[inputIndex1] = v1; 88 | proof.values[inputIndex2] = v2; 89 | proof.nodes[i] = []; 90 | } 91 | else { 92 | proof.values[inputIndex1] = v1; 93 | proof.nodes[i] = [v2]; 94 | } 95 | } 96 | else { 97 | proof.values[inputIndex2] = v2; 98 | proof.nodes[i] = [v1]; 99 | } 100 | nextIndexes.push((index + nodeCount) >> 1); 101 | } 102 | // add required internal nodes to the proof, skipping redundancies 103 | for (let d = this.depth - 1; d > 0; d--) { 104 | indexes = nextIndexes; 105 | nextIndexes = []; 106 | for (let i = 0; i < indexes.length; i++) { 107 | let siblingIndex = indexes[i] ^ 1; 108 | if (i + 1 < indexes.length && indexes[i + 1] === siblingIndex) { 109 | i++; 110 | } 111 | else { 112 | let siblingOffset = siblingIndex * nodeSize; 113 | let sibling = this.nodes.slice(siblingOffset, siblingOffset + nodeSize); 114 | proof.nodes[i].push(sibling); 115 | } 116 | // add parent index to the set of next indexes 117 | nextIndexes.push(siblingIndex >> 1); 118 | } 119 | } 120 | return proof; 121 | } 122 | // STATIC METHODS 123 | // -------------------------------------------------------------------------------------------- 124 | static verify(root, index, proof, hash) { 125 | const r = index & 1; 126 | const value1 = proof[r]; 127 | const value2 = proof[1 - r]; 128 | let v = hash.merge(value1, value2); 129 | index = (index + 2 ** (proof.length - 1)) >> 1; 130 | for (let i = 2; i < proof.length; i++) { 131 | if (index & 1) { 132 | v = hash.merge(proof[i], v); 133 | } 134 | else { 135 | v = hash.merge(v, proof[i]); 136 | } 137 | index = index >> 1; 138 | } 139 | return root.equals(v); 140 | } 141 | static verifyBatch(root, indexes, proof, hash) { 142 | const v = new Map(); 143 | // replace odd indexes, offset, and sort in ascending order 144 | const offset = 2 ** proof.depth; 145 | const indexMap = mapIndexes(indexes, offset - 1); 146 | indexes = normalizeIndexes(indexes); 147 | if (indexes.length !== proof.nodes.length) 148 | return false; 149 | // for each index use values to compute parent nodes 150 | let nextIndexes = []; 151 | const proofPointers = new Array(indexes.length); 152 | for (let i = 0; i < indexes.length; i++) { 153 | let index = indexes[i]; 154 | let v1, v2; 155 | const inputIndex1 = indexMap.get(index); 156 | const inputIndex2 = indexMap.get(index + 1); 157 | if (inputIndex1 !== undefined) { 158 | if (inputIndex2 !== undefined) { 159 | v1 = proof.values[inputIndex1]; 160 | v2 = proof.values[inputIndex2]; 161 | proofPointers[i] = 0; 162 | } 163 | else { 164 | v1 = proof.values[inputIndex1]; 165 | v2 = proof.nodes[i][0]; 166 | proofPointers[i] = 1; 167 | } 168 | } 169 | else { 170 | v1 = proof.nodes[i][0]; 171 | v2 = proof.values[inputIndex2]; 172 | proofPointers[i] = 1; 173 | } 174 | // if either value wasn't found, proof fails 175 | if (v1 === undefined || v2 === undefined) 176 | return false; 177 | let parent = hash.merge(v1, v2); 178 | let parentIndex = (offset + index >> 1); 179 | v.set(parentIndex, parent); 180 | nextIndexes.push(parentIndex); 181 | } 182 | // iteratively move up, until we get to the root 183 | for (let d = proof.depth - 1; d > 0; d--) { 184 | indexes = nextIndexes; 185 | nextIndexes = []; 186 | for (let i = 0; i < indexes.length; i++) { 187 | let nodeIndex = indexes[i]; 188 | let siblingIndex = nodeIndex ^ 1; 189 | // determine the sibling 190 | let sibling; 191 | if (i + 1 < indexes.length && indexes[i + 1] === siblingIndex) { 192 | sibling = v.get(siblingIndex); 193 | i++; 194 | } 195 | else { 196 | let pointer = proofPointers[i]; 197 | sibling = proof.nodes[i][pointer]; 198 | proofPointers[i] = pointer + 1; 199 | } 200 | let node = v.get(nodeIndex); 201 | // if either node wasn't found, proof fails 202 | if (node === undefined || sibling === undefined) 203 | return false; 204 | // calculate parent node and add it to the next set of nodes 205 | let parent = (nodeIndex & 1) ? hash.merge(sibling, node) : hash.merge(node, sibling); 206 | let parentIndex = nodeIndex >> 1; 207 | v.set(parentIndex, parent); 208 | nextIndexes.push(parentIndex); 209 | } 210 | } 211 | return root.equals(v.get(1)); 212 | } 213 | } 214 | exports.MerkleTree = MerkleTree; 215 | // HELPER FUNCTIONS 216 | // ================================================================================================ 217 | function normalizeIndexes(input) { 218 | input = input.slice().sort(compareNumbers); 219 | const output = new Set(); 220 | for (let index of input) { 221 | output.add(index - (index & 1)); 222 | } 223 | return Array.from(output); 224 | } 225 | function mapIndexes(input, maxValid) { 226 | const output = new Map(); 227 | for (let i = 0; i < input.length; i++) { 228 | let index = input[i]; 229 | output.set(index, i); 230 | if (index < 0) 231 | throw new TypeError(`Invalid index: ${index}`); 232 | if (index > maxValid) 233 | throw new TypeError(`Invalid index: ${index}`); 234 | if (!Number.isInteger(index)) 235 | throw new TypeError(`Invalid index: ${index}`); 236 | } 237 | if (input.length !== output.size) 238 | throw new Error('Repeating indexes detected'); 239 | return output; 240 | } 241 | function compareNumbers(a, b) { 242 | return a - b; 243 | } 244 | //# sourceMappingURL=MerkleTree.js.map -------------------------------------------------------------------------------- /bin/lib/assembly/blake2s.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GuildOfWeavers/merkle/0235f48f6603153c0d6f05fdbc5f0def8a31c3d0/bin/lib/assembly/blake2s.wasm -------------------------------------------------------------------------------- /bin/lib/assembly/index.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | Object.defineProperty(exports, "__esModule", { value: true }); 3 | // IMPORTS 4 | // ================================================================================================ 5 | const fs = require("fs"); 6 | const loader = require("@assemblyscript/loader"); 7 | // CONSTANTS 8 | // ================================================================================================ 9 | const BLAKE2S_WASM = `${__dirname}/blake2s.wasm`; 10 | const IV = [ 11 | 0x6A09E667, 0xBB67AE85, 0x3C6EF372, 0xA54FF53A, 12 | 0x510E527F, 0x9B05688C, 0x1F83D9AB, 0x5BE0CD19 13 | ]; 14 | const SIGMA = [ 15 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 | 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3, 17 | 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4, 18 | 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8, 19 | 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13, 20 | 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9, 21 | 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11, 22 | 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10, 23 | 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5, 24 | 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 25 | ]; 26 | // PUBLIC MODULE 27 | // ================================================================================================ 28 | function instantiateBlake2s(memory) { 29 | if (memory === undefined) { 30 | memory = new WebAssembly.Memory({ initial: 10 }); 31 | } 32 | const wasm = loader.instantiateSync(fs.readFileSync(BLAKE2S_WASM), { 33 | env: { memory } 34 | }); 35 | const memU8 = new Uint8Array(wasm.memory.buffer); 36 | let sIdx = wasm.getSigmaRef(); 37 | for (let sigma of SIGMA) { 38 | memU8[sIdx] = sigma * 4; 39 | sIdx++; 40 | } 41 | const memU32 = new Uint32Array(wasm.memory.buffer); 42 | let iIdx = wasm.getIvRef() >> 2; 43 | for (let iv of IV) { 44 | memU32[iIdx] = iv; 45 | iIdx++; 46 | } 47 | return wasm; 48 | } 49 | exports.instantiateBlake2s = instantiateBlake2s; 50 | //# sourceMappingURL=index.js.map -------------------------------------------------------------------------------- /bin/lib/hash/JsHash.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | Object.defineProperty(exports, "__esModule", { value: true }); 3 | // IMPORTS 4 | // ================================================================================================ 5 | const crypto = require("crypto"); 6 | const JsVector_1 = require("../vectors/JsVector"); 7 | // MODULE VARIABLES 8 | // ================================================================================================ 9 | const DIGEST_SIZE = 32; // 32 bytes 10 | const DOUBLE_INPUT_LENGTH = 2 * DIGEST_SIZE; 11 | const NULL_BUFFER = Buffer.alloc(DIGEST_SIZE); 12 | // CLASS DEFINITION 13 | // ================================================================================================ 14 | class JsHash { 15 | // CONSTRUCTOR 16 | // -------------------------------------------------------------------------------------------- 17 | constructor(algorithm) { 18 | this.algorithm = algorithm; 19 | } 20 | // PROPERTY ACCESSORS 21 | // -------------------------------------------------------------------------------------------- 22 | get digestSize() { 23 | return DIGEST_SIZE; 24 | } 25 | get isOptimized() { 26 | return false; 27 | } 28 | // PUBLIC METHODS 29 | // -------------------------------------------------------------------------------------------- 30 | digest(value) { 31 | const hash = crypto.createHash(this.algorithm); 32 | hash.update(value); 33 | return hash.digest(); 34 | } 35 | merge(a, b) { 36 | const hash = crypto.createHash(this.algorithm); 37 | hash.update(a); 38 | hash.update(b); 39 | return hash.digest(); 40 | } 41 | buildMerkleNodes(depth, leaves) { 42 | // allocate memory for tree nodes 43 | const nodeCount = 2 ** depth; 44 | const nodes = new ArrayBuffer(nodeCount * DIGEST_SIZE); 45 | const nodeBuffer = Buffer.from(nodes); 46 | // build first row of internal nodes (parents of leaves) 47 | const parentCount = nodeCount / 2; 48 | const evenLeafCount = (leaves.length & 1) ? leaves.length - 1 : leaves.length; 49 | let tOffset = parentCount * DIGEST_SIZE; 50 | const lBuffer = leaves.toBuffer(); 51 | const doubleElementSize = leaves.elementSize * 2; 52 | let sOffset = 0; 53 | for (let i = 0; i < evenLeafCount; i += 2, sOffset += doubleElementSize, tOffset += DIGEST_SIZE) { 54 | let hash = crypto.createHash(this.algorithm); 55 | hash.update(lBuffer.slice(sOffset, sOffset + doubleElementSize)); 56 | hash.digest().copy(nodeBuffer, tOffset); 57 | } 58 | // if the number of leaves was odd, process the last leaf 59 | if (evenLeafCount !== leaves.length) { 60 | let hash = crypto.createHash(this.algorithm); 61 | hash.update(lBuffer.slice(sOffset)); 62 | hash.update(NULL_BUFFER); 63 | hash.digest().copy(nodeBuffer, tOffset); 64 | tOffset += DIGEST_SIZE; 65 | } 66 | // if number of leaves was not a power of 2, assume all other leaves are NULL 67 | if (leaves.length < nodeCount) { 68 | const nullParent = this.merge(NULL_BUFFER, NULL_BUFFER); 69 | while (tOffset < nodes.byteLength) { 70 | nullParent.copy(nodeBuffer, tOffset); 71 | tOffset += DIGEST_SIZE; 72 | } 73 | } 74 | // calculate all other tree nodes 75 | for (let i = parentCount - 1; i > 0; i--) { 76 | let tIndex = i * DIGEST_SIZE; 77 | let sIndex = tIndex << 1; 78 | let hash = crypto.createHash(this.algorithm); 79 | hash.update(nodeBuffer.slice(sIndex, sIndex + DOUBLE_INPUT_LENGTH)); 80 | hash.digest().copy(nodeBuffer, tIndex); 81 | } 82 | return nodes; 83 | } 84 | mergeVectorRows(vectors) { 85 | const elementCount = vectors[0].length; 86 | const elementSize = vectors[0].elementSize; 87 | const result = new Array(elementCount); 88 | const buffer = Buffer.allocUnsafe(vectors.length * elementSize); 89 | for (let i = 0; i < elementCount; i++) { 90 | let offset = 0; 91 | for (let j = 0; j < vectors.length; j++) { 92 | offset += vectors[j].copyValue(i, buffer, offset); 93 | } 94 | result[i] = this.digest(buffer); 95 | } 96 | return new JsVector_1.JsVector(result); 97 | } 98 | digestValues(values, valueSize) { 99 | const elementCount = values.byteLength / valueSize; 100 | if (!Number.isInteger(elementCount)) { 101 | throw new Error('Values buffer cannot contain partial number of elements'); 102 | } 103 | const result = new Array(elementCount); 104 | for (let i = 0, offset = 0; i < elementCount; i++, offset += valueSize) { 105 | result[i] = this.digest(values.slice(offset, offset + valueSize)); 106 | } 107 | return new JsVector_1.JsVector(result); 108 | } 109 | } 110 | exports.JsHash = JsHash; 111 | //# sourceMappingURL=JsHash.js.map -------------------------------------------------------------------------------- /bin/lib/hash/index.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | Object.defineProperty(exports, "__esModule", { value: true }); 3 | const WasmBlake2s_1 = require("./WasmBlake2s"); 4 | const JsHash_1 = require("./JsHash"); 5 | function createHash(algorithm, useWasmOrOptions) { 6 | if (!useWasmOrOptions) { 7 | return new JsHash_1.JsHash(algorithm); 8 | } 9 | const HashCtr = getHashConstructor(algorithm); 10 | if (!HashCtr) { 11 | return new JsHash_1.JsHash(algorithm); 12 | } 13 | const wasmOptions = normalizeWasmOptions(useWasmOrOptions); 14 | return new HashCtr(wasmOptions); 15 | } 16 | exports.createHash = createHash; 17 | function isWasmOptimized(algorithm) { 18 | switch (algorithm) { 19 | case 'blake2s256': { 20 | return true; 21 | } 22 | default: { 23 | return false; 24 | } 25 | } 26 | } 27 | exports.isWasmOptimized = isWasmOptimized; 28 | // HELPER FUNCTIONS 29 | // ================================================================================================ 30 | function getHashConstructor(algorithm) { 31 | switch (algorithm) { 32 | case 'blake2s256': { 33 | return WasmBlake2s_1.WasmBlake2s; 34 | } 35 | default: { 36 | return undefined; 37 | } 38 | } 39 | } 40 | function normalizeWasmOptions(useWasmOrOptions) { 41 | if (typeof useWasmOrOptions === 'boolean') { 42 | return { memory: new WebAssembly.Memory({ initial: 10 }) }; 43 | } 44 | const memory = useWasmOrOptions.memory || new WebAssembly.Memory({ initial: 10 }); 45 | return { memory }; 46 | } 47 | //# sourceMappingURL=index.js.map -------------------------------------------------------------------------------- /bin/lib/hash/wasmBlake2s.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | Object.defineProperty(exports, "__esModule", { value: true }); 3 | const assembly_1 = require("../assembly"); 4 | const WasmVector_1 = require("../vectors/WasmVector"); 5 | // MODULE VARIABLES 6 | // ================================================================================================ 7 | const DIGEST_SIZE = 32; // 32 bytes 8 | const NULL_BUFFER = Buffer.alloc(DIGEST_SIZE); 9 | // CLASS DEFINITION 10 | // ================================================================================================ 11 | class WasmBlake2s { 12 | // CONSTRUCTOR 13 | // -------------------------------------------------------------------------------------------- 14 | constructor(options) { 15 | this.wasm = assembly_1.instantiateBlake2s(options.memory); 16 | this.iRef = this.wasm.getInputsRef(); 17 | this.oRef = this.wasm.getOutputRef(); 18 | this.oEnd = this.oRef + DIGEST_SIZE; 19 | } 20 | // PROPERTY ACCESSORS 21 | // -------------------------------------------------------------------------------------------- 22 | get algorithm() { 23 | return "blake2s256"; 24 | } 25 | get digestSize() { 26 | return DIGEST_SIZE; 27 | } 28 | get isOptimized() { 29 | return true; 30 | } 31 | // PUBLIC METHODS 32 | // -------------------------------------------------------------------------------------------- 33 | digest(value) { 34 | let memU8 = new Uint8Array(this.wasm.memory.buffer); 35 | // TODO: investigate checking if the buffer comes from shared memory 36 | if (value.byteLength < 4096) { 37 | memU8.set(value, this.iRef); 38 | this.wasm.hash(this.iRef, value.byteLength, this.oRef); 39 | } 40 | else { 41 | const vRef = this.wasm.newArray(value.byteLength); 42 | if (memU8.buffer !== this.wasm.memory.buffer) { 43 | memU8 = new Uint8Array(this.wasm.memory.buffer); 44 | } 45 | memU8.set(value, vRef); 46 | this.wasm.hash(vRef, value.byteLength, this.oRef); 47 | this.wasm.__release(vRef); 48 | } 49 | return Buffer.from(memU8.subarray(this.oRef, this.oEnd)); 50 | } 51 | merge(a, b) { 52 | const memU8 = new Uint8Array(this.wasm.memory.buffer); 53 | memU8.set(a, this.iRef); 54 | memU8.set(b, this.iRef + a.byteLength); 55 | this.wasm.hash(this.iRef, a.byteLength + b.byteLength, this.oRef); 56 | return Buffer.from(memU8.subarray(this.oRef, this.oEnd)); 57 | } 58 | buildMerkleNodes(depth, leaves) { 59 | const wasm = this.wasm, iRef = this.iRef; 60 | // allocate memory for tree nodes 61 | const nodeCount = 1 << depth; 62 | const bufferLength = nodeCount * DIGEST_SIZE; 63 | const nRef = this.wasm.newArray(bufferLength); 64 | // build first row of internal nodes (parents of leaves) 65 | const parentCount = nodeCount >>> 1; // nodeCount / 2 66 | const evenLeafCount = (leaves.length & 1) ? leaves.length - 1 : leaves.length; 67 | let resRef = nRef + parentCount * DIGEST_SIZE; 68 | let lBuffer = leaves.toBuffer(), lRef = lBuffer.byteOffset, releaseLeaves = false; 69 | if (lBuffer.buffer !== wasm.memory.buffer) { 70 | // if the leaves buffer belongs to some other WASM memory, copy it into local memory 71 | lRef = wasm.newArray(lBuffer.byteLength); 72 | const memU8 = new Uint8Array(this.wasm.memory.buffer); 73 | memU8.set(lBuffer, lRef); 74 | releaseLeaves = true; 75 | } 76 | resRef = wasm.hashValues1(lRef, resRef, leaves.elementSize << 1, evenLeafCount >>> 1); 77 | // if the leaves were copied into local memory, free that memory 78 | if (releaseLeaves) { 79 | wasm.__release(lRef); 80 | } 81 | // if the number of leaves was odd, process the last leaf 82 | if (evenLeafCount !== leaves.length) { 83 | const lastLeaf = Buffer.from(lBuffer.slice(lBuffer.byteLength - leaves.elementSize)); 84 | const memU8 = new Uint8Array(this.wasm.memory.buffer); 85 | memU8.set(lastLeaf, iRef); 86 | memU8.set(NULL_BUFFER, iRef + lastLeaf.length); 87 | wasm.hash(iRef, lastLeaf.length + DIGEST_SIZE, resRef); 88 | resRef += DIGEST_SIZE; 89 | } 90 | // if number of leaves was not a power of 2, assume all other leaves are NULL 91 | if (leaves.length < nodeCount) { 92 | const nullParent = this.merge(NULL_BUFFER, NULL_BUFFER); 93 | const resEnd = nRef + bufferLength; 94 | const memU8 = new Uint8Array(this.wasm.memory.buffer); 95 | while (resRef < resEnd) { 96 | memU8.set(nullParent, resRef); 97 | resRef += DIGEST_SIZE; 98 | } 99 | } 100 | // calculate all other tree nodes 101 | let tIndex = (parentCount - 1) * DIGEST_SIZE; 102 | let sIndex = tIndex << 1; 103 | wasm.hashValues2(nRef + sIndex, nRef + tIndex, DIGEST_SIZE << 1, parentCount); 104 | // copy the buffer out of WASM memory, free the memory, and return the buffer 105 | const nBuffer = this.wasm.memory.buffer.slice(nRef, nRef + bufferLength); 106 | this.wasm.__release(nRef); 107 | return nBuffer; 108 | } 109 | mergeVectorRows(vectors) { 110 | const elementCount = vectors[0].length; 111 | const elementSize = vectors[0].elementSize; 112 | if (elementSize > 64) { 113 | throw new Error(`Cannot merge vector rows: vector element size must be smaller than 64 bytes`); 114 | } 115 | else if (64 % elementSize !== 0) { 116 | throw new Error(`Cannot merge vector rows: vector element size must be a divisor of 64`); 117 | } 118 | const vRefs = this.wasm.newArray(vectors.length * 8); 119 | const vIdx = vRefs >>> 3; 120 | const refsToRelease = new Set(); 121 | // build array of references to vectors 122 | let vRef; 123 | let memU8 = new Uint8Array(this.wasm.memory.buffer); 124 | for (let i = 0; i < vectors.length; i++) { 125 | let buffer = vectors[i].toBuffer(); 126 | if (buffer.buffer === this.wasm.memory.buffer) { 127 | // if the vector is already in WASM memory, just cache the reference to it 128 | vRef = buffer.byteOffset; 129 | } 130 | else { 131 | // otherwise, copy the vector into WASM memory 132 | vRef = this.wasm.newArray(buffer.byteLength); 133 | if (memU8.buffer !== this.wasm.memory.buffer) { 134 | memU8 = new Uint8Array(this.wasm.memory.buffer); 135 | } 136 | memU8.set(vectors[i].toBuffer(), vRef); 137 | refsToRelease.add(vRef); 138 | } 139 | const memU64 = new BigUint64Array(this.wasm.memory.buffer); 140 | memU64[vIdx + i] = BigInt(vRef); 141 | } 142 | const resRef = this.wasm.newArray(elementCount * DIGEST_SIZE); 143 | this.wasm.mergeArrayElements(vRefs, resRef, vectors.length, elementCount, elementSize); 144 | // release all memory that was used up during the operation 145 | this.wasm.__release(vRefs); 146 | for (let vRef of refsToRelease) { 147 | this.wasm.__release(vRef); 148 | } 149 | // build and return a vector with hashes 150 | return new WasmVector_1.WasmVector(this.wasm.memory, resRef, elementCount, DIGEST_SIZE); 151 | } 152 | digestValues(values, valueSize) { 153 | const elementCount = values.byteLength / valueSize; 154 | if (!Number.isInteger(elementCount)) { 155 | throw new Error('Values buffer cannot contain partial number of elements'); 156 | } 157 | let vRef, releaseValues; 158 | if (this.wasm.memory.buffer === values.buffer) { 159 | // if the vector is already in WASM memory, just cache the reference to it 160 | vRef = values.byteOffset; 161 | releaseValues = false; 162 | } 163 | else { 164 | // otherwise, copy the vector into WASM memory 165 | vRef = this.wasm.newArray(values.byteLength); 166 | const memU8 = new Uint8Array(this.wasm.memory.buffer); 167 | memU8.set(values, vRef); 168 | releaseValues = true; 169 | } 170 | // allocate memory to hold the results and hash the values 171 | const resRef = this.wasm.newArray(elementCount * DIGEST_SIZE); 172 | this.wasm.hashValues1(vRef, resRef, valueSize, elementCount); 173 | // if the values were copied into WASM memory during the operation, free the memory 174 | if (releaseValues) { 175 | this.wasm.__release(vRef); 176 | } 177 | return new WasmVector_1.WasmVector(this.wasm.memory, resRef, elementCount, DIGEST_SIZE); 178 | } 179 | } 180 | exports.WasmBlake2s = WasmBlake2s; 181 | //# sourceMappingURL=WasmBlake2s.js.map -------------------------------------------------------------------------------- /bin/lib/vectors/JsVector.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | Object.defineProperty(exports, "__esModule", { value: true }); 3 | // CLASS DEFINITION 4 | // ================================================================================================ 5 | class JsVector { 6 | constructor(values) { 7 | this.values = values; 8 | this.elementSize = values[0].byteLength; 9 | } 10 | get byteLength() { 11 | return this.values.length * this.elementSize; 12 | } 13 | get length() { 14 | return this.values.length; 15 | } 16 | copyValue(index, destination, offset) { 17 | const value = this.values[index]; 18 | value.copy(destination, offset); 19 | return this.elementSize; 20 | } 21 | toBuffer(startIdx = 0, elementCount) { 22 | if (elementCount === undefined) { 23 | elementCount = this.values.length - startIdx; 24 | } 25 | if (elementCount === 1) { 26 | return this.values[startIdx]; 27 | } 28 | const result = Buffer.alloc(elementCount * this.elementSize); 29 | const endIdx = startIdx + elementCount; 30 | let offset = 0; 31 | for (let i = startIdx; i < endIdx; i++, offset += this.elementSize) { 32 | this.values[i].copy(result, offset); 33 | } 34 | return result; 35 | } 36 | } 37 | exports.JsVector = JsVector; 38 | //# sourceMappingURL=JsVector.js.map -------------------------------------------------------------------------------- /bin/lib/vectors/WasmVector.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | Object.defineProperty(exports, "__esModule", { value: true }); 3 | // CLASS DEFINITION 4 | // ================================================================================================ 5 | class WasmVector { 6 | constructor(memory, base, length, elementSize) { 7 | this.memory = memory; 8 | this.base = base; 9 | this.length = length; 10 | this.elementSize = elementSize; 11 | } 12 | get byteLength() { 13 | return this.length * this.elementSize; 14 | } 15 | copyValue(index, destination, offset) { 16 | const value = Buffer.from(this.memory.buffer, this.base + index * this.elementSize, this.elementSize); 17 | value.copy(destination, offset); 18 | return this.elementSize; 19 | } 20 | toBuffer(startIdx = 0, elementCount) { 21 | const offset = this.base + startIdx * this.elementSize; 22 | let length; 23 | if (elementCount === undefined) { 24 | length = (this.base + this.byteLength) - offset; 25 | } 26 | else { 27 | length = elementCount * this.elementSize; 28 | } 29 | return Buffer.from(this.memory.buffer, offset, length); 30 | } 31 | } 32 | exports.WasmVector = WasmVector; 33 | //# sourceMappingURL=WasmVector.js.map -------------------------------------------------------------------------------- /bin/merkle.d.ts: -------------------------------------------------------------------------------- 1 | declare module '@guildofweavers/merkle' { 2 | 3 | // HASHING 4 | // -------------------------------------------------------------------------------------------- 5 | export type HashAlgorithm = 'sha256' | 'blake2s256'; 6 | 7 | /** 8 | * Creates a Hash object for the specified algorithm. If useWasm is set to true, will try to 9 | * instantiate a WebAssembly-optimized version of the algorithm. If WASM optimization is not 10 | * available for the specified algorithm, Node's native implementation will be used. 11 | */ 12 | export function createHash(algorithm: HashAlgorithm, useWasm?: boolean): Hash; 13 | 14 | /** 15 | * Tries to create a WebAssembly-optimized Hash object for the specified algorithm and pass 16 | * the provided options to it. If WASM optimization is not available for the specified algorithm, 17 | * Node's native implementation will be used. 18 | */ 19 | export function createHash(algorithm: HashAlgorithm, options: Partial): Hash; 20 | 21 | export interface WasmOptions { 22 | readonly memory: WebAssembly.Memory; 23 | } 24 | 25 | export interface Hash { 26 | readonly algorithm : HashAlgorithm; 27 | readonly digestSize : number; 28 | readonly isOptimized: boolean; 29 | 30 | /** Hashes the provided value */ 31 | digest(value: Buffer): Buffer; 32 | 33 | /** Hashes a concatenation of a and b */ 34 | merge(a: Buffer, b: Buffer): Buffer; 35 | 36 | buildMerkleNodes(depth: number, leaves: Vector): ArrayBuffer; 37 | 38 | mergeVectorRows(vectors: Vector[]): Vector; 39 | 40 | digestValues(values: Buffer, valueSize: number): Vector; 41 | } 42 | 43 | /** Returns true if WebAssembly optimization is available for the provided algorithm */ 44 | export function isWasmOptimized(hashAlgorithm: HashAlgorithm): boolean; 45 | 46 | // MERKLE TREE 47 | // -------------------------------------------------------------------------------------------- 48 | export class MerkleTree { 49 | 50 | /** 51 | * Returns a Merkle tree created from the specified values 52 | * @param values Values that form the leaves of the tree 53 | * @param hash Hash object to use for hashing of internal nodes 54 | */ 55 | static create(values: Buffer[] | Vector, hash: Hash): MerkleTree; 56 | 57 | /** 58 | * Returns a Promise for a Merkle tree created from the specified values 59 | * @param values Values that form the leaves of the tree 60 | * @param hash Hash object to use for hashing of internal nodes 61 | */ 62 | static createAsync(values: Buffer[] | Vector, hash: Hash): Promise; 63 | 64 | /** Root of the tree */ 65 | readonly root: Buffer; 66 | 67 | /** Returns a leaf node located at the specified index */ 68 | getLeaf(index: number): Buffer; 69 | 70 | /** Returns all leaf nodes of the tree */ 71 | getLeaves(): Buffer[]; 72 | 73 | /** Returns a Merkle proof for a single leaf at the specified index */ 74 | prove(index: number): Buffer[]; 75 | 76 | /** Returns a compressed Merkle proof for leaves at the specified indexes */ 77 | proveBatch(indexes: number[]): BatchMerkleProof; 78 | 79 | /** 80 | * Verifies Merkle proof for a single index 81 | * @param root Root of the Merkle tree 82 | * @param index Index of a leaf to verify 83 | * @param proof Merkle proof for the leaf at the specified index 84 | * @param hash Hash object to use for hashing of internal nodes 85 | */ 86 | static verify(root: Buffer, index: number, proof: Buffer[], hash: Hash): boolean; 87 | 88 | /** 89 | * Verifies Merkle proof for a list of indexes 90 | * @param root Root of the Merkle tree 91 | * @param index Indexes of leaves to verify 92 | * @param proof Compressed Merkle proof for the leaves at the specified indexes 93 | * @param hash Hash object to use for hashing of internal nodes 94 | */ 95 | static verifyBatch(root: Buffer, indexes: number[], proof: BatchMerkleProof, hash: Hash): boolean; 96 | } 97 | 98 | export interface BatchMerkleProof { 99 | /** leaf nodes located at the indexes covered by the proof */ 100 | values: Buffer[]; 101 | 102 | /** Internal nodes that form the actual proof */ 103 | nodes: Buffer[][]; 104 | 105 | /** Depth of the source Merkle tree */ 106 | depth: number; 107 | } 108 | 109 | // INTERNAL DATA STRUCTURES 110 | // -------------------------------------------------------------------------------------------- 111 | export interface Vector { 112 | readonly length : number; 113 | readonly byteLength : number; 114 | readonly elementSize : number; 115 | 116 | copyValue(index: number, destination: Buffer, offset: number): number; 117 | toBuffer(startIdx?: number, elementCount?: number): Buffer; 118 | } 119 | 120 | } -------------------------------------------------------------------------------- /bin/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@guildofweavers/merkle", 3 | "version": "0.3.12", 4 | "description": "Merkle tree and other data structures", 5 | "main": "index.js", 6 | "typings": "merkle.d.ts", 7 | "license": "MIT", 8 | "author": { 9 | "name": "Bobbin Threadbare", 10 | "email": "bobbinth@protonmail.com" 11 | }, 12 | "keywords": [ 13 | "merkle tree", 14 | "merkle proof", 15 | "cryptography" 16 | ], 17 | "repository": { 18 | "type": "git", 19 | "url": "https://github.com/GuildOfWeavers/merkle.git" 20 | }, 21 | "engines": { 22 | "node": ">=12.7.x" 23 | }, 24 | "dependencies": { 25 | "@assemblyscript/loader": "0.8.x" 26 | }, 27 | "devDependencies": { 28 | "@types/chai": "4.2.x", 29 | "@types/mocha": "5.2.x", 30 | "@types/node": "12.7.x", 31 | "assemblyscript": "0.8.x", 32 | "chai": "4.2.x", 33 | "del": "5.0.x", 34 | "gulp": "4.0.x", 35 | "gulp-mocha": "6.0.x", 36 | "mocha": "6.2.x" 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /gulpfile.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | // IMPORTS 3 | // ================================================================================================ 4 | const gulp = require('gulp'); 5 | const del = require('del'); 6 | const exec = require('child_process').exec; 7 | const mocha = require('gulp-mocha'); 8 | 9 | // TASKS 10 | // ================================================================================================ 11 | function clean(cb) { 12 | del(['bin']).then(() => { cb(); }); 13 | } 14 | 15 | function compile(cb) { 16 | exec('tsc -p .', function (err, stdout, stderr) { 17 | if (stdout.length > 0) console.log(stdout); 18 | if (stderr.length > 0) console.error(stderr); 19 | cb(err); 20 | }); 21 | } 22 | 23 | function asbuild(cb) { 24 | const source = 'lib/assembly/blake2s.as'; 25 | const target = '/bin/lib/assembly/blake2s.wasm'; 26 | exec(`npx asc ${source} -b ${target} --sourceMap --validate --importMemory --enable bulk-memory -O3`, function (err, stdout, stderr) { 27 | if (stdout.length > 0) console.log(stdout); 28 | if (stderr.length > 0) console.error(stderr); 29 | cb(err); 30 | }); 31 | } 32 | 33 | function copyFiles(cb) { 34 | gulp.src('./package.json').pipe(gulp.dest('./bin')); 35 | gulp.src('./package-lock.json').pipe(gulp.dest('./bin')); 36 | gulp.src('./merkle.d.ts').pipe(gulp.dest('./bin')); 37 | gulp.src('./.npmignore').pipe(gulp.dest('./bin')); 38 | gulp.src('./README.md').pipe(gulp.dest('./bin')); 39 | cb(); 40 | } 41 | 42 | function publish(cb) { 43 | exec('npm publish bin --access=public', function (err, stdout, stderr) { 44 | if (stdout.length > 0) console.log(stdout); 45 | if (stderr.length > 0) console.error(stderr); 46 | cb(err); 47 | }); 48 | } 49 | 50 | function runTests(cb) { 51 | gulp.src('./bin/tests/**/*.spec.js') 52 | .pipe( mocha({reporter: 'spec', bail: false})) 53 | .on('error', err => { 54 | if (err && (!err.message || err.message !== 'There were test failures')) { 55 | console.error(JSON.stringify(err, null, 2)); 56 | } 57 | } ) 58 | .once('error', () => process.exit(1)) 59 | .on('end', () => process.exit(0)); 60 | cb(); 61 | } 62 | 63 | const build = gulp.series(clean, asbuild, compile, copyFiles); 64 | 65 | // EXPORTS 66 | // ================================================================================================ 67 | exports.build = build; 68 | exports.publish = gulp.series(build, publish); 69 | exports.test = gulp.series(build, runTests); 70 | exports.default = build; 71 | -------------------------------------------------------------------------------- /index.ts: -------------------------------------------------------------------------------- 1 | // RE-EXPORTS 2 | // ================================================================================================ 3 | export { createHash, isWasmOptimized } from './lib/hash'; 4 | export { MerkleTree } from './lib/MerkleTree'; -------------------------------------------------------------------------------- /lib/MerkleTree.ts: -------------------------------------------------------------------------------- 1 | // IMPORTS 2 | // ================================================================================================ 3 | import { BatchMerkleProof, Hash, Vector } from '@guildofweavers/merkle'; 4 | import { JsVector } from './vectors/JsVector'; 5 | 6 | // CLASS DEFINITION 7 | // ================================================================================================ 8 | export class MerkleTree { 9 | 10 | readonly depth : number; 11 | readonly nodes : Buffer; 12 | readonly values : Vector; 13 | readonly nodeSize : number; 14 | 15 | // CONSTRUCTORS 16 | // -------------------------------------------------------------------------------------------- 17 | static async createAsync(values: Buffer[] | Vector, hash: Hash) { 18 | // FUTURE: implement asynchronous instantiation 19 | return MerkleTree.create(values, hash); 20 | } 21 | 22 | static create(values: Buffer[] | Vector, hash: Hash) { 23 | const leaves = Array.isArray(values) ? new JsVector(values) : values; 24 | const depth = Math.ceil(Math.log2(leaves.length)); 25 | const nodes = hash.buildMerkleNodes(depth, leaves) 26 | return new MerkleTree(nodes, leaves, depth, hash.digestSize); 27 | } 28 | 29 | private constructor(nodes: ArrayBuffer, leaves: Vector, depth: number, nodeSize: number) { 30 | this.depth = depth; 31 | this.nodes = Buffer.from(nodes); 32 | this.nodeSize = nodeSize; 33 | this.values = leaves; 34 | } 35 | 36 | // PUBLIC ACCESSORS 37 | // -------------------------------------------------------------------------------------------- 38 | get root(): Buffer { 39 | // makes a copy of a node at position 1 40 | return this.nodes.slice(this.nodeSize, this.nodeSize + this.nodeSize); 41 | } 42 | 43 | getLeaf(index: number): Buffer { 44 | // makes a copy of the leaf value 45 | return Buffer.from(this.values.toBuffer(index, 1)); 46 | } 47 | 48 | getLeaves(): Buffer[] { 49 | // makes a deep copy of all leaves 50 | const leaves = new Array(this.values.length); 51 | for (let i = 0; i < leaves.length; i++) { 52 | leaves[i] = Buffer.from(this.values.toBuffer(i, 1)); 53 | } 54 | return leaves; 55 | } 56 | 57 | // PUBLIC METHODS 58 | // -------------------------------------------------------------------------------------------- 59 | prove(index: number): Buffer[] { 60 | if (index < 0) throw new TypeError(`Invalid index: ${index}`); 61 | if (index > this.values.length) throw new TypeError(`Invalid index: ${index}`); 62 | if (!Number.isInteger(index)) throw new TypeError(`Invalid index: ${index}`); 63 | 64 | const nodeSize = this.nodeSize; 65 | const nodeCount = this.nodes.byteLength / nodeSize; 66 | 67 | const value1 = this.getLeaf(index); 68 | const value2 = this.getLeaf(index ^ 1); 69 | const proof = [value1, value2]; 70 | 71 | index = (index + nodeCount) >> 1; 72 | while (index > 1) { 73 | let siblingOffset = (index ^ 1) * nodeSize; 74 | let sibling = this.nodes.slice(siblingOffset, siblingOffset + nodeSize); 75 | proof.push(sibling); 76 | index = index >> 1; 77 | } 78 | 79 | return proof; 80 | } 81 | 82 | proveBatch(indexes: number[]): BatchMerkleProof { 83 | const nodeSize = this.nodeSize; 84 | const nodeCount = this.nodes.byteLength / nodeSize; 85 | 86 | const indexMap = mapIndexes(indexes, this.values.length - 1); 87 | indexes = normalizeIndexes(indexes); 88 | const proof: BatchMerkleProof = { 89 | values : new Array(indexMap.size), 90 | nodes : new Array(indexes.length), 91 | depth : this.depth 92 | }; 93 | 94 | // populate the proof with leaf node values 95 | let nextIndexes = []; 96 | for (let i = 0; i < indexes.length; i++) { 97 | let index = indexes[i]; 98 | let v1 = this.getLeaf(index); 99 | let v2 = this.getLeaf(index + 1); 100 | 101 | // only values for indexes that were explicitly requested are included in values array 102 | const inputIndex1 = indexMap.get(index); 103 | const inputIndex2 = indexMap.get(index + 1); 104 | if (inputIndex1 !== undefined) { 105 | if (inputIndex2 !== undefined) { 106 | proof.values[inputIndex1] = v1; 107 | proof.values[inputIndex2] = v2; 108 | proof.nodes[i] = []; 109 | } 110 | else { 111 | proof.values[inputIndex1] = v1; 112 | proof.nodes[i] = [v2]; 113 | } 114 | } 115 | else { 116 | proof.values[inputIndex2!] = v2; 117 | proof.nodes[i] = [v1]; 118 | } 119 | 120 | nextIndexes.push((index + nodeCount) >> 1); 121 | } 122 | 123 | // add required internal nodes to the proof, skipping redundancies 124 | for (let d = this.depth - 1; d > 0; d--) { 125 | indexes = nextIndexes; 126 | nextIndexes = []; 127 | 128 | for (let i = 0; i < indexes.length; i++) { 129 | let siblingIndex = indexes[i] ^ 1; 130 | if (i + 1 < indexes.length && indexes[i + 1] === siblingIndex) { 131 | i++; 132 | } 133 | else { 134 | let siblingOffset = siblingIndex * nodeSize; 135 | let sibling = this.nodes.slice(siblingOffset, siblingOffset + nodeSize) 136 | proof.nodes[i].push(sibling); 137 | } 138 | 139 | // add parent index to the set of next indexes 140 | nextIndexes.push(siblingIndex >> 1); 141 | } 142 | } 143 | 144 | return proof; 145 | } 146 | 147 | // STATIC METHODS 148 | // -------------------------------------------------------------------------------------------- 149 | static verify(root: Buffer, index: number, proof: Buffer[], hash: Hash): boolean { 150 | 151 | const r = index & 1; 152 | const value1 = proof[r]; 153 | const value2 = proof[1 - r]; 154 | let v = hash.merge(value1, value2); 155 | 156 | index = (index + 2 ** (proof.length - 1)) >> 1; 157 | for ( let i = 2; i < proof.length; i++) { 158 | if (index & 1) { 159 | v = hash.merge(proof[i], v); 160 | } 161 | else { 162 | v = hash.merge(v, proof[i]); 163 | } 164 | index = index >> 1; 165 | } 166 | 167 | return root.equals(v); 168 | } 169 | 170 | static verifyBatch(root: Buffer, indexes: number[], proof: BatchMerkleProof, hash: Hash): boolean { 171 | const v = new Map(); 172 | 173 | // replace odd indexes, offset, and sort in ascending order 174 | const offset = 2 ** proof.depth; 175 | const indexMap = mapIndexes(indexes, offset - 1); 176 | indexes = normalizeIndexes(indexes); 177 | if (indexes.length !== proof.nodes.length) return false; 178 | 179 | // for each index use values to compute parent nodes 180 | let nextIndexes: number[] = []; 181 | const proofPointers = new Array(indexes.length); 182 | for (let i = 0; i < indexes.length; i++) { 183 | let index = indexes[i]; 184 | 185 | let v1: Buffer, v2: Buffer; 186 | const inputIndex1 = indexMap.get(index); 187 | const inputIndex2 = indexMap.get(index + 1); 188 | 189 | if (inputIndex1 !== undefined) { 190 | if (inputIndex2 !== undefined) { 191 | v1 = proof.values[inputIndex1]; 192 | v2 = proof.values[inputIndex2]; 193 | proofPointers[i] = 0; 194 | } 195 | else { 196 | v1 = proof.values[inputIndex1]; 197 | v2 = proof.nodes[i][0]; 198 | proofPointers[i] = 1; 199 | } 200 | } 201 | else { 202 | v1 = proof.nodes[i][0]; 203 | v2 = proof.values[inputIndex2!]; 204 | 205 | proofPointers[i] = 1; 206 | } 207 | 208 | // if either value wasn't found, proof fails 209 | if (v1 === undefined || v2 === undefined) return false; 210 | 211 | let parent = hash.merge(v1, v2); 212 | let parentIndex = (offset + index >> 1); 213 | 214 | v.set(parentIndex, parent); 215 | nextIndexes.push(parentIndex); 216 | } 217 | 218 | // iteratively move up, until we get to the root 219 | for (let d = proof.depth - 1; d > 0; d--) { 220 | indexes = nextIndexes; 221 | nextIndexes = []; 222 | 223 | for (let i = 0; i < indexes.length; i++) { 224 | let nodeIndex = indexes[i]; 225 | let siblingIndex = nodeIndex ^ 1; 226 | 227 | // determine the sibling 228 | let sibling: Buffer; 229 | if (i + 1 < indexes.length && indexes[i + 1] === siblingIndex) { 230 | sibling = v.get(siblingIndex)!; 231 | i++; 232 | } 233 | else { 234 | let pointer = proofPointers[i]; 235 | sibling = proof.nodes[i][pointer]; 236 | proofPointers[i] = pointer + 1; 237 | } 238 | 239 | let node = v.get(nodeIndex)!; 240 | 241 | // if either node wasn't found, proof fails 242 | if (node === undefined || sibling === undefined) return false; 243 | 244 | // calculate parent node and add it to the next set of nodes 245 | let parent = (nodeIndex & 1) ? hash.merge(sibling, node) : hash.merge(node, sibling); 246 | let parentIndex = nodeIndex >> 1; 247 | v.set(parentIndex, parent); 248 | nextIndexes.push(parentIndex); 249 | } 250 | } 251 | 252 | return root.equals(v.get(1)!); 253 | } 254 | } 255 | 256 | // HELPER FUNCTIONS 257 | // ================================================================================================ 258 | function normalizeIndexes(input: number[]) { 259 | input = input.slice().sort(compareNumbers); 260 | const output = new Set(); 261 | for (let index of input) { 262 | output.add(index - (index & 1)); 263 | } 264 | return Array.from(output); 265 | } 266 | 267 | function mapIndexes(input: number[], maxValid: number) { 268 | const output = new Map(); 269 | for (let i = 0; i < input.length; i++) { 270 | let index = input[i]; 271 | output.set(index, i); 272 | 273 | if (index < 0) throw new TypeError(`Invalid index: ${index}`); 274 | if (index > maxValid) throw new TypeError(`Invalid index: ${index}`); 275 | if (!Number.isInteger(index)) throw new TypeError(`Invalid index: ${index}`); 276 | } 277 | 278 | if (input.length !== output.size) throw new Error('Repeating indexes detected'); 279 | 280 | return output; 281 | } 282 | 283 | function compareNumbers(a: number, b: number) { 284 | return a - b; 285 | } -------------------------------------------------------------------------------- /lib/assembly/blake2s.as.ts: -------------------------------------------------------------------------------- 1 | // 2 | /// 3 | 4 | // CONSTANTS 5 | // ================================================================================================ 6 | const SIGMA = new ArrayBuffer(160); 7 | const IV = new ArrayBuffer(32); 8 | 9 | export function getSigmaRef(): usize { 10 | return changetype(SIGMA); 11 | } 12 | 13 | export function getIvRef(): usize { 14 | return changetype(IV); 15 | } 16 | 17 | // MODULE VARIABLES 18 | // ================================================================================================ 19 | let v = new ArrayBuffer(64); 20 | let m = new ArrayBuffer(64); 21 | let t: u64 = 0; 22 | 23 | // INPUTS / OUTPUTS 24 | // ================================================================================================ 25 | let _output = new ArrayBuffer(32); 26 | let _inputs = new ArrayBuffer(1024); 27 | 28 | export function getInputsRef(): usize { 29 | return changetype(_inputs); 30 | } 31 | 32 | export function getOutputRef(): usize { 33 | return changetype(_output); 34 | } 35 | 36 | export function newArray(length: i32): ArrayBuffer { 37 | return new ArrayBuffer(length); 38 | } 39 | 40 | // PUBLIC FUNCTIONS 41 | // ================================================================================================ 42 | export function hash(vRef: usize, vLength: i32, resRef: usize): void { 43 | 44 | // initialize the context 45 | store(resRef, 0x6b08e647); // h[0] = IV[0] ^ 0x01010000 ^ 0 ^ 32; 46 | memory.copy(resRef + 4, changetype(IV) + 4, 28); 47 | t = 0; 48 | 49 | // run intermediate compressions 50 | let mRef = changetype(m); 51 | while (vLength > 64) { 52 | memory.copy(mRef, vRef + t, 64); 53 | t += 64; 54 | compress(resRef, false); 55 | vLength -= 64; 56 | } 57 | 58 | // run final compression 59 | if (vLength > 0) { 60 | memory.copy(mRef, vRef + t, vLength); 61 | } 62 | memory.fill(mRef + vLength, 0, 64 - vLength); 63 | t += vLength; 64 | compress(resRef, true); 65 | } 66 | 67 | export function hashValues1(vRef: usize, resRef: usize, vElementSize: i32, vElementCount: i32): usize { 68 | for (let i = 0; i < vElementCount; i++) { 69 | hash(vRef, vElementSize, resRef); 70 | vRef += vElementSize; 71 | resRef += 32; 72 | } 73 | return resRef; 74 | } 75 | 76 | export function hashValues2(vRef: usize, resRef: usize, vElementSize: i32, vElementCount: i32): usize { 77 | for (let i = vElementCount - 1; i > 0; i--) { 78 | hash(vRef, vElementSize, resRef); 79 | vRef -= vElementSize; 80 | resRef -= 32; 81 | } 82 | return resRef; 83 | } 84 | 85 | export function mergeArrayElements(vRefs: usize, resRef: usize, vCount: i32, vElementCount: i32, vElementSize: i32): void { 86 | 87 | let mRef = changetype(m); 88 | let vLength = vCount * vElementSize; 89 | let vOffset = 0; 90 | let resEnd = resRef + vElementCount * 32; 91 | 92 | while (resRef < resEnd) { 93 | 94 | // initialize the context 95 | store(resRef, 0x6b08e647); // h[0] = IV[0] ^ 0x01010000 ^ 0 ^ 32; 96 | memory.copy(resRef + 4, changetype(IV) + 4, 28); 97 | t = 0; 98 | let c = 0; 99 | 100 | // run intermediate compressions 101 | for (let j = 0; j < vCount; j++) { 102 | let vRef = load(vRefs + (j << 3)); 103 | memory.copy(mRef + c, vRef + vOffset, vElementSize); 104 | c += vElementSize; 105 | t += vElementSize; 106 | 107 | if (c == 64 && t != vLength) { 108 | c = 0; 109 | compress(resRef, false); 110 | } 111 | } 112 | 113 | // run final compression 114 | memory.fill(mRef + c, 0, 64 - c); 115 | compress(resRef, true); 116 | 117 | // update references for the next loop iteration 118 | resRef += 32; 119 | vOffset += vElementSize; 120 | } 121 | } 122 | 123 | // INTERNAL FUNCTIONS 124 | // ================================================================================================ 125 | function compress(hRef: usize, last: boolean): void { 126 | 127 | let vRef = changetype(v); 128 | let iRef = changetype(IV); 129 | 130 | memory.copy(vRef, hRef, 32); // v[0-31] = h[0-31] 131 | memory.copy(vRef + 32, iRef, 32); // v[32-63] = IV[0-31] 132 | 133 | let v12 = load(vRef, 12 * 4); // v[12] = v[12] ^ t 134 | store(vRef, v12 ^ t, 12 * 4); 135 | 136 | let v13 = load(vRef, 13 * 4); // v[13] = v[13] ^ (t >> 32) 137 | store(vRef, v13 ^ (t >> 32), 13 * 4); 138 | 139 | if (last) { 140 | let v14 = load(vRef, 14 * 4); 141 | store(vRef, ~v14, 14 * 4); 142 | } 143 | 144 | let sRef = changetype(SIGMA), sRefEnd = sRef + 160, si: u64; 145 | while (sRef < sRefEnd) { 146 | si = load(sRef, 0); 147 | mix( 0, 16, 32, 48, (si), (si >> 8)); 148 | mix( 4, 20, 36, 52, (si >> 16), (si >> 24)); 149 | mix( 8, 24, 40, 56, (si >> 32), (si >> 40)); 150 | mix(12, 28, 44, 60, (si >> 48), (si >> 56)); 151 | 152 | si = load(sRef, 8); 153 | mix( 0, 20, 40, 60, (si), (si >> 8)); 154 | mix( 4, 24, 44, 48, (si >> 16), (si >> 24)); 155 | mix( 8, 28, 32, 52, (si >> 32), (si >> 40)); 156 | mix(12, 16, 36, 56, (si >> 48), (si >> 56)); 157 | 158 | sRef += 16; 159 | } 160 | 161 | let v1: u32, v2: u32, hv: u32; 162 | for (let i = 0; i < 8; i++, vRef += 4, hRef += 4) { 163 | v1 = load(vRef); 164 | v2 = load(vRef, 32); 165 | hv = load(hRef); 166 | store(hRef, hv ^ v1 ^ v2); 167 | } 168 | } 169 | 170 | // @ts-ignore 171 | @inline 172 | function mix(a: u8, b: u8, c: u8, d: u8, xi: u8, yi: u8): void { 173 | 174 | let mRef = changetype(m); 175 | let x = load(mRef + xi); 176 | let y = load(mRef + yi); 177 | 178 | let vRef = changetype(v); 179 | let vaRef = vRef + a; 180 | let vbRef = vRef + b; 181 | let vcRef = vRef + c; 182 | let vdRef = vRef + d; 183 | 184 | let va = load(vaRef); // v[a] = v[a] + v[b] + x 185 | let vb = load(vbRef); 186 | va = va + vb + x; 187 | let vd = load(vdRef); // v[d] = rotr(v[d] ^ v[a], 16) 188 | vd = rotr(vd ^ va, 16); 189 | let vc = load(vcRef); // v[c] = v[c] + v[d] 190 | vc = vc + vd; 191 | vb = rotr(vb ^ vc, 12); // v[b] = rotr(v[b] ^ v[c], 12) 192 | va = va + vb + y; // v[a] = v[a] + v[b] + y 193 | vd = rotr(vd ^ va, 8); // v[d] = rotr(v[d] ^ v[a], 8) 194 | vc = vc + vd; // v[c] = v[c] + v[d] 195 | vb = rotr(vb ^ vc, 7); // v[b] = rotr(v[b] ^ v[c], 7) 196 | 197 | store(vaRef, va); 198 | store(vbRef, vb); 199 | store(vcRef, vc); 200 | store(vdRef, vd); 201 | } -------------------------------------------------------------------------------- /lib/assembly/index.ts: -------------------------------------------------------------------------------- 1 | // IMPORTS 2 | // ================================================================================================ 3 | import * as fs from 'fs'; 4 | import * as loader from '@assemblyscript/loader'; 5 | 6 | // CONSTANTS 7 | // ================================================================================================ 8 | const BLAKE2S_WASM = `${__dirname}/blake2s.wasm`; 9 | const IV = [ 10 | 0x6A09E667, 0xBB67AE85, 0x3C6EF372, 0xA54FF53A, 11 | 0x510E527F, 0x9B05688C, 0x1F83D9AB, 0x5BE0CD19]; 12 | const SIGMA = [ 13 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 14 | 14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3, 15 | 11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4, 16 | 7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8, 17 | 9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13, 18 | 2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9, 19 | 12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11, 20 | 13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10, 21 | 6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5, 22 | 10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0]; 23 | 24 | // INTERFACES 25 | // ================================================================================================ 26 | export type WasmBlake2s = loader.ASUtil & { 27 | getSigmaRef(): number; 28 | getIvRef(): number; 29 | getInputsRef(): number; 30 | getOutputRef(): number; 31 | newArray(length: number): number; 32 | 33 | hash(vRef: number, vLength: number, resRef: number): void; 34 | 35 | hashValues1(vRef: number, resRef: number, vElementSize: number, vElementCount: number): number; 36 | hashValues2(vRef: number, resRef: number, vElementSize: number, vElementCount: number): number; 37 | 38 | mergeArrayElements(vRefs: number, resRef: number, vCount: number, vElementCount: number, vElementSize: number): void; 39 | 40 | memory: WebAssembly.Memory; 41 | } 42 | 43 | // PUBLIC MODULE 44 | // ================================================================================================ 45 | export function instantiateBlake2s(memory?: WebAssembly.Memory): WasmBlake2s { 46 | if (memory === undefined) { 47 | memory = new WebAssembly.Memory({ initial: 10 }); 48 | } 49 | 50 | const wasm: WasmBlake2s = loader.instantiateSync(fs.readFileSync(BLAKE2S_WASM), { 51 | env: { memory } 52 | }); 53 | 54 | const memU8 = new Uint8Array(wasm.memory.buffer); 55 | 56 | let sIdx = wasm.getSigmaRef(); 57 | for (let sigma of SIGMA) { 58 | memU8[sIdx] = sigma * 4; 59 | sIdx++; 60 | } 61 | 62 | const memU32 = new Uint32Array(wasm.memory.buffer); 63 | let iIdx = wasm.getIvRef() >> 2; 64 | for (let iv of IV) { 65 | memU32[iIdx] = iv; 66 | iIdx++; 67 | } 68 | 69 | return wasm; 70 | } -------------------------------------------------------------------------------- /lib/hash/JsHash.ts: -------------------------------------------------------------------------------- 1 | // IMPORTS 2 | // ================================================================================================ 3 | import * as crypto from 'crypto'; 4 | import { Hash, HashAlgorithm, Vector } from "@guildofweavers/merkle"; 5 | import { JsVector } from '../vectors/JsVector'; 6 | 7 | // MODULE VARIABLES 8 | // ================================================================================================ 9 | const DIGEST_SIZE = 32; // 32 bytes 10 | const DOUBLE_INPUT_LENGTH = 2 * DIGEST_SIZE; 11 | const NULL_BUFFER = Buffer.alloc(DIGEST_SIZE); 12 | 13 | // CLASS DEFINITION 14 | // ================================================================================================ 15 | export class JsHash implements Hash { 16 | 17 | readonly algorithm: HashAlgorithm 18 | 19 | // CONSTRUCTOR 20 | // -------------------------------------------------------------------------------------------- 21 | constructor(algorithm: HashAlgorithm) { 22 | this.algorithm = algorithm; 23 | } 24 | 25 | // PROPERTY ACCESSORS 26 | // -------------------------------------------------------------------------------------------- 27 | get digestSize(): number { 28 | return DIGEST_SIZE; 29 | } 30 | 31 | get isOptimized(): boolean { 32 | return false; 33 | } 34 | 35 | // PUBLIC METHODS 36 | // -------------------------------------------------------------------------------------------- 37 | digest(value: Buffer): Buffer { 38 | const hash = crypto.createHash(this.algorithm); 39 | hash.update(value) 40 | return hash.digest(); 41 | } 42 | 43 | merge(a: Buffer, b: Buffer): Buffer { 44 | const hash = crypto.createHash(this.algorithm); 45 | hash.update(a); 46 | hash.update(b); 47 | return hash.digest(); 48 | } 49 | 50 | buildMerkleNodes(depth: number, leaves: Vector): ArrayBuffer { 51 | 52 | // allocate memory for tree nodes 53 | const nodeCount = 2**depth; 54 | const nodes = new ArrayBuffer(nodeCount * DIGEST_SIZE); 55 | const nodeBuffer = Buffer.from(nodes); 56 | 57 | // build first row of internal nodes (parents of leaves) 58 | const parentCount = nodeCount / 2; 59 | const evenLeafCount = (leaves.length & 1) ? leaves.length - 1 : leaves.length; 60 | let tOffset = parentCount * DIGEST_SIZE; 61 | 62 | const lBuffer = leaves.toBuffer(); 63 | const doubleElementSize = leaves.elementSize * 2; 64 | let sOffset = 0; 65 | for (let i = 0; i < evenLeafCount; i += 2, sOffset += doubleElementSize, tOffset += DIGEST_SIZE) { 66 | let hash = crypto.createHash(this.algorithm); 67 | hash.update(lBuffer.slice(sOffset, sOffset + doubleElementSize)); 68 | hash.digest().copy(nodeBuffer, tOffset); 69 | } 70 | 71 | // if the number of leaves was odd, process the last leaf 72 | if (evenLeafCount !== leaves.length) { 73 | let hash = crypto.createHash(this.algorithm); 74 | hash.update(lBuffer.slice(sOffset)); 75 | hash.update(NULL_BUFFER); 76 | hash.digest().copy(nodeBuffer, tOffset); 77 | tOffset += DIGEST_SIZE; 78 | } 79 | 80 | // if number of leaves was not a power of 2, assume all other leaves are NULL 81 | if (leaves.length < nodeCount) { 82 | const nullParent = this.merge(NULL_BUFFER, NULL_BUFFER); 83 | while (tOffset < nodes.byteLength) { 84 | nullParent.copy(nodeBuffer, tOffset); 85 | tOffset += DIGEST_SIZE; 86 | } 87 | } 88 | 89 | // calculate all other tree nodes 90 | for (let i = parentCount - 1; i > 0; i--) { 91 | let tIndex = i * DIGEST_SIZE; 92 | let sIndex = tIndex << 1; 93 | let hash = crypto.createHash(this.algorithm); 94 | hash.update(nodeBuffer.slice(sIndex, sIndex + DOUBLE_INPUT_LENGTH)); 95 | hash.digest().copy(nodeBuffer, tIndex); 96 | } 97 | 98 | return nodes; 99 | } 100 | 101 | mergeVectorRows(vectors: Vector[]): Vector { 102 | const elementCount = vectors[0].length; 103 | const elementSize = vectors[0].elementSize; 104 | 105 | const result = new Array(elementCount); 106 | const buffer = Buffer.allocUnsafe(vectors.length * elementSize); 107 | for (let i = 0; i < elementCount; i++) { 108 | let offset = 0; 109 | for (let j = 0; j < vectors.length; j++) { 110 | offset += vectors[j].copyValue(i, buffer, offset); 111 | } 112 | result[i] = this.digest(buffer); 113 | } 114 | return new JsVector(result); 115 | } 116 | 117 | digestValues(values: Buffer, valueSize: number): Vector { 118 | const elementCount = values.byteLength / valueSize; 119 | if (!Number.isInteger(elementCount)) { 120 | throw new Error('Values buffer cannot contain partial number of elements'); 121 | } 122 | 123 | const result = new Array(elementCount); 124 | for (let i = 0, offset = 0; i < elementCount; i++, offset += valueSize) { 125 | result[i] = this.digest(values.slice(offset, offset + valueSize)); 126 | } 127 | 128 | return new JsVector(result); 129 | } 130 | } -------------------------------------------------------------------------------- /lib/hash/index.ts: -------------------------------------------------------------------------------- 1 | // IMPORTS 2 | // ================================================================================================ 3 | import { HashAlgorithm, Hash, WasmOptions } from '@guildofweavers/merkle'; 4 | import { WasmBlake2s } from './WasmBlake2s'; 5 | import { JsHash } from './JsHash'; 6 | 7 | // PUBLIC FUNCTIONS 8 | // ================================================================================================ 9 | export function createHash(algorithm: HashAlgorithm, useWasm?: boolean): Hash 10 | export function createHash(algorithm: HashAlgorithm, options: Partial): Hash 11 | export function createHash(algorithm: HashAlgorithm, useWasmOrOptions?: boolean | Partial): Hash { 12 | if (!useWasmOrOptions) { 13 | return new JsHash(algorithm); 14 | } 15 | 16 | const HashCtr = getHashConstructor(algorithm); 17 | if (!HashCtr) { 18 | return new JsHash(algorithm); 19 | } 20 | 21 | const wasmOptions = normalizeWasmOptions(useWasmOrOptions); 22 | return new HashCtr(wasmOptions); 23 | } 24 | 25 | export function isWasmOptimized(algorithm: HashAlgorithm): boolean { 26 | switch (algorithm) { 27 | case 'blake2s256': { 28 | return true; 29 | } 30 | default: { 31 | return false; 32 | } 33 | } 34 | } 35 | 36 | // HELPER FUNCTIONS 37 | // ================================================================================================ 38 | function getHashConstructor(algorithm: HashAlgorithm) { 39 | switch (algorithm) { 40 | case 'blake2s256': { 41 | return WasmBlake2s; 42 | } 43 | default: { 44 | return undefined; 45 | } 46 | } 47 | } 48 | 49 | function normalizeWasmOptions(useWasmOrOptions: boolean | Partial): WasmOptions { 50 | if (typeof useWasmOrOptions === 'boolean') { 51 | return { memory: new WebAssembly.Memory({ initial: 10 }) }; 52 | } 53 | 54 | const memory = useWasmOrOptions.memory || new WebAssembly.Memory({ initial: 10 }); 55 | return { memory }; 56 | } -------------------------------------------------------------------------------- /lib/hash/wasmBlake2s.ts: -------------------------------------------------------------------------------- 1 | // IMPORTS 2 | // ================================================================================================ 3 | import { Hash, HashAlgorithm, Vector, WasmOptions } from "@guildofweavers/merkle"; 4 | import { instantiateBlake2s, WasmBlake2s as Blake2sWasm } from '../assembly'; 5 | import { WasmVector } from "../vectors/WasmVector"; 6 | 7 | // MODULE VARIABLES 8 | // ================================================================================================ 9 | const DIGEST_SIZE = 32; // 32 bytes 10 | const NULL_BUFFER = Buffer.alloc(DIGEST_SIZE); 11 | 12 | // CLASS DEFINITION 13 | // ================================================================================================ 14 | export class WasmBlake2s implements Hash { 15 | 16 | readonly wasm : Blake2sWasm; 17 | readonly iRef : number; 18 | readonly oRef : number; 19 | readonly oEnd : number; 20 | 21 | // CONSTRUCTOR 22 | // -------------------------------------------------------------------------------------------- 23 | constructor(options: WasmOptions) { 24 | this.wasm = instantiateBlake2s(options.memory); 25 | this.iRef = this.wasm.getInputsRef(); 26 | this.oRef = this.wasm.getOutputRef(); 27 | this.oEnd = this.oRef + DIGEST_SIZE; 28 | } 29 | 30 | // PROPERTY ACCESSORS 31 | // -------------------------------------------------------------------------------------------- 32 | get algorithm(): HashAlgorithm { 33 | return "blake2s256"; 34 | } 35 | 36 | get digestSize(): number { 37 | return DIGEST_SIZE; 38 | } 39 | 40 | get isOptimized(): boolean { 41 | return true; 42 | } 43 | 44 | // PUBLIC METHODS 45 | // -------------------------------------------------------------------------------------------- 46 | digest(value: Buffer): Buffer { 47 | let memU8 = new Uint8Array(this.wasm.memory.buffer); 48 | // TODO: investigate checking if the buffer comes from shared memory 49 | if (value.byteLength < 4096) { 50 | memU8.set(value, this.iRef); 51 | this.wasm.hash(this.iRef, value.byteLength, this.oRef); 52 | } 53 | else { 54 | const vRef = this.wasm.newArray(value.byteLength); 55 | if (memU8.buffer !== this.wasm.memory.buffer) { 56 | memU8 = new Uint8Array(this.wasm.memory.buffer); 57 | } 58 | memU8.set(value, vRef); 59 | this.wasm.hash(vRef, value.byteLength, this.oRef); 60 | this.wasm.__release(vRef); 61 | } 62 | return Buffer.from(memU8.subarray(this.oRef, this.oEnd)); 63 | } 64 | 65 | merge(a: Buffer, b: Buffer): Buffer { 66 | const memU8 = new Uint8Array(this.wasm.memory.buffer); 67 | memU8.set(a, this.iRef); 68 | memU8.set(b, this.iRef + a.byteLength); 69 | this.wasm.hash(this.iRef, a.byteLength + b.byteLength, this.oRef); 70 | return Buffer.from(memU8.subarray(this.oRef, this.oEnd)); 71 | } 72 | 73 | buildMerkleNodes(depth: number, leaves: Vector): ArrayBuffer { 74 | 75 | const wasm = this.wasm, iRef = this.iRef; 76 | 77 | // allocate memory for tree nodes 78 | const nodeCount = 1 << depth; 79 | const bufferLength = nodeCount * DIGEST_SIZE; 80 | const nRef = this.wasm.newArray(bufferLength); 81 | 82 | // build first row of internal nodes (parents of leaves) 83 | const parentCount = nodeCount >>> 1; // nodeCount / 2 84 | const evenLeafCount = (leaves.length & 1) ? leaves.length - 1 : leaves.length; 85 | let resRef = nRef + parentCount * DIGEST_SIZE; 86 | 87 | let lBuffer = leaves.toBuffer(), lRef = lBuffer.byteOffset, releaseLeaves = false; 88 | if (lBuffer.buffer !== wasm.memory.buffer) { 89 | // if the leaves buffer belongs to some other WASM memory, copy it into local memory 90 | lRef = wasm.newArray(lBuffer.byteLength); 91 | const memU8 = new Uint8Array(this.wasm.memory.buffer); 92 | memU8.set(lBuffer, lRef); 93 | releaseLeaves = true; 94 | } 95 | resRef = wasm.hashValues1(lRef, resRef, leaves.elementSize << 1, evenLeafCount >>> 1); 96 | 97 | // if the leaves were copied into local memory, free that memory 98 | if (releaseLeaves) { 99 | wasm.__release(lRef); 100 | } 101 | 102 | // if the number of leaves was odd, process the last leaf 103 | if (evenLeafCount !== leaves.length) { 104 | const lastLeaf = Buffer.from(lBuffer.slice(lBuffer.byteLength - leaves.elementSize)); 105 | const memU8 = new Uint8Array(this.wasm.memory.buffer); 106 | memU8.set(lastLeaf, iRef); 107 | memU8.set(NULL_BUFFER, iRef + lastLeaf.length); 108 | wasm.hash(iRef, lastLeaf.length + DIGEST_SIZE, resRef); 109 | resRef += DIGEST_SIZE; 110 | } 111 | 112 | // if number of leaves was not a power of 2, assume all other leaves are NULL 113 | if (leaves.length < nodeCount) { 114 | const nullParent = this.merge(NULL_BUFFER, NULL_BUFFER); 115 | const resEnd = nRef + bufferLength; 116 | const memU8 = new Uint8Array(this.wasm.memory.buffer); 117 | while (resRef < resEnd) { 118 | memU8.set(nullParent, resRef); 119 | resRef += DIGEST_SIZE; 120 | } 121 | } 122 | 123 | // calculate all other tree nodes 124 | let tIndex = (parentCount - 1) * DIGEST_SIZE 125 | let sIndex = tIndex << 1; 126 | wasm.hashValues2(nRef + sIndex, nRef + tIndex, DIGEST_SIZE << 1, parentCount); 127 | 128 | // copy the buffer out of WASM memory, free the memory, and return the buffer 129 | const nBuffer = this.wasm.memory.buffer.slice(nRef, nRef + bufferLength); 130 | this.wasm.__release(nRef); 131 | return nBuffer; 132 | } 133 | 134 | mergeVectorRows(vectors: Vector[]): Vector { 135 | const elementCount = vectors[0].length; 136 | const elementSize = vectors[0].elementSize; 137 | 138 | if (elementSize > 64) { 139 | throw new Error(`Cannot merge vector rows: vector element size must be smaller than 64 bytes`); 140 | } 141 | else if (64 % elementSize !== 0) { 142 | throw new Error(`Cannot merge vector rows: vector element size must be a divisor of 64`); 143 | } 144 | 145 | const vRefs = this.wasm.newArray(vectors.length * 8); 146 | const vIdx = vRefs >>> 3; 147 | const refsToRelease = new Set(); 148 | 149 | // build array of references to vectors 150 | let vRef: number; 151 | let memU8 = new Uint8Array(this.wasm.memory.buffer); 152 | for (let i = 0; i < vectors.length; i++) { 153 | let buffer = vectors[i].toBuffer(); 154 | if (buffer.buffer === this.wasm.memory.buffer) { 155 | // if the vector is already in WASM memory, just cache the reference to it 156 | vRef = buffer.byteOffset; 157 | } 158 | else { 159 | // otherwise, copy the vector into WASM memory 160 | vRef = this.wasm.newArray(buffer.byteLength); 161 | if (memU8.buffer !== this.wasm.memory.buffer) { 162 | memU8 = new Uint8Array(this.wasm.memory.buffer); 163 | } 164 | memU8.set(vectors[i].toBuffer(), vRef); 165 | refsToRelease.add(vRef); 166 | } 167 | const memU64 = new BigUint64Array(this.wasm.memory.buffer); 168 | memU64[vIdx + i] = BigInt(vRef); 169 | } 170 | 171 | const resRef = this.wasm.newArray(elementCount * DIGEST_SIZE); 172 | this.wasm.mergeArrayElements(vRefs, resRef, vectors.length, elementCount, elementSize); 173 | 174 | // release all memory that was used up during the operation 175 | this.wasm.__release(vRefs); 176 | for (let vRef of refsToRelease) { 177 | this.wasm.__release(vRef); 178 | } 179 | 180 | // build and return a vector with hashes 181 | return new WasmVector(this.wasm.memory, resRef, elementCount, DIGEST_SIZE); 182 | } 183 | 184 | digestValues(values: Buffer, valueSize: number): Vector { 185 | const elementCount = values.byteLength / valueSize; 186 | if (!Number.isInteger(elementCount)) { 187 | throw new Error('Values buffer cannot contain partial number of elements'); 188 | } 189 | 190 | let vRef: number, releaseValues: boolean; 191 | if (this.wasm.memory.buffer === values.buffer) { 192 | // if the vector is already in WASM memory, just cache the reference to it 193 | vRef = values.byteOffset; 194 | releaseValues = false; 195 | } 196 | else { 197 | // otherwise, copy the vector into WASM memory 198 | vRef = this.wasm.newArray(values.byteLength); 199 | const memU8 = new Uint8Array(this.wasm.memory.buffer); 200 | memU8.set(values, vRef); 201 | releaseValues = true; 202 | } 203 | 204 | // allocate memory to hold the results and hash the values 205 | const resRef = this.wasm.newArray(elementCount * DIGEST_SIZE); 206 | this.wasm.hashValues1(vRef, resRef, valueSize, elementCount); 207 | 208 | // if the values were copied into WASM memory during the operation, free the memory 209 | if (releaseValues) { 210 | this.wasm.__release(vRef); 211 | } 212 | 213 | return new WasmVector(this.wasm.memory, resRef, elementCount, DIGEST_SIZE); 214 | } 215 | } -------------------------------------------------------------------------------- /lib/vectors/JsVector.ts: -------------------------------------------------------------------------------- 1 | // IMPORTS 2 | // ================================================================================================ 3 | import { Vector } from "@guildofweavers/merkle"; 4 | 5 | // CLASS DEFINITION 6 | // ================================================================================================ 7 | export class JsVector implements Vector { 8 | 9 | readonly values : Buffer[]; 10 | readonly elementSize : number; 11 | 12 | constructor(values: Buffer[]) { 13 | this.values = values; 14 | this.elementSize = values[0].byteLength; 15 | } 16 | 17 | get byteLength(): number { 18 | return this.values.length * this.elementSize; 19 | } 20 | 21 | get length(): number { 22 | return this.values.length; 23 | } 24 | 25 | copyValue(index: number, destination: Buffer, offset: number): number { 26 | const value = this.values[index]; 27 | value.copy(destination, offset); 28 | return this.elementSize; 29 | } 30 | 31 | toBuffer(startIdx = 0, elementCount?: number): Buffer { 32 | if (elementCount === undefined) { 33 | elementCount = this.values.length - startIdx; 34 | } 35 | 36 | if (elementCount === 1) { 37 | return this.values[startIdx]; 38 | } 39 | 40 | const result = Buffer.alloc(elementCount * this.elementSize); 41 | const endIdx = startIdx + elementCount; 42 | let offset = 0; 43 | for (let i = startIdx; i < endIdx; i++, offset += this.elementSize) { 44 | this.values[i].copy(result, offset); 45 | } 46 | 47 | return result; 48 | } 49 | } -------------------------------------------------------------------------------- /lib/vectors/WasmVector.ts: -------------------------------------------------------------------------------- 1 | // IMPORTS 2 | // ================================================================================================ 3 | import { Vector } from '@guildofweavers/merkle'; 4 | 5 | // CLASS DEFINITION 6 | // ================================================================================================ 7 | export class WasmVector implements Vector { 8 | 9 | readonly memory : WebAssembly.Memory; 10 | readonly base : number; 11 | readonly length : number; 12 | readonly elementSize: number; 13 | 14 | constructor(memory: WebAssembly.Memory, base: number, length: number, elementSize: number) { 15 | this.memory = memory; 16 | this.base = base; 17 | this.length = length; 18 | this.elementSize = elementSize; 19 | } 20 | 21 | get byteLength(): number { 22 | return this.length * this.elementSize; 23 | } 24 | 25 | copyValue(index: number, destination: Buffer, offset: number): number { 26 | const value = Buffer.from(this.memory.buffer, this.base + index * this.elementSize, this.elementSize); 27 | value.copy(destination, offset); 28 | return this.elementSize; 29 | } 30 | 31 | toBuffer(startIdx = 0, elementCount?: number): Buffer { 32 | const offset = this.base + startIdx * this.elementSize; 33 | let length: number; 34 | if (elementCount === undefined) { 35 | length = (this.base + this.byteLength) - offset; 36 | } 37 | else { 38 | length = elementCount * this.elementSize; 39 | } 40 | return Buffer.from(this.memory.buffer, offset, length); 41 | } 42 | } -------------------------------------------------------------------------------- /merkle.d.ts: -------------------------------------------------------------------------------- 1 | declare module '@guildofweavers/merkle' { 2 | 3 | // HASHING 4 | // -------------------------------------------------------------------------------------------- 5 | export type HashAlgorithm = 'sha256' | 'blake2s256'; 6 | 7 | /** 8 | * Creates a Hash object for the specified algorithm. If useWasm is set to true, will try to 9 | * instantiate a WebAssembly-optimized version of the algorithm. If WASM optimization is not 10 | * available for the specified algorithm, Node's native implementation will be used. 11 | */ 12 | export function createHash(algorithm: HashAlgorithm, useWasm?: boolean): Hash; 13 | 14 | /** 15 | * Tries to create a WebAssembly-optimized Hash object for the specified algorithm and pass 16 | * the provided options to it. If WASM optimization is not available for the specified algorithm, 17 | * Node's native implementation will be used. 18 | */ 19 | export function createHash(algorithm: HashAlgorithm, options: Partial): Hash; 20 | 21 | export interface WasmOptions { 22 | readonly memory: WebAssembly.Memory; 23 | } 24 | 25 | export interface Hash { 26 | readonly algorithm : HashAlgorithm; 27 | readonly digestSize : number; 28 | readonly isOptimized: boolean; 29 | 30 | /** Hashes the provided value */ 31 | digest(value: Buffer): Buffer; 32 | 33 | /** Hashes a concatenation of a and b */ 34 | merge(a: Buffer, b: Buffer): Buffer; 35 | 36 | buildMerkleNodes(depth: number, leaves: Vector): ArrayBuffer; 37 | 38 | mergeVectorRows(vectors: Vector[]): Vector; 39 | 40 | digestValues(values: Buffer, valueSize: number): Vector; 41 | } 42 | 43 | /** Returns true if WebAssembly optimization is available for the provided algorithm */ 44 | export function isWasmOptimized(hashAlgorithm: HashAlgorithm): boolean; 45 | 46 | // MERKLE TREE 47 | // -------------------------------------------------------------------------------------------- 48 | export class MerkleTree { 49 | 50 | /** 51 | * Returns a Merkle tree created from the specified values 52 | * @param values Values that form the leaves of the tree 53 | * @param hash Hash object to use for hashing of internal nodes 54 | */ 55 | static create(values: Buffer[] | Vector, hash: Hash): MerkleTree; 56 | 57 | /** 58 | * Returns a Promise for a Merkle tree created from the specified values 59 | * @param values Values that form the leaves of the tree 60 | * @param hash Hash object to use for hashing of internal nodes 61 | */ 62 | static createAsync(values: Buffer[] | Vector, hash: Hash): Promise; 63 | 64 | /** Root of the tree */ 65 | readonly root: Buffer; 66 | 67 | /** Returns a leaf node located at the specified index */ 68 | getLeaf(index: number): Buffer; 69 | 70 | /** Returns all leaf nodes of the tree */ 71 | getLeaves(): Buffer[]; 72 | 73 | /** Returns a Merkle proof for a single leaf at the specified index */ 74 | prove(index: number): Buffer[]; 75 | 76 | /** Returns a compressed Merkle proof for leaves at the specified indexes */ 77 | proveBatch(indexes: number[]): BatchMerkleProof; 78 | 79 | /** 80 | * Verifies Merkle proof for a single index 81 | * @param root Root of the Merkle tree 82 | * @param index Index of a leaf to verify 83 | * @param proof Merkle proof for the leaf at the specified index 84 | * @param hash Hash object to use for hashing of internal nodes 85 | */ 86 | static verify(root: Buffer, index: number, proof: Buffer[], hash: Hash): boolean; 87 | 88 | /** 89 | * Verifies Merkle proof for a list of indexes 90 | * @param root Root of the Merkle tree 91 | * @param index Indexes of leaves to verify 92 | * @param proof Compressed Merkle proof for the leaves at the specified indexes 93 | * @param hash Hash object to use for hashing of internal nodes 94 | */ 95 | static verifyBatch(root: Buffer, indexes: number[], proof: BatchMerkleProof, hash: Hash): boolean; 96 | } 97 | 98 | export interface BatchMerkleProof { 99 | /** leaf nodes located at the indexes covered by the proof */ 100 | values: Buffer[]; 101 | 102 | /** Internal nodes that form the actual proof */ 103 | nodes: Buffer[][]; 104 | 105 | /** Depth of the source Merkle tree */ 106 | depth: number; 107 | } 108 | 109 | // INTERNAL DATA STRUCTURES 110 | // -------------------------------------------------------------------------------------------- 111 | export interface Vector { 112 | readonly length : number; 113 | readonly byteLength : number; 114 | readonly elementSize : number; 115 | 116 | copyValue(index: number, destination: Buffer, offset: number): number; 117 | toBuffer(startIdx?: number, elementCount?: number): Buffer; 118 | } 119 | 120 | } -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@guildofweavers/merkle", 3 | "version": "0.3.12", 4 | "description": "Merkle tree and other data structures", 5 | "main": "index.js", 6 | "typings": "merkle.d.ts", 7 | "license": "MIT", 8 | "author": { 9 | "name": "Bobbin Threadbare", 10 | "email": "bobbinth@protonmail.com" 11 | }, 12 | "keywords": [ 13 | "merkle tree", 14 | "merkle proof", 15 | "cryptography" 16 | ], 17 | "repository": { 18 | "type": "git", 19 | "url": "https://github.com/GuildOfWeavers/merkle.git" 20 | }, 21 | "engines": { 22 | "node": ">=12.7.x" 23 | }, 24 | "dependencies": { 25 | "@assemblyscript/loader": "0.8.x" 26 | }, 27 | "devDependencies": { 28 | "@types/chai": "4.2.x", 29 | "@types/mocha": "5.2.x", 30 | "@types/node": "12.7.x", 31 | "assemblyscript": "0.8.x", 32 | "chai": "4.2.x", 33 | "del": "5.0.x", 34 | "gulp": "4.0.x", 35 | "gulp-mocha": "6.0.x", 36 | "mocha": "6.2.x" 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /tests/MerkleTree.spec.ts: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai'; 2 | 3 | import { MerkleTree } from '../lib/MerkleTree'; 4 | import { createHash } from '../lib/hash'; 5 | 6 | const hash = createHash('sha256'); 7 | 8 | const leafCount = 8; 9 | const elements = [ 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h' ]; 10 | const bElements = elements.map(e => Buffer.from(e)); 11 | 12 | const h12 = hash.digest(Buffer.from(elements[0] + elements[1])); 13 | const h34 = hash.digest(Buffer.from(elements[2] + elements[3])); 14 | const h56 = hash.digest(Buffer.from(elements[4] + elements[5])); 15 | const h78 = hash.digest(Buffer.from(elements[6] + elements[7])); 16 | 17 | const h1234 = hash.merge(h12, h34); 18 | const h5678 = hash.merge(h56, h78); 19 | 20 | const hRoot = hash.merge(h1234, h5678); 21 | 22 | const root = hRoot.toString(); 23 | 24 | let tree: MerkleTree; 25 | 26 | describe('MerkleTree;', () => { 27 | describe('create();', () => { 28 | it('should create new tree', () => { 29 | const tree = MerkleTree.create(bElements, hash); 30 | 31 | expect(tree.depth).to.equal(3); 32 | expect(tree.values).to.have.length(elements.length); 33 | expect(tree.root.toString()).to.equal(root); 34 | }); 35 | }); 36 | 37 | describe('prove();', () => { 38 | beforeEach(() => { 39 | tree = MerkleTree.create(bElements, hash); 40 | }); 41 | 42 | describe('should return correct proof for element with index', () => { 43 | [ 44 | { 45 | index: 0, 46 | result: [ elements[0], elements[1], h34, h5678 ] 47 | }, 48 | { 49 | index: 1, 50 | result: [ elements[1], elements[0], h34, h5678 ] 51 | }, 52 | { 53 | index: 3, 54 | result: [ elements[3], elements[2], h12, h5678 ] 55 | }, 56 | { 57 | index: 6, 58 | result: [ elements[6], elements[7], h56, h1234 ] 59 | }, 60 | { 61 | index: 7, 62 | result: [ elements[7], elements[6], h56, h1234 ] 63 | } 64 | ].forEach(({ index, result }) => { 65 | it(String(index), () => { 66 | const proof = tree.prove(index); 67 | 68 | expect(proof[0].toString()).to.equal(result[0]); 69 | expect(proof[1].toString()).to.equal(result[1]); 70 | expect(proof[2].toString('hex')).to.equal(result[2].toString('hex')); 71 | expect(proof[3].toString('hex')).to.equal(result[3].toString('hex')); 72 | }); 73 | }); 74 | }); 75 | 76 | describe('should return error for nonexistent index', () => { 77 | [ -1, 20, 1000 ].forEach(index => { 78 | it(String(index), () => { 79 | expect(() => tree.prove(index)).to.throw(`Invalid index: ${index}`); 80 | }); 81 | }); 82 | }); 83 | }); 84 | 85 | describe('verify();', () => { 86 | beforeEach(() => { 87 | tree = MerkleTree.create(bElements, hash); 88 | }); 89 | 90 | describe('should verify proof for index', () => { 91 | [ 0, 1, 3, 6, 7 ].forEach(index => { 92 | it(String(index), () => { 93 | const proof = tree.prove(index); 94 | 95 | expect(MerkleTree.verify(tree.root, index, proof, hash)).to.be.true; 96 | }); 97 | }); 98 | }); 99 | 100 | describe('should not verify proof', () => { 101 | [ 102 | [0, 1], [6, 7], 103 | [1, -1], [1, 8] 104 | ].forEach(([index, fIndex]) => { 105 | it(`created for index ${index} but verified for ${fIndex}`, () => { 106 | const proof = tree.prove(index); 107 | 108 | expect(MerkleTree.verify(tree.root, fIndex, proof, hash)).to.be.false; 109 | }); 110 | }); 111 | }); 112 | }); 113 | 114 | describe('proveBatch();', () => { 115 | beforeEach(() => { 116 | tree = MerkleTree.create(bElements, hash); 117 | }); 118 | 119 | describe('should return correct proof for indexes', () => { 120 | [ 121 | { 122 | indexes: [3], 123 | nodes : [[bElements[2], h12, h5678]] 124 | }, 125 | { 126 | indexes: [0, 1], 127 | nodes : [[h34, h5678]] 128 | }, 129 | { 130 | indexes: [0, 1, 7], 131 | nodes : [[h34], [bElements[6], h56]] 132 | }, 133 | { 134 | indexes: [0, 1, 6, 7], 135 | nodes : [[h34], [h56]] 136 | }, 137 | { 138 | indexes: [0, 1, 6, 7, 3], 139 | nodes : [[], [bElements[2]], [h56]] 140 | }, 141 | { 142 | indexes: [0, 1, 2, 3, 6, 7], 143 | nodes : [[], [], [h56]] 144 | }, 145 | { 146 | indexes: [0, 1, 6, 7, 4, 5, 2, 3], 147 | nodes : [[], [], [], []] 148 | } 149 | ].forEach(({ indexes, nodes }) => { 150 | it(String(indexes), () => { 151 | const proof = tree.proveBatch(indexes); 152 | 153 | indexes.forEach((index, vIndex) => { 154 | expect(proof.values[vIndex].toString()).to.equal(elements[index]); 155 | }); 156 | 157 | expect(proof.nodes).to.deep.equal(nodes); 158 | 159 | expect(proof.depth).to.equal(3); 160 | }); 161 | }); 162 | }); 163 | 164 | describe('should return error for indexes', () => { 165 | [ 166 | [-1], 167 | [8], 168 | [0, 8], 169 | [8, 0, 1], 170 | [0, 10, 1], 171 | [0, 3, -1] 172 | ].forEach((indexes) => { 173 | it(String(indexes), () => { 174 | expect(() => tree.proveBatch(indexes)).to.throw('Invalid index'); 175 | }); 176 | }); 177 | }); 178 | }); 179 | 180 | describe('verifyBatch();', () => { 181 | beforeEach(() => { 182 | tree = MerkleTree.create(bElements, hash); 183 | }); 184 | 185 | describe('should verify proof for indexes', () => { 186 | [ 187 | [3], 188 | [0, 1], 189 | [0, 1, 7], 190 | [0, 1, 6, 7], 191 | [0, 1, 6, 7, 3], 192 | [0, 1, 2, 3, 6, 7], 193 | [0, 1, 6, 7, 4, 5, 2, 3] 194 | ].forEach(indexes => { 195 | it(`[${indexes}]`, () => { 196 | const proof = tree.proveBatch(indexes); 197 | 198 | expect(MerkleTree.verifyBatch(tree.root, indexes, proof, hash)).to.be.true; 199 | }); 200 | }); 201 | }); 202 | 203 | describe('should not verify proof', () => { 204 | [ 205 | [[3], []], 206 | [[3], [2]], 207 | [[0, 1], [1]], 208 | [[0, 1, 7], [0, 7, 1]], 209 | [[0, 1, 6, 7], [0, 1, 2]], 210 | [[0, 1, 6, 7, 3], [0, 1, 7, 3]], 211 | [[0, 1, 2, 3, 6, 7], [2, 3, 6, 7]], 212 | [[0, 1, 6, 7, 4, 5, 2, 3], [1, 2, 3, 4]] 213 | ].forEach(([indexes, fIndexes]) => { 214 | it(`created for indexes [${indexes}] but verified for [${fIndexes}]`, () => { 215 | const proof = tree.proveBatch(indexes); 216 | 217 | expect(MerkleTree.verifyBatch(tree.root, fIndexes, proof, hash)).to.be.false; 218 | }); 219 | }); 220 | }); 221 | }); 222 | }); 223 | -------------------------------------------------------------------------------- /tests/run.ts: -------------------------------------------------------------------------------- 1 | // IMPORTS 2 | // ================================================================================================ 3 | import * as assert from 'assert'; 4 | import * as crypto from 'crypto'; 5 | import { Vector } from '@guildofweavers/merkle'; 6 | import { MerkleTree, createHash } from '../index'; 7 | import { JsVector } from '../lib/vectors/JsVector'; 8 | import { WasmVector } from '../lib/vectors/WasmVector'; 9 | import { instantiateBlake2s } from '../lib/assembly'; 10 | 11 | // MODULE VARIABLES 12 | // ================================================================================================ 13 | const iterations = 10; 14 | const treeDepth = 18; 15 | const branchCount = 128; 16 | const elementSize = 32; 17 | 18 | const memory = new WebAssembly.Memory({ initial: 300 }); 19 | const wasm = instantiateBlake2s(memory); 20 | 21 | const dataInWasm = true; 22 | const hash = createHash('blake2s256', { memory }); 23 | 24 | // TESTS 25 | // ================================================================================================ 26 | (function runTest() { 27 | 28 | let t0 = 0, t1 = 0, s1 = 0; 29 | let leafCount = 2**treeDepth; 30 | 31 | for (let i = 0; i < iterations; i++) { 32 | let leaves = generateLeaves(leafCount); 33 | 34 | let indexSet = new Set(); 35 | while (indexSet.size < branchCount) { 36 | indexSet.add(Math.floor(Math.random() * leafCount)); 37 | } 38 | let indexes = Array.from(indexSet); 39 | 40 | let controlSet = new Set(); 41 | while (controlSet.size < branchCount) { 42 | controlSet.add(Math.floor(Math.random() * leafCount)); 43 | } 44 | let controls = Array.from(controlSet); 45 | 46 | let start = Date.now(); 47 | const tree = MerkleTree.create(leaves, hash); 48 | t0 += Date.now() - start; 49 | 50 | start = Date.now(); 51 | const mp = tree.proveBatch(indexes); 52 | t1 += Date.now() - start; 53 | for (let i = 0; i < mp.nodes.length; i++) { 54 | s1 += mp.nodes[i].length; 55 | } 56 | s1 += mp.values.length; 57 | 58 | assert.equal(MerkleTree.verifyBatch(tree.root, indexes, mp, hash), true); 59 | assert.equal(MerkleTree.verifyBatch(tree.root, controls, mp, hash), false); 60 | 61 | if (leaves instanceof WasmVector) { 62 | wasm.__release(leaves.base); 63 | } 64 | } 65 | 66 | const proofTime = Math.round(t1 / iterations * 100) / 100; 67 | const proofSize = (Math.round(s1 / iterations) * hash.digestSize / 1024); 68 | const naiveProofSize = ((treeDepth + 1) * hash.digestSize * branchCount) / 1024; 69 | const compressionRatio = Math.round(proofSize / naiveProofSize * 100); 70 | 71 | console.log(`tree built in ${Math.round(t0 / iterations)} ms`); 72 | console.log(`proof size: ${proofSize} KB (${compressionRatio}%), time: ${proofTime} ms`); 73 | })(); 74 | 75 | function generateLeaves(leafCount: number): Vector { 76 | let elements: Buffer[] = [], bRef: number; 77 | if (dataInWasm) { 78 | bRef = wasm.newArray(leafCount * elementSize); 79 | } 80 | 81 | const memU8 = new Uint8Array(wasm.memory.buffer); 82 | for (let i = 0; i < leafCount; i++) { 83 | let value = crypto.randomBytes(elementSize); 84 | if (dataInWasm) { 85 | memU8.set(value, bRef! + i * elementSize); 86 | } 87 | else { 88 | elements.push(value); 89 | } 90 | } 91 | 92 | if (dataInWasm) { 93 | return new WasmVector(wasm.memory, bRef!, leafCount, elementSize) 94 | } 95 | else { 96 | return new JsVector(elements); 97 | } 98 | } -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "esnext", 4 | "module": "commonjs", 5 | "strict": true, 6 | "noEmitOnError": true, 7 | "sourceMap": true, 8 | "rootDir": ".", 9 | "outDir": "bin" 10 | }, 11 | "exclude": [ 12 | "bin/*", 13 | "lib/assembly/*.as.ts" 14 | ] 15 | } --------------------------------------------------------------------------------