├── .gitignore ├── .travis.yml ├── Cargo.toml ├── Makefile ├── README.md ├── assets ├── ark.bin └── mds.bin ├── benches ├── big_merkle.rs ├── hash.rs └── merkle.rs ├── build.rs └── src ├── big_merkle ├── merkle_coord.rs ├── merkle_range.rs ├── mod.rs └── proof.rs ├── constants.rs ├── error.rs ├── lib.rs ├── merkle.rs ├── poseidon.rs └── proof.rs /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | **/*.rs.bk 3 | Cargo.lock 4 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: rust 2 | 3 | branches: 4 | only: 5 | - master 6 | 7 | os: 8 | - linux 9 | 10 | matrix: 11 | include: 12 | - rust: nightly 13 | after_script: 14 | # Send a notification to the Dusk build Status Telegram channel once the CI build completes 15 | - bash <(curl -s https://raw.githubusercontent.com/dusk-network/tools/master/bash/telegram_ci_notifications.sh) 16 | 17 | before_install: 18 | - sudo apt-get update 19 | 20 | before_script: 21 | - make dep 22 | 23 | script: 24 | - make test 25 | - make inttest 26 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "dusk-poseidon-merkle" 3 | version = "0.1.0" 4 | authors = ["Victor Lopez "] 5 | edition = "2018" 6 | build = "build.rs" 7 | 8 | [features] 9 | big-merkle = ["rocksdb", "bincode", "serde", "num_cpus"] 10 | 11 | [dependencies] 12 | lazy_static = "1.4.0" 13 | rocksdb = { version = "0.12", optional = true } 14 | bincode = { version = "1.2", optional = true } 15 | serde = { version = "1.0", features = ["derive"], optional = true } 16 | num_cpus = { version = "1.10", optional = true } 17 | 18 | [dependencies.curve25519-dalek] 19 | branch = "feature/compressed-try-from" 20 | git = "https://github.com/dusk-network/curve25519-dalek.git" 21 | features = ["serde"] 22 | 23 | [dev-dependencies] 24 | criterion = "0.3" 25 | rand = "0.7.0" 26 | sha2 = "0.8" 27 | tempdir = "0.3" 28 | 29 | [build-dependencies.curve25519-dalek] 30 | branch = "feature/compressed-try-from" 31 | git = "https://github.com/dusk-network/curve25519-dalek.git" 32 | 33 | [[bench]] 34 | name = "hash" 35 | harness = false 36 | 37 | [[bench]] 38 | name = "merkle" 39 | harness = false 40 | 41 | [[bench]] 42 | name = "big_merkle" 43 | harness = false 44 | required-features = ["big-merkle"] 45 | 46 | [profile.bench] 47 | lto = true 48 | incremental = false 49 | codegen-units = 1 50 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | RS_FILES := $(shell find . -name '*.rs') 2 | .PHONY: all dep lintdep lint fmt inttest test clean build release bench publishdoc 3 | all: test inttest build release ## Main sequence 4 | dep: ## Install the dependencies 5 | @rustup toolchain install beta 6 | @rustup toolchain install nightly 7 | @rustup component add rustfmt --toolchain=beta 8 | lintdep: dep ## Install the lint deps 9 | @rustup component add clippy --toolchain=nightly 10 | lint: ## Perform the clippy lints 11 | @cargo +nightly clippy 12 | fmt: ## Format the go files 13 | @cargo +beta fmt -- ${RS_FILES} 14 | inttest: ## Run integration test 15 | @cargo +nightly test --release -- --ignored --test-threads=1 16 | test: ## Run unittests 17 | @cargo +nightly check && \ 18 | cargo +beta fmt --all -- --check && \ 19 | cargo +nightly test 20 | clean: ## Remove previous build 21 | @cargo +nightly clean 22 | build: ## Build with debug symbols 23 | @cargo +nightly build 24 | release: ## Build with optimization and without debug symbols 25 | @cargo +nightly build --release 26 | bench: dep ## Perform the benchmark tests 27 | @for a in 2 4 8 ; \ 28 | do export POSEIDON_MERKLE_ARITY=$$a ; \ 29 | export POSEIDON_MERKLE_WIDTH=64 ; \ 30 | echo "POSEIDON_MERKLE_ARITY" $$POSEIDON_MERKLE_ARITY "POSEIDON_MERKLE_WIDTH" $$POSEIDON_MERKLE_WIDTH ; \ 31 | cargo +nightly bench ; \ 32 | done 33 | publishdoc: ## Generate and publish git pages docs 34 | @cargo +nightly doc && \ 35 | echo "" > target/doc/index.html && \ 36 | curl -o 'target/doc/badge.svg' 'https://img.shields.io/badge/docs-latest-blue?logo=rust' && \ 37 | curl -o 'target/doc/repo-badge.svg' 'https://img.shields.io/badge/github-dusk--poseidon-brightgreen?logo=github' && \ 38 | ghp-import -n target/doc && \ 39 | git push -f https://github.com/dusk-network/dusk-poseidon-merkle gh-pages 40 | help: ## Display this help screen 41 | @grep -h -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' 42 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Poseidon Merkle Tree 2 | 3 | [![Build Status](https://travis-ci.com/dusk-network/dusk-poseidon-merkle.svg?branch=master)](https://travis-ci.com/dusk-network/dusk-poseidon-merkle) 4 | [![Repository](https://dusk-network.github.io/dusk-poseidon-merkle/repo-badge.svg)](https://github.com/dusk-network/dusk-poseidon-merkle) 5 | [![Documentation](https://dusk-network.github.io/dusk-poseidon-merkle/badge.svg)](https://dusk-network.github.io/dusk-poseidon-merkle/dusk_poseidon_merkle/index.html) 6 | 7 | Reference implementation for the Poseidon Merkle function. 8 | 9 | The `Poseidon` structure will accept a number of inputs equal to the arity of the tree. 10 | 11 | ## Build 12 | 13 | A few environment variables will be read in the build process. 14 | 15 | * `POSEIDON_MERKLE_ARITY` 16 | 17 | Represents the arity of the merkle tree. This is also the maximum number of elements the poseidon hash will accept. Defaults to `4`. 18 | 19 | 20 | * `POSEIDON_MERKLE_WIDTH` 21 | 22 | Represents the merkle tree width. Defaults to `64`. 23 | 24 | * `POSEIDON_FULL_ROUNDS` 25 | 26 | Represents the number of full rounds performed during the permutation. Defaults to `8`. 27 | 28 | * `POSEIDON_PARTIAL_ROUNDS` 29 | 30 | Represents the number of partial rounds performed during the permutation. Defaults to `59`. 31 | 32 | ## Example 33 | 34 | ```rust 35 | use dusk_poseidon_merkle::{MERKLE_ARITY, Poseidon, Scalar}; 36 | 37 | let mut h = Poseidon::default(); 38 | for i in 0..MERKLE_ARITY { 39 | h.push(Scalar::from(i as u64)).unwrap(); 40 | } 41 | 42 | let hash = h.hash(); 43 | println!("{:x?}", hash.as_bytes()); 44 | ``` 45 | 46 | ## Reference 47 | 48 | [Starkad and Poseidon: New Hash Functions for Zero Knowledge Proof Systems](https://eprint.iacr.org/2019/458.pdf) 49 | -------------------------------------------------------------------------------- /assets/ark.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dusk-network/dusk-poseidon-merkle/e10987678f05ad952df9bf2718daaa95f0e83a02/assets/ark.bin -------------------------------------------------------------------------------- /assets/mds.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dusk-network/dusk-poseidon-merkle/e10987678f05ad952df9bf2718daaa95f0e83a02/assets/mds.bin -------------------------------------------------------------------------------- /benches/big_merkle.rs: -------------------------------------------------------------------------------- 1 | use criterion::{criterion_group, criterion_main, Criterion}; 2 | use dusk_poseidon_merkle::*; 3 | use lazy_static::*; 4 | use std::env; 5 | use std::time::Duration; 6 | 7 | // 2^36 8 | const WIDTH: usize = 68719476736; 9 | 10 | lazy_static! { 11 | static ref POSEIDON_BENCH_BIG_MERKLE_SAMPLE_SIZE: usize = { 12 | env::var("POSEIDON_BENCH_BIG_MERKLE_SAMPLE_SIZE") 13 | .map(|s| { 14 | s.parse() 15 | .expect("Failed to parse POSEIDON_BENCH_BIG_MERKLE_SAMPLE_SIZE") 16 | }) 17 | .unwrap_or(10) 18 | }; 19 | static ref POSEIDON_BENCH_BIG_MERKLE_MEASUREMENT_TIME: Duration = { 20 | Duration::from_secs( 21 | env::var("POSEIDON_BENCH_BIG_MERKLE_MEASUREMENT_TIME") 22 | .map(|s| { 23 | s.parse() 24 | .expect("Failed to parse POSEIDON_BENCH_BIG_MERKLE_MEASUREMENT_TIME") 25 | }) 26 | .unwrap_or(60), 27 | ) 28 | }; 29 | } 30 | 31 | fn bench_big_merkle(c: &mut Criterion) { 32 | let mut group = c.benchmark_group("big_merkle"); 33 | 34 | let iter = vec![10, 1000]; 35 | for x in iter { 36 | let path = format!("big_merkle_{}", x); 37 | let desc = format!( 38 | "Proof with width {}, arity {}, elements {}", 39 | WIDTH, MERKLE_ARITY, x 40 | ); 41 | let mut tree: BigMerkleTree = BigMerkleTree::new(path.as_str(), WIDTH).unwrap(); 42 | for i in 0..10 { 43 | tree.insert(i, Scalar::from(i as u64)).unwrap(); 44 | } 45 | group.bench_function(desc.as_str(), move |b| b.iter(|| proof(&mut tree))); 46 | } 47 | 48 | group.finish(); 49 | } 50 | 51 | fn proof(tree: &mut BigMerkleTree) { 52 | tree.clear_cache(false).unwrap(); 53 | tree.proof(0).unwrap(); 54 | } 55 | 56 | criterion_group! { 57 | name = big_merkle; 58 | 59 | config = Criterion::default() 60 | .sample_size(*POSEIDON_BENCH_BIG_MERKLE_SAMPLE_SIZE) 61 | .measurement_time(*POSEIDON_BENCH_BIG_MERKLE_MEASUREMENT_TIME); 62 | 63 | targets = bench_big_merkle 64 | } 65 | criterion_main!(big_merkle); 66 | -------------------------------------------------------------------------------- /benches/hash.rs: -------------------------------------------------------------------------------- 1 | use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; 2 | use dusk_poseidon_merkle::*; 3 | use rand::rngs::OsRng; 4 | use rand::seq::SliceRandom; 5 | use sha2::{Digest, Sha256, Sha512}; 6 | 7 | fn bench_hash(c: &mut Criterion) { 8 | let scalars: Vec = std::iter::repeat(()) 9 | .take(1000) 10 | .enumerate() 11 | .map(|(i, _)| Scalar::from(i as u64)) 12 | .collect(); 13 | 14 | let mut group = c.benchmark_group("hash"); 15 | 16 | group.bench_with_input( 17 | BenchmarkId::new("Sha2 256", "Generated scalars"), 18 | &scalars, 19 | |b, s| { 20 | b.iter(|| { 21 | let mut h = Sha256::new(); 22 | 23 | std::iter::repeat(()) 24 | .take(MERKLE_ARITY) 25 | .map(|_| s.choose(&mut OsRng).unwrap()) 26 | .for_each(|scalar| { 27 | h.input(scalar.as_bytes()); 28 | }); 29 | 30 | h.result(); 31 | }) 32 | }, 33 | ); 34 | 35 | group.bench_with_input( 36 | BenchmarkId::new("Sha2 512", "Generated scalars"), 37 | &scalars, 38 | |b, s| { 39 | b.iter(|| { 40 | let mut h = Sha512::new(); 41 | 42 | std::iter::repeat(()) 43 | .take(MERKLE_ARITY) 44 | .map(|_| s.choose(&mut OsRng).unwrap()) 45 | .for_each(|scalar| { 46 | h.input(scalar.as_bytes()); 47 | }); 48 | 49 | h.result(); 50 | }) 51 | }, 52 | ); 53 | 54 | group.bench_with_input( 55 | BenchmarkId::new("Poseidon hash", "Generated scalars"), 56 | &scalars, 57 | |b, s| { 58 | b.iter(|| { 59 | let mut h = Poseidon::default(); 60 | 61 | std::iter::repeat(()) 62 | .take(MERKLE_ARITY) 63 | .map(|_| s.choose(&mut OsRng).unwrap()) 64 | .for_each(|scalar| { 65 | h.push(*scalar).unwrap(); 66 | }); 67 | 68 | h.hash(); 69 | }) 70 | }, 71 | ); 72 | 73 | group.finish(); 74 | } 75 | 76 | criterion_group! { 77 | name = hash; 78 | 79 | config = Criterion::default(); 80 | 81 | targets = bench_hash 82 | } 83 | criterion_main!(hash); 84 | -------------------------------------------------------------------------------- /benches/merkle.rs: -------------------------------------------------------------------------------- 1 | use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; 2 | use dusk_poseidon_merkle::*; 3 | use lazy_static::*; 4 | use rand::rngs::OsRng; 5 | use rand::seq::SliceRandom; 6 | use rand::RngCore; 7 | use std::env; 8 | use std::time::Duration; 9 | 10 | lazy_static! { 11 | static ref POSEIDON_BENCH_MERKLE_SAMPLE_SIZE: usize = { 12 | env::var("POSEIDON_BENCH_MERKLE_SAMPLE_SIZE") 13 | .map(|s| { 14 | s.parse() 15 | .expect("Failed to parse POSEIDON_BENCH_MERKLE_SAMPLE_SIZE") 16 | }) 17 | .unwrap_or(20) 18 | }; 19 | static ref POSEIDON_BENCH_MERKLE_MEASUREMENT_TIME: Duration = { 20 | Duration::from_secs( 21 | env::var("POSEIDON_BENCH_MERKLE_MEASUREMENT_TIME") 22 | .map(|s| { 23 | s.parse() 24 | .expect("Failed to parse POSEIDON_BENCH_MERKLE_MEASUREMENT_TIME") 25 | }) 26 | .unwrap_or(15), 27 | ) 28 | }; 29 | } 30 | 31 | fn bench_merkle(c: &mut Criterion) { 32 | let mut tree = MerkleTree::::default(); 33 | for i in 0..MERKLE_WIDTH { 34 | tree.insert_unchecked(i, Scalar::from(OsRng.next_u64())); 35 | } 36 | 37 | let leaves = *tree.leaves(); 38 | let root = tree.clone().root(); 39 | let mut temp_tree = tree.clone(); 40 | let proofs: Vec<(Scalar, Proof)> = leaves 41 | .iter() 42 | .map(|l| (l.unwrap(), temp_tree.proof(&l.unwrap()).unwrap())) 43 | .collect(); 44 | 45 | let mut group = c.benchmark_group("merkle"); 46 | 47 | group.bench_with_input( 48 | BenchmarkId::new("Proof", "Generated tree"), 49 | &(leaves.clone(), tree.clone()), 50 | |b, (l, t)| { 51 | b.iter(|| { 52 | let leaf = l.choose(&mut OsRng).unwrap().unwrap(); 53 | t.clone().proof(&leaf).unwrap(); 54 | }) 55 | }, 56 | ); 57 | 58 | group.bench_with_input( 59 | BenchmarkId::new("Verify", "Generated proofs"), 60 | &(root.clone(), proofs.clone()), 61 | |b, (r, p)| { 62 | b.iter(|| { 63 | let (leaf, proof) = p.choose(&mut OsRng).unwrap(); 64 | proof.verify(&leaf, r); 65 | }) 66 | }, 67 | ); 68 | 69 | group.finish(); 70 | } 71 | 72 | criterion_group! { 73 | name = merkle; 74 | 75 | config = Criterion::default() 76 | .sample_size(*POSEIDON_BENCH_MERKLE_SAMPLE_SIZE) 77 | .measurement_time(*POSEIDON_BENCH_MERKLE_MEASUREMENT_TIME); 78 | 79 | targets = bench_merkle 80 | } 81 | criterion_main!(merkle); 82 | -------------------------------------------------------------------------------- /build.rs: -------------------------------------------------------------------------------- 1 | use curve25519_dalek::scalar::Scalar; 2 | use std::env; 3 | use std::fs::File; 4 | use std::io::Write; 5 | use std::path::Path; 6 | 7 | fn generate_mds(t: usize) -> Vec> { 8 | let mut matrix: Vec> = Vec::with_capacity(t); 9 | let mut xs: Vec = Vec::with_capacity(t); 10 | let mut ys: Vec = Vec::with_capacity(t); 11 | 12 | // Generate x and y values deterministically for the cauchy matrix 13 | // where x[i] != y[i] to allow the values to be inverted 14 | // and there are no duplicates in the x vector or y vector, so that the determinant is always non-zero 15 | // [a b] 16 | // [c d] 17 | // det(M) = (ad - bc) ; if a == b and c == d => det(M) =0 18 | // For an MDS matrix, every possible mxm submatrix, must have det(M) != 0 19 | for i in 0..t { 20 | let x = Scalar::from((i) as u64); 21 | let y = Scalar::from((i + t) as u64); 22 | xs.push(x); 23 | ys.push(y); 24 | } 25 | 26 | for i in 0..t { 27 | let mut row: Vec = Vec::with_capacity(t); 28 | for j in 0..t { 29 | // Generate the entry at (i,j) 30 | let entry = (xs[i] + ys[j]).invert(); 31 | row.insert(j, entry); 32 | } 33 | matrix.push(row); 34 | } 35 | 36 | matrix 37 | } 38 | 39 | fn main() { 40 | let out_dir = env::var("CARGO_MANIFEST_DIR").expect("No out dir"); 41 | let dest_path = Path::new(&out_dir).join("src").join("constants.rs"); 42 | let mut f = File::create(&dest_path).expect("Could not create file"); 43 | 44 | let merkle_arity = env::var("POSEIDON_MERKLE_ARITY") 45 | .map(|s| s.parse().expect("Failed to parse POSEIDON_MERKLE_ARITY")) 46 | .unwrap_or(4); 47 | 48 | let merkle_width = env::var("POSEIDON_MERKLE_WIDTH") 49 | .map(|s| s.parse().expect("Failed to parse POSEIDON_MERKLE_WIDTH")) 50 | .unwrap_or(64); 51 | 52 | let full_rounds = env::var("POSEIDON_FULL_ROUNDS") 53 | .map(|s| s.parse().expect("Failed to parse POSEIDON_FULL_ROUNDS")) 54 | .unwrap_or(8); 55 | 56 | let partial_rounds = env::var("POSEIDON_PARTIAL_ROUNDS") 57 | .map(|s| s.parse().expect("Failed to parse POSEIDON_PARTIAL_ROUNDS")) 58 | .unwrap_or(59); 59 | 60 | let width = merkle_arity + 1; 61 | let merkle_height = merkle_width as f64; 62 | let merkle_height = merkle_height.log(merkle_arity as f64) as usize; 63 | 64 | write!( 65 | &mut f, 66 | r#"// Poseidon constants 67 | pub(crate) const WIDTH: usize = {}; 68 | pub(crate) const FULL_ROUNDS: usize = {}; 69 | pub(crate) const PARTIAL_ROUNDS: usize = {}; 70 | 71 | // Merkle constants 72 | /// Arity of the merkle tree 73 | pub const MERKLE_ARITY: usize = {}; 74 | /// Width of the merkle tree 75 | pub const MERKLE_WIDTH: usize = {}; 76 | pub(crate) const MERKLE_HEIGHT: usize = {}; 77 | 78 | "#, 79 | width, full_rounds, partial_rounds, merkle_arity, merkle_width, merkle_height 80 | ) 81 | .expect("Could not write file"); 82 | 83 | let dest_path = Path::new(&out_dir).join("assets").join("mds.bin"); 84 | let mut f = File::create(&dest_path).expect("Could not create file"); 85 | let mds = generate_mds(width) 86 | .into_iter() 87 | .flatten() 88 | .fold(vec![], |mut v, scalars| { 89 | v.extend_from_slice(scalars.as_bytes()); 90 | v 91 | }); 92 | 93 | f.write_all(mds.as_slice()) 94 | .expect("Failed to write MDS matrix bin file."); 95 | } 96 | -------------------------------------------------------------------------------- /src/big_merkle/merkle_coord.rs: -------------------------------------------------------------------------------- 1 | use crate::{Error, MERKLE_ARITY}; 2 | 3 | use std::cmp; 4 | use std::convert::{TryFrom, TryInto}; 5 | 6 | use rocksdb::DB; 7 | use serde::{Deserialize, Serialize}; 8 | 9 | /// Representation of a coordinate inside the tree. 10 | /// 11 | /// No tree consistency is performed in this layer. This implies invalid coordinates are possible 12 | /// inside a tree. 13 | #[derive(Serialize, Deserialize, PartialEq, Debug, Copy, Clone)] 14 | pub struct MerkleCoord { 15 | /// Height position in the tree 16 | pub height: usize, 17 | /// Index for the current row of the tree. 18 | pub idx: usize, 19 | } 20 | 21 | impl MerkleCoord { 22 | /// MerkleCoord constructor 23 | pub fn new(height: usize, idx: usize) -> Self { 24 | MerkleCoord { height, idx } 25 | } 26 | 27 | /// Attempt to fetch a leaf from a DB 28 | pub fn fetch_leaf(self, db: &DB) -> Result, Error> 29 | where 30 | T: for<'a> Deserialize<'a>, 31 | { 32 | let coord: Vec = self.try_into()?; 33 | 34 | db.get(coord.as_slice()) 35 | .map_err(|e| Error::Other(e.to_string()))? 36 | .map(|b| bincode::deserialize::(b.as_ref()).map_err(|e| Error::Other(e.to_string()))) 37 | .transpose() 38 | } 39 | 40 | /// Attempt to persist a leaf into a DB 41 | pub fn persist_leaf(self, db: &DB, leaf: T) -> Result<(), Error> 42 | where 43 | T: Serialize, 44 | { 45 | let coord: Vec = self.try_into()?; 46 | let leaf = bincode::serialize(&leaf).map_err(|e| Error::Other(e.to_string()))?; 47 | 48 | db.put(coord.as_slice(), leaf.as_slice()) 49 | .map_err(|e| Error::Other(e.to_string())) 50 | } 51 | 52 | /// Descend the tree for a number of provided levels 53 | pub fn descend(&mut self, levels: usize) { 54 | let levels = cmp::min(self.height, levels); 55 | 56 | if levels > 0 { 57 | self.height -= levels; 58 | self.idx /= MERKLE_ARITY.pow(levels as u32); 59 | } 60 | } 61 | } 62 | 63 | impl TryFrom<&[u8]> for MerkleCoord { 64 | type Error = Error; 65 | 66 | fn try_from(buf: &[u8]) -> Result { 67 | bincode::deserialize(&buf).map_err(|e| Error::Other(e.to_string())) 68 | } 69 | } 70 | 71 | impl TryInto> for MerkleCoord { 72 | type Error = Error; 73 | 74 | fn try_into(self) -> Result, Self::Error> { 75 | bincode::serialize(&self).map_err(|e| Error::Other(e.to_string())) 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /src/big_merkle/merkle_range.rs: -------------------------------------------------------------------------------- 1 | use crate::MERKLE_ARITY; 2 | 3 | use std::cmp::Ordering; 4 | use std::ops::Range; 5 | 6 | /// Struct to represent a range in the base of the tree 7 | #[derive(Debug, Eq, Clone)] 8 | pub struct MerkleRange(pub Range); 9 | 10 | impl Ord for MerkleRange { 11 | fn cmp(&self, other: &Self) -> Ordering { 12 | self.0.start.cmp(&other.0.start) 13 | } 14 | } 15 | 16 | impl PartialOrd for MerkleRange { 17 | fn partial_cmp(&self, other: &Self) -> Option { 18 | Some(self.cmp(other)) 19 | } 20 | } 21 | 22 | impl PartialEq for MerkleRange { 23 | /// The equivalence for a merkle range is defined by any provided range that is contained 24 | /// within the current range. 25 | /// 26 | /// Therefore, this is not commutative, and should be used with care. 27 | fn eq(&self, other: &Self) -> bool { 28 | self.0.start <= other.0.start && self.0.end >= other.0.end 29 | } 30 | } 31 | 32 | impl From> for MerkleRange { 33 | fn from(r: Range) -> Self { 34 | MerkleRange(r) 35 | } 36 | } 37 | 38 | impl MerkleRange { 39 | /// Will return a range within the max row for a relative merkle position 40 | pub fn new(max_height: usize, height: usize, idx: usize) -> Self { 41 | let h = max_height - height; 42 | let h = h as u32; 43 | 44 | let from = MERKLE_ARITY.pow(h) * idx; 45 | let to = MERKLE_ARITY.pow(h) * (idx + 1); 46 | 47 | MerkleRange::from(from..to) 48 | } 49 | } 50 | 51 | #[cfg(test)] 52 | mod tests { 53 | use crate::*; 54 | 55 | #[test] 56 | fn big_merkle_range_eq() { 57 | let r1 = MerkleRange::new(3, 0, 0); 58 | let r2 = MerkleRange::new(3, 3, 15); 59 | 60 | assert!(r1 == r2); 61 | assert!(r2 != r1) 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/big_merkle/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::{Error, Poseidon, PoseidonLeaf, Scalar, MERKLE_ARITY}; 2 | 3 | use std::cmp; 4 | use std::convert::TryInto; 5 | use std::ops; 6 | use std::path::Path; 7 | use std::sync::{mpsc, Arc, Mutex}; 8 | use std::thread; 9 | 10 | use rocksdb::DB; 11 | #[cfg(test)] 12 | use tempdir::TempDir; 13 | 14 | pub use merkle_coord::MerkleCoord; 15 | pub use merkle_range::MerkleRange; 16 | pub use proof::BigProof; 17 | 18 | const CACHE_HEIGHT_INTERVAL: usize = 2; 19 | 20 | mod merkle_coord; 21 | mod merkle_range; 22 | mod proof; 23 | 24 | /// The merkle tree will accept up to `MERKLE_ARITY * MERKLE_WIDTH` leaves. 25 | #[derive(Debug)] 26 | pub struct BigMerkleTree { 27 | width: usize, 28 | height: usize, 29 | max_idx: usize, 30 | /// For most cases, this attribute should hold one element that represents the higher idx to 31 | /// the end of the tree. The usage of the free intervals is, however, non-restricted. 32 | empty_intervals: Vec, 33 | db: Arc, 34 | cache: Arc, 35 | } 36 | 37 | impl Clone for BigMerkleTree { 38 | fn clone(&self) -> Self { 39 | BigMerkleTree { 40 | max_idx: self.max_idx, 41 | db: Arc::clone(&self.db), 42 | cache: Arc::clone(&self.cache), 43 | empty_intervals: self.empty_intervals.clone(), 44 | width: self.width, 45 | height: self.height, 46 | } 47 | } 48 | } 49 | 50 | impl BigMerkleTree { 51 | /// `BigMerkleTree` constructor 52 | pub fn new, E: AsRef>( 53 | db_path: D, 54 | cache_path: E, 55 | width: usize, 56 | ) -> Result { 57 | let max_idx = 0; 58 | let height = width as f64; 59 | let height = height.log(MERKLE_ARITY as f64) as usize; 60 | 61 | let mut empty_intervals = Vec::new(); 62 | 63 | let db = DB::open_default(db_path).map_err(|e| Error::Other(e.to_string()))?; 64 | let db = Arc::new(db); 65 | 66 | let cache = DB::open_default(cache_path).map_err(|e| Error::Other(e.to_string()))?; 67 | let cache = Arc::new(cache); 68 | 69 | // The initial empty interval is the whole input set. Therefore, the relative range for the 70 | // root node. 71 | empty_intervals.push(MerkleRange::new(height, 0, 0)); 72 | 73 | Ok(BigMerkleTree { 74 | max_idx, 75 | db, 76 | cache, 77 | empty_intervals, 78 | width, 79 | height, 80 | }) 81 | } 82 | 83 | /// Return a reference to the internal path of the DB 84 | pub fn db_path(&self) -> &Path { 85 | self.db.path() 86 | } 87 | 88 | /// Height of the tree 89 | pub fn height(&self) -> usize { 90 | self.height 91 | } 92 | 93 | /// Arity of the tree 94 | pub fn arity(&self) -> usize { 95 | MERKLE_ARITY 96 | } 97 | 98 | /// Width of the tree 99 | pub fn width(&self) -> usize { 100 | self.width 101 | } 102 | 103 | /// Divide the tree into a parallelizable path to the root 104 | pub fn segments(&self) -> Vec { 105 | let mut coords = vec![]; 106 | let mut coord = MerkleCoord::new(self.height, self.max_idx); 107 | 108 | while coord.height > 0 { 109 | coord.descend(CACHE_HEIGHT_INTERVAL); 110 | 111 | for i in 0..coord.idx + 1 { 112 | coords.push(MerkleCoord::new(coord.height, i)); 113 | } 114 | } 115 | 116 | coords 117 | } 118 | 119 | /// Check if the node in the provided height and index belongs to an empty super tree. 120 | pub fn node_is_empty(&self, height: usize, idx: usize) -> bool { 121 | let r = MerkleRange::new(self.height, height, idx); 122 | self.empty_intervals.contains(&r) 123 | } 124 | 125 | /// Insert the provided leaf on the provided index 126 | pub fn insert(&mut self, idx: usize, leaf: T) -> Result<(), Error> { 127 | self.insert_height(self.height, idx, leaf) 128 | } 129 | 130 | /// Insert the provided leaf on the provided index 131 | fn insert_height( 132 | &mut self, 133 | height: usize, 134 | idx: usize, 135 | leaf: T, 136 | ) -> Result<(), Error> { 137 | let coord = MerkleCoord::new(height, idx); 138 | 139 | if height == self.height { 140 | coord 141 | .persist_leaf(&self.db, leaf) 142 | .and_then(|_| self.inserted(idx)) 143 | } else { 144 | coord.persist_leaf(&self.cache, leaf) 145 | } 146 | } 147 | 148 | /// Flag the provided index as inserted in the structure. 149 | /// 150 | /// This will reorganize the empty intervals. 151 | pub fn inserted(&mut self, idx: usize) -> Result<(), Error> { 152 | self.max_idx = cmp::max(self.max_idx, idx); 153 | 154 | // Should split the empty interval only if the current idx belongs to an empty base 155 | if self.node_is_empty(self.height, idx) { 156 | // The range for the current idx is always itself + 1, since its possible to insert 157 | // leaves only on the base 158 | let idx_r: MerkleRange = (idx..idx + 1).into(); 159 | 160 | let mut r1 = None; 161 | let mut r2 = None; 162 | let mut empty_idx = None; 163 | 164 | // Find the empty interval that should be split 165 | for (i, r) in self.empty_intervals.iter().enumerate() { 166 | if r == &idx_r { 167 | r1 = Some(r.clone()); 168 | r2 = Some(r.clone()); 169 | empty_idx = Some(i); 170 | } 171 | } 172 | 173 | // If the interval is not found, then we have unreachable code since the 174 | // `node_is_empty` check was performed 175 | let mut r1 = r1.ok_or(Error::IndexOutOfBounds)?; 176 | let mut r2 = r2.ok_or(Error::IndexOutOfBounds)?; 177 | let empty_idx = empty_idx.ok_or(Error::IndexOutOfBounds)?; 178 | 179 | // The rightmost of the interval is always split 180 | r2.0.start = idx + 1; 181 | self.empty_intervals[empty_idx] = r2; 182 | 183 | // The leftmost of the interval is split only if idx is not the first element of the 184 | // provided interval 185 | // 186 | // Since the base should be, but not necessarily is, append only, this should lead to 187 | // performance degradation 188 | if idx > r1.0.start { 189 | r1.0.end = idx; 190 | self.empty_intervals.push(r1); 191 | } 192 | } 193 | 194 | self.modified(idx) 195 | } 196 | 197 | /// Set the provided leaf index as absent for the hash calculation. 198 | pub fn remove(&mut self, idx: usize) -> Result<(), Error> { 199 | let coord: Vec = MerkleCoord::new(self.height, idx).try_into()?; 200 | 201 | self.db 202 | .delete(coord.as_slice()) 203 | .map_err(|e| Error::Other(e.to_string())) 204 | .and_then(|_| self.removed(idx)) 205 | } 206 | 207 | /// Flag the provided index as absent. 208 | /// 209 | /// This will reorganize the empty intervals. 210 | pub fn removed(&mut self, idx: usize) -> Result<(), Error> { 211 | // Check if there is an adjacent left interval 212 | let left = idx > 0 && self.node_is_empty(self.height, idx - 1); 213 | 214 | // Check if there is an adjacent right interval 215 | let right = idx < self.width - 1 && self.node_is_empty(self.height, idx + 1); 216 | 217 | if left && right { 218 | // Merge the two intervals 219 | 220 | // Fetch the index of the left and right intervals 221 | let r1 = MerkleRange::from(idx - 1..idx); 222 | let r2 = MerkleRange::from(idx + 1..idx + 2); 223 | let mut idx_left = None; 224 | let mut idx_right = None; 225 | for (i, r) in self.empty_intervals.iter().enumerate() { 226 | if r == &r1 { 227 | idx_left = Some(i); 228 | } 229 | 230 | if r == &r2 { 231 | idx_right = Some(i); 232 | } 233 | } 234 | 235 | // If the interval is not found, then we have unreachable code since the 236 | // `node_is_empty` check was performed 237 | let idx_left = idx_left.ok_or(Error::IndexOutOfBounds)?; 238 | let idx_right = idx_right.ok_or(Error::IndexOutOfBounds)?; 239 | 240 | self.empty_intervals[idx_left].0.end = self.empty_intervals[idx_right].0.end; 241 | self.empty_intervals.remove(idx_right); 242 | } else if left { 243 | // Decrement the left interval 244 | let r1 = MerkleRange::from(idx - 1..idx); 245 | let mut r1_idx = None; 246 | 247 | for (i, r) in self.empty_intervals.iter().enumerate() { 248 | if r == &r1 { 249 | r1_idx = Some(i); 250 | } 251 | } 252 | 253 | // If the interval is not found, then we have unreachable code since the 254 | // `node_is_empty` check was performed 255 | let r1_idx = r1_idx.ok_or(Error::IndexOutOfBounds)?; 256 | 257 | // Decrement the range, if the number of elements is greater than 1 258 | if self.empty_intervals[r1_idx].0.end == self.empty_intervals[r1_idx].0.start + 1 { 259 | self.empty_intervals.remove(r1_idx); 260 | } else { 261 | self.empty_intervals[r1_idx].0.end -= 1; 262 | } 263 | } else if right { 264 | // Increment the left start interval 265 | let r1 = MerkleRange::from(idx + 1..idx + 2); 266 | let mut r1_idx = None; 267 | 268 | for (i, r) in self.empty_intervals.iter().enumerate() { 269 | if r == &r1 { 270 | r1_idx = Some(i); 271 | } 272 | } 273 | 274 | // If the interval is not found, then we have unreachable code since the 275 | // `node_is_empty` check was performed 276 | let r1_idx = r1_idx.ok_or(Error::IndexOutOfBounds)?; 277 | self.empty_intervals[r1_idx].0.start += 1; 278 | 279 | // Increments the range, if the number of elements is greater than 1 280 | if self.empty_intervals[r1_idx].0.end == self.empty_intervals[r1_idx].0.start + 1 { 281 | self.empty_intervals.remove(r1_idx); 282 | } else { 283 | self.empty_intervals[r1_idx].0.start += 1; 284 | } 285 | } else { 286 | // If there is no adjacent empty interval, then create an interval of its own 287 | self.empty_intervals.push((idx..idx + 1).into()); 288 | } 289 | 290 | self.modified(idx) 291 | } 292 | 293 | /// Flag the base idx as modified, and delete all sub-trees from the cache 294 | fn modified(&mut self, idx: usize) -> Result<(), Error> { 295 | let mut coord = MerkleCoord::new(self.height, idx); 296 | 297 | loop { 298 | coord.descend(1); 299 | 300 | let c: Vec = coord.try_into()?; 301 | self.cache 302 | .delete(c.as_slice()) 303 | .map_err(|e| Error::Other(e.to_string()))?; 304 | 305 | if coord.height == 0 { 306 | break; 307 | } 308 | } 309 | 310 | Ok(()) 311 | } 312 | 313 | /// Fetch a node of the tree for the provided coordinates 314 | pub fn node(&mut self, height: usize, idx: usize) -> Result, Error> 315 | where 316 | Scalar: ops::Mul, 317 | { 318 | if height == self.height { 319 | // Fetch directly from db 320 | MerkleCoord::new(height, idx).fetch_leaf(&self.db) 321 | } else if self.node_is_empty(height, idx) { 322 | // Fetch a precalculated null node 323 | if height == self.height { 324 | Ok(None) 325 | } else { 326 | // TODO Generate a precalculated height for null sub-trees 327 | Ok(Some(T::from(0u64))) 328 | } 329 | } else { 330 | // Calculate the node 331 | let coord = MerkleCoord::new(height, idx); 332 | let should_cache = (height % CACHE_HEIGHT_INTERVAL) == 0; 333 | 334 | let node = if should_cache { 335 | coord.fetch_leaf::(&self.cache)? 336 | } else { 337 | None 338 | }; 339 | 340 | if node.is_some() { 341 | return Ok(node); 342 | } 343 | 344 | let mut h = Poseidon::default(); 345 | 346 | let needle = idx * MERKLE_ARITY; 347 | for i in 0..MERKLE_ARITY { 348 | if let Some(n) = self.node(height + 1, needle + i)? { 349 | h.insert_unchecked(i, n); 350 | } 351 | } 352 | 353 | let node = h.hash(); 354 | if should_cache { 355 | coord.persist_leaf(&self.cache, node)?; 356 | } 357 | 358 | Ok(Some(node)) 359 | } 360 | } 361 | 362 | /// Generate a proof of membership for the provided leaf index 363 | pub fn proof(&mut self, mut needle: usize) -> Result, Error> 364 | where 365 | Scalar: ops::Mul, 366 | { 367 | let mut proof = BigProof::new(); 368 | let mut leaves = [None; MERKLE_ARITY]; 369 | 370 | for row in 0..self.height { 371 | let from = MERKLE_ARITY * (needle / MERKLE_ARITY); 372 | let idx = needle % MERKLE_ARITY; 373 | 374 | for i in 0..MERKLE_ARITY { 375 | leaves[i] = self.node(self.height - row, from + i)?; 376 | } 377 | 378 | proof.push(idx, leaves); 379 | needle /= MERKLE_ARITY; 380 | } 381 | 382 | Ok(proof) 383 | } 384 | 385 | /// Calculate and return the root of the merkle tree. 386 | pub fn root(&mut self) -> Result 387 | where 388 | Scalar: ops::Mul, 389 | { 390 | let (tx, rx) = mpsc::channel(); 391 | let rx = Mutex::new(rx); 392 | let rx = Arc::new(rx); 393 | 394 | let mut handles: Vec>> = vec![]; 395 | 396 | let segments = self.segments(); 397 | for s in segments { 398 | tx.send(s).map_err(|e| Error::Other(e.to_string()))?; 399 | } 400 | 401 | for _ in 0..num_cpus::get() { 402 | let worker = Arc::clone(&rx); 403 | let mut tree = self.clone(); 404 | 405 | handles.push(thread::spawn(move || { 406 | while let Some(c) = worker 407 | .lock() 408 | .map(|r| r.recv().ok()) 409 | .map_err(|e| Error::Other(e.to_string()))? 410 | { 411 | tree.node(c.height, c.idx)?; 412 | } 413 | 414 | Ok(()) 415 | })); 416 | } 417 | 418 | for h in handles { 419 | h.join().unwrap().unwrap(); 420 | } 421 | 422 | self.node(0, 0).and_then(|n| { 423 | n.ok_or(Error::Other( 424 | "It was not possible to obtain the root node from the merkle tree.".to_owned(), 425 | )) 426 | }) 427 | } 428 | } 429 | 430 | #[cfg(test)] 431 | pub fn big_merkle_default(path: &str) -> BigMerkleTree { 432 | // 2^34 433 | let width = 17179869184; 434 | let db_path = TempDir::new(path).map(|t| t.into_path()).unwrap(); 435 | 436 | let cache_path = format!("{}-cache", path); 437 | let cache_path = TempDir::new(cache_path.as_str()) 438 | .map(|t| t.into_path()) 439 | .unwrap(); 440 | 441 | BigMerkleTree::new(db_path, cache_path, width).unwrap() 442 | } 443 | 444 | #[cfg(test)] 445 | mod tests { 446 | use super::big_merkle_default; 447 | 448 | #[test] 449 | fn big_merkle_empty() { 450 | let mut merkle = big_merkle_default("big_merkle_empty"); 451 | let idx = merkle.width() / 3; 452 | 453 | assert!(merkle.node_is_empty(0, 0)); 454 | assert!(merkle.node_is_empty(merkle.height(), idx)); 455 | 456 | merkle.inserted(idx).unwrap(); 457 | 458 | assert!(!merkle.node_is_empty(0, 0)); 459 | assert!(!merkle.node_is_empty(merkle.height(), idx)); 460 | assert!(merkle.node_is_empty(merkle.height(), idx - 1)); 461 | assert!(merkle.node_is_empty(merkle.height(), idx + 1)); 462 | 463 | merkle.inserted(0).unwrap(); 464 | assert!(!merkle.node_is_empty(merkle.height(), 0)); 465 | } 466 | } 467 | -------------------------------------------------------------------------------- /src/big_merkle/proof.rs: -------------------------------------------------------------------------------- 1 | use crate::{Poseidon, PoseidonLeaf, Scalar, MERKLE_ARITY}; 2 | 3 | use std::ops; 4 | 5 | /// Set of pairs (idx, Hash) to reconstruct the merkle root. 6 | /// For every level of the tree, 7 | /// Required information to reconstruct the merkle root. 8 | /// 9 | /// For every level of the tree, there is an index, and a slice of leaves. 10 | /// 11 | /// The index will be the position in which the previously calculated information should be 12 | /// inserted. 13 | /// 14 | /// The leaves will define the other elements required to perform the hash for that level of the 15 | /// tree. 16 | #[derive(Debug, Clone, PartialEq)] 17 | pub struct BigProof { 18 | data: Vec<(usize, [Option; MERKLE_ARITY])>, 19 | } 20 | 21 | impl BigProof { 22 | /// BigProof constructor 23 | pub fn new() -> Self { 24 | BigProof { data: vec![] } 25 | } 26 | 27 | pub(crate) fn push(&mut self, idx: usize, leaves: [Option; MERKLE_ARITY]) { 28 | self.data.push((idx, leaves)) 29 | } 30 | 31 | /// Return the raw proof data 32 | pub fn data(&self) -> &Vec<(usize, [Option; MERKLE_ARITY])> { 33 | &self.data 34 | } 35 | 36 | /// Verify if the provided leaf corresponds to the proof in the merkle construction 37 | pub fn verify(&self, leaf: &T, root: &T) -> bool 38 | where 39 | Scalar: ops::Mul, 40 | { 41 | let mut leaf = *leaf; 42 | let mut h = Poseidon::default(); 43 | 44 | self.data.iter().for_each(|(idx, data)| { 45 | h.replace(&data[0..MERKLE_ARITY]); 46 | h.insert_unchecked(*idx, leaf); 47 | 48 | leaf = h.hash(); 49 | }); 50 | 51 | &leaf == root 52 | } 53 | } 54 | 55 | #[cfg(test)] 56 | mod tests { 57 | use super::super::big_merkle_default; 58 | use crate::*; 59 | 60 | #[test] 61 | fn big_proof_verify() { 62 | let mut t = big_merkle_default("big_proof_verify"); 63 | for i in 0..64 { 64 | t.insert(i, Scalar::from(i as u64)).unwrap(); 65 | } 66 | 67 | let root = t.root().unwrap(); 68 | let i = 21; 69 | 70 | let proof = t.proof(i).unwrap(); 71 | assert!(proof.verify(&Scalar::from(i as u64), &root)); 72 | } 73 | 74 | #[test] 75 | fn big_proof_verify_failure() { 76 | let mut t = big_merkle_default("big_proof_verify_failure"); 77 | for i in 0..64 { 78 | t.insert(i, Scalar::from(i as u64)).unwrap(); 79 | } 80 | 81 | let root = t.root().unwrap(); 82 | let i = 21; 83 | 84 | let proof = t.proof(i + 1).unwrap(); 85 | assert!(!proof.verify(&Scalar::from(i as u64), &root)); 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /src/constants.rs: -------------------------------------------------------------------------------- 1 | // Poseidon constants 2 | pub(crate) const WIDTH: usize = 5; 3 | pub(crate) const FULL_ROUNDS: usize = 8; 4 | pub(crate) const PARTIAL_ROUNDS: usize = 59; 5 | 6 | // Merkle constants 7 | /// Arity of the merkle tree 8 | pub const MERKLE_ARITY: usize = 4; 9 | /// Width of the merkle tree 10 | pub const MERKLE_WIDTH: usize = 64; 11 | pub(crate) const MERKLE_HEIGHT: usize = 3; 12 | 13 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | use std::{error, fmt}; 2 | 3 | #[derive(Debug, Clone)] 4 | /// Possible error states for the hashing. 5 | pub enum Error { 6 | /// The allowed number of leaves cannot be greater than the arity of the tree. 7 | FullBuffer, 8 | /// Attempt to reference an index element that is out of bounds 9 | IndexOutOfBounds, 10 | /// The provided leaf was not found in the tree 11 | LeafNotFound, 12 | /// Other errors 13 | Other(String), 14 | } 15 | 16 | impl error::Error for Error {} 17 | 18 | impl fmt::Display for Error { 19 | fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { 20 | match self { 21 | Error::FullBuffer => write!( 22 | f, 23 | "The size of the buffer cannot be greater than the arity of the merkle tree." 24 | ), 25 | Error::IndexOutOfBounds => write!(f, "The referenced index is outs of bounds."), 26 | Error::LeafNotFound => write!(f, "The provided leaf is not present in the tree."), 27 | Error::Other(s) => write!(f, "{}", s), 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![feature(external_doc)] 2 | #![deny(missing_docs)] 3 | #![doc(include = "../README.md")] 4 | 5 | use std::ops; 6 | 7 | use lazy_static::*; 8 | #[cfg(feature = "big-merkle")] 9 | use serde::{Deserialize, Serialize}; 10 | 11 | pub use crate::poseidon::Poseidon; 12 | pub use curve25519_dalek::scalar::Scalar; 13 | pub use error::Error; 14 | pub use merkle::MerkleTree; 15 | pub use proof::Proof; 16 | 17 | #[cfg(feature = "big-merkle")] 18 | pub use big_merkle::{BigMerkleTree, BigProof, MerkleCoord, MerkleRange}; 19 | 20 | mod error; 21 | mod merkle; 22 | mod poseidon; 23 | mod proof; 24 | 25 | #[cfg(feature = "big-merkle")] 26 | mod big_merkle; 27 | 28 | include!("constants.rs"); 29 | 30 | lazy_static! { 31 | static ref ROUND_CONSTANTS: [Scalar; 960] = { 32 | let bytes = include_bytes!("../assets/ark.bin"); 33 | unsafe { std::ptr::read(bytes.as_ptr() as *const _) } 34 | }; 35 | static ref MDS_MATRIX: [[Scalar; WIDTH]; WIDTH] = { 36 | let bytes = include_bytes!("../assets/mds.bin"); 37 | assert_eq!(bytes.len(), (WIDTH * WIDTH) << 5); 38 | unsafe { std::ptr::read(bytes.as_ptr() as *const _) } 39 | }; 40 | } 41 | 42 | /// The items for the [`MerkleTree`] and [`Poseidon`] must implement this trait 43 | /// 44 | /// The implementation must be serializable for the [`BigMerkleTree`] storage 45 | #[cfg(feature = "big-merkle")] 46 | pub trait PoseidonLeaf: 47 | Copy 48 | + From 49 | + From 50 | + From<[u8; 32]> 51 | + PartialEq 52 | + ops::MulAssign 53 | + ops::AddAssign 54 | + Serialize 55 | + for<'d> Deserialize<'d> 56 | + Send 57 | + Sync 58 | { 59 | } 60 | 61 | /// The items for the [`MerkleTree`] and [`Poseidon`] must implement this trait 62 | #[cfg(not(feature = "big-merkle"))] 63 | pub trait PoseidonLeaf: 64 | Copy + From + From + PartialEq + ops::MulAssign + ops::AddAssign 65 | { 66 | } 67 | 68 | impl PoseidonLeaf for Scalar {} 69 | 70 | #[cfg(test)] 71 | mod tests { 72 | use crate::*; 73 | 74 | #[test] 75 | fn constants_consistency() { 76 | // Grant we have enough constants for the sbox rounds 77 | assert!(WIDTH * (FULL_ROUNDS + PARTIAL_ROUNDS) <= ROUND_CONSTANTS.len()); 78 | 79 | // Sanity check for the arity 80 | assert!(MERKLE_ARITY > 1); 81 | 82 | // Sanity check for the height 83 | assert!(MERKLE_HEIGHT > 1); 84 | 85 | // Enforce a relation between the provided MDS matrix and the arity of the merkle tree 86 | assert_eq!(WIDTH, MERKLE_ARITY + 1); 87 | 88 | // Enforce at least one level for the merkle tree 89 | assert!(MERKLE_WIDTH > MERKLE_ARITY); 90 | 91 | // Grant the defined arity is consistent with the defined width 92 | assert_eq!( 93 | MERKLE_ARITY.pow(std::cmp::max(2, MERKLE_HEIGHT as u32)), 94 | MERKLE_WIDTH 95 | ); 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /src/merkle.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | Error, Poseidon, PoseidonLeaf, Proof, Scalar, MERKLE_ARITY, MERKLE_HEIGHT, MERKLE_WIDTH, 3 | }; 4 | use std::ops; 5 | 6 | /// The merkle tree will accept up to `MERKLE_ARITY * MERKLE_WIDTH` leaves. 7 | #[derive(Copy, Clone)] 8 | pub struct MerkleTree { 9 | root: Option, 10 | leaves: [Option; MERKLE_WIDTH], 11 | raw: [[Option; MERKLE_WIDTH]; MERKLE_HEIGHT + 1], 12 | } 13 | 14 | impl Default for MerkleTree { 15 | fn default() -> Self { 16 | MerkleTree { 17 | raw: [[None; MERKLE_WIDTH]; MERKLE_HEIGHT + 1], 18 | root: None, 19 | leaves: [None; MERKLE_WIDTH], 20 | } 21 | } 22 | } 23 | 24 | impl MerkleTree { 25 | /// Return a reference to the provided leaves 26 | pub fn leaves(&self) -> &[Option; MERKLE_WIDTH] { 27 | &self.leaves 28 | } 29 | 30 | /// Insert the provided leaf in the defined position. 31 | /// 32 | /// # Panics 33 | /// 34 | /// Panics if `index` is out of bounds. 35 | pub fn insert_unchecked(&mut self, index: usize, leaf: T) { 36 | self.root = None; 37 | self.leaves[index].replace(leaf); 38 | } 39 | 40 | /// Set the provided leaf index as absent for the hash calculation. 41 | /// 42 | /// # Panics 43 | /// 44 | /// Panics if `index` is out of bounds. 45 | pub fn remove_unchecked(&mut self, index: usize) -> Option { 46 | self.root = None; 47 | self.leaves[index].take() 48 | } 49 | 50 | /// Generate a proof of membership for the provided leaf 51 | pub fn proof(&mut self, leaf: &T) -> Result, Error> 52 | where 53 | Scalar: ops::Mul, 54 | { 55 | self.leaves 56 | .iter() 57 | .enumerate() 58 | .fold(None, |mut idx, (i, il)| { 59 | if let Some(l) = il { 60 | if idx.is_none() && l == leaf { 61 | idx.replace(i); 62 | } 63 | } 64 | 65 | idx 66 | }) 67 | .ok_or(Error::LeafNotFound) 68 | .map(|i| self.proof_index(i)) 69 | } 70 | 71 | /// Generate a proof of membership for the provided leaf index 72 | /// 73 | /// # Panics 74 | /// 75 | /// Panics if `index` is out of bounds. 76 | pub fn proof_index(&mut self, mut needle: usize) -> Proof 77 | where 78 | Scalar: ops::Mul, 79 | { 80 | self.root(); 81 | let mut proof = Proof::default(); 82 | 83 | for row in 0..MERKLE_HEIGHT { 84 | let from = MERKLE_ARITY * (needle / MERKLE_ARITY); 85 | let to = from + MERKLE_ARITY; 86 | let idx = needle % MERKLE_ARITY; 87 | 88 | proof.push(idx, &self.raw[row][from..to]); 89 | needle /= MERKLE_ARITY; 90 | } 91 | 92 | proof 93 | } 94 | 95 | /// Calculate and return the root of the merkle tree. 96 | pub fn root(&mut self) -> T 97 | where 98 | Scalar: ops::Mul, 99 | { 100 | if let Some(s) = self.root { 101 | return s; 102 | } 103 | 104 | self.raw[0].copy_from_slice(&self.leaves); 105 | for i in 1..self.raw.len() { 106 | self.raw[i].copy_from_slice(&[None; MERKLE_WIDTH]); 107 | } 108 | 109 | let mut merkle = MERKLE_WIDTH; 110 | let mut h = Poseidon::default(); 111 | 112 | for raw_index in 1..MERKLE_HEIGHT + 1 { 113 | for i in (0..merkle).step_by(MERKLE_ARITY) { 114 | let from = i; 115 | let to = i + MERKLE_ARITY; 116 | let idx = to / MERKLE_ARITY - 1; 117 | 118 | h.replace(&self.raw[raw_index - 1][from..to]); 119 | self.raw[raw_index][idx] = Some(h.hash()); 120 | } 121 | 122 | merkle /= MERKLE_ARITY; 123 | } 124 | 125 | self.root = self.raw[MERKLE_HEIGHT][0]; 126 | match self.root { 127 | Some(s) => s, 128 | None => unreachable!(), 129 | } 130 | } 131 | } 132 | 133 | #[cfg(test)] 134 | mod tests { 135 | use crate::*; 136 | 137 | #[test] 138 | fn merkle() { 139 | let mut t = MerkleTree::default(); 140 | t.insert_unchecked(0, Scalar::one()); 141 | let root = t.root(); 142 | assert_ne!(Scalar::zero(), root) 143 | } 144 | 145 | #[test] 146 | fn merkle_pad() { 147 | let mut t = MerkleTree::default(); 148 | t.insert_unchecked(0, Scalar::one()); 149 | let root = t.root(); 150 | 151 | let mut t = MerkleTree::default(); 152 | t.insert_unchecked(0, Scalar::one()); 153 | t.insert_unchecked(1, Scalar::zero()); 154 | 155 | assert_ne!(t.root(), root) 156 | } 157 | 158 | #[test] 159 | fn merkle_det() { 160 | let mut v = vec![]; 161 | for i in 0..MERKLE_ARITY { 162 | v.push(Scalar::from(i as u64)); 163 | } 164 | 165 | let mut t = MerkleTree::default(); 166 | v.iter() 167 | .enumerate() 168 | .for_each(|(i, s)| t.insert_unchecked(i, *s)); 169 | let root = t.root(); 170 | 171 | let mut t = MerkleTree::default(); 172 | v.iter() 173 | .enumerate() 174 | .for_each(|(i, s)| t.insert_unchecked(i, *s)); 175 | 176 | assert_eq!(t.root(), root) 177 | } 178 | 179 | #[test] 180 | fn merkle_sanity_proof() { 181 | let base = Scalar::one(); 182 | let mut t = MerkleTree::default(); 183 | t.insert_unchecked(0, base); 184 | 185 | let root = t.root(); 186 | 187 | let mut h = Poseidon::default(); 188 | h.push(base).unwrap(); 189 | let mut main_path = h.hash(); 190 | 191 | h.reset(); 192 | let mut round_void = h.hash(); 193 | let mut void: Vec> = std::iter::repeat(Some(round_void)) 194 | .take(MERKLE_ARITY) 195 | .collect(); 196 | 197 | for _ in 0..MERKLE_HEIGHT - 1 { 198 | h.replace(void.as_slice()); 199 | round_void = h.hash(); 200 | 201 | void[0] = Some(main_path); 202 | h.replace(void.as_slice()); 203 | main_path = h.hash(); 204 | 205 | void = std::iter::repeat(Some(round_void)) 206 | .take(MERKLE_ARITY) 207 | .collect(); 208 | } 209 | 210 | assert_eq!(root, main_path); 211 | } 212 | } 213 | -------------------------------------------------------------------------------- /src/poseidon.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | Error, PoseidonLeaf, Scalar, FULL_ROUNDS, MDS_MATRIX, MERKLE_ARITY, PARTIAL_ROUNDS, 3 | ROUND_CONSTANTS, WIDTH, 4 | }; 5 | use std::ops; 6 | 7 | /// The `Poseidon` structure will accept a number of inputs equal to the arity. 8 | /// 9 | /// The leaves must implement [`ops::Mul`] against a [`Scalar`], because the MDS matrix and the 10 | /// round constants are set, by default, as scalars. 11 | #[derive(Debug, Clone, Copy, PartialEq)] 12 | pub struct Poseidon { 13 | constants_offset: usize, 14 | present_elements: u64, 15 | pos: usize, 16 | leaves: [T; WIDTH], 17 | } 18 | 19 | impl Default for Poseidon { 20 | fn default() -> Self { 21 | Poseidon { 22 | present_elements: 0u64, 23 | constants_offset: 0, 24 | pos: 1, 25 | leaves: [T::from(0u64); WIDTH], 26 | } 27 | } 28 | } 29 | 30 | impl Poseidon { 31 | /// The poseidon width will be defined by `arity + 1`, because the first element will be a set of bitflags defining which element is present or absent. The absent elements will be represented by `0`, and the present ones by `1`, considering inverse order. 32 | /// 33 | /// For example: given we have an arity of `8`, and if we have two present elements, three absent, and three present, we will have the first element as `0xe3`, or `(11100011)`. 34 | /// 35 | /// Every time we push an element, we set the related bitflag with the proper state. 36 | /// 37 | /// The returned `usize` represents the leaf position for the insert operation 38 | pub fn push(&mut self, leaf: T) -> Result { 39 | // Cannot input more elements than the defined arity 40 | if self.pos > MERKLE_ARITY { 41 | return Err(Error::FullBuffer); 42 | } 43 | 44 | self.insert_unchecked(self.pos - 1, leaf); 45 | self.pos += 1; 46 | 47 | Ok(self.pos - 2) 48 | } 49 | 50 | /// Insert the provided leaf in the defined position. 51 | /// 52 | /// # Panics 53 | /// 54 | /// Panics if `index` is out of bounds. 55 | pub(crate) fn insert_unchecked(&mut self, index: usize, leaf: T) { 56 | let mut mask = 1u64; 57 | mask <<= index; 58 | self.present_elements |= mask; 59 | 60 | // Set current element, and increase the pointer 61 | self.leaves[index + 1] = leaf; 62 | } 63 | 64 | /// Removes an item that is indexed by `index`. 65 | /// 66 | /// Internally, the buffer is stored in form of `index + 1`, because of the leading bitflags 67 | /// element. 68 | /// 69 | /// # Example 70 | /// ``` 71 | /// use dusk_poseidon_merkle::*; 72 | /// 73 | /// let mut h = Poseidon::default(); 74 | /// 75 | /// assert!(h.remove(0).is_err()); 76 | /// 77 | /// let idx = h.push(Scalar::one()).unwrap(); 78 | /// assert_eq!(0, idx); 79 | /// 80 | /// h.remove(0).unwrap(); 81 | /// ``` 82 | pub fn remove(&mut self, index: usize) -> Result { 83 | let index = index + 1; 84 | if index >= self.pos { 85 | return Err(Error::IndexOutOfBounds); 86 | } 87 | 88 | Ok(self.remove_unchecked(index)) 89 | } 90 | 91 | /// Removes the first equivalence of the item from the leafs set and returns it. 92 | pub fn remove_item(&mut self, item: &T) -> Option { 93 | self.leaves 94 | .iter() 95 | .enumerate() 96 | .fold(None, |mut acc, (i, s)| { 97 | if acc.is_none() && i > 0 && s == item { 98 | acc.replace(i); 99 | } 100 | 101 | acc 102 | }) 103 | .map(|idx| self.remove_unchecked(idx)) 104 | } 105 | 106 | /// Set the provided index as absent for the hash calculation. 107 | /// 108 | /// # Panics 109 | /// 110 | /// Panics if `index` is out of bounds. 111 | pub fn remove_unchecked(&mut self, index: usize) -> T { 112 | let leaf = self.leaves[index]; 113 | self.leaves[index] = T::from(0u64); 114 | 115 | let mut mask = 1u64; 116 | mask <<= index; 117 | self.present_elements &= !mask; 118 | 119 | leaf 120 | } 121 | 122 | /// Replace the leaves with the provided optional items. 123 | /// 124 | /// # Panics 125 | /// 126 | /// Panics if the provided slice is bigger than the arity. 127 | pub fn replace(&mut self, buf: &[Option]) { 128 | self.reset(); 129 | buf.iter().enumerate().for_each(|(i, scalar)| { 130 | if let Some(s) = scalar { 131 | self.insert_unchecked(i, *s); 132 | } 133 | }); 134 | } 135 | 136 | /// Restore the initial state 137 | pub fn reset(&mut self) { 138 | self.present_elements = 0; 139 | self.constants_offset = 0; 140 | self.pos = 1; 141 | self.leaves.iter_mut().for_each(|l| *l = T::from(0u64)); 142 | } 143 | 144 | /// The absent elements will be considered as zeroes in the permutation. 145 | /// 146 | /// The number of rounds is divided into two equal parts for the full rounds, plus the partial rounds. 147 | /// 148 | /// The returned element is the second poseidon leaf, for the first is initially the bitflags scheme. 149 | pub fn hash(&mut self) -> T 150 | where 151 | Scalar: ops::Mul, 152 | { 153 | // The first element is a set of bitflags to differentiate zeroed leaves from absent 154 | // ones 155 | // 156 | // This avoids collisions 157 | self.leaves[0] = T::from(self.present_elements); 158 | 159 | // This counter is incremented when a round constants is read. Therefore, the round constants never 160 | // repeat 161 | for _ in 0..FULL_ROUNDS / 2 { 162 | self.full_round(); 163 | } 164 | 165 | for _ in 0..PARTIAL_ROUNDS { 166 | self.partial_round(); 167 | } 168 | 169 | for _ in 0..FULL_ROUNDS / 2 { 170 | self.full_round(); 171 | } 172 | 173 | // The first bitflags element is discarded, so we can use the first actual leaf as a result 174 | // of the hash 175 | self.leaves[1] 176 | } 177 | 178 | /// The full round function will add the round constants and apply the S-Box to all poseidon leaves, including the bitflags first element. 179 | /// 180 | /// After that, the poseidon elements will be set to the result of the product between the poseidon leaves and the constant MDS matrix. 181 | pub fn full_round(&mut self) 182 | where 183 | Scalar: ops::Mul, 184 | { 185 | // Every element of the merkle tree, plus the bitflag, is incremented by the round constants 186 | self.add_round_constants(); 187 | 188 | // Apply the quintic S-Box to all elements 189 | self.leaves.iter_mut().for_each(|l| quintic_s_box(l)); 190 | 191 | // Multiply the elements by the constant MDS matrix 192 | self.product_mds(); 193 | } 194 | 195 | /// The partial round is the same as the full round, with the difference that we apply the S-Box only to the first bitflags poseidon leaf. 196 | pub fn partial_round(&mut self) 197 | where 198 | Scalar: ops::Mul, 199 | { 200 | // Every element of the merkle tree, plus the bitflag, is incremented by the round constants 201 | self.add_round_constants(); 202 | 203 | // Apply the quintic S-Box to the bitflags element 204 | quintic_s_box(&mut self.leaves[0]); 205 | 206 | // Multiply the elements by the constant MDS matrix 207 | self.product_mds(); 208 | } 209 | 210 | /// For every leaf, add the round constants with index defined by the constants offset, and increment the 211 | /// offset 212 | fn add_round_constants(&mut self) { 213 | let mut constants_offset = self.constants_offset; 214 | 215 | self.leaves.iter_mut().for_each(|l| { 216 | *l += T::from(ROUND_CONSTANTS[constants_offset]); 217 | constants_offset += 1; 218 | }); 219 | 220 | self.constants_offset = constants_offset; 221 | } 222 | 223 | /// Set the provided leaves with the result of the product between the leaves and the constant 224 | /// MDS matrix 225 | fn product_mds(&mut self) 226 | where 227 | Scalar: ops::Mul, 228 | { 229 | let mut result = [T::from(0u64); WIDTH]; 230 | 231 | for j in 0..WIDTH { 232 | for k in 0..WIDTH { 233 | result[j] += MDS_MATRIX[j][k] * self.leaves[k]; 234 | } 235 | } 236 | 237 | self.leaves.copy_from_slice(&result); 238 | } 239 | } 240 | 241 | /// Apply the quintic S-Box (s^5) to a given item 242 | fn quintic_s_box(l: &mut T) 243 | where 244 | T: Copy + ops::MulAssign, 245 | { 246 | let c = *l; 247 | for _ in 0..4 { 248 | *l *= c; 249 | } 250 | } 251 | 252 | #[cfg(test)] 253 | mod tests { 254 | use crate::*; 255 | 256 | #[test] 257 | fn reset() { 258 | let mut h = Poseidon::default(); 259 | for _ in 0..MERKLE_ARITY { 260 | h.push(Scalar::one()).unwrap(); 261 | } 262 | h.hash(); 263 | h.reset(); 264 | 265 | assert_eq!(Poseidon::default(), h); 266 | } 267 | 268 | #[test] 269 | fn hash_det() { 270 | let mut h = Poseidon::default(); 271 | h.push(Scalar::one()).unwrap(); 272 | 273 | let mut h2 = h.clone(); 274 | let result = h.hash(); 275 | 276 | assert_eq!(result, h2.hash()); 277 | } 278 | } 279 | -------------------------------------------------------------------------------- /src/proof.rs: -------------------------------------------------------------------------------- 1 | use crate::{Poseidon, PoseidonLeaf, Scalar, MERKLE_ARITY, MERKLE_HEIGHT}; 2 | use std::ops; 3 | 4 | /// Set of pairs (idx, Hash) to reconstruct the merkle root. 5 | /// For every level of the tree, 6 | /// Required information to reconstruct the merkle root. 7 | /// 8 | /// For every level of the tree, there is an index, and a slice of leaves. 9 | /// 10 | /// The index will be the position in which the previously calculated information should be 11 | /// inserted. 12 | /// 13 | /// The leaves will define the other elements required to perform the hash for that level of the 14 | /// tree. 15 | #[derive(Debug, Copy, Clone, PartialEq)] 16 | pub struct Proof { 17 | pos: usize, 18 | data: [(usize, [Option; MERKLE_ARITY]); MERKLE_HEIGHT], 19 | } 20 | 21 | impl Default for Proof { 22 | fn default() -> Self { 23 | Proof { 24 | pos: 0, 25 | data: [(0, [None; MERKLE_ARITY]); MERKLE_HEIGHT], 26 | } 27 | } 28 | } 29 | 30 | impl Proof { 31 | pub(crate) fn push(&mut self, idx: usize, leaves: &[Option]) { 32 | let (i, proof) = &mut self.data[self.pos]; 33 | 34 | proof.copy_from_slice(leaves); 35 | *i = idx; 36 | 37 | self.pos += 1; 38 | } 39 | 40 | /// Return the raw proof data 41 | pub fn data(&self) -> &[(usize, [Option; MERKLE_ARITY]); MERKLE_HEIGHT] { 42 | &self.data 43 | } 44 | 45 | /// Verify if the provided leaf corresponds to the proof in the merkle construction 46 | pub fn verify(&self, leaf: &T, root: &T) -> bool 47 | where 48 | Scalar: ops::Mul, 49 | { 50 | let mut leaf = *leaf; 51 | let mut h = Poseidon::default(); 52 | 53 | for i in 0..self.data.len() { 54 | let (idx, data) = self.data[i]; 55 | 56 | h.replace(&data[0..MERKLE_ARITY]); 57 | h.insert_unchecked(idx, leaf); 58 | 59 | leaf = h.hash(); 60 | } 61 | 62 | &leaf == root 63 | } 64 | } 65 | 66 | #[cfg(test)] 67 | mod tests { 68 | use crate::*; 69 | 70 | #[test] 71 | fn proof_verify() { 72 | let mut t = MerkleTree::::default(); 73 | for i in 0..MERKLE_WIDTH { 74 | t.insert_unchecked(i, Scalar::from(i as u64)); 75 | } 76 | 77 | let root = t.root(); 78 | let i = MERKLE_WIDTH / 3; 79 | 80 | let proof = t.proof_index(i); 81 | assert!(proof.verify(&Scalar::from(i as u64), &root)); 82 | } 83 | 84 | #[test] 85 | fn proof_verify_failure() { 86 | let mut t = MerkleTree::::default(); 87 | for i in 0..MERKLE_WIDTH { 88 | t.insert_unchecked(i, Scalar::from(i as u64)); 89 | } 90 | 91 | let root = t.root(); 92 | let i = MERKLE_WIDTH / 3; 93 | 94 | let proof = t.proof_index(i + 1); 95 | assert!(!proof.verify(&Scalar::from(i as u64), &root)); 96 | } 97 | } 98 | --------------------------------------------------------------------------------