├── .github └── workflows │ ├── build-and-test.yml │ └── relyance-sci.yml ├── .gitignore ├── Cargo.toml ├── README.md ├── crates ├── ark-circom │ ├── Cargo.toml │ ├── src │ │ ├── circom.rs │ │ ├── ethereum.rs │ │ ├── lib.rs │ │ └── zkey.rs │ └── test-vectors │ │ ├── test.zkey │ │ └── verification_key.json ├── ark-zkey │ ├── .gitignore │ ├── Cargo.toml │ ├── README.md │ └── src │ │ ├── lib.rs │ │ └── semaphore.16.arkzkey ├── circom-witness-rs │ ├── .gitignore │ ├── Cargo.toml │ ├── LICENSE │ ├── README.md │ ├── build.rs │ ├── include │ │ └── witness.h │ ├── script │ │ └── replace.sh │ └── src │ │ ├── field.rs │ │ ├── generate.rs │ │ ├── graph.rs │ │ └── lib.rs ├── hasher │ ├── Cargo.toml │ └── src │ │ └── lib.rs ├── js │ ├── .gitignore │ ├── Cargo.toml │ ├── README.md │ ├── build.mjs │ ├── example │ │ ├── .gitignore │ │ ├── index.mjs │ │ ├── index.ts │ │ ├── package-lock.json │ │ └── package.json │ └── src │ │ └── lib.rs ├── keccak │ ├── Cargo.toml │ └── src │ │ ├── keccak.rs │ │ ├── lib.rs │ │ └── sha3.rs ├── poseidon │ ├── Cargo.toml │ └── src │ │ ├── constants.rs │ │ ├── lib.rs │ │ └── poseidon.rs ├── proof │ ├── Cargo.toml │ └── src │ │ ├── ark.rs │ │ ├── compression.rs │ │ ├── lib.rs │ │ └── packing.rs ├── semaphore-depth-config │ ├── Cargo.toml │ └── src │ │ └── lib.rs ├── semaphore-depth-macros │ ├── Cargo.toml │ └── src │ │ └── lib.rs ├── semaphore │ ├── Cargo.toml │ ├── README.md │ ├── benches │ │ ├── cascading_merkle_tree.rs │ │ └── lazy_merkle_tree.rs │ ├── build.rs │ ├── examples │ │ └── abort │ │ │ └── main.rs │ ├── graphs │ │ ├── 16 │ │ │ └── graph.bin │ │ ├── 20 │ │ │ └── graph.bin │ │ └── 30 │ │ │ └── graph.bin │ └── src │ │ ├── circuit.rs │ │ ├── field.rs │ │ ├── hash.rs │ │ ├── identity.rs │ │ ├── lib.rs │ │ ├── packed_proof.rs │ │ ├── poseidon_tree.rs │ │ ├── protocol │ │ ├── authentication.rs │ │ └── mod.rs │ │ └── util.rs ├── storage │ ├── Cargo.toml │ └── src │ │ ├── lib.rs │ │ └── mmap_vec.rs ├── trees │ ├── Cargo.toml │ ├── src │ │ ├── cascading │ │ │ ├── mod.rs │ │ │ └── storage_ops.rs │ │ ├── imt │ │ │ └── mod.rs │ │ ├── lazy │ │ │ └── mod.rs │ │ ├── lib.rs │ │ └── proof.rs │ └── tests │ │ └── equivalent.rs └── utils │ ├── Cargo.toml │ └── src │ └── lib.rs ├── cspell.json ├── mit-license.md ├── publish_all.sh └── supply-chain ├── audits.toml ├── config.toml └── imports.lock /.github/workflows/build-and-test.yml: -------------------------------------------------------------------------------- 1 | name: Build and Test 2 | on: push 3 | 4 | env: 5 | RUST_VERSION: 1.86.0 6 | 7 | jobs: 8 | dependabot-dependency-review: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: "Checkout Repository" 12 | uses: actions/checkout@v4 13 | - name: "Dependency Review" 14 | uses: actions/dependency-review-action@v4 15 | with: 16 | base-ref: ${{ inputs.base-ref || github.event.pull_request.base.sha || 'main' }} 17 | head-ref: ${{ inputs.head-ref || github.event.pull_request.head.sha || github.ref }} 18 | 19 | test: 20 | name: Test 21 | runs-on: ubuntu-latest 22 | env: 23 | RUSTFLAGS: "-D warnings" 24 | steps: 25 | - name: Checkout Repository 26 | uses: actions/checkout@v4 27 | 28 | - name: Set up Rust 29 | run: | 30 | rustup update ${{ env.RUST_VERSION }} && rustup default ${{ env.RUST_VERSION }} && rustup component add rustfmt --toolchain ${{ env.RUST_VERSION }} && rustup component add clippy --toolchain ${{ env.RUST_VERSION }} 31 | 32 | - name: Cache Cargo registry 33 | uses: actions/cache@v4 34 | with: 35 | path: ~/.cargo/registry 36 | key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.toml') }} 37 | restore-keys: | 38 | ${{ runner.os }}-cargo-registry- 39 | 40 | - name: Cache Cargo index 41 | uses: actions/cache@v4 42 | with: 43 | path: ~/.cargo/git 44 | key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.toml') }} 45 | restore-keys: | 46 | ${{ runner.os }}-cargo-index- 47 | 48 | - name: Cache Cargo build 49 | uses: actions/cache@v4 50 | with: 51 | path: target 52 | key: ${{ runner.os }}-cargo-build-${{ hashFiles('**/Cargo.toml') }} 53 | restore-keys: | 54 | ${{ runner.os }}-cargo-build- 55 | 56 | - name: Check 57 | run: cargo check --workspace --tests --benches --features depth_16,depth_20,depth_30 58 | 59 | - name: Check Formatting 60 | run: cargo fmt --all -- --check 61 | 62 | - name: Run Clippy 63 | run: cargo clippy --all-targets --features depth_16,depth_20,depth_30 64 | 65 | - name: Run test 66 | run: cargo test --workspace --features depth_16,depth_20,depth_30 67 | # vet: 68 | # name: Vet Dependencies 69 | # runs-on: ubuntu-latest 70 | # steps: 71 | # - uses: actions/checkout@master 72 | # - name: Install Rust 73 | # uses: actions-rs/toolchain@v1 74 | # with: 75 | # profile: minimal 76 | # toolchain: ${{ env.RUST_VERSION }} 77 | # override: true 78 | # - uses: actions-rs/cargo@v1 79 | # with: 80 | # command: build 81 | # - uses: actions/cache@v3 82 | # with: 83 | # path: | 84 | # ~/.cargo/registry/index/ 85 | # ~/.cargo/registry/cache/ 86 | # ~/.cargo/git/db/ 87 | # target/ 88 | # key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} 89 | # - name: Install cargo-vet 90 | # run: cargo install cargo-vet --version ${{ env.CARGO_VET_VERSION }} --git ${{ env.CARGO_VET_REPO }} 91 | # - name: Prune (If some import got updated) 92 | # run: cargo vet prune 93 | # - name: Invoke cargo-vet 94 | # run: cargo vet 95 | -------------------------------------------------------------------------------- /.github/workflows/relyance-sci.yml: -------------------------------------------------------------------------------- 1 | name: Relyance SCI Scan 2 | 3 | on: 4 | schedule: 5 | - cron: "30 0 * * *" 6 | workflow_dispatch: 7 | 8 | jobs: 9 | execute-relyance-sci: 10 | name: Relyance SCI Job 11 | runs-on: ubuntu-latest 12 | permissions: 13 | contents: read 14 | 15 | steps: 16 | - name: Checkout 17 | uses: actions/checkout@v4 18 | 19 | - name: Pull and run SCI binary 20 | run: |- 21 | docker pull gcr.io/relyance-ext/compliance_inspector:release && \ 22 | docker run --rm -v `pwd`:/repo --env API_KEY='${{ secrets.DPP_SCI_KEY }}' gcr.io/relyance-ext/compliance_inspector:release 23 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | *.profraw 3 | snarkfiles_tmp 4 | semaphore_files 5 | .idea 6 | lcov.info 7 | 8 | Cargo.lock 9 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = ["crates/*"] 3 | resolver = "2" 4 | 5 | [workspace.package] 6 | version = "0.5.0" 7 | edition = "2021" 8 | homepage = "https://github.com/worldcoin/semaphore-rs" 9 | license = "MIT" 10 | repository = "https://github.com/worldcoin/semaphore-rs" 11 | authors = [ 12 | "Remco Bloemen ", 13 | "Philipp Sippl ", 14 | ] 15 | description = "Rust support library for Semaphore" 16 | keywords = ["worldcoin", "protocol", "signup"] 17 | categories = ["cryptography"] 18 | 19 | [workspace.dependencies] 20 | # Internal 21 | semaphore-rs-utils = { version = "0.5.0", path = "crates/utils" } 22 | semaphore-rs-ark-circom = { version = "0.5.0", path = "crates/ark-circom" } 23 | semaphore-rs-ark-zkey = { version = "0.5.0", path = "crates/ark-zkey" } 24 | semaphore-rs-proof = { version = "0.5.0", path = "crates/proof", default-features = false } 25 | semaphore-rs-poseidon = { version = "0.5.0", path = "crates/poseidon" } 26 | semaphore-rs-hasher = { version = "0.5.0", path = "crates/hasher" } 27 | semaphore-rs-keccak = { version = "0.5.0", path = "crates/keccak" } 28 | semaphore-rs-trees = { version = "0.5.0", path = "crates/trees" } 29 | semaphore-rs-storage = { version = "0.5.0", path = "crates/storage" } 30 | semaphore-rs-depth-config = { version = "0.5.0", path = "crates/semaphore-depth-config" } 31 | semaphore-rs-depth-macros = { version = "0.5.0", path = "crates/semaphore-depth-macros" } 32 | semaphore-rs-witness = { version = "0.5.0", path = "crates/circom-witness-rs" } 33 | 34 | # 3rd Party 35 | alloy-core = { version = "1.0", default-features = false, features = [ 36 | "sol-types", 37 | ] } 38 | bincode = "1.3.3" 39 | bytemuck = "1.18" 40 | byteorder = "1" 41 | color-eyre = "0.6" 42 | criterion = { version = "0.5", features = ["async_tokio", "html_reports"] } 43 | cxx = "1" 44 | cxx-build = "1" 45 | derive-where = "1" 46 | hex = "0.4.0" 47 | hex-literal = "0.4" 48 | itertools = "0.13" 49 | lazy_static = "1" 50 | mmap-rs = "0.6.1" 51 | num-bigint = { version = "0.4", default-features = false, features = ["rand"] } 52 | num-traits = "0.2.19" 53 | once_cell = "1.8" 54 | postcard = "1" 55 | proptest = "1.0" 56 | rand = { version = "0.8.4", features = ["small_rng"] } 57 | rand_chacha = "0.3.1" 58 | rayon = "1.5.1" 59 | reqwest = { version = "0.11", default-features = false, features = [ 60 | "blocking", 61 | "rustls-tls", 62 | ] } 63 | ruint = { version = "1.12.3", features = [ 64 | "rand", 65 | "bytemuck", 66 | "serde", 67 | "num-bigint", 68 | "ark-ff-04", 69 | ] } 70 | serde = { version = "1.0", features = ["derive"] } 71 | serde_json = "1.0.79" 72 | serial_test = "3" 73 | sha2 = "0.10.1" 74 | test-case = "3.3.1" 75 | tempfile = "3.0" 76 | thiserror = "1.0.0" 77 | tiny-keccak = { version = "2.0.2", features = ["sha3", "keccak"] } 78 | tokio = "1" 79 | tracing-test = "0.2" 80 | zeroize = "1.6.0" 81 | memmap2 = "0.9" 82 | flame = "0.2" 83 | flamer = "0.5" 84 | 85 | # WASM 86 | wasm-bindgen = "0.2" 87 | js-sys = "0.3" 88 | # Required if we're compiling to WASM 89 | getrandom = { version = "0.2.15", features = ["js"] } 90 | wasm-bindgen-test = "0.3" 91 | 92 | # Proc macros 93 | syn = { version = "2.0.9", features = ["full", "visit-mut", "extra-traits"] } 94 | proc-macro2 = "1.0.53" 95 | quote = "1.0.26" 96 | 97 | # Ark 98 | ark-bn254 = { version = "=0.4.0" } 99 | ark-ec = { version = "0.4.2", default-features = false, features = [ 100 | "parallel", 101 | ] } 102 | ark-ff = { version = "0.4.2", default-features = false, features = [ 103 | "parallel", 104 | "asm", 105 | ] } 106 | ark-groth16 = { version = "=0.4.0", features = ["parallel"] } 107 | ark-relations = { version = "=0.4.0", default-features = false } 108 | ark-std = { version = "0.4.0", default-features = false, features = [ 109 | "parallel", 110 | ] } 111 | ark-serialize = { version = "0.4.2", features = ["derive"] } 112 | ark-poly = { version = "0.4.2" } 113 | ark-crypto-primitives = { version = "0.4.0" } 114 | 115 | [profile.release] 116 | codegen-units = 1 117 | lto = true 118 | panic = "abort" 119 | opt-level = 3 120 | 121 | # Compilation profile for any non-workspace member. 122 | # Dependencies are optimized, even in a dev build. This improves dev performance 123 | # while having neglible impact on incremental build times. 124 | [profile.dev.package."*"] 125 | opt-level = 3 126 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | crates/semaphore/README.md -------------------------------------------------------------------------------- /crates/ark-circom/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "semaphore-rs-ark-circom" 3 | version.workspace = true 4 | edition.workspace = true 5 | homepage.workspace = true 6 | license.workspace = true 7 | repository.workspace = true 8 | authors.workspace = true 9 | description.workspace = true 10 | keywords.workspace = true 11 | categories.workspace = true 12 | 13 | [dependencies] 14 | ark-ff.workspace = true 15 | ark-relations.workspace = true 16 | ark-serialize.workspace = true 17 | ark-std.workspace = true 18 | ark-bn254.workspace = true 19 | ark-groth16.workspace = true 20 | ark-poly.workspace = true 21 | ark-crypto-primitives.workspace = true 22 | 23 | ruint.workspace = true 24 | serde_json.workspace = true 25 | byteorder.workspace = true 26 | num-traits.workspace = true 27 | num-bigint.workspace = true 28 | 29 | thiserror.workspace = true 30 | 31 | [features] 32 | # This features does nothing but the ark_std::cfg_into_iter expectes it 33 | parallel = [] 34 | -------------------------------------------------------------------------------- /crates/ark-circom/src/circom.rs: -------------------------------------------------------------------------------- 1 | use ark_ff::PrimeField; 2 | use ark_groth16::r1cs_to_qap::{evaluate_constraint, LibsnarkReduction, R1CSToQAP}; 3 | use ark_poly::EvaluationDomain; 4 | use ark_relations::r1cs::{ConstraintMatrices, ConstraintSystemRef, SynthesisError}; 5 | use ark_std::{cfg_into_iter, cfg_iter, cfg_iter_mut, vec}; 6 | 7 | /// Implements the witness map used by snarkjs. The arkworks witness map calculates the 8 | /// coefficients of H through computing (AB-C)/Z in the evaluation domain and going back to the 9 | /// coefficients domain. snarkjs instead precomputes the Lagrange form of the powers of tau bases 10 | /// in a domain twice as large and the witness map is computed as the odd coefficients of (AB-C) 11 | /// in that domain. This serves as HZ when computing the C proof element. 12 | pub struct CircomReduction; 13 | 14 | impl R1CSToQAP for CircomReduction { 15 | #[allow(clippy::type_complexity)] 16 | fn instance_map_with_evaluation>( 17 | cs: ConstraintSystemRef, 18 | t: &F, 19 | ) -> Result<(Vec, Vec, Vec, F, usize, usize), SynthesisError> { 20 | LibsnarkReduction::instance_map_with_evaluation::(cs, t) 21 | } 22 | 23 | fn witness_map_from_matrices>( 24 | matrices: &ConstraintMatrices, 25 | num_inputs: usize, 26 | num_constraints: usize, 27 | full_assignment: &[F], 28 | ) -> Result, SynthesisError> { 29 | let zero = F::zero(); 30 | let domain = 31 | D::new(num_constraints + num_inputs).ok_or(SynthesisError::PolynomialDegreeTooLarge)?; 32 | let domain_size = domain.size(); 33 | 34 | let mut a = vec![zero; domain_size]; 35 | let mut b = vec![zero; domain_size]; 36 | 37 | cfg_iter_mut!(a[..num_constraints]) 38 | .zip(cfg_iter_mut!(b[..num_constraints])) 39 | .zip(cfg_iter!(&matrices.a)) 40 | .zip(cfg_iter!(&matrices.b)) 41 | .for_each(|(((a, b), at_i), bt_i)| { 42 | *a = evaluate_constraint(at_i, full_assignment); 43 | *b = evaluate_constraint(bt_i, full_assignment); 44 | }); 45 | 46 | { 47 | let start = num_constraints; 48 | let end = start + num_inputs; 49 | a[start..end].clone_from_slice(&full_assignment[..num_inputs]); 50 | } 51 | 52 | let mut c = vec![zero; domain_size]; 53 | cfg_iter_mut!(c[..num_constraints]) 54 | .zip(&a) 55 | .zip(&b) 56 | .for_each(|((c_i, &a), &b)| { 57 | *c_i = a * b; 58 | }); 59 | 60 | domain.ifft_in_place(&mut a); 61 | domain.ifft_in_place(&mut b); 62 | 63 | let root_of_unity = { 64 | let domain_size_double = 2 * domain_size; 65 | let domain_double = 66 | D::new(domain_size_double).ok_or(SynthesisError::PolynomialDegreeTooLarge)?; 67 | domain_double.element(1) 68 | }; 69 | D::distribute_powers_and_mul_by_const(&mut a, root_of_unity, F::one()); 70 | D::distribute_powers_and_mul_by_const(&mut b, root_of_unity, F::one()); 71 | 72 | domain.fft_in_place(&mut a); 73 | domain.fft_in_place(&mut b); 74 | 75 | let mut ab = domain.mul_polynomials_in_evaluation_domain(&a, &b); 76 | drop(a); 77 | drop(b); 78 | 79 | domain.ifft_in_place(&mut c); 80 | D::distribute_powers_and_mul_by_const(&mut c, root_of_unity, F::one()); 81 | domain.fft_in_place(&mut c); 82 | 83 | cfg_iter_mut!(ab) 84 | .zip(c) 85 | .for_each(|(ab_i, c_i)| *ab_i -= &c_i); 86 | 87 | Ok(ab) 88 | } 89 | 90 | fn h_query_scalars>( 91 | max_power: usize, 92 | t: F, 93 | _: F, 94 | delta_inverse: F, 95 | ) -> Result, SynthesisError> { 96 | // the usual H query has domain-1 powers. Z has domain powers. So HZ has 2*domain-1 powers. 97 | let mut scalars = cfg_into_iter!(0..2 * max_power + 1) 98 | .map(|i| delta_inverse * t.pow([i as u64])) 99 | .collect::>(); 100 | let domain_size = scalars.len(); 101 | let domain = D::new(domain_size).ok_or(SynthesisError::PolynomialDegreeTooLarge)?; 102 | // generate the lagrange coefficients 103 | domain.ifft_in_place(&mut scalars); 104 | Ok(cfg_into_iter!(scalars).skip(1).step_by(2).collect()) 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /crates/ark-circom/src/ethereum.rs: -------------------------------------------------------------------------------- 1 | //! Helpers for converting Arkworks types to U256-tuples as expected by the 2 | //! Solidity Groth16 Verifier smart contracts 3 | use ark_ff::{BigInteger, PrimeField}; 4 | use num_traits::Zero; 5 | 6 | use ark_bn254::{Bn254, Fq, Fq2, Fr, G1Affine, G2Affine}; 7 | use ark_serialize::CanonicalDeserialize; 8 | use ruint::aliases::U256; 9 | use thiserror::Error; 10 | 11 | #[derive(Error, Debug)] 12 | pub enum AffineError { 13 | #[error("point is not on curve")] 14 | NotOnCurve, 15 | #[error("point is not in correct subgroup")] 16 | NotInCorrectSubgroup, 17 | } 18 | 19 | pub struct Inputs(pub Vec); 20 | 21 | impl From<&[Fr]> for Inputs { 22 | fn from(src: &[Fr]) -> Self { 23 | let els = src.iter().map(|point| point_to_u256(*point)).collect(); 24 | 25 | Self(els) 26 | } 27 | } 28 | 29 | #[derive(Default, Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] 30 | pub struct G1 { 31 | pub x: U256, 32 | pub y: U256, 33 | } 34 | 35 | impl TryFrom for G1Affine { 36 | type Error = AffineError; 37 | 38 | fn try_from(value: G1) -> Result { 39 | let x: Fq = u256_to_point(value.x); 40 | let y: Fq = u256_to_point(value.y); 41 | if x.is_zero() && y.is_zero() { 42 | Ok(G1Affine::identity()) 43 | } else { 44 | let point = G1Affine { 45 | x, 46 | y, 47 | infinity: false, 48 | }; 49 | if !point.is_on_curve() { 50 | return Err(AffineError::NotOnCurve); 51 | } 52 | if !point.is_in_correct_subgroup_assuming_on_curve() { 53 | return Err(AffineError::NotInCorrectSubgroup); 54 | } 55 | Ok(point) 56 | } 57 | } 58 | } 59 | 60 | type G1Tup = (U256, U256); 61 | 62 | impl G1 { 63 | pub fn as_tuple(&self) -> (U256, U256) { 64 | (self.x, self.y) 65 | } 66 | } 67 | 68 | impl From<&G1Affine> for G1 { 69 | fn from(p: &G1Affine) -> Self { 70 | Self { 71 | x: point_to_u256(p.x), 72 | y: point_to_u256(p.y), 73 | } 74 | } 75 | } 76 | 77 | #[derive(Default, Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] 78 | pub struct G2 { 79 | pub x: [U256; 2], 80 | pub y: [U256; 2], 81 | } 82 | 83 | impl TryFrom for G2Affine { 84 | type Error = AffineError; 85 | 86 | fn try_from(src: G2) -> Result { 87 | let c0 = u256_to_point(src.x[0]); 88 | let c1 = u256_to_point(src.x[1]); 89 | let x = Fq2::new(c0, c1); 90 | 91 | let c0 = u256_to_point(src.y[0]); 92 | let c1 = u256_to_point(src.y[1]); 93 | let y = Fq2::new(c0, c1); 94 | 95 | if x.is_zero() && y.is_zero() { 96 | Ok(G2Affine::identity()) 97 | } else { 98 | let point = G2Affine { 99 | x, 100 | y, 101 | infinity: false, 102 | }; 103 | if !point.is_on_curve() { 104 | return Err(AffineError::NotOnCurve); 105 | } 106 | if !point.is_in_correct_subgroup_assuming_on_curve() { 107 | return Err(AffineError::NotInCorrectSubgroup); 108 | } 109 | Ok(point) 110 | } 111 | } 112 | } 113 | 114 | type G2Tup = ([U256; 2], [U256; 2]); 115 | 116 | impl G2 { 117 | // NB: Serialize the c1 limb first. 118 | pub fn as_tuple(&self) -> G2Tup { 119 | ([self.x[1], self.x[0]], [self.y[1], self.y[0]]) 120 | } 121 | } 122 | 123 | impl From<&G2Affine> for G2 { 124 | fn from(p: &G2Affine) -> Self { 125 | Self { 126 | x: [point_to_u256(p.x.c0), point_to_u256(p.x.c1)], 127 | y: [point_to_u256(p.y.c0), point_to_u256(p.y.c1)], 128 | } 129 | } 130 | } 131 | 132 | #[derive(Default, Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] 133 | pub struct Proof { 134 | pub a: G1, 135 | pub b: G2, 136 | pub c: G1, 137 | } 138 | 139 | impl Proof { 140 | pub fn as_tuple(&self) -> (G1Tup, G2Tup, G1Tup) { 141 | (self.a.as_tuple(), self.b.as_tuple(), self.c.as_tuple()) 142 | } 143 | } 144 | 145 | impl From> for Proof { 146 | fn from(proof: ark_groth16::Proof) -> Self { 147 | Self { 148 | a: G1::from(&proof.a), 149 | b: G2::from(&proof.b), 150 | c: G1::from(&proof.c), 151 | } 152 | } 153 | } 154 | 155 | impl TryFrom for ark_groth16::Proof { 156 | type Error = AffineError; 157 | 158 | fn try_from(src: Proof) -> Result, AffineError> { 159 | Ok(ark_groth16::Proof { 160 | a: src.a.try_into()?, 161 | b: src.b.try_into()?, 162 | c: src.c.try_into()?, 163 | }) 164 | } 165 | } 166 | 167 | #[derive(Default, Clone, PartialEq, Eq, PartialOrd, Ord)] 168 | pub struct VerifyingKey { 169 | pub alpha1: G1, 170 | pub beta2: G2, 171 | pub gamma2: G2, 172 | pub delta2: G2, 173 | pub ic: Vec, 174 | } 175 | 176 | impl VerifyingKey { 177 | pub fn as_tuple(&self) -> (G1Tup, G2Tup, G2Tup, G2Tup, Vec) { 178 | ( 179 | self.alpha1.as_tuple(), 180 | self.beta2.as_tuple(), 181 | self.gamma2.as_tuple(), 182 | self.delta2.as_tuple(), 183 | self.ic.iter().map(|i| i.as_tuple()).collect(), 184 | ) 185 | } 186 | } 187 | 188 | impl From> for VerifyingKey { 189 | fn from(vk: ark_groth16::VerifyingKey) -> Self { 190 | Self { 191 | alpha1: G1::from(&vk.alpha_g1), 192 | beta2: G2::from(&vk.beta_g2), 193 | gamma2: G2::from(&vk.gamma_g2), 194 | delta2: G2::from(&vk.delta_g2), 195 | ic: vk.gamma_abc_g1.iter().map(G1::from).collect(), 196 | } 197 | } 198 | } 199 | 200 | impl TryFrom for ark_groth16::VerifyingKey { 201 | type Error = AffineError; 202 | 203 | fn try_from(src: VerifyingKey) -> Result, AffineError> { 204 | Ok(ark_groth16::VerifyingKey { 205 | alpha_g1: src.alpha1.try_into()?, 206 | beta_g2: src.beta2.try_into()?, 207 | gamma_g2: src.gamma2.try_into()?, 208 | delta_g2: src.delta2.try_into()?, 209 | gamma_abc_g1: src 210 | .ic 211 | .into_iter() 212 | .map(TryInto::try_into) 213 | .collect::>()?, 214 | }) 215 | } 216 | } 217 | 218 | // Helper for converting a PrimeField to its U256 representation for Ethereum compatibility 219 | fn u256_to_point(point: U256) -> F { 220 | let buf: [u8; 32] = point.to_le_bytes(); 221 | let bigint = F::BigInt::deserialize_uncompressed(&buf[..]).expect("always works"); 222 | F::from_bigint(bigint).expect("always works") 223 | } 224 | 225 | // Helper for converting a PrimeField to its U256 representation for Ethereum compatibility 226 | // (U256 reads data as big endian) 227 | fn point_to_u256(point: F) -> U256 { 228 | let point = point.into_bigint(); 229 | let point_bytes = point.to_bytes_be(); 230 | U256::try_from_be_slice(&point_bytes[..]).expect("always works") 231 | } 232 | 233 | #[cfg(test)] 234 | mod tests { 235 | use super::*; 236 | use ark_bn254::Fq; 237 | use ark_std::UniformRand; 238 | 239 | fn fq() -> Fq { 240 | Fq::from(2) 241 | } 242 | 243 | fn fr() -> Fr { 244 | Fr::from(2) 245 | } 246 | 247 | fn g1() -> G1Affine { 248 | let rng = &mut ark_std::test_rng(); 249 | G1Affine::rand(rng) 250 | } 251 | 252 | fn g2() -> G2Affine { 253 | let rng = &mut ark_std::test_rng(); 254 | G2Affine::rand(rng) 255 | } 256 | 257 | #[test] 258 | fn convert_fq() { 259 | let el = fq(); 260 | let el2 = point_to_u256(el); 261 | let el3: Fq = u256_to_point(el2); 262 | let el4 = point_to_u256(el3); 263 | assert_eq!(el, el3); 264 | assert_eq!(el2, el4); 265 | } 266 | 267 | #[test] 268 | fn convert_fr() { 269 | let el = fr(); 270 | let el2 = point_to_u256(el); 271 | let el3: Fr = u256_to_point(el2); 272 | let el4 = point_to_u256(el3); 273 | assert_eq!(el, el3); 274 | assert_eq!(el2, el4); 275 | } 276 | 277 | #[test] 278 | fn convert_g1() { 279 | let el = g1(); 280 | let el2 = G1::from(&el); 281 | let el3: G1Affine = el2.try_into().unwrap(); 282 | let el4 = G1::from(&el3); 283 | assert_eq!(el, el3); 284 | assert_eq!(el2, el4); 285 | } 286 | 287 | #[test] 288 | fn convert_g2() { 289 | let el = g2(); 290 | let el2 = G2::from(&el); 291 | let el3: G2Affine = el2.try_into().unwrap(); 292 | let el4 = G2::from(&el3); 293 | assert_eq!(el, el3); 294 | assert_eq!(el2, el4); 295 | } 296 | 297 | #[test] 298 | fn convert_vk() { 299 | let vk = ark_groth16::VerifyingKey:: { 300 | alpha_g1: g1(), 301 | beta_g2: g2(), 302 | gamma_g2: g2(), 303 | delta_g2: g2(), 304 | gamma_abc_g1: vec![g1(), g1(), g1()], 305 | }; 306 | let vk_ethers = VerifyingKey::from(vk.clone()); 307 | let ark_vk: ark_groth16::VerifyingKey = vk_ethers.try_into().unwrap(); 308 | assert_eq!(ark_vk, vk); 309 | } 310 | 311 | #[test] 312 | fn convert_proof() { 313 | let p = ark_groth16::Proof:: { 314 | a: g1(), 315 | b: g2(), 316 | c: g1(), 317 | }; 318 | let p2 = Proof::from(p.clone()); 319 | let p3 = ark_groth16::Proof::try_from(p2).unwrap(); 320 | assert_eq!(p, p3); 321 | } 322 | } 323 | -------------------------------------------------------------------------------- /crates/ark-circom/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod circom; 2 | pub mod ethereum; 3 | pub mod zkey; 4 | 5 | pub use circom::CircomReduction; 6 | pub use zkey::read_zkey; 7 | -------------------------------------------------------------------------------- /crates/ark-circom/test-vectors/test.zkey: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/worldcoin/semaphore-rs/c1a7165bc838c8ea8de6ee7e4b3571b36994b619/crates/ark-circom/test-vectors/test.zkey -------------------------------------------------------------------------------- /crates/ark-circom/test-vectors/verification_key.json: -------------------------------------------------------------------------------- 1 | { 2 | "protocol": "groth16", 3 | "curve": "bn128", 4 | "nPublic": 1, 5 | "vk_alpha_1": [ 6 | "20491192805390485299153009773594534940189261866228447918068658471970481763042", 7 | "9383485363053290200918347156157836566562967994039712273449902621266178545958", 8 | "1" 9 | ], 10 | "vk_beta_2": [ 11 | [ 12 | "6375614351688725206403948262868962793625744043794305715222011528459656738731", 13 | "4252822878758300859123897981450591353533073413197771768651442665752259397132" 14 | ], 15 | [ 16 | "10505242626370262277552901082094356697409835680220590971873171140371331206856", 17 | "21847035105528745403288232691147584728191162732299865338377159692350059136679" 18 | ], 19 | [ 20 | "1", 21 | "0" 22 | ] 23 | ], 24 | "vk_gamma_2": [ 25 | [ 26 | "10857046999023057135944570762232829481370756359578518086990519993285655852781", 27 | "11559732032986387107991004021392285783925812861821192530917403151452391805634" 28 | ], 29 | [ 30 | "8495653923123431417604973247489272438418190587263600148770280649306958101930", 31 | "4082367875863433681332203403145435568316851327593401208105741076214120093531" 32 | ], 33 | [ 34 | "1", 35 | "0" 36 | ] 37 | ], 38 | "vk_delta_2": [ 39 | [ 40 | "10857046999023057135944570762232829481370756359578518086990519993285655852781", 41 | "11559732032986387107991004021392285783925812861821192530917403151452391805634" 42 | ], 43 | [ 44 | "8495653923123431417604973247489272438418190587263600148770280649306958101930", 45 | "4082367875863433681332203403145435568316851327593401208105741076214120093531" 46 | ], 47 | [ 48 | "1", 49 | "0" 50 | ] 51 | ], 52 | "vk_alphabeta_12": [ 53 | [ 54 | [ 55 | "2029413683389138792403550203267699914886160938906632433982220835551125967885", 56 | "21072700047562757817161031222997517981543347628379360635925549008442030252106" 57 | ], 58 | [ 59 | "5940354580057074848093997050200682056184807770593307860589430076672439820312", 60 | "12156638873931618554171829126792193045421052652279363021382169897324752428276" 61 | ], 62 | [ 63 | "7898200236362823042373859371574133993780991612861777490112507062703164551277", 64 | "7074218545237549455313236346927434013100842096812539264420499035217050630853" 65 | ] 66 | ], 67 | [ 68 | [ 69 | "7077479683546002997211712695946002074877511277312570035766170199895071832130", 70 | "10093483419865920389913245021038182291233451549023025229112148274109565435465" 71 | ], 72 | [ 73 | "4595479056700221319381530156280926371456704509942304414423590385166031118820", 74 | "19831328484489333784475432780421641293929726139240675179672856274388269393268" 75 | ], 76 | [ 77 | "11934129596455521040620786944827826205713621633706285934057045369193958244500", 78 | "8037395052364110730298837004334506829870972346962140206007064471173334027475" 79 | ] 80 | ] 81 | ], 82 | "IC": [ 83 | [ 84 | "6819801395408938350212900248749732364821477541620635511814266536599629892365", 85 | "9092252330033992554755034971584864587974280972948086568597554018278609861372", 86 | "1" 87 | ], 88 | [ 89 | "17882351432929302592725330552407222299541667716607588771282887857165175611387", 90 | "18907419617206324833977586007131055763810739835484972981819026406579664278293", 91 | "1" 92 | ] 93 | ] 94 | } -------------------------------------------------------------------------------- /crates/ark-zkey/.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | -------------------------------------------------------------------------------- /crates/ark-zkey/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "semaphore-rs-ark-zkey" 3 | version.workspace = true 4 | edition.workspace = true 5 | homepage.workspace = true 6 | license.workspace = true 7 | repository.workspace = true 8 | authors.workspace = true 9 | description.workspace = true 10 | keywords.workspace = true 11 | categories.workspace = true 12 | 13 | [dependencies] 14 | semaphore-rs-ark-circom.workspace = true 15 | 16 | color-eyre.workspace = true 17 | memmap2.workspace = true 18 | 19 | ark-serialize.workspace = true 20 | ark-bn254.workspace = true 21 | ark-groth16.workspace = true 22 | ark-relations.workspace = true 23 | ark-ff.workspace = true 24 | ark-ec.workspace = true 25 | -------------------------------------------------------------------------------- /crates/ark-zkey/README.md: -------------------------------------------------------------------------------- 1 | # ark-zkey 2 | 3 | Library to read `zkey` faster by serializing to `arkworks` friendly format. 4 | 5 | See https://github.com/oskarth/mopro/issues/25 for context. 6 | 7 | ## To generate arkzkey 8 | 9 | Hacky, but the way we generate `arkzkey` now is by running the corresponding test. 10 | 11 | Note that we also neeed to change the const `ZKEY_BYTES` above. 12 | 13 | E.g.: 14 | 15 | ``` 16 | cargo test multiplier2 --release -- --nocapture 17 | cargo test keccak256 --release -- --nocapture 18 | cargo test rsa --release -- --nocapture 19 | ``` 20 | 21 | Will take corresponding `zkey` and put `arkzkey`` in same folder. 22 | 23 | ## Multiplier 24 | 25 | NOTE: Need to change const ZKEY here 26 | 27 | `cargo test multiplier2 --release -- --nocapture` 28 | 29 | ``` 30 | running 1 test 31 | [build] Processing zkey data... 32 | [build] Time to process zkey data: 3.513041ms 33 | [build] Serializing proving key and constraint matrices 34 | [build] Time to serialize proving key and constraint matrices: 42ns 35 | [build] Writing arkzkey to: ../mopro-core/examples/circom/multiplier2/target/multiplier2_final.arkzkey 36 | [build] Time to write arkzkey: 1.884875ms 37 | Reading arkzkey from: ../mopro-core/examples/circom/multiplier2/target/multiplier2_final.arkzkey 38 | 39 | Time to open arkzkey file: 18.084µs 40 | Time to mmap arkzkey: 8.542µs 41 | Time to deserialize proving key: 305.75µs 42 | Time to deserialize matrices: 5µs 43 | Time to read arkzkey: 348.083µs 44 | test tests::test_multiplier2_serialization_deserialization ... ok 45 | ``` 46 | 47 | Naive test: `cargo test naive --release -- --nocapture` (with right zkey constant). 48 | 49 | **Result: `350µs` vs naive `3.3ms`** 50 | 51 | ## Keccak 52 | 53 | NOTE: Need to change const ZKEY here 54 | 55 | `cargo test keccak256 --release -- --nocapture` 56 | 57 | ``` 58 | [build] Processing zkey data... 59 | test tests::test_keccak256_serialization_deserialization has been running for over 60 seconds 60 | [build]Time to process zkey data: 158.753181958s 61 | [build] Serializing proving key and constraint matrices 62 | [build] Time to serialize proving key and constraint matrices: 42ns 63 | [build] Writing arkzkey to: ../mopro-core/examples/circom/keccak256/target/keccak256_256_test_final.arkzkey 64 | [build] Time to write arkzkey: 16.204274125s 65 | Reading arkzkey from: ../mopro-core/examples/circom/keccak256/target/keccak256_256_test_final.arkzkey 66 | Time to open arkzkey file: 51.75µs 67 | Time to mmap arkzkey: 17.25µs 68 | Time to deserialize proving key: 18.323550083s 69 | Time to deserialize matrices: 46.935792ms 70 | Time to read arkzkey: 18.3730695s 71 | test tests::test_keccak256_serialization_deserialization ... ok 72 | ``` 73 | 74 | Vs naive: 75 | 76 | `[build] Time to process zkey data: 158.753181958s` 77 | 78 | 79 | **Result: 18s vs 158s** 80 | -------------------------------------------------------------------------------- /crates/ark-zkey/src/lib.rs: -------------------------------------------------------------------------------- 1 | use std::fs::File; 2 | use std::io::BufReader; 3 | use std::path::PathBuf; 4 | 5 | use ark_bn254::{Bn254, Fr}; 6 | use ark_ff::Field; 7 | use ark_groth16::ProvingKey; 8 | use ark_relations::r1cs::ConstraintMatrices; 9 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; 10 | use color_eyre::eyre::{Result, WrapErr}; 11 | use semaphore_rs_ark_circom::read_zkey; 12 | 13 | #[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, PartialEq)] 14 | pub struct SerializableProvingKey(pub ProvingKey); 15 | 16 | #[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, PartialEq)] 17 | pub struct SerializableMatrix { 18 | pub data: Vec>, 19 | } 20 | 21 | #[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, PartialEq)] 22 | pub struct SerializableConstraintMatrices { 23 | pub num_instance_variables: usize, 24 | pub num_witness_variables: usize, 25 | pub num_constraints: usize, 26 | pub a_num_non_zero: usize, 27 | pub b_num_non_zero: usize, 28 | pub c_num_non_zero: usize, 29 | pub a: SerializableMatrix, 30 | pub b: SerializableMatrix, 31 | pub c: SerializableMatrix, 32 | } 33 | 34 | // TODO: Return ProvingKey, ConstraintMatrices? 35 | pub fn read_arkzkey_from_bytes( 36 | arkzkey_bytes: &[u8], 37 | ) -> Result<(ProvingKey, ConstraintMatrices)> { 38 | let mut cursor = std::io::Cursor::new(arkzkey_bytes); 39 | 40 | let serialized_proving_key = 41 | SerializableProvingKey::deserialize_compressed_unchecked(&mut cursor) 42 | .wrap_err("Failed to deserialize proving key")?; 43 | 44 | let serialized_constraint_matrices = 45 | SerializableConstraintMatrices::deserialize_compressed_unchecked(&mut cursor) 46 | .wrap_err("Failed to deserialize constraint matrices")?; 47 | 48 | // Get on right form for API 49 | let proving_key: ProvingKey = serialized_proving_key.0; 50 | let constraint_matrices: ConstraintMatrices = ConstraintMatrices { 51 | num_instance_variables: serialized_constraint_matrices.num_instance_variables, 52 | num_witness_variables: serialized_constraint_matrices.num_witness_variables, 53 | num_constraints: serialized_constraint_matrices.num_constraints, 54 | a_num_non_zero: serialized_constraint_matrices.a_num_non_zero, 55 | b_num_non_zero: serialized_constraint_matrices.b_num_non_zero, 56 | c_num_non_zero: serialized_constraint_matrices.c_num_non_zero, 57 | a: serialized_constraint_matrices.a.data, 58 | b: serialized_constraint_matrices.b.data, 59 | c: serialized_constraint_matrices.c.data, 60 | }; 61 | 62 | Ok((proving_key, constraint_matrices)) 63 | } 64 | 65 | pub fn read_proving_key_and_matrices_from_zkey( 66 | zkey_path: &str, 67 | ) -> Result<(SerializableProvingKey, SerializableConstraintMatrices)> { 68 | let zkey_file_path = PathBuf::from(zkey_path); 69 | let zkey_file = File::open(zkey_file_path).wrap_err("Failed to open zkey file")?; 70 | 71 | let mut buf_reader = BufReader::new(zkey_file); 72 | 73 | let (proving_key, matrices) = 74 | read_zkey(&mut buf_reader).wrap_err("Failed to read zkey file")?; 75 | 76 | let serializable_proving_key = SerializableProvingKey(proving_key); 77 | let serializable_constrain_matrices = SerializableConstraintMatrices { 78 | num_instance_variables: matrices.num_instance_variables, 79 | num_witness_variables: matrices.num_witness_variables, 80 | num_constraints: matrices.num_constraints, 81 | a_num_non_zero: matrices.a_num_non_zero, 82 | b_num_non_zero: matrices.b_num_non_zero, 83 | c_num_non_zero: matrices.c_num_non_zero, 84 | a: SerializableMatrix { data: matrices.a }, 85 | b: SerializableMatrix { data: matrices.b }, 86 | c: SerializableMatrix { data: matrices.c }, 87 | }; 88 | 89 | Ok((serializable_proving_key, serializable_constrain_matrices)) 90 | } 91 | 92 | pub fn convert_zkey( 93 | proving_key: SerializableProvingKey, 94 | constraint_matrices: SerializableConstraintMatrices, 95 | arkzkey_path: &str, 96 | ) -> Result<()> { 97 | let arkzkey_file_path = PathBuf::from(arkzkey_path); 98 | 99 | let mut file = File::create(&arkzkey_file_path) 100 | .wrap_err("Failed to create serialized proving key file")?; 101 | 102 | proving_key 103 | .serialize_compressed(&mut file) 104 | .wrap_err("Failed to serialize proving key")?; 105 | 106 | constraint_matrices 107 | .serialize_compressed(&mut file) 108 | .wrap_err("Failed to serialize constraint matrices")?; 109 | 110 | Ok(()) 111 | } 112 | 113 | #[cfg(test)] 114 | mod tests { 115 | use std::time::Instant; 116 | 117 | use super::*; 118 | 119 | #[test] 120 | fn test_read_arkzkey_from_bytes() -> Result<()> { 121 | const ARKZKEY_BYTES: &[u8] = include_bytes!("./semaphore.16.arkzkey"); 122 | 123 | println!("Reading arkzkey from bytes (keccak)"); 124 | let now = Instant::now(); 125 | let (_deserialized_proving_key, _deserialized_constraint_matrices) = 126 | read_arkzkey_from_bytes(ARKZKEY_BYTES)?; 127 | println!("Time to read arkzkey: {:?}", now.elapsed()); 128 | 129 | Ok(()) 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /crates/ark-zkey/src/semaphore.16.arkzkey: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/worldcoin/semaphore-rs/c1a7165bc838c8ea8de6ee7e4b3571b36994b619/crates/ark-zkey/src/semaphore.16.arkzkey -------------------------------------------------------------------------------- /crates/circom-witness-rs/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | *_cpp 3 | *.new 4 | .DS_Store 5 | circuit.cc 6 | constants.dat 7 | .idea 8 | -------------------------------------------------------------------------------- /crates/circom-witness-rs/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "semaphore-rs-witness" 3 | version.workspace = true 4 | edition.workspace = true 5 | homepage.workspace = true 6 | license.workspace = true 7 | repository.workspace = true 8 | authors.workspace = true 9 | description.workspace = true 10 | keywords.workspace = true 11 | categories.workspace = true 12 | 13 | [dependencies] 14 | ark-bn254 = { workspace = true, features = ["std"] } 15 | ark-ff = { workspace = true, features = ["std"] } 16 | ark-serialize.workspace = true 17 | byteorder.workspace = true 18 | color-eyre.workspace = true 19 | hex.workspace = true 20 | postcard = { workspace = true, features = ["use-std"] } 21 | rand.workspace = true 22 | ruint.workspace = true 23 | serde.workspace = true 24 | serde_json.workspace = true 25 | 26 | [build-dependencies] 27 | cxx-build.workspace = true 28 | 29 | [features] 30 | build-witness = [] 31 | -------------------------------------------------------------------------------- /crates/circom-witness-rs/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Philipp Sippl 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /crates/circom-witness-rs/README.md: -------------------------------------------------------------------------------- 1 | # 🏎️ circom-witness-rs 2 | 3 | ## Description 4 | 5 | This crate provides a fast witness generator for Circom circuits, serving as a drop-in replacement for Circom's witness generator. It was created in response to the slow performance of Circom's WASM generator for larger circuits, which also necessitates a WASM runtime, often a cumbersome requirement. The native C++ generator, though faster, depends on x86 assembly for field operations, rendering it impractical for use on other platforms (e.g., cross-compiling to ARM for mobile devices). 6 | 7 | `circom-witness-rs` comes with two modes: 8 | 9 | 1. Generate the static execution graph required for the witness generation at build time (`--features=build-witness`). 10 | 2. Generate the witness elements at runtime from serialized graph. 11 | 12 | In the first mode, it generates the c++ version of the witness generator through circom and links itself against it. The c++ code is made accessible to rust through [`cxx`](https://github.com/dtolnay/cxx). It hooks all field functions (which are x86 assembly in the original generator), such that it can recreate the execution graph through symblic execution. The execution graph is further optimized through constant propagation and dead code elimination. The resulting graph is then serialized to a binary format. At runtime, the graph can be embedded in the binary and interpreted to generate the witness. 13 | 14 | ## Usage 15 | 16 | See this [example project](https://github.com/philsippl/semaphore-witness-example) for Semaphore with more details on building. 17 | 18 | See `semaphore-rs` for an [example at runtime](https://github.com/worldcoin/semaphore-rs/blob/62f556bdc1a2a25021dcccc97af4dfa522ab5789/src/protocol/mod.rs#L161-L163). 19 | 20 | All of those example were used with `circom compiler 2.1.6` ([dcf7d68](https://github.com/iden3/circom/tree/dcf7d687a81c6d9b3e3840181fd83cdaf5f4ac05)). Using a different version of circom might cause issues due to different c++ code being generated. 21 | 22 | ## Benchmarks 23 | 24 | ### [semaphore-rs](https://github.com/worldcoin/semaphore-rs/tree/main) 25 | **TLDR: For semaphore circuit (depth 30) `circom-witness-rs` is ~25x faster than wasm and ~10x faster than native c++ version.** 26 | ``` 27 | cargo bench --bench=criterion --features=bench,depth_30 28 | ``` 29 | 30 | With `circom-witness-rs`:q 31 | ``` 32 | witness_30 time: [993.84 µs 996.62 µs 999.42 µs] 33 | ``` 34 | 35 | With wasm witness generator from [`circom-compat`](https://github.com/arkworks-rs/circom-compat/blob/master/src/witness/witness_calculator.rs): 36 | ``` 37 | witness_30 time: [24.630 ms 24.693 ms 24.759 ms] 38 | ``` 39 | 40 | With native c++ witness generator from circom: `9.640ms` 41 | 42 | As a nice side effect of the graph optimizations, the binary size is also reduced heavily. In the example of Semaphore the binary size is reduced from `1.3MB` (`semaphore.wasm`) to `350KB` (`graph.bin`). 43 | 44 | ## Unimplemented features 45 | 46 | There are still quite a few missing operations that need to be implemented. The list of supported and unsupported operations can be found here. Support for the missing operations is very straighfoward and will be added in the future. 47 | https://github.com/philsippl/circom-witness-rs/blob/e889cedde49a8929812b825aede55d9668118302/src/generate.rs#L61-L89 48 | -------------------------------------------------------------------------------- /crates/circom-witness-rs/build.rs: -------------------------------------------------------------------------------- 1 | use std::{env, fs, path::Path, process::Command}; 2 | 3 | fn main() { 4 | if cfg!(feature = "build-witness") { 5 | let witness_cpp = env::var("WITNESS_CPP").unwrap(); 6 | let circuit_file = Path::new(&witness_cpp); 7 | let circuit_name = circuit_file.file_stem().unwrap().to_str().unwrap(); 8 | 9 | let status = Command::new("circom") 10 | .args([ 11 | fs::canonicalize(circuit_file).unwrap().to_str().unwrap(), 12 | "--c", 13 | ]) 14 | .status() 15 | .unwrap(); 16 | assert!(status.success()); 17 | 18 | let cpp = Path::new("./") 19 | .join(circuit_name.to_owned() + "_cpp") 20 | .join(circuit_name.to_owned() + ".cpp"); 21 | 22 | println!("cargo:warning=\"{}\"", cpp.to_str().unwrap()); 23 | 24 | let status = Command::new("./script/replace.sh") 25 | .arg(cpp.to_str().unwrap()) 26 | .status() 27 | .unwrap(); 28 | assert!(status.success()); 29 | 30 | cxx_build::bridge("src/generate.rs") 31 | .file("src/circuit.cc") 32 | .flag_if_supported("-std=c++14") 33 | .flag_if_supported("-w") 34 | .flag_if_supported("-d") 35 | .flag_if_supported("-g") 36 | .compile("witness"); 37 | 38 | println!("cargo:rerun-if-changed=src/main.rs"); 39 | println!("cargo:rerun-if-changed=src/circuit.cc"); 40 | println!("cargo:rerun-if-changed=include/circuit.h"); 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /crates/circom-witness-rs/include/witness.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include "rust/cxx.h" 3 | #include 4 | 5 | typedef unsigned long long u64; 6 | typedef uint32_t u32; 7 | typedef uint8_t u8; 8 | 9 | struct Circom_CalcWit; 10 | 11 | void run(Circom_CalcWit *buf); 12 | uint get_size_of_io_map(); 13 | uint get_total_signal_no(); 14 | uint get_main_input_signal_no(); 15 | uint get_main_input_signal_start(); 16 | uint get_number_of_components(); 17 | uint get_size_of_constants(); 18 | uint get_size_of_input_hashmap(); 19 | uint get_size_of_witness(); 20 | -------------------------------------------------------------------------------- /crates/circom-witness-rs/script/replace.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | # Check for input file 4 | if [ "$#" -ne 1 ]; then 5 | echo "Usage: $0 " 6 | exit 1 7 | fi 8 | 9 | filename=$(basename "$1" .cpp) 10 | 11 | # Add header 12 | cat < "$filename.new" 13 | #include "witness/include/witness.h" 14 | #include "witness/src/generate.rs.h" 15 | 16 | /// We need this accessor since cxx doesn't support hashmaps yet 17 | class IOSignalInfoAccessor { 18 | private: 19 | Circom_CalcWit *calcWitContext; 20 | 21 | public: 22 | explicit IOSignalInfoAccessor(Circom_CalcWit *calcWit) 23 | : calcWitContext(calcWit) {} 24 | auto operator[](size_t index) const -> decltype(auto) { 25 | return (calcWitContext 26 | ->templateInsId2IOSignalInfoList)[index % get_size_of_input_hashmap()]; 27 | } 28 | }; 29 | 30 | typedef void (*Circom_TemplateFunction)(uint __cIdx, Circom_CalcWit* __ctx); 31 | 32 | ////////////////////////////////////////////////////////////////// 33 | /// Generated code from circom compiler below 34 | ////////////////////////////////////////////////////////////////// 35 | 36 | EOT 37 | 38 | # Replace a few things we can't do in cxx 39 | sed -e 's/FrElement\* signalValues/rust::Vec \&signalValues/g' \ 40 | -e 's/std::string/rust::string/g' \ 41 | -e 's/ctx->templateInsId2IOSignalInfo/IOSignalInfoAccessor(ctx)/g' \ 42 | -e 's/u32\* mySubcomponents/rust::Vec mySubcomponents/g' \ 43 | -e 's/FrElement\* circuitConstants/rust::Vec \&circuitConstants/g' \ 44 | -e 's/rust::string\* listOfTemplateMessages/rust::Vec \&listOfTemplateMessages/g' \ 45 | -e 's/FrElement expaux\[\([0-9]*\)\];/rust::Vec expaux = create_vec(\1);/g' \ 46 | -e 's/FrElement lvar\[\([0-9]*\)\];/rust::Vec lvar = create_vec(\1);/g' \ 47 | -e 's/FrElement lvarcall\[\([0-9]*\)\];/rust::Vec lvarcall = create_vec(\1);/g' \ 48 | -e 's/PFrElement aux_dest/FrElement \*aux_dest/g' \ 49 | -e 's/subcomponents = new uint\[\([0-9]*\)\];/subcomponents = create_vec_u32(\1);/g' \ 50 | -e '/trace/d' \ 51 | -e 's/\(ctx,\)\(lvarcall,\)\(myId,\)/\1\&\2\3/g' \ 52 | -e '/^#include/d' \ 53 | -e '/assert/d' \ 54 | -e '/mySubcomponentsParallel/d' \ 55 | -e 's/FrElement lvarcall\[\([0-9]*\)\];/rust::Vec lvarcall = create_vec(\1);/g' \ 56 | -e 's/,FrElement\* lvar,/,rust::Vec\& lvar,/g' \ 57 | -e 's/ctx,\&lvarcall,myId,/ctx,lvarcall,myId,/g' \ 58 | -e '/delete \[\][^;]*;/d' -e 'N;/\ndelete/!P;D' \ 59 | -e '/^#include/d' "$1" >> "$filename.new" 60 | 61 | 62 | sed -E -e 's/"([^"]+)"\+ctx->generate_position_array\(([^)]+)\)/generate_position_array("\1", \2)/g' \ 63 | -e 's/subcomponents = new uint\[([0-9]+)\]\{0\};/subcomponents = create_vec_u32(\1);/g' \ 64 | -e 's/^uint aux_dimensions\[([0-9]+)\] = \{([^}]+)\};$/rust::Vec aux_dimensions = rust::Vec{\2};/' "$filename.new" > "src/circuit.cc" 65 | 66 | cp "$(echo $filename)_cpp/$filename.dat" src/constants.dat 67 | -------------------------------------------------------------------------------- /crates/circom-witness-rs/src/field.rs: -------------------------------------------------------------------------------- 1 | #![allow(unused, non_snake_case)] 2 | 3 | use crate::graph::{Node, Operation}; 4 | use ruint::{aliases::U256, uint}; 5 | use std::{ptr, sync::Mutex}; 6 | 7 | pub const M: U256 = 8 | uint!(21888242871839275222246405745257275088548364400416034343698204186575808495617_U256); 9 | 10 | pub const INV: u64 = 14042775128853446655; 11 | 12 | pub const R: U256 = uint!(0x0e0a77c19a07df2f666ea36f7879462e36fc76959f60cd29ac96341c4ffffffb_U256); 13 | 14 | static NODES: Mutex> = Mutex::new(Vec::new()); 15 | static VALUES: Mutex> = Mutex::new(Vec::new()); 16 | static CONSTANT: Mutex> = Mutex::new(Vec::new()); 17 | 18 | #[derive(Debug, Default, Clone, Copy)] 19 | pub struct FrElement(pub usize); 20 | 21 | pub fn print_eval() { 22 | let nodes = NODES.lock().unwrap(); 23 | let values = VALUES.lock().unwrap(); 24 | let constant = CONSTANT.lock().unwrap(); 25 | 26 | let mut constants = 0_usize; 27 | for (i, node) in nodes.iter().enumerate() { 28 | print!("{}: {:?}", i, node); 29 | if constant[i] { 30 | constants += 1; 31 | println!(" = {}", values[i]); 32 | } else { 33 | println!(); 34 | } 35 | } 36 | eprintln!( 37 | "{} nodes of which {} constant and {} dynamic", 38 | nodes.len(), 39 | constants, 40 | nodes.len() - constants 41 | ); 42 | } 43 | 44 | pub fn get_graph() -> Vec { 45 | NODES.lock().unwrap().clone() 46 | } 47 | 48 | pub fn get_values() -> Vec { 49 | VALUES.lock().unwrap().clone() 50 | } 51 | 52 | pub fn undefined() -> FrElement { 53 | FrElement(usize::MAX) 54 | } 55 | 56 | pub fn constant(c: U256) -> FrElement { 57 | let mut nodes = NODES.lock().unwrap(); 58 | let mut values = VALUES.lock().unwrap(); 59 | let mut constant = CONSTANT.lock().unwrap(); 60 | assert_eq!(nodes.len(), values.len()); 61 | assert_eq!(nodes.len(), constant.len()); 62 | 63 | nodes.push(Node::Constant(c)); 64 | values.push(c); 65 | constant.push(true); 66 | 67 | FrElement(nodes.len() - 1) 68 | } 69 | 70 | pub fn input(i: usize, value: U256) -> FrElement { 71 | let mut nodes = NODES.lock().unwrap(); 72 | let mut values = VALUES.lock().unwrap(); 73 | let mut constant = CONSTANT.lock().unwrap(); 74 | assert_eq!(nodes.len(), values.len()); 75 | assert_eq!(nodes.len(), constant.len()); 76 | 77 | nodes.push(Node::Input(i)); 78 | values.push(value); 79 | constant.push(false); 80 | 81 | FrElement(nodes.len() - 1) 82 | } 83 | 84 | fn binop(op: Operation, to: *mut FrElement, a: *const FrElement, b: *const FrElement) { 85 | let mut nodes = NODES.lock().unwrap(); 86 | let mut values = VALUES.lock().unwrap(); 87 | let mut constant = CONSTANT.lock().unwrap(); 88 | assert_eq!(nodes.len(), values.len()); 89 | assert_eq!(nodes.len(), constant.len()); 90 | 91 | let (a, b, to) = unsafe { ((*a).0, (*b).0, &mut (*to).0) }; 92 | assert!(a < nodes.len()); 93 | assert!(b < nodes.len()); 94 | nodes.push(Node::Op(op, a, b)); 95 | *to = nodes.len() - 1; 96 | 97 | let (va, vb) = (values[a], values[b]); 98 | values.push(op.eval(va, vb)); 99 | 100 | let (ca, cb) = (constant[a], constant[b]); 101 | constant.push(ca && cb); 102 | } 103 | 104 | pub fn Fr_mul(to: *mut FrElement, a: *const FrElement, b: *const FrElement) { 105 | binop(Operation::Mul, to, a, b); 106 | } 107 | 108 | #[allow(warnings)] 109 | pub unsafe fn Fr_add(to: *mut FrElement, a: *const FrElement, b: *const FrElement) { 110 | binop(Operation::Add, to, a, b); 111 | } 112 | 113 | #[allow(warnings)] 114 | pub unsafe fn Fr_sub(to: *mut FrElement, a: *const FrElement, b: *const FrElement) { 115 | binop(Operation::Sub, to, a, b); 116 | } 117 | 118 | #[allow(warnings)] 119 | pub fn Fr_copy(to: *mut FrElement, a: *const FrElement) { 120 | unsafe { 121 | *to = *a; 122 | } 123 | } 124 | 125 | #[allow(warnings)] 126 | pub fn Fr_copyn(to: *mut FrElement, a: *const FrElement, n: usize) { 127 | unsafe { 128 | ptr::copy_nonoverlapping(a, to, n); 129 | } 130 | } 131 | 132 | /// Create a vector of FrElement with length `len`. 133 | /// Needed because the default constructor of opaque type is not implemented. 134 | pub fn create_vec(len: usize) -> Vec { 135 | vec![FrElement(usize::MAX); len] 136 | } 137 | 138 | pub fn create_vec_u32(len: usize) -> Vec { 139 | vec![0; len] 140 | } 141 | 142 | pub fn generate_position_array( 143 | prefix: String, 144 | dimensions: Vec, 145 | size_dimensions: u32, 146 | index: u32, 147 | ) -> String { 148 | let mut positions: String = prefix; 149 | let mut index = index; 150 | for i in 0..size_dimensions { 151 | let last_pos = index % dimensions[size_dimensions as usize - 1 - i as usize]; 152 | index /= dimensions[size_dimensions as usize - 1 - i as usize]; 153 | let new_pos = format!("[{}]", last_pos); 154 | positions = new_pos + &positions; 155 | } 156 | positions 157 | } 158 | 159 | pub unsafe fn Fr_toInt(a: *const FrElement) -> u64 { 160 | let nodes = NODES.lock().unwrap(); 161 | let values = VALUES.lock().unwrap(); 162 | let constant = CONSTANT.lock().unwrap(); 163 | assert_eq!(nodes.len(), values.len()); 164 | assert_eq!(nodes.len(), constant.len()); 165 | 166 | let a = unsafe { (*a).0 }; 167 | assert!(a < nodes.len()); 168 | assert!(constant[a]); 169 | values[a].try_into().unwrap() 170 | } 171 | 172 | pub unsafe fn print(a: *const FrElement) { 173 | println!("DEBUG>> {:?}", (*a).0); 174 | } 175 | 176 | pub fn Fr_isTrue(a: *mut FrElement) -> bool { 177 | let nodes = NODES.lock().unwrap(); 178 | let values = VALUES.lock().unwrap(); 179 | let constant = CONSTANT.lock().unwrap(); 180 | assert_eq!(nodes.len(), values.len()); 181 | assert_eq!(nodes.len(), constant.len()); 182 | 183 | let a = unsafe { (*a).0 }; 184 | assert!(a < nodes.len()); 185 | assert!(constant[a]); 186 | values[a] != U256::ZERO 187 | } 188 | 189 | pub unsafe fn Fr_eq(to: *mut FrElement, a: *const FrElement, b: *const FrElement) { 190 | binop(Operation::Eq, to, a, b); 191 | } 192 | 193 | pub unsafe fn Fr_neq(to: *mut FrElement, a: *const FrElement, b: *const FrElement) { 194 | binop(Operation::Neq, to, a, b); 195 | } 196 | 197 | pub unsafe fn Fr_lt(to: *mut FrElement, a: *const FrElement, b: *const FrElement) { 198 | binop(Operation::Lt, to, a, b); 199 | } 200 | 201 | pub unsafe fn Fr_gt(to: *mut FrElement, a: *const FrElement, b: *const FrElement) { 202 | binop(Operation::Gt, to, a, b); 203 | } 204 | 205 | pub unsafe fn Fr_leq(to: *mut FrElement, a: *const FrElement, b: *const FrElement) { 206 | binop(Operation::Leq, to, a, b); 207 | } 208 | 209 | pub unsafe fn Fr_geq(to: *mut FrElement, a: *const FrElement, b: *const FrElement) { 210 | binop(Operation::Geq, to, a, b); 211 | } 212 | 213 | pub unsafe fn Fr_lor(to: *mut FrElement, a: *const FrElement, b: *const FrElement) { 214 | binop(Operation::Lor, to, a, b); 215 | } 216 | 217 | pub unsafe fn Fr_shl(to: *mut FrElement, a: *const FrElement, b: *const FrElement) { 218 | binop(Operation::Shl, to, a, b); 219 | } 220 | 221 | pub unsafe fn Fr_shr(to: *mut FrElement, a: *const FrElement, b: *const FrElement) { 222 | binop(Operation::Shr, to, a, b); 223 | } 224 | 225 | pub unsafe fn Fr_band(to: *mut FrElement, a: *const FrElement, b: *const FrElement) { 226 | binop(Operation::Band, to, a, b); 227 | } 228 | -------------------------------------------------------------------------------- /crates/circom-witness-rs/src/generate.rs: -------------------------------------------------------------------------------- 1 | #![allow(non_snake_case)] 2 | 3 | use crate::field::{self, *}; 4 | use crate::graph::{self, Node}; 5 | use crate::HashSignalInfo; 6 | use byteorder::{LittleEndian, ReadBytesExt}; 7 | use ffi::InputOutputList; 8 | use ruint::{aliases::U256, uint}; 9 | use serde::{Deserialize, Serialize}; 10 | use std::{io::Read, time::Instant}; 11 | 12 | #[cxx::bridge] 13 | mod ffi { 14 | 15 | #[derive(Debug, Default, Clone)] 16 | pub struct InputOutputList { 17 | pub defs: Vec, 18 | } 19 | 20 | #[derive(Debug, Clone, Default)] 21 | pub struct IODef { 22 | pub code: usize, 23 | pub offset: usize, 24 | pub lengths: Vec, 25 | } 26 | 27 | #[derive(Debug, Default, Clone)] 28 | struct Circom_Component { 29 | templateId: u64, 30 | signalStart: u64, 31 | inputCounter: u64, 32 | templateName: String, 33 | componentName: String, 34 | idFather: u64, 35 | subcomponents: Vec, 36 | outputIsSet: Vec, 37 | } 38 | 39 | #[derive(Debug)] 40 | struct Circom_CalcWit { 41 | signalValues: Vec, 42 | componentMemory: Vec, 43 | circuitConstants: Vec, 44 | templateInsId2IOSignalInfoList: Vec, 45 | listOfTemplateMessages: Vec, 46 | } 47 | 48 | // Rust types and signatures exposed to C++. 49 | extern "Rust" { 50 | type FrElement; 51 | 52 | fn create_vec(len: usize) -> Vec; 53 | fn create_vec_u32(len: usize) -> Vec; 54 | fn generate_position_array( 55 | prefix: String, 56 | dimensions: Vec, 57 | size_dimensions: u32, 58 | index: u32, 59 | ) -> String; 60 | 61 | // Field operations 62 | unsafe fn Fr_mul(to: *mut FrElement, a: *const FrElement, b: *const FrElement); 63 | unsafe fn Fr_add(to: *mut FrElement, a: *const FrElement, b: *const FrElement); 64 | unsafe fn Fr_sub(to: *mut FrElement, a: *const FrElement, b: *const FrElement); 65 | unsafe fn Fr_copy(to: *mut FrElement, a: *const FrElement); 66 | unsafe fn Fr_copyn(to: *mut FrElement, a: *const FrElement, n: usize); 67 | // unsafe fn Fr_neg(to: *mut FrElement, a: *const FrElement); 68 | // unsafe fn Fr_inv(to: *mut FrElement, a: *const FrElement); 69 | // unsafe fn Fr_div(to: *mut FrElement, a: *const FrElement, b: *const FrElement); 70 | // unsafe fn Fr_square(to: *mut FrElement, a: *const FrElement); 71 | unsafe fn Fr_shl(to: *mut FrElement, a: *const FrElement, b: *const FrElement); 72 | unsafe fn Fr_shr(to: *mut FrElement, a: *const FrElement, b: *const FrElement); 73 | unsafe fn Fr_band(to: *mut FrElement, a: *const FrElement, b: *const FrElement); 74 | // fn Fr_bor(to: &mut FrElement, a: &FrElement, b: &FrElement); 75 | // fn Fr_bxor(to: &mut FrElement, a: &FrElement, b: &FrElement); 76 | // fn Fr_bnot(to: &mut FrElement, a: &FrElement); 77 | unsafe fn Fr_eq(to: *mut FrElement, a: *const FrElement, b: *const FrElement); 78 | unsafe fn Fr_neq(to: *mut FrElement, a: *const FrElement, b: *const FrElement); 79 | unsafe fn Fr_lt(to: *mut FrElement, a: *const FrElement, b: *const FrElement); 80 | unsafe fn Fr_gt(to: *mut FrElement, a: *const FrElement, b: *const FrElement); 81 | unsafe fn Fr_leq(to: *mut FrElement, a: *const FrElement, b: *const FrElement); 82 | unsafe fn Fr_geq(to: *mut FrElement, a: *const FrElement, b: *const FrElement); 83 | unsafe fn Fr_isTrue(a: *mut FrElement) -> bool; 84 | // fn Fr_fromBool(to: &mut FrElement, a: bool); 85 | unsafe fn Fr_toInt(a: *mut FrElement) -> u64; 86 | unsafe fn Fr_lor(to: *mut FrElement, a: *const FrElement, b: *const FrElement); 87 | unsafe fn print(a: *mut FrElement); 88 | // fn Fr_pow(to: &mut FrElement, a: &FrElement, b: &FrElement); 89 | // fn Fr_idiv(to: &mut FrElement, a: &FrElement, b: &FrElement); 90 | } 91 | 92 | // C++ types and signatures exposed to Rust. 93 | unsafe extern "C++" { 94 | include!("witness/include/witness.h"); 95 | 96 | unsafe fn run(ctx: *mut Circom_CalcWit); 97 | fn get_size_of_io_map() -> u32; 98 | fn get_total_signal_no() -> u32; 99 | fn get_main_input_signal_no() -> u32; 100 | fn get_main_input_signal_start() -> u32; 101 | fn get_number_of_components() -> u32; 102 | fn get_size_of_constants() -> u32; 103 | fn get_size_of_input_hashmap() -> u32; 104 | fn get_size_of_witness() -> u32; 105 | } 106 | } 107 | 108 | const DAT_BYTES: &[u8] = include_bytes!("constants.dat"); 109 | 110 | pub fn get_input_hash_map() -> Vec { 111 | let mut bytes = &DAT_BYTES[..(ffi::get_size_of_input_hashmap() as usize) * 24]; 112 | let mut input_hash_map = 113 | vec![HashSignalInfo::default(); ffi::get_size_of_input_hashmap() as usize]; 114 | for i in 0..ffi::get_size_of_input_hashmap() as usize { 115 | let hash = bytes.read_u64::().unwrap(); 116 | let signalid = bytes.read_u64::().unwrap(); 117 | let signalsize = bytes.read_u64::().unwrap(); 118 | input_hash_map[i] = HashSignalInfo { 119 | hash, 120 | signalid, 121 | signalsize, 122 | }; 123 | } 124 | input_hash_map 125 | } 126 | 127 | pub fn get_witness_to_signal() -> Vec { 128 | let mut bytes = &DAT_BYTES[(ffi::get_size_of_input_hashmap() as usize) * 24 129 | ..(ffi::get_size_of_input_hashmap() as usize) * 24 130 | + (ffi::get_size_of_witness() as usize) * 8]; 131 | let mut signal_list = Vec::with_capacity(ffi::get_size_of_witness() as usize); 132 | for i in 0..ffi::get_size_of_witness() as usize { 133 | signal_list.push(bytes.read_u64::().unwrap() as usize); 134 | } 135 | signal_list 136 | } 137 | 138 | pub fn get_constants() -> Vec { 139 | if ffi::get_size_of_constants() == 0 { 140 | return vec![]; 141 | } 142 | 143 | // skip the first part 144 | let mut bytes = &DAT_BYTES[(ffi::get_size_of_input_hashmap() as usize) * 24 145 | + (ffi::get_size_of_witness() as usize) * 8..]; 146 | let mut constants = vec![field::constant(U256::from(0)); ffi::get_size_of_constants() as usize]; 147 | for i in 0..ffi::get_size_of_constants() as usize { 148 | let sv = bytes.read_i32::().unwrap() as i32; 149 | let typ = bytes.read_u32::().unwrap() as u32; 150 | 151 | let mut buf = [0; 32]; 152 | bytes.read_exact(&mut buf); 153 | 154 | if typ & 0x80000000 == 0 { 155 | constants[i] = field::constant(U256::from(sv)); 156 | } else { 157 | constants[i] = 158 | field::constant(U256::from_le_bytes(buf).mul_redc(uint!(1_U256), M, INV)); 159 | } 160 | } 161 | 162 | return constants; 163 | } 164 | 165 | pub fn get_iosignals() -> Vec { 166 | if ffi::get_size_of_io_map() == 0 { 167 | return vec![]; 168 | } 169 | 170 | // skip the first part 171 | let mut bytes = &DAT_BYTES[(ffi::get_size_of_input_hashmap() as usize) * 24 172 | + (ffi::get_size_of_witness() as usize) * 8 173 | + (ffi::get_size_of_constants() as usize * 40)..]; 174 | let io_size = ffi::get_size_of_io_map() as usize; 175 | let hashmap_size = ffi::get_size_of_input_hashmap() as usize; 176 | let mut indices = vec![0usize; io_size]; 177 | let mut map: Vec = vec![InputOutputList::default(); hashmap_size]; 178 | 179 | (0..io_size).for_each(|i| { 180 | let t32 = bytes.read_u32::().unwrap() as usize; 181 | indices[i] = t32; 182 | }); 183 | 184 | (0..io_size).for_each(|i| { 185 | let l32 = bytes.read_u32::().unwrap() as usize; 186 | let mut io_list: InputOutputList = InputOutputList { defs: vec![] }; 187 | 188 | (0..l32).for_each(|_j| { 189 | let offset = bytes.read_u32::().unwrap() as usize; 190 | let len = bytes.read_u32::().unwrap() as usize + 1; 191 | 192 | let mut lengths = vec![0usize; len]; 193 | 194 | (1..len).for_each(|k| { 195 | lengths[k] = bytes.read_u32::().unwrap() as usize; 196 | }); 197 | 198 | io_list.defs.push(ffi::IODef { 199 | code: 0, 200 | offset, 201 | lengths, 202 | }); 203 | }); 204 | map[indices[i] % hashmap_size] = io_list; 205 | }); 206 | map 207 | } 208 | 209 | /// Run cpp witness generator and optimize graph 210 | pub fn build_witness() -> color_color_eyre::Result<()> { 211 | let mut signal_values = vec![field::undefined(); ffi::get_total_signal_no() as usize]; 212 | signal_values[0] = field::constant(uint!(1_U256)); 213 | 214 | let total_input_len = 215 | (ffi::get_main_input_signal_no() + ffi::get_main_input_signal_start()) as usize; 216 | 217 | for i in 0..total_input_len { 218 | signal_values[i + 1] = field::input(i + 1, uint!(0_U256)); 219 | } 220 | 221 | let mut ctx = ffi::Circom_CalcWit { 222 | signalValues: signal_values, 223 | componentMemory: vec![ 224 | ffi::Circom_Component::default(); 225 | ffi::get_number_of_components() as usize 226 | ], 227 | circuitConstants: get_constants(), 228 | templateInsId2IOSignalInfoList: get_iosignals(), 229 | listOfTemplateMessages: vec![], 230 | }; 231 | 232 | // measure time 233 | let now = Instant::now(); 234 | unsafe { 235 | ffi::run(&mut ctx as *mut _); 236 | } 237 | eprintln!("Calculation took: {:?}", now.elapsed()); 238 | 239 | let signal_values = get_witness_to_signal(); 240 | let mut signals = signal_values 241 | .into_iter() 242 | .map(|i| ctx.signalValues[i].0) 243 | .collect::>(); 244 | let mut nodes = field::get_graph(); 245 | eprintln!("Graph with {} nodes", nodes.len()); 246 | 247 | // Optimize graph 248 | graph::optimize(&mut nodes, &mut signals); 249 | 250 | // Store graph to file. 251 | let input_map = get_input_hash_map(); 252 | let bytes = postcard::to_stdvec(&(&nodes, &signals, &input_map)).unwrap(); 253 | eprintln!("Graph size: {} bytes", bytes.len()); 254 | std::fs::write("graph.bin", bytes).unwrap(); 255 | 256 | // Evaluate the graph. 257 | let input_len = (ffi::get_main_input_signal_no() + ffi::get_main_input_signal_start()) as usize; // TODO: fetch from file 258 | let mut inputs = vec![U256::from(0); input_len]; 259 | inputs[0] = U256::from(1); 260 | for i in 1..nodes.len() { 261 | if let Node::Input(j) = nodes[i] { 262 | inputs[j] = get_values()[i]; 263 | } else { 264 | break; 265 | } 266 | } 267 | 268 | let now = Instant::now(); 269 | for _ in 0..10 { 270 | _ = graph::evaluate(&nodes, &inputs, &signals); 271 | } 272 | eprintln!("Calculation took: {:?}", now.elapsed() / 10); 273 | 274 | // Print graph 275 | // for (i, node) in nodes.iter().enumerate() { 276 | // println!("node[{}] = {:?}", i, node); 277 | // } 278 | // for (i, j) in signals.iter().enumerate() { 279 | // println!("signal[{}] = node[{}]", i, j); 280 | // } 281 | 282 | Ok(()) 283 | } 284 | -------------------------------------------------------------------------------- /crates/circom-witness-rs/src/graph.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | collections::HashMap, 3 | ops::{Shl, Shr}, 4 | }; 5 | 6 | use crate::field::M; 7 | use ark_bn254::Fr; 8 | use ark_ff::PrimeField; 9 | use rand::Rng; 10 | use ruint::aliases::U256; 11 | use serde::{Deserialize, Serialize}; 12 | 13 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Compress, Validate}; 14 | 15 | fn ark_se(a: &A, s: S) -> Result 16 | where 17 | S: serde::Serializer, 18 | { 19 | let mut bytes = vec![]; 20 | a.serialize_with_mode(&mut bytes, Compress::Yes) 21 | .map_err(serde::ser::Error::custom)?; 22 | s.serialize_bytes(&bytes) 23 | } 24 | 25 | fn ark_de<'de, D, A: CanonicalDeserialize>(data: D) -> Result 26 | where 27 | D: serde::de::Deserializer<'de>, 28 | { 29 | let s: Vec = serde::de::Deserialize::deserialize(data)?; 30 | let a = A::deserialize_with_mode(s.as_slice(), Compress::Yes, Validate::Yes); 31 | a.map_err(serde::de::Error::custom) 32 | } 33 | 34 | #[derive(Hash, PartialEq, Eq, Debug, Clone, Copy, Serialize, Deserialize)] 35 | pub enum Operation { 36 | Mul, 37 | MMul, 38 | Add, 39 | Sub, 40 | Eq, 41 | Neq, 42 | Lt, 43 | Gt, 44 | Leq, 45 | Geq, 46 | Lor, 47 | Shl, 48 | Shr, 49 | Band, 50 | } 51 | 52 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] 53 | pub enum Node { 54 | Input(usize), 55 | Constant(U256), 56 | #[serde(serialize_with = "ark_se", deserialize_with = "ark_de")] 57 | MontConstant(Fr), 58 | Op(Operation, usize, usize), 59 | } 60 | 61 | impl Operation { 62 | pub fn eval(&self, a: U256, b: U256) -> U256 { 63 | use Operation::*; 64 | match self { 65 | Add => a.add_mod(b, M), 66 | Sub => a.add_mod(M - b, M), 67 | Mul => a.mul_mod(b, M), 68 | Eq => U256::from(a == b), 69 | Neq => U256::from(a != b), 70 | Lt => U256::from(a < b), 71 | Gt => U256::from(a > b), 72 | Leq => U256::from(a <= b), 73 | Geq => U256::from(a >= b), 74 | Lor => U256::from(a != U256::ZERO || b != U256::ZERO), 75 | Shl => compute_shl_uint(a, b), 76 | Shr => compute_shr_uint(a, b), 77 | Band => a.bitand(b), 78 | _ => unimplemented!("operator {:?} not implemented", self), 79 | } 80 | } 81 | 82 | pub fn eval_fr(&self, a: Fr, b: Fr) -> Fr { 83 | use Operation::*; 84 | match self { 85 | Add => a + b, 86 | Sub => a - b, 87 | Mul => a * b, 88 | _ => unimplemented!("operator {:?} not implemented for Montgomery", self), 89 | } 90 | } 91 | } 92 | 93 | fn compute_shl_uint(a: U256, b: U256) -> U256 { 94 | debug_assert!(b.lt(&U256::from(256))); 95 | let ls_limb = b.as_limbs()[0]; 96 | a.shl(ls_limb as usize) 97 | } 98 | 99 | fn compute_shr_uint(a: U256, b: U256) -> U256 { 100 | debug_assert!(b.lt(&U256::from(256))); 101 | let ls_limb = b.as_limbs()[0]; 102 | a.shr(ls_limb as usize) 103 | } 104 | 105 | /// All references must be backwards. 106 | fn assert_valid(nodes: &[Node]) { 107 | for (i, &node) in nodes.iter().enumerate() { 108 | if let Node::Op(_, a, b) = node { 109 | assert!(a < i); 110 | assert!(b < i); 111 | } 112 | } 113 | } 114 | 115 | pub fn optimize(nodes: &mut Vec, outputs: &mut [usize]) { 116 | tree_shake(nodes, outputs); 117 | propagate(nodes); 118 | value_numbering(nodes, outputs); 119 | constants(nodes); 120 | tree_shake(nodes, outputs); 121 | montgomery_form(nodes); 122 | } 123 | 124 | #[allow(clippy::unnecessary_fallible_conversions)] // Prevents the false positive on line 143 125 | pub fn evaluate(nodes: &[Node], inputs: &[U256], outputs: &[usize]) -> Vec { 126 | // assert_valid(nodes); 127 | 128 | // Evaluate the graph. 129 | let mut values = Vec::with_capacity(nodes.len()); 130 | for &node in nodes.iter() { 131 | let value = match node { 132 | Node::Constant(c) => Fr::new(c.into()), 133 | Node::MontConstant(c) => c, 134 | Node::Input(i) => Fr::new(inputs[i].into()), 135 | Node::Op(op, a, b) => op.eval_fr(values[a], values[b]), 136 | }; 137 | values.push(value); 138 | } 139 | 140 | // Convert from Montgomery form and return the outputs. 141 | let mut out = vec![U256::ZERO; outputs.len()]; 142 | for i in 0..outputs.len() { 143 | out[i] = U256::try_from(values[outputs[i]].into_bigint()).unwrap(); 144 | } 145 | 146 | out 147 | } 148 | 149 | /// Constant propagation 150 | pub fn propagate(nodes: &mut [Node]) { 151 | assert_valid(nodes); 152 | let mut constants = 0_usize; 153 | for i in 0..nodes.len() { 154 | if let Node::Op(op, a, b) = nodes[i] { 155 | if let (Node::Constant(va), Node::Constant(vb)) = (nodes[a], nodes[b]) { 156 | nodes[i] = Node::Constant(op.eval(va, vb)); 157 | constants += 1; 158 | } else if a == b { 159 | // Not constant but equal 160 | use Operation::*; 161 | if let Some(c) = match op { 162 | Eq | Leq | Geq => Some(true), 163 | Neq | Lt | Gt => Some(false), 164 | _ => None, 165 | } { 166 | nodes[i] = Node::Constant(U256::from(c)); 167 | constants += 1; 168 | } 169 | } 170 | } 171 | } 172 | 173 | eprintln!("Propagated {constants} constants"); 174 | } 175 | 176 | /// Remove unused nodes 177 | pub fn tree_shake(nodes: &mut Vec, outputs: &mut [usize]) { 178 | assert_valid(nodes); 179 | 180 | // Mark all nodes that are used. 181 | let mut used = vec![false; nodes.len()]; 182 | for &i in outputs.iter() { 183 | used[i] = true; 184 | } 185 | 186 | // Work backwards from end as all references are backwards. 187 | for i in (0..nodes.len()).rev() { 188 | if used[i] { 189 | if let Node::Op(_, a, b) = nodes[i] { 190 | used[a] = true; 191 | used[b] = true; 192 | } 193 | } 194 | } 195 | 196 | // Remove unused nodes 197 | let n = nodes.len(); 198 | let mut retain = used.iter(); 199 | nodes.retain(|_| *retain.next().unwrap()); 200 | let removed = n - nodes.len(); 201 | 202 | // Renumber references. 203 | let mut renumber = vec![None; n]; 204 | let mut index = 0; 205 | for (i, &used) in used.iter().enumerate() { 206 | if used { 207 | renumber[i] = Some(index); 208 | index += 1; 209 | } 210 | } 211 | assert_eq!(index, nodes.len()); 212 | for (&used, renumber) in used.iter().zip(renumber.iter()) { 213 | assert_eq!(used, renumber.is_some()); 214 | } 215 | 216 | // Renumber references. 217 | for node in nodes.iter_mut() { 218 | if let Node::Op(_, a, b) = node { 219 | *a = renumber[*a].unwrap(); 220 | *b = renumber[*b].unwrap(); 221 | } 222 | } 223 | for output in outputs.iter_mut() { 224 | *output = renumber[*output].unwrap(); 225 | } 226 | 227 | eprintln!("Removed {removed} unused nodes"); 228 | } 229 | 230 | /// Randomly evaluate the graph 231 | fn random_eval(nodes: &mut [Node]) -> Vec { 232 | let mut rng = rand::thread_rng(); 233 | let mut values = Vec::with_capacity(nodes.len()); 234 | let mut inputs = HashMap::new(); 235 | let mut prfs = HashMap::new(); 236 | for node in nodes.iter() { 237 | use Operation::*; 238 | let value = match node { 239 | // Constants evaluate to themselves 240 | Node::Constant(c) => *c, 241 | 242 | Node::MontConstant(_c) => unimplemented!("should not be used"), 243 | 244 | // Algebraic Ops are evaluated directly 245 | // Since the field is large, by Swartz-Zippel if 246 | // two values are the same then they are likely algebraically equal. 247 | Node::Op(op @ (Add | Sub | Mul), a, b) => op.eval(values[*a], values[*b]), 248 | 249 | // Input and non-algebraic ops are random functions 250 | // TODO: https://github.com/recmo/uint/issues/95 and use .gen_range(..M) 251 | Node::Input(i) => *inputs.entry(*i).or_insert_with(|| rng.gen::() % M), 252 | Node::Op(op, a, b) => *prfs 253 | .entry((*op, values[*a], values[*b])) 254 | .or_insert_with(|| rng.gen::() % M), 255 | }; 256 | values.push(value); 257 | } 258 | values 259 | } 260 | 261 | /// Value numbering 262 | pub fn value_numbering(nodes: &mut [Node], outputs: &mut [usize]) { 263 | assert_valid(nodes); 264 | 265 | // Evaluate the graph in random field elements. 266 | let values = random_eval(nodes); 267 | 268 | // Find all nodes with the same value. 269 | let mut value_map = HashMap::new(); 270 | for (i, &value) in values.iter().enumerate() { 271 | value_map.entry(value).or_insert_with(Vec::new).push(i); 272 | } 273 | 274 | // For nodes that are the same, pick the first index. 275 | let mut renumber = Vec::with_capacity(nodes.len()); 276 | for value in values { 277 | renumber.push(value_map[&value][0]); 278 | } 279 | 280 | // Renumber references. 281 | for node in nodes.iter_mut() { 282 | if let Node::Op(_, a, b) = node { 283 | *a = renumber[*a]; 284 | *b = renumber[*b]; 285 | } 286 | } 287 | for output in outputs.iter_mut() { 288 | *output = renumber[*output]; 289 | } 290 | 291 | eprintln!("Global value numbering applied"); 292 | } 293 | 294 | /// Probabilistic constant determination 295 | pub fn constants(nodes: &mut [Node]) { 296 | assert_valid(nodes); 297 | 298 | // Evaluate the graph in random field elements. 299 | let values_a = random_eval(nodes); 300 | let values_b = random_eval(nodes); 301 | 302 | // Find all nodes with the same value. 303 | let mut constants = 0; 304 | for i in 0..nodes.len() { 305 | if let Node::Constant(_) = nodes[i] { 306 | continue; 307 | } 308 | if values_a[i] == values_b[i] { 309 | nodes[i] = Node::Constant(values_a[i]); 310 | constants += 1; 311 | } 312 | } 313 | eprintln!("Found {} constants", constants); 314 | } 315 | 316 | /// Convert to Montgomery form 317 | pub fn montgomery_form(nodes: &mut [Node]) { 318 | for node in nodes.iter_mut() { 319 | use Node::*; 320 | use Operation::*; 321 | match node { 322 | Constant(c) => *node = MontConstant(Fr::new((*c).into())), 323 | MontConstant(..) => (), 324 | Input(..) => (), 325 | Op(Add | Sub | Mul, ..) => (), 326 | Op(..) => unimplemented!("Operators Montgomery form"), 327 | } 328 | } 329 | eprintln!("Converted to Montgomery form"); 330 | } 331 | -------------------------------------------------------------------------------- /crates/circom-witness-rs/src/lib.rs: -------------------------------------------------------------------------------- 1 | mod field; 2 | pub mod graph; 3 | 4 | #[cfg(feature = "build-witness")] 5 | pub mod generate; 6 | 7 | use std::collections::HashMap; 8 | 9 | use ruint::aliases::U256; 10 | use serde::{Deserialize, Serialize}; 11 | 12 | use crate::graph::Node; 13 | 14 | #[derive(Debug, Default, Clone, Serialize, Deserialize)] 15 | pub struct HashSignalInfo { 16 | pub hash: u64, 17 | pub signalid: u64, 18 | pub signalsize: u64, 19 | } 20 | 21 | pub struct Graph { 22 | pub nodes: Vec, 23 | pub signals: Vec, 24 | pub input_mapping: Vec, 25 | } 26 | 27 | fn fnv1a(s: &str) -> u64 { 28 | let mut hash: u64 = 0xCBF29CE484222325; 29 | for c in s.bytes() { 30 | hash ^= c as u64; 31 | hash = hash.wrapping_mul(0x100000001B3); 32 | } 33 | hash 34 | } 35 | 36 | /// Loads the graph from bytes 37 | pub fn init_graph(graph_bytes: &[u8]) -> color_eyre::Result { 38 | let (nodes, signals, input_mapping): (Vec, Vec, Vec) = 39 | postcard::from_bytes(graph_bytes)?; 40 | 41 | Ok(Graph { 42 | nodes, 43 | signals, 44 | input_mapping, 45 | }) 46 | } 47 | 48 | /// Calculates the number of needed inputs 49 | pub fn get_inputs_size(graph: &Graph) -> usize { 50 | let mut start = false; 51 | let mut max_index = 0usize; 52 | for &node in graph.nodes.iter() { 53 | if let Node::Input(i) = node { 54 | if i > max_index { 55 | max_index = i; 56 | } 57 | start = true 58 | } else if start { 59 | break; 60 | } 61 | } 62 | max_index + 1 63 | } 64 | 65 | /// Allocates inputs vec with position 0 set to 1 66 | pub fn get_inputs_buffer(size: usize) -> Vec { 67 | let mut inputs = vec![U256::ZERO; size]; 68 | inputs[0] = U256::from(1); 69 | inputs 70 | } 71 | 72 | /// Calculates the position of the given signal in the inputs buffer 73 | pub fn get_input_mapping(input_list: &Vec, graph: &Graph) -> HashMap { 74 | let mut input_mapping = HashMap::new(); 75 | for key in input_list { 76 | let h = fnv1a(key); 77 | let pos = graph 78 | .input_mapping 79 | .iter() 80 | .position(|x| x.hash == h) 81 | .unwrap(); 82 | let si = (graph.input_mapping[pos].signalid) as usize; 83 | input_mapping.insert(key.to_string(), si); 84 | } 85 | input_mapping 86 | } 87 | 88 | /// Sets all provided inputs given the mapping and inputs buffer 89 | pub fn populate_inputs( 90 | input_list: &HashMap>, 91 | input_mapping: &HashMap, 92 | input_buffer: &mut [U256], 93 | ) { 94 | for (key, value) in input_list { 95 | let start = input_mapping[key]; 96 | let end = start + value.len(); 97 | input_buffer[start..end].copy_from_slice(value); 98 | } 99 | } 100 | 101 | /// Calculate witness based on serialized graph and inputs 102 | pub fn calculate_witness( 103 | input_list: HashMap>, 104 | graph: &Graph, 105 | ) -> color_eyre::Result> { 106 | let mut inputs_buffer = get_inputs_buffer(get_inputs_size(graph)); 107 | let input_mapping = get_input_mapping(&input_list.keys().cloned().collect(), graph); 108 | populate_inputs(&input_list, &input_mapping, &mut inputs_buffer); 109 | Ok(graph::evaluate( 110 | &graph.nodes, 111 | &inputs_buffer, 112 | &graph.signals, 113 | )) 114 | } 115 | -------------------------------------------------------------------------------- /crates/hasher/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "semaphore-rs-hasher" 3 | version.workspace = true 4 | edition.workspace = true 5 | homepage.workspace = true 6 | license.workspace = true 7 | repository.workspace = true 8 | authors.workspace = true 9 | description.workspace = true 10 | keywords.workspace = true 11 | categories.workspace = true 12 | 13 | [dependencies] 14 | bytemuck.workspace = true 15 | -------------------------------------------------------------------------------- /crates/hasher/src/lib.rs: -------------------------------------------------------------------------------- 1 | use bytemuck::Pod; 2 | 3 | /// Hash types, values and algorithms for a Merkle tree 4 | pub trait Hasher { 5 | /// Type of the leaf and node hashes 6 | type Hash; 7 | 8 | /// Compute the hash of an intermediate node 9 | fn hash_node(left: &Self::Hash, right: &Self::Hash) -> Self::Hash; 10 | } 11 | 12 | /// A marker trait that indicates some useful properties of a hash type 13 | /// 14 | /// It's not strictly necessary, but for many implementations it's a useful set of constraints 15 | pub trait Hash: Pod + Eq + Send + Sync {} 16 | 17 | impl Hash for T where T: Pod + Eq + Send + Sync {} 18 | -------------------------------------------------------------------------------- /crates/js/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | **/*.rs.bk 3 | Cargo.lock 4 | bin/ 5 | pkg/ 6 | pkg-nodejs/ 7 | pkg-bundler/ 8 | pkg-web/ 9 | wasm-pack.log 10 | -------------------------------------------------------------------------------- /crates/js/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "semaphore-rs-js" 3 | version = "0.3.0" 4 | edition.workspace = true 5 | homepage.workspace = true 6 | license.workspace = true 7 | repository.workspace = true 8 | authors.workspace = true 9 | description.workspace = true 10 | keywords.workspace = true 11 | categories.workspace = true 12 | 13 | [lib] 14 | crate-type = ["cdylib", "rlib"] 15 | 16 | [dependencies] 17 | semaphore-rs-proof = { workspace = true, default-features = false } 18 | ruint.workspace = true 19 | hex.workspace = true 20 | 21 | wasm-bindgen.workspace = true 22 | js-sys.workspace = true 23 | getrandom.workspace = true 24 | 25 | [dev-dependencies] 26 | wasm-bindgen-test.workspace = true 27 | -------------------------------------------------------------------------------- /crates/js/README.md: -------------------------------------------------------------------------------- 1 | # WASM bindings for semaphore-rs related functionality 2 | 3 | This crate exposes semaphore-rs functionality to WASM. Currently it only exposes proof compression. 4 | 5 | ## Building & publishing 6 | 7 | wasm-pack doesn't allow us to compile to a single target for node and browser usage. Instead we'll publish a package for each target. 8 | 9 | The `build_and_publish.sh` script handles all of that. 10 | 11 | To build and publish a new version simply run `./build_and_publish.sh`. Note that the package will likely fail to publish if using your own npm account. 12 | 13 | To only check the build output run `DRY_RUN=1 ./build_and_publish.sh`. 14 | 15 | ## Example 16 | 17 | Refer to `example/index.mjs` or `example/index.ts` for usage 18 | -------------------------------------------------------------------------------- /crates/js/build.mjs: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | // build.mjs 3 | // This script performs the following steps: 4 | // 1. Uses `cargo metadata` to extract package metadata. 5 | // 2. Constructs a detailed package.json using that metadata. 6 | // 3. Invokes `cargo build --target wasm32-unknown-unknown` for the local package. 7 | // 4. Locates the wasm artifact (by searching upward for the target directory), 8 | // and runs wasm-bindgen (with --target web, --omit-imports, --omit-default-module-path) 9 | // outputting the JS bindings into a pkg directory. 10 | // 5. Generates an inline loader script that embeds the wasm as a base64 string. 11 | // 6. Publishes the package (unless the `--dry-run` CLI argument is present). 12 | 13 | import fs from 'fs'; 14 | import path from 'path'; 15 | import { execSync } from 'child_process'; 16 | 17 | const isDryRun = process.argv.includes('--dry-run'); 18 | 19 | async function main() { 20 | // 1. Extract package metadata using cargo metadata. 21 | console.log("Extracting package metadata via cargo metadata..."); 22 | const metadataJson = execSync('cargo metadata --no-deps --format-version 1', { encoding: 'utf-8' }); 23 | const metadata = JSON.parse(metadataJson); 24 | const manifestPath = path.resolve('Cargo.toml'); 25 | const pkgMeta = metadata.packages.find(pkg => path.resolve(pkg.manifest_path) === manifestPath); 26 | if (!pkgMeta) { 27 | throw new Error("Could not find package metadata for the current Cargo.toml"); 28 | } 29 | const pkgName = pkgMeta.name; 30 | const pkgVersion = pkgMeta.version; 31 | const pkgDescription = pkgMeta.description || ""; 32 | const pkgLicense = pkgMeta.license || ""; 33 | const pkgHomepage = pkgMeta.homepage || ""; 34 | const pkgRepository = pkgMeta.repository || ""; 35 | const pkgKeywords = pkgMeta.keywords || []; 36 | // We'll use authors as collaborators. 37 | const pkgCollaborators = pkgMeta.authors || []; 38 | 39 | // Convert crate name to snake_case for file naming (dashes become underscores). 40 | const pkgBaseName = pkgName.replace(/-/g, '_'); 41 | // wasm-bindgen will output files named like `.js` and `_bg.wasm` 42 | const wasmBindgenJs = `${pkgBaseName}.js`; 43 | const wasmBindgenWasm = `${pkgBaseName}_bg.wasm`; 44 | const wasmBindgenDts = `${pkgBaseName}.d.ts`; 45 | 46 | // 2. Build the Rust crate for the wasm target. 47 | console.log("Building the Rust project with cargo..."); 48 | execSync('cargo build --target wasm32-unknown-unknown', { stdio: 'inherit' }); 49 | 50 | // 3. Locate the target directory by searching upward from the current directory. 51 | let targetDir = null; 52 | let currentDir = process.cwd(); 53 | while (currentDir !== path.parse(currentDir).root) { 54 | const potentialTarget = path.join(currentDir, 'target'); 55 | if (fs.existsSync(potentialTarget)) { 56 | targetDir = potentialTarget; 57 | break; 58 | } 59 | currentDir = path.dirname(currentDir); 60 | } 61 | if (!targetDir) { 62 | throw new Error("Could not locate the target directory"); 63 | } 64 | // Assume a debug build; the wasm artifact should be at: 65 | // target/wasm32-unknown-unknown/debug/.wasm 66 | const wasmArtifactPath = path.join(targetDir, 'wasm32-unknown-unknown', 'debug', `${pkgBaseName}.wasm`); 67 | if (!fs.existsSync(wasmArtifactPath)) { 68 | throw new Error(`Wasm artifact not found at ${wasmArtifactPath}`); 69 | } 70 | 71 | // 4. Run wasm-bindgen on the artifact. 72 | console.log("Running wasm-bindgen..."); 73 | const pkgDir = path.resolve('pkg'); 74 | if (!fs.existsSync(pkgDir)) { 75 | fs.mkdirSync(pkgDir); 76 | } 77 | const wasmBindgenCmd = `wasm-bindgen ${wasmArtifactPath} --out-dir ${pkgDir} --target web --omit-imports --omit-default-module-path`; 78 | execSync(wasmBindgenCmd, { stdio: 'inherit' }); 79 | 80 | // 5. Construct the inline loader. 81 | const wasmOutputPath = path.join(pkgDir, wasmBindgenWasm); 82 | if (!fs.existsSync(wasmOutputPath)) { 83 | throw new Error(`Wasm file not found in pkg directory: ${wasmOutputPath}`); 84 | } 85 | const wasmBuffer = fs.readFileSync(wasmOutputPath); 86 | const wasmBase64 = wasmBuffer.toString('base64'); 87 | 88 | const inlineLoaderContent = ` 89 | // This file is auto-generated by build.mjs. 90 | // It inlines the wasm module as a base64 string and loads it synchronously. 91 | 92 | import { initSync } from './${wasmBindgenJs}'; 93 | 94 | const base64Wasm = "${wasmBase64}"; 95 | 96 | // Convert a base64 string to a Uint8Array. 97 | function base64ToUint8Array(base64) { 98 | if (typeof atob === 'function') { 99 | const binaryString = atob(base64); 100 | const len = binaryString.length; 101 | const bytes = new Uint8Array(len); 102 | for (let i = 0; i < len; i++) { 103 | bytes[i] = binaryString.charCodeAt(i); 104 | } 105 | return bytes; 106 | } else if (typeof Buffer === 'function') { 107 | return new Uint8Array(Buffer.from(base64, 'base64')); 108 | } else { 109 | throw new Error('No base64 decoder available'); 110 | } 111 | } 112 | 113 | const wasmBytes = base64ToUint8Array(base64Wasm); 114 | 115 | // Initialize the generated bindings with the inlined wasm instance. 116 | initSync({ module: wasmBytes }); 117 | 118 | export * from './${wasmBindgenJs}'; 119 | `.trim(); 120 | 121 | const inlineLoaderPath = path.join(pkgDir, 'index.js'); 122 | fs.writeFileSync(inlineLoaderPath, inlineLoaderContent); 123 | console.log(`Generated inline loader: ${inlineLoaderPath}`); 124 | 125 | // 6. Construct a fleshed-out package.json. 126 | const packageJson = { 127 | name: pkgName, 128 | type: "module", 129 | collaborators: pkgCollaborators, 130 | description: pkgDescription, 131 | version: pkgVersion, 132 | license: pkgLicense, 133 | repository: pkgRepository ? { type: "git", url: pkgRepository } : undefined, 134 | files: [ 135 | // With inlined wasm, ship the loader and generated JS bindings. 136 | "index.js", 137 | wasmBindgenJs, 138 | wasmBindgenDts 139 | ], 140 | main: "index.js", 141 | homepage: pkgHomepage, 142 | types: wasmBindgenDts, 143 | sideEffects: [ 144 | "./snippets/*" 145 | ], 146 | keywords: pkgKeywords 147 | }; 148 | 149 | // Remove any keys that are undefined. 150 | Object.keys(packageJson).forEach(key => { 151 | if (packageJson[key] === undefined) { 152 | delete packageJson[key]; 153 | } 154 | }); 155 | 156 | const pkgJsonPath = path.join(pkgDir, 'package.json'); 157 | fs.writeFileSync(pkgJsonPath, JSON.stringify(packageJson, null, 2)); 158 | console.log(`Updated package.json: ${pkgJsonPath}`); 159 | 160 | // 7. Publish the package unless --dry-run is provided. 161 | if (!isDryRun) { 162 | console.log("Publishing package..."); 163 | execSync('npm publish', { stdio: 'inherit', cwd: pkgDir }); 164 | } else { 165 | console.log("--dry-run flag present, skipping npm publish."); 166 | } 167 | 168 | console.log("Build complete: Wasm module inlined and package.json regenerated."); 169 | } 170 | 171 | main().catch(err => { 172 | console.error(err); 173 | process.exit(1); 174 | }); 175 | 176 | -------------------------------------------------------------------------------- /crates/js/example/.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | -------------------------------------------------------------------------------- /crates/js/example/index.mjs: -------------------------------------------------------------------------------- 1 | import { compressProof, decompressProof } from "semaphore-rs-js"; 2 | 3 | const proof = [ 4 | "0x2d77679b613036865f4518894c80691cf65338fe7834fe3dd5f98c4f0f5a9e6d", 5 | "0x24018e845edf74d69528a63eed053296a397df13a1d08873e2b2d673837b31c3", 6 | "0x099d39b2cbca524b5916ac97dbc4afc1b8a5f59d65ba583fc49ec2677226e926", 7 | "0x0da5812d7b4e0beb22d25c194431674396aec70751873edb9ac8c933ba1f0f2e", 8 | "0x0723caca23efb9aa44db59ead0eeb28c2efb9c766d9a3f994ed047179e37b347", 9 | "0x02166d9fc2d4cf446b120e5663880e0927825aa36a02b896ac0f3a5ef6e0239b", 10 | "0x287fb1d0415a734ba76df9eb50ca6758bb806272f8fe40e3adbad3a850c05167", 11 | "0x1240cf8aa43cf4ea4a2d8dffac653a6467cefd0f19e129cffad85299d6705444", 12 | ]; 13 | 14 | const compressed = compressProof(proof); 15 | const decompressed = decompressProof(compressed); 16 | 17 | for (let i = 0; i < 8; i++) { 18 | if (proof[i] !== decompressed[i]) { 19 | console.log("Proof not equal after decompression"); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /crates/js/example/index.ts: -------------------------------------------------------------------------------- 1 | import { compressProof, decompressProof } from "semaphore-rs-js"; 2 | 3 | const proof: [string, string, string, string, string, string, string, string] = 4 | [ 5 | "0x2d77679b613036865f4518894c80691cf65338fe7834fe3dd5f98c4f0f5a9e6d", 6 | "0x24018e845edf74d69528a63eed053296a397df13a1d08873e2b2d673837b31c3", 7 | "0x099d39b2cbca524b5916ac97dbc4afc1b8a5f59d65ba583fc49ec2677226e926", 8 | "0x0da5812d7b4e0beb22d25c194431674396aec70751873edb9ac8c933ba1f0f2e", 9 | "0x0723caca23efb9aa44db59ead0eeb28c2efb9c766d9a3f994ed047179e37b347", 10 | "0x02166d9fc2d4cf446b120e5663880e0927825aa36a02b896ac0f3a5ef6e0239b", 11 | "0x287fb1d0415a734ba76df9eb50ca6758bb806272f8fe40e3adbad3a850c05167", 12 | "0x1240cf8aa43cf4ea4a2d8dffac653a6467cefd0f19e129cffad85299d6705444", 13 | ]; 14 | 15 | const compressed = compressProof(proof); 16 | const decompressed = decompressProof(compressed); 17 | 18 | for (let i = 0; i < 8; i++) { 19 | if (proof[i] !== decompressed[i]) { 20 | console.log("Proof not equal after decompression"); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /crates/js/example/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "testing", 3 | "version": "1.0.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "testing", 9 | "version": "1.0.0", 10 | "license": "ISC", 11 | "dependencies": { 12 | "semaphore-rs-js": "0.3.0" 13 | } 14 | }, 15 | "node_modules/semaphore-rs-js": { 16 | "version": "0.3.0", 17 | "resolved": "https://registry.npmjs.org/semaphore-rs-js/-/semaphore-rs-js-0.3.0.tgz", 18 | "integrity": "sha512-+RbHspjjR7S+34H8u25dlLryhxdqd0DOoPFL/U8g/AB03jyJOtQCofmROiEYjy83gkJp7h2Lgbf12H71YBfIoQ==" 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /crates/js/example/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "testing", 3 | "version": "1.0.0", 4 | "main": "index.js", 5 | "scripts": { 6 | "test": "echo \"Error: no test specified\" && exit 1" 7 | }, 8 | "author": "", 9 | "license": "ISC", 10 | "description": "", 11 | "dependencies": { 12 | "semaphore-rs-js": "0.3.0" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /crates/js/src/lib.rs: -------------------------------------------------------------------------------- 1 | use js_sys::Array; 2 | use ruint::aliases::U256; 3 | use semaphore_rs_proof::{compression::CompressedProof, Proof}; 4 | use wasm_bindgen::prelude::*; 5 | 6 | /// Compresses a Groth16 proof 7 | #[wasm_bindgen( 8 | js_name = "compressProof", 9 | unchecked_return_type = "[string, string, string, string]", 10 | return_description = "An array of 4 0x prefixed, hex encoded strings representing a compressed proof" 11 | )] 12 | pub fn compress_proof( 13 | #[wasm_bindgen( 14 | unchecked_param_type = "[string, string, string, string, string, string, string, string]", 15 | param_description = "An array of 8 hex encoded strings (with optional 0x prefixes) that represent an uncompressed proof" 16 | )] 17 | proof: Array, 18 | ) -> Result { 19 | let proof: Vec = proof 20 | .iter() 21 | .map(|v| v.as_string().unwrap_or_default()) 22 | .collect(); 23 | 24 | let proof = from_vec(proof)?; 25 | let proof = Proof::from_flat(proof); 26 | 27 | let proof = semaphore_rs_proof::compression::compress_proof(proof) 28 | .ok_or_else(|| JsError::new("Failed to compress proof"))? 29 | .flatten(); 30 | 31 | Ok(to_js_array(proof)) 32 | } 33 | 34 | /// Decompresses a Groth16 proof 35 | #[wasm_bindgen( 36 | js_name = "decompressProof", 37 | unchecked_return_type = "[string, string, string, string, string, string, string, string]", 38 | return_description = "An array of 8 0x prefixed, hex encoded strings representing an uncompressed proof" 39 | )] 40 | pub fn decompress_proof( 41 | #[wasm_bindgen( 42 | js_name = "compressedProof", 43 | unchecked_param_type = "[string, string, string, string]", 44 | param_description = "An array of 4 hex encoded strings (with optional 0x prefixes) that represent a compressed proof" 45 | )] 46 | compressed_proof: Array, 47 | ) -> Result { 48 | let compressed_proof: Vec = compressed_proof 49 | .iter() 50 | .map(|v| v.as_string().unwrap_or_default()) 51 | .collect(); 52 | 53 | let proof = from_vec(compressed_proof)?; 54 | let proof = CompressedProof::from_flat(proof); 55 | 56 | let proof = semaphore_rs_proof::compression::decompress_proof(proof) 57 | .ok_or_else(|| JsError::new("Failed to decompress proof"))?; 58 | let proof = proof.flatten(); 59 | 60 | Ok(to_js_array(proof)) 61 | } 62 | 63 | fn from_vec(proof: Vec) -> Result<[U256; N], JsError> { 64 | if proof.len() != N { 65 | return Err(JsError::new(&format!("Proof length must be {N}"))); 66 | } 67 | 68 | let proof: Vec = proof 69 | .into_iter() 70 | .map(|s| { 71 | U256::from_str_radix(s.trim_start_matches("0x"), 16) 72 | .map_err(|err| JsError::new(&err.to_string())) 73 | }) 74 | .collect::>()?; 75 | 76 | let proof: [U256; N] = proof.try_into().unwrap(); 77 | 78 | Ok(proof) 79 | } 80 | 81 | fn to_js_array(arr: [U256; N]) -> Array { 82 | let js_array = Array::new(); 83 | arr.iter().take(N).for_each(|v| { 84 | js_array.push(&JsValue::from_str(&format!("{:#066x}", v))); 85 | }); 86 | js_array 87 | } 88 | -------------------------------------------------------------------------------- /crates/keccak/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "semaphore-rs-keccak" 3 | version.workspace = true 4 | edition.workspace = true 5 | homepage.workspace = true 6 | license.workspace = true 7 | repository.workspace = true 8 | authors.workspace = true 9 | description.workspace = true 10 | keywords.workspace = true 11 | categories.workspace = true 12 | 13 | [dependencies] 14 | semaphore-rs-hasher.workspace = true 15 | tiny-keccak = { workspace = true, features = ["keccak"] } 16 | 17 | [features] 18 | default = ["sha3"] 19 | sha3 = ["tiny-keccak/sha3"] 20 | -------------------------------------------------------------------------------- /crates/keccak/src/keccak.rs: -------------------------------------------------------------------------------- 1 | use semaphore_rs_hasher::Hasher; 2 | use tiny_keccak::{Hasher as _, Keccak}; 3 | pub struct Keccak256; 4 | 5 | impl Hasher for Keccak256 { 6 | type Hash = [u8; 32]; 7 | 8 | fn hash_node(left: &Self::Hash, right: &Self::Hash) -> Self::Hash { 9 | let mut keccak = Keccak::v256(); 10 | let mut output = [0; 32]; 11 | 12 | keccak.update(left); 13 | keccak.update(right); 14 | keccak.finalize(&mut output); 15 | 16 | output 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /crates/keccak/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod keccak; 2 | 3 | #[cfg(feature = "sha3")] 4 | pub mod sha3; 5 | -------------------------------------------------------------------------------- /crates/keccak/src/sha3.rs: -------------------------------------------------------------------------------- 1 | use semaphore_rs_hasher::Hasher; 2 | use tiny_keccak::{Hasher as _, Sha3}; 3 | 4 | pub struct Sha3_256; 5 | 6 | impl Hasher for Sha3_256 { 7 | type Hash = [u8; 32]; 8 | 9 | fn hash_node(left: &Self::Hash, right: &Self::Hash) -> Self::Hash { 10 | let mut sha3_hasher = Sha3::v256(); 11 | 12 | sha3_hasher.update(left); 13 | sha3_hasher.update(right); 14 | 15 | let mut out = [0u8; 32]; 16 | sha3_hasher.finalize(&mut out); 17 | 18 | out 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /crates/poseidon/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "semaphore-rs-poseidon" 3 | version.workspace = true 4 | edition.workspace = true 5 | homepage.workspace = true 6 | license.workspace = true 7 | repository.workspace = true 8 | authors.workspace = true 9 | description.workspace = true 10 | keywords.workspace = true 11 | categories.workspace = true 12 | 13 | [dependencies] 14 | semaphore-rs-hasher.workspace = true 15 | ark-bn254.workspace = true 16 | ark-ff.workspace = true 17 | once_cell.workspace = true 18 | ruint.workspace = true 19 | -------------------------------------------------------------------------------- /crates/poseidon/src/lib.rs: -------------------------------------------------------------------------------- 1 | use ruint::aliases::U256; 2 | use semaphore_rs_hasher::Hasher; 3 | 4 | pub mod constants; 5 | pub mod poseidon; 6 | 7 | pub struct Poseidon; 8 | 9 | impl Hasher for Poseidon { 10 | type Hash = U256; 11 | 12 | fn hash_node(left: &Self::Hash, right: &Self::Hash) -> Self::Hash { 13 | poseidon::hash2(*left, *right) 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /crates/poseidon/src/poseidon.rs: -------------------------------------------------------------------------------- 1 | use ark_bn254::Fr; 2 | use ark_ff::{Field, Zero}; 3 | use once_cell::sync::Lazy; 4 | use ruint::aliases::U256; 5 | 6 | use crate::constants; 7 | 8 | static M1: Lazy<[[Fr; 2]; 2]> = Lazy::new(|| { 9 | constants::M1 10 | .iter() 11 | .map(|row| { 12 | row.iter() 13 | .map(Fr::try_from) 14 | .collect::, _>>() 15 | .unwrap() 16 | .try_into() 17 | .unwrap() 18 | }) 19 | .collect::>() 20 | .try_into() 21 | .unwrap() 22 | }); 23 | 24 | static C1: Lazy<[[Fr; 2]; 64]> = Lazy::new(|| { 25 | constants::C1 26 | .iter() 27 | .map(|row| { 28 | row.iter() 29 | .map(Fr::try_from) 30 | .collect::, _>>() 31 | .unwrap() 32 | .try_into() 33 | .unwrap() 34 | }) 35 | .collect::>() 36 | .try_into() 37 | .unwrap() 38 | }); 39 | 40 | static M: Lazy<[[Fr; 3]; 3]> = Lazy::new(|| { 41 | constants::M 42 | .iter() 43 | .map(|row| { 44 | row.iter() 45 | .map(Fr::try_from) 46 | .collect::, _>>() 47 | .unwrap() 48 | .try_into() 49 | .unwrap() 50 | }) 51 | .collect::>() 52 | .try_into() 53 | .unwrap() 54 | }); 55 | 56 | static C: Lazy<[[Fr; 3]; 65]> = Lazy::new(|| { 57 | constants::C 58 | .iter() 59 | .map(|row| { 60 | row.iter() 61 | .map(Fr::try_from) 62 | .collect::, _>>() 63 | .unwrap() 64 | .try_into() 65 | .unwrap() 66 | }) 67 | .collect::>() 68 | .try_into() 69 | .unwrap() 70 | }); 71 | 72 | /// Compute the one-value Poseidon hash function. 73 | /// 74 | /// # Panics 75 | /// 76 | /// Panics if `input` is not a valid field element. 77 | #[must_use] 78 | pub fn hash1(value: U256) -> U256 { 79 | let value = value.try_into().unwrap(); 80 | let mut state = [Fr::zero(), value]; 81 | 82 | for i in 0..64 { 83 | // Add round constants 84 | state[0] += C1[i][0]; 85 | state[1] += C1[i][1]; 86 | 87 | // SubWords, S-Box: Exponentiate 88 | state[0] = state[0].pow([5]); 89 | if !(4..60).contains(&i) { 90 | state[1] = state[1].pow([5]); 91 | } 92 | 93 | // MixLayer: Multiply by maximum distance separable matrix 94 | state = [ 95 | M1[0][0] * state[0] + M1[0][1] * state[1], 96 | M1[1][0] * state[0] + M1[1][1] * state[1], 97 | ]; 98 | } 99 | state[0].into() 100 | } 101 | 102 | /// Compute the two-value Poseidon hash function. 103 | /// 104 | /// # Panics 105 | /// 106 | /// Panics if `left`, `right` are not a valid field element. 107 | #[must_use] 108 | pub fn hash2(left: U256, right: U256) -> U256 { 109 | let left = left.try_into().unwrap(); 110 | let right = right.try_into().unwrap(); 111 | let mut state = [Fr::zero(), left, right]; 112 | 113 | for i in 0..65 { 114 | // Add round constants 115 | state[0] += C[i][0]; 116 | state[1] += C[i][1]; 117 | state[2] += C[i][2]; 118 | 119 | // SubWords, S-Box: Exponentiate 120 | state[0] = state[0].pow([5]); 121 | if !(4..61).contains(&i) { 122 | state[1] = state[1].pow([5]); 123 | state[2] = state[2].pow([5]); 124 | } 125 | 126 | // MixLayer: Multiply by maximum distance separable matrix 127 | state = [ 128 | M[0][0] * state[0] + M[0][1] * state[1] + M[0][2] * state[2], 129 | M[1][0] * state[0] + M[1][1] * state[1] + M[1][2] * state[2], 130 | M[2][0] * state[0] + M[2][1] * state[1] + M[2][2] * state[2], 131 | ]; 132 | } 133 | state[0].into() 134 | } 135 | 136 | #[cfg(test)] 137 | mod tests { 138 | use ruint::uint; 139 | 140 | use super::*; 141 | 142 | #[test] 143 | fn test_hash1() { 144 | uint! { 145 | assert_eq!(hash1(0_U256), 0x2a09a9fd93c590c26b91effbb2499f07e8f7aa12e2b4940a3aed2411cb65e11c_U256); 146 | 147 | } 148 | } 149 | 150 | #[test] 151 | fn test_hash2() { 152 | uint! { 153 | assert_eq!(hash2(0_U256, 0_U256), 0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864_U256); 154 | assert_eq!(hash2(31213_U256, 132_U256), 0x303f59cd0831b5633bcda50514521b33776b5d4280eb5868ba1dbbe2e4d76ab5_U256); 155 | } 156 | } 157 | } 158 | -------------------------------------------------------------------------------- /crates/proof/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "semaphore-rs-proof" 3 | version.workspace = true 4 | edition.workspace = true 5 | homepage.workspace = true 6 | license.workspace = true 7 | repository.workspace = true 8 | authors.workspace = true 9 | description.workspace = true 10 | keywords.workspace = true 11 | categories.workspace = true 12 | 13 | [dependencies] 14 | semaphore-rs-utils.workspace = true 15 | semaphore-rs-ark-circom = { workspace = true, optional = true } 16 | 17 | ruint.workspace = true 18 | serde.workspace = true 19 | serde_json.workspace = true 20 | ark-ec = { workspace = true, optional = true } 21 | ark-groth16 = { workspace = true, optional = true } 22 | ark-bn254 = { workspace = true, optional = true } 23 | alloy-core = { workspace = true } 24 | lazy_static.workspace = true 25 | getrandom.workspace = true 26 | hex.workspace = true 27 | 28 | [features] 29 | default = ["ark"] 30 | ark = ["dep:semaphore-rs-ark-circom", "dep:ark-ec", "dep:ark-groth16", "dep:ark-bn254"] 31 | -------------------------------------------------------------------------------- /crates/proof/src/ark.rs: -------------------------------------------------------------------------------- 1 | use super::Proof; 2 | use ark_bn254::Config; 3 | use ark_ec::bn::Bn; 4 | use ark_groth16::Proof as ArkProof; 5 | use semaphore_rs_ark_circom::ethereum::AffineError; 6 | 7 | impl From>> for Proof { 8 | fn from(proof: ArkProof>) -> Self { 9 | let proof = semaphore_rs_ark_circom::ethereum::Proof::from(proof); 10 | let (a, b, c) = proof.as_tuple(); 11 | Self(a, b, c) 12 | } 13 | } 14 | 15 | impl TryFrom for ArkProof> { 16 | type Error = AffineError; 17 | 18 | fn try_from(proof: Proof) -> Result { 19 | let eth_proof = semaphore_rs_ark_circom::ethereum::Proof { 20 | a: semaphore_rs_ark_circom::ethereum::G1 { 21 | x: proof.0 .0, 22 | y: proof.0 .1, 23 | }, 24 | #[rustfmt::skip] // Rustfmt inserts some confusing spaces 25 | b: semaphore_rs_ark_circom::ethereum::G2 { 26 | // The order of coefficients is flipped. 27 | x: [proof.1.0[1], proof.1.0[0]], 28 | y: [proof.1.1[1], proof.1.1[0]], 29 | }, 30 | c: semaphore_rs_ark_circom::ethereum::G1 { 31 | x: proof.2 .0, 32 | y: proof.2 .1, 33 | }, 34 | }; 35 | // This conversion can fail if points are not on the curve. 36 | eth_proof.try_into() 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /crates/proof/src/lib.rs: -------------------------------------------------------------------------------- 1 | use ruint::aliases::U256; 2 | use serde::{Deserialize, Serialize}; 3 | 4 | #[cfg(feature = "ark")] 5 | mod ark; 6 | 7 | pub mod compression; 8 | 9 | pub mod packing; 10 | 11 | // Matches the private G1Tup type in ark-circom. 12 | pub type G1 = (U256, U256); 13 | 14 | // Matches the private G2Tup type in ark-circom. 15 | pub type G2 = ([U256; 2], [U256; 2]); 16 | 17 | /// Wrap a proof object so we have serde support 18 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)] 19 | pub struct Proof(pub G1, pub G2, pub G1); 20 | 21 | impl Proof { 22 | pub const fn from_flat(flat: [U256; 8]) -> Self { 23 | let [x0, x1, x2, x3, x4, x5, x6, x7] = flat; 24 | Self((x0, x1), ([x2, x3], [x4, x5]), (x6, x7)) 25 | } 26 | 27 | pub const fn flatten(self) -> [U256; 8] { 28 | let Self((a0, a1), ([bx0, bx1], [by0, by1]), (c0, c1)) = self; 29 | [a0, a1, bx0, bx1, by0, by1, c0, c1] 30 | } 31 | } 32 | 33 | #[cfg(test)] 34 | mod tests { 35 | use super::*; 36 | 37 | #[test] 38 | fn deser() { 39 | let s = r#"[["0x1","0x2"],[["0x3","0x4"],["0x5","0x6"]],["0x7","0x8"]]"#; 40 | 41 | let deserialized: Proof = serde_json::from_str(s).unwrap(); 42 | let reserialized = serde_json::to_string(&deserialized).unwrap(); 43 | 44 | assert_eq!(s, reserialized); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /crates/proof/src/packing.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | fmt::Display, 3 | str::{from_utf8, FromStr}, 4 | }; 5 | 6 | use alloy_core::sol_types::{ 7 | sol_data::{FixedArray, Uint}, 8 | SolType, SolValue, 9 | }; 10 | use serde::{Deserialize, Deserializer, Serialize, Serializer}; 11 | 12 | use crate::Proof; 13 | use semaphore_rs_utils::{bytes_from_hex, bytes_to_hex, deserialize_bytes, serialize_bytes}; 14 | 15 | /// A packed proof is a representation of the ZKP in a single attribute (as 16 | /// opposed to array of arrays) which is easier to transport 17 | #[derive(Clone, Copy, Debug, PartialEq, Eq)] 18 | pub struct PackedProof(pub [u8; 256]); 19 | 20 | impl From for PackedProof { 21 | fn from(proof: Proof) -> Self { 22 | let flat_proof = [ 23 | proof.0 .0, 24 | proof.0 .1, 25 | proof.1 .0[0], 26 | proof.1 .0[1], 27 | proof.1 .1[0], 28 | proof.1 .1[1], 29 | proof.2 .0, 30 | proof.2 .1, 31 | ]; 32 | 33 | let bytes = flat_proof.abi_encode(); 34 | let mut encoded = [0u8; 256]; 35 | encoded.copy_from_slice(&bytes[..256]); 36 | Self(encoded) 37 | } 38 | } 39 | 40 | impl From for Proof { 41 | fn from(proof: PackedProof) -> Self { 42 | let decoded = FixedArray::, 8>::abi_decode(&proof.0).unwrap(); 43 | 44 | let a = (decoded[0], decoded[1]); 45 | let b = ([decoded[2], decoded[3]], [decoded[4], decoded[5]]); 46 | let c = (decoded[6], decoded[7]); 47 | 48 | Self(a, b, c) 49 | } 50 | } 51 | 52 | impl Display for PackedProof { 53 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 54 | let hex = bytes_to_hex::<256, 514>(&self.0); 55 | write!( 56 | f, 57 | "{}", 58 | from_utf8(&hex).expect("failed to convert to string") 59 | ) 60 | } 61 | } 62 | 63 | impl FromStr for PackedProof { 64 | type Err = hex::FromHexError; 65 | 66 | fn from_str(s: &str) -> Result { 67 | bytes_from_hex::<256>(s).map(Self) 68 | } 69 | } 70 | 71 | impl Serialize for PackedProof { 72 | fn serialize(&self, serializer: S) -> Result { 73 | serialize_bytes::<256, 514, S>(serializer, &self.0) 74 | } 75 | } 76 | 77 | impl<'de> Deserialize<'de> for PackedProof { 78 | fn deserialize>(deserializer: D) -> Result { 79 | let bytes = deserialize_bytes::<256, _>(deserializer)?; 80 | Ok(Self(bytes)) 81 | } 82 | } 83 | 84 | #[cfg(test)] 85 | pub mod test { 86 | use super::*; 87 | use ruint::aliases::U256; 88 | 89 | #[test] 90 | fn test_serializing_proof_into_packed_proof() { 91 | let proof = Proof( 92 | (U256::from(1), U256::from(2)), 93 | ( 94 | [U256::from(3), U256::from(4)], 95 | [U256::from(5), U256::from(6)], 96 | ), 97 | (U256::from(7), U256::from(8)), 98 | ); 99 | 100 | let packed_proof = PackedProof::from(proof); 101 | 102 | assert_eq!(packed_proof.to_string(), "0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000005000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000008"); 103 | 104 | let proof2 = Proof::from(packed_proof); 105 | 106 | assert_eq!(proof, proof2); 107 | } 108 | 109 | #[test] 110 | fn test_parse_from_string() { 111 | let packed_proof_str = "0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000005000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000008"; 112 | 113 | let packed_proof = PackedProof::from_str(packed_proof_str).unwrap(); 114 | 115 | let expected_proof = Proof( 116 | (U256::from(1), U256::from(2)), 117 | ( 118 | [U256::from(3), U256::from(4)], 119 | [U256::from(5), U256::from(6)], 120 | ), 121 | (U256::from(7), U256::from(8)), 122 | ); 123 | 124 | let proof: Proof = packed_proof.into(); 125 | 126 | assert_eq!(proof, expected_proof); 127 | } 128 | 129 | #[test] 130 | fn test_parse_from_string_without_prefix() { 131 | // note the lack of 0x prefix 132 | let packed_proof_str = "00000000000000000000000000000000000000000000000000000000000000050000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000005000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000008"; 133 | 134 | let packed_proof = PackedProof::from_str(packed_proof_str).unwrap(); 135 | 136 | let expected_proof = Proof( 137 | (U256::from(5), U256::from(6)), 138 | ( 139 | [U256::from(3), U256::from(4)], 140 | [U256::from(5), U256::from(6)], 141 | ), 142 | (U256::from(7), U256::from(8)), 143 | ); 144 | 145 | let proof: Proof = packed_proof.into(); 146 | 147 | assert_eq!(proof, expected_proof); 148 | } 149 | 150 | #[test] 151 | fn test_serialize_proof_to_json() { 152 | let packed_proof_str = "0x00000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000005000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000008"; 153 | 154 | let packed_proof = PackedProof::from_str(packed_proof_str).unwrap(); 155 | let proof: Proof = packed_proof.into(); 156 | 157 | let serialized = serde_json::to_value(proof).unwrap(); 158 | 159 | assert_eq!( 160 | serialized, 161 | serde_json::json!([ 162 | ["0x1", "0x2"], 163 | [["0x3", "0x4"], ["0x5", "0x6"]], 164 | ["0x7", "0x8"] 165 | ]) 166 | ); 167 | } 168 | 169 | #[test] 170 | fn test_serialize_proof_to_json_real_numbers() { 171 | let packed_proof_str = "0x15c1fc6907219676890dfe147ee6f10b580c7881dddacb1567b3bcbfc513a54d233afda3efff43a7631990d2e79470abcbae3ccad4b920476e64745bfe97bb0a0c8c7d7434c382d590d601d951c29c8463d555867db70f9e84f7741c81c2e1e6241d2ddf1c9e6670a24109a0e9c915cd6e07d0248a384dd38d3c91e9b0419f5f0b23c5467a06eff56cc2c246ada1e7d5705afc4dc8b43fd5a6972c679a2019c5091ed6522f7924d3674d08966a008f947f9aa016a4100bb12f911326f3e1befd0acdf5a5996e00933206cbec48f3bbdcee2a4ca75f8db911c00001e5a05474872446d6f1c1506837392a30fdc73d66fd89f4e1b1a5d14b93e2ad0c5f7b777520"; 172 | 173 | let packed_proof = PackedProof::from_str(packed_proof_str).unwrap(); 174 | let proof: Proof = packed_proof.into(); 175 | 176 | let serialized = serde_json::to_value(proof).unwrap(); 177 | 178 | assert_eq!( 179 | serialized, 180 | serde_json::json!([ 181 | [ 182 | "0x15c1fc6907219676890dfe147ee6f10b580c7881dddacb1567b3bcbfc513a54d", 183 | "0x233afda3efff43a7631990d2e79470abcbae3ccad4b920476e64745bfe97bb0a" 184 | ], 185 | [ 186 | [ 187 | "0xc8c7d7434c382d590d601d951c29c8463d555867db70f9e84f7741c81c2e1e6", 188 | "0x241d2ddf1c9e6670a24109a0e9c915cd6e07d0248a384dd38d3c91e9b0419f5f" 189 | ], 190 | [ 191 | "0xb23c5467a06eff56cc2c246ada1e7d5705afc4dc8b43fd5a6972c679a2019c5", 192 | "0x91ed6522f7924d3674d08966a008f947f9aa016a4100bb12f911326f3e1befd" 193 | ] 194 | ], 195 | [ 196 | "0xacdf5a5996e00933206cbec48f3bbdcee2a4ca75f8db911c00001e5a0547487", 197 | "0x2446d6f1c1506837392a30fdc73d66fd89f4e1b1a5d14b93e2ad0c5f7b777520" 198 | ] 199 | ]) 200 | ); 201 | } 202 | 203 | #[test] 204 | fn test_deserialize_proof_from_json() { 205 | let proof_str = "[ 206 | [ 207 | \"0x15c1fc6907219676890dfe147ee6f10b580c7881dddacb1567b3bcbfc513a54d\", 208 | \"0x233afda3efff43a7631990d2e79470abcbae3ccad4b920476e64745bfe97bb0a\" 209 | ], 210 | [ 211 | [ 212 | \"0xc8c7d7434c382d590d601d951c29c8463d555867db70f9e84f7741c81c2e1e6\", 213 | \"0x241d2ddf1c9e6670a24109a0e9c915cd6e07d0248a384dd38d3c91e9b0419f5f\" 214 | ], 215 | [ 216 | \"0xb23c5467a06eff56cc2c246ada1e7d5705afc4dc8b43fd5a6972c679a2019c5\", 217 | \"0x91ed6522f7924d3674d08966a008f947f9aa016a4100bb12f911326f3e1befd\" 218 | ] 219 | ], 220 | [ 221 | \"0xacdf5a5996e00933206cbec48f3bbdcee2a4ca75f8db911c00001e5a0547487\", 222 | \"0x2446d6f1c1506837392a30fdc73d66fd89f4e1b1a5d14b93e2ad0c5f7b777520\" 223 | ] 224 | ]"; 225 | 226 | let proof = serde_json::from_str::(proof_str).unwrap(); 227 | 228 | let packed_proof = PackedProof::from(proof); 229 | 230 | let expected_proof = "0x15c1fc6907219676890dfe147ee6f10b580c7881dddacb1567b3bcbfc513a54d233afda3efff43a7631990d2e79470abcbae3ccad4b920476e64745bfe97bb0a0c8c7d7434c382d590d601d951c29c8463d555867db70f9e84f7741c81c2e1e6241d2ddf1c9e6670a24109a0e9c915cd6e07d0248a384dd38d3c91e9b0419f5f0b23c5467a06eff56cc2c246ada1e7d5705afc4dc8b43fd5a6972c679a2019c5091ed6522f7924d3674d08966a008f947f9aa016a4100bb12f911326f3e1befd0acdf5a5996e00933206cbec48f3bbdcee2a4ca75f8db911c00001e5a05474872446d6f1c1506837392a30fdc73d66fd89f4e1b1a5d14b93e2ad0c5f7b777520"; 231 | 232 | assert_eq!(packed_proof.to_string(), expected_proof); 233 | } 234 | 235 | #[test] 236 | fn test_invalid_parsing() { 237 | // note this is only 7 numbers 238 | let packed_proof_str = "0x0000000000000000000000000000000000000000000000000000000000000005000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000030000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000500000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000007"; 239 | PackedProof::from_str(packed_proof_str).expect_err("parsing should fail"); 240 | 241 | // not a valid number 242 | let packed_proof_str = "0000000000000000p000000000000000000000000000000000000000000000050000000000000000000000000000000000000000000000000000000000000006000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000005000000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000070000000000000000000000000000000000000000000000000000000000000008"; 243 | PackedProof::from_str(packed_proof_str).expect_err("parsing should fail"); 244 | 245 | // completely invalid 246 | let packed_proof_str = "0x0"; 247 | PackedProof::from_str(packed_proof_str).expect_err("parsing should fail"); 248 | } 249 | } 250 | -------------------------------------------------------------------------------- /crates/semaphore-depth-config/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "semaphore-rs-depth-config" 3 | version.workspace = true 4 | edition.workspace = true 5 | homepage.workspace = true 6 | license.workspace = true 7 | repository.workspace = true 8 | authors.workspace = true 9 | description.workspace = true 10 | keywords.workspace = true 11 | categories.workspace = true 12 | 13 | [features] 14 | depth_16 = [] 15 | depth_20 = [] 16 | depth_30 = [] 17 | -------------------------------------------------------------------------------- /crates/semaphore-depth-config/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![allow(unused)] 2 | 3 | pub const fn get_supported_depth_count() -> usize { 4 | let mut res = 0; 5 | #[cfg(feature = "depth_16")] 6 | { 7 | res += 1; 8 | } 9 | #[cfg(feature = "depth_20")] 10 | { 11 | res += 1; 12 | } 13 | #[cfg(feature = "depth_30")] 14 | { 15 | res += 1; 16 | } 17 | res 18 | } 19 | 20 | #[allow(unused_assignments)] 21 | const fn gen_supported_depths() -> [usize; get_supported_depth_count()] { 22 | let mut res = [0; get_supported_depth_count()]; 23 | let mut i = 0; 24 | #[cfg(feature = "depth_16")] 25 | { 26 | res[i] = 16; 27 | i += 1; 28 | } 29 | #[cfg(feature = "depth_20")] 30 | { 31 | res[i] = 20; 32 | i += 1; 33 | } 34 | #[cfg(feature = "depth_30")] 35 | { 36 | res[i] = 30; 37 | i += 1; 38 | } 39 | res 40 | } 41 | 42 | static SUPPORTED_DEPTHS: [usize; get_supported_depth_count()] = gen_supported_depths(); 43 | 44 | pub fn get_supported_depths() -> &'static [usize] { 45 | &SUPPORTED_DEPTHS 46 | } 47 | 48 | #[allow(unused_assignments)] 49 | pub const fn get_depth_index(depth: usize) -> Option { 50 | let mut i = 0; 51 | 52 | #[cfg(feature = "depth_16")] 53 | { 54 | if depth == 16 { 55 | return Some(i); 56 | } 57 | i += 1; 58 | } 59 | #[cfg(feature = "depth_20")] 60 | { 61 | if depth == 20 { 62 | return Some(i); 63 | } 64 | i += 1; 65 | } 66 | #[cfg(feature = "depth_30")] 67 | { 68 | if depth == 30 { 69 | return Some(i); 70 | } 71 | i += 1; 72 | } 73 | None 74 | } 75 | -------------------------------------------------------------------------------- /crates/semaphore-depth-macros/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "semaphore-rs-depth-macros" 3 | version.workspace = true 4 | edition.workspace = true 5 | homepage.workspace = true 6 | license.workspace = true 7 | repository.workspace = true 8 | authors.workspace = true 9 | description.workspace = true 10 | keywords.workspace = true 11 | categories.workspace = true 12 | 13 | [features] 14 | depth_16 = ["semaphore-rs-depth-config/depth_16"] 15 | depth_20 = ["semaphore-rs-depth-config/depth_20"] 16 | depth_30 = ["semaphore-rs-depth-config/depth_30"] 17 | 18 | [lib] 19 | proc-macro = true 20 | 21 | [dependencies] 22 | semaphore-rs-depth-config.workspace = true 23 | itertools ={ workspace = true } 24 | syn.workspace = true 25 | proc-macro2.workspace = true 26 | quote.workspace = true 27 | -------------------------------------------------------------------------------- /crates/semaphore-depth-macros/src/lib.rs: -------------------------------------------------------------------------------- 1 | use proc_macro::TokenStream; 2 | use quote::{format_ident, quote}; 3 | use semaphore_rs_depth_config::get_supported_depths; 4 | use syn::{ 5 | parse::{Parse, ParseStream}, 6 | parse_macro_input, parse_quote, 7 | visit_mut::VisitMut, 8 | Ident, Token, 9 | }; 10 | 11 | /// Multi-depth test generator 12 | /// 13 | /// This macro is used to generate a test for each supported depth. 14 | /// It expects to annotate a function with a single argument, and will generate 15 | /// test cases delegating to that function for each supported depth. 16 | /// 17 | /// For example, 18 | /// ``` 19 | /// use semaphore_rs_depth_macros::test_all_depths; 20 | /// #[test_all_depths] 21 | /// fn test_depth_non_zero(depth: usize) { 22 | /// assert!(depth > 0); 23 | /// } 24 | /// ``` 25 | /// with `depth_16` and `depth_30` features active will generate the following 26 | /// code: 27 | /// ```no_run 28 | /// fn test_depth_non_zero(depth: usize) { 29 | /// assert!(depth > 0); 30 | /// } 31 | /// 32 | /// #[test] 33 | /// fn test_depth_non_zero_depth_16() { 34 | /// test_depth_non_zero(16); 35 | /// } 36 | /// 37 | /// #[test] 38 | /// fn test_depth_non_zero_depth_30() { 39 | /// test_depth_non_zero(30); 40 | /// } 41 | /// ``` 42 | #[proc_macro_attribute] 43 | pub fn test_all_depths(_attr: TokenStream, item: TokenStream) -> TokenStream { 44 | let fun = parse_macro_input!(item as syn::ItemFn); 45 | let fun_name = &fun.sig.ident; 46 | 47 | let original_fun = quote! { #fun }; 48 | let mut result = TokenStream::from(original_fun); 49 | 50 | for depth in get_supported_depths() { 51 | let fun_name_versioned = format_ident!("{}_depth_{}", fun_name, depth); 52 | let tokens = quote! { 53 | #[test] 54 | fn #fun_name_versioned() { 55 | #fun_name(#depth); 56 | } 57 | }; 58 | result.extend(TokenStream::from(tokens)); 59 | } 60 | result 61 | } 62 | 63 | #[derive(Debug)] 64 | struct ArrayForDepthsInput { 65 | replaced_ident: Ident, 66 | expr: syn::Expr, 67 | } 68 | 69 | #[derive(Debug)] 70 | struct MacroArgs { 71 | args: Vec, 72 | } 73 | 74 | impl Parse for MacroArgs { 75 | fn parse(input: ParseStream) -> syn::Result { 76 | let mut args = Vec::new(); 77 | while !input.is_empty() { 78 | args.push(input.parse::()?); 79 | if input.is_empty() { 80 | break; 81 | } 82 | input.parse::()?; 83 | } 84 | Ok(MacroArgs { args }) 85 | } 86 | } 87 | 88 | impl MacroArgs { 89 | fn tokens(&self) -> proc_macro2::TokenStream { 90 | let args = &self.args; 91 | quote! { #(#args),* } 92 | } 93 | } 94 | 95 | struct IdentReplacer(Ident, syn::Expr); 96 | 97 | impl VisitMut for IdentReplacer { 98 | fn visit_expr_mut(&mut self, expr: &mut syn::Expr) { 99 | match expr { 100 | syn::Expr::Path(ident) => { 101 | if ident.path.is_ident(&self.0) { 102 | *expr = self.1.clone(); 103 | } 104 | } 105 | syn::Expr::Macro(mcr) => { 106 | let Ok(mut args) = mcr.mac.parse_body::() else { 107 | return; 108 | }; 109 | for arg in &mut args.args { 110 | self.visit_expr_mut(arg); 111 | } 112 | mcr.mac.tokens = args.tokens(); 113 | } 114 | _ => syn::visit_mut::visit_expr_mut(self, expr), 115 | } 116 | } 117 | } 118 | 119 | impl Parse for ArrayForDepthsInput { 120 | fn parse(input: ParseStream) -> syn::Result { 121 | input.parse::()?; 122 | let replaced_ident = input.parse::()?; 123 | input.parse::()?; 124 | let expr = input.parse::()?; 125 | Ok(ArrayForDepthsInput { 126 | replaced_ident, 127 | expr, 128 | }) 129 | } 130 | } 131 | 132 | /// Macro to generate code for multiple depths. 133 | /// 134 | /// Generates an array of expressions, where the given identifier is replaced 135 | /// with each supported depth. The argument must use closure syntax, but this 136 | /// is pure syntactic, the closure expression gets unrolled statically. 137 | /// 138 | /// This macro also descends into other macros, as long as they use standard 139 | /// Rust syntax for arguments. Any non-standard macros will be ignored in the 140 | /// expansion of this. 141 | /// 142 | /// For example, `array_for_depths!(|depth| depth + 5)`, with only `depth_16` 143 | /// and `depth_30` supported, will generate `[16 + 5, 30 + 5]`, and 144 | /// `array_for_depths!(|depth| concat!("foo", depth))` will generate 145 | /// `[concat!("foo", 16), concat!("foo", 30)]`. 146 | #[proc_macro] 147 | pub fn array_for_depths(input: TokenStream) -> TokenStream { 148 | let input = parse_macro_input!(input as ArrayForDepthsInput); 149 | let items = get_supported_depths() 150 | .iter() 151 | .map(|depth| { 152 | let mut replacer = IdentReplacer(input.replaced_ident.clone(), parse_quote!(#depth)); 153 | let mut expr = input.expr.clone(); 154 | replacer.visit_expr_mut(&mut expr); 155 | expr 156 | }) 157 | .collect::>(); 158 | let array = quote! { [#(#items),*] }; 159 | array.into() 160 | } 161 | -------------------------------------------------------------------------------- /crates/semaphore/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "semaphore-rs" 3 | version.workspace = true 4 | edition.workspace = true 5 | homepage.workspace = true 6 | license.workspace = true 7 | repository.workspace = true 8 | authors.workspace = true 9 | description.workspace = true 10 | keywords.workspace = true 11 | categories.workspace = true 12 | 13 | [dependencies] 14 | # Internal 15 | semaphore-rs-utils.workspace = true 16 | semaphore-rs-ark-zkey.workspace = true 17 | semaphore-rs-ark-circom.workspace = true 18 | semaphore-rs-proof = { workspace = true, features = ["ark"] } 19 | semaphore-rs-poseidon.workspace = true 20 | semaphore-rs-hasher.workspace = true 21 | semaphore-rs-keccak.workspace = true 22 | semaphore-rs-trees.workspace = true 23 | semaphore-rs-storage.workspace = true 24 | semaphore-rs-depth-config.workspace = true 25 | semaphore-rs-depth-macros.workspace = true 26 | semaphore-rs-witness.workspace = true 27 | 28 | # 3rd Party 29 | bincode.workspace = true 30 | bytemuck.workspace = true 31 | color-eyre.workspace = true 32 | hex.workspace = true 33 | hex-literal.workspace = true 34 | itertools.workspace = true 35 | lazy_static.workspace = true 36 | mmap-rs.workspace = true 37 | num-bigint.workspace = true 38 | once_cell.workspace = true 39 | rand.workspace = true 40 | rayon.workspace = true 41 | ruint.workspace = true 42 | serde.workspace = true 43 | sha2.workspace = true 44 | thiserror.workspace = true 45 | tiny-keccak.workspace = true 46 | zeroize.workspace = true 47 | 48 | # Ark 49 | ark-bn254.workspace = true 50 | ark-ec.workspace = true 51 | ark-ff.workspace = true 52 | ark-groth16.workspace = true 53 | ark-relations.workspace = true 54 | ark-std.workspace = true 55 | 56 | [dev-dependencies] 57 | serial_test.workspace = true 58 | tokio = { workspace = true, features = ["macros", "rt-multi-thread"] } 59 | criterion.workspace = true 60 | bincode.workspace = true 61 | proptest.workspace = true 62 | rand_chacha.workspace = true 63 | serde_json.workspace = true 64 | tempfile.workspace = true 65 | tiny-keccak.workspace = true 66 | tracing-test.workspace = true 67 | 68 | [build-dependencies] 69 | semaphore-rs-ark-zkey.workspace = true 70 | color-eyre.workspace = true 71 | reqwest.workspace = true 72 | semaphore-rs-depth-config.workspace = true 73 | 74 | [[bench]] 75 | name = "cascading_merkle_tree" 76 | harness = false 77 | 78 | [features] 79 | default = [] 80 | depth_16 = [ 81 | "semaphore-rs-depth-config/depth_16", 82 | "semaphore-rs-depth-macros/depth_16", 83 | ] 84 | depth_20 = [ 85 | "semaphore-rs-depth-config/depth_20", 86 | "semaphore-rs-depth-macros/depth_20", 87 | ] 88 | depth_30 = [ 89 | "semaphore-rs-depth-config/depth_30", 90 | "semaphore-rs-depth-macros/depth_30", 91 | ] -------------------------------------------------------------------------------- /crates/semaphore/README.md: -------------------------------------------------------------------------------- 1 | # 🦀 semaphore-rs 2 | 3 | Rust support library for using [semaphore](https://github.com/appliedzkp/semaphore). It's mostly a Rust rewrite of [zk-kit](https://github.com/appliedzkp/zk-kit), but just focuses on semaphore (for now) and still covers a much smaller scope. It's using [ark-circom](https://github.com/gakonst/ark-circom) under the hood for generating the groth16 proofs. 4 | 5 | ## Usage 6 | 7 | Add this line to your `cargo.toml`: 8 | 9 | ```toml 10 | semaphore = { git = "https://github.com/worldcoin/semaphore-rs" } 11 | ``` 12 | 13 | ## Building semaphore circuits 14 | 15 | 1. Check out submodule (if not done before already): `git submodule update --init --recursive` 16 | 1. Install semaphore dependencies `cd semaphore && npm install` 17 | 1. Compile circuits `npm exec ts-node ./scripts/compile-circuits.ts` 18 | 1. You'll find the `zkey` and `wasm` file in `semaphore/build/snark` 19 | 20 | ## Example 21 | 22 | Example as in `src/lib.rs`, run with `cargo test`. 23 | 24 | ```rust,no_run 25 | use semaphore_rs::{get_supported_depths, hash_to_field, Field, identity::Identity, 26 | poseidon_tree::LazyPoseidonTree, protocol::*}; 27 | use num_bigint::BigInt; 28 | 29 | // generate identity 30 | let mut secret = *b"secret"; 31 | let id = Identity::from_secret(&mut secret, None); 32 | 33 | // Get the first available tree depth. This is controlled by the crate features. 34 | let depth = get_supported_depths()[0]; 35 | 36 | // generate merkle tree 37 | let leaf = Field::from(0); 38 | let mut tree = LazyPoseidonTree::new(depth, leaf).derived(); 39 | tree = tree.update(0, &id.commitment()); 40 | 41 | let merkle_proof = tree.proof(0); 42 | let root = tree.root(); 43 | 44 | // change signal and external_nullifier here 45 | let signal_hash = hash_to_field(b"xxx"); 46 | let external_nullifier_hash = hash_to_field(b"appId"); 47 | 48 | let nullifier_hash = generate_nullifier_hash(&id, external_nullifier_hash); 49 | 50 | let proof = generate_proof(&id, &merkle_proof, external_nullifier_hash, signal_hash).unwrap(); 51 | let success = verify_proof(root, nullifier_hash, signal_hash, external_nullifier_hash, &proof, depth).unwrap(); 52 | 53 | assert!(success); 54 | ``` 55 | -------------------------------------------------------------------------------- /crates/semaphore/benches/cascading_merkle_tree.rs: -------------------------------------------------------------------------------- 1 | use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; 2 | use semaphore_rs::Field; 3 | use semaphore_rs_hasher::Hasher; 4 | use semaphore_rs_poseidon::Poseidon; 5 | use semaphore_rs_storage::MmapVec; 6 | use semaphore_rs_trees::cascading::CascadingMerkleTree; 7 | 8 | criterion_main!(cascading_merkle_tree); 9 | criterion_group!( 10 | cascading_merkle_tree, 11 | bench_cascading_validate, 12 | bench_cascading_create_dense_tree, 13 | bench_cascading_create_dense_mmap_tree, 14 | bench_cascading_restore_dense_mmap_tree, 15 | bench_cascading_dense_tree_reads, 16 | bench_cascading_dense_mmap_tree_reads, 17 | bench_cascading_dense_tree_writes, 18 | bench_cascading_dense_mmap_tree_writes, 19 | bench_cascading_proof_from_hash 20 | ); 21 | 22 | struct TreeValues { 23 | depth: usize, 24 | empty_value: H::Hash, 25 | initial_values: Vec, 26 | } 27 | 28 | fn bench_cascading_proof_from_hash(criterion: &mut Criterion) { 29 | let tree_value = create_values_for_tree(14); 30 | 31 | criterion.bench_function("bench_cascading_proof_from_hash", |b| { 32 | let leaf = Field::from(234123412341usize); 33 | b.iter_batched_ref( 34 | || { 35 | let mut tree = CascadingMerkleTree::::new_with_leaves( 36 | vec![], 37 | tree_value.depth, 38 | &tree_value.empty_value, 39 | &tree_value.initial_values, 40 | ); 41 | tree.set_leaf(1 << 13, leaf); 42 | tree 43 | }, 44 | |tree| { 45 | let _ = tree.proof_from_hash(leaf); 46 | }, 47 | BatchSize::SmallInput, 48 | ); 49 | }); 50 | } 51 | 52 | fn bench_cascading_validate(criterion: &mut Criterion) { 53 | let tree_values = [ 54 | create_values_for_tree(4), 55 | create_values_for_tree(10), 56 | create_values_for_tree(14), 57 | ]; 58 | 59 | let mut group = criterion.benchmark_group("bench_cascading_validate"); 60 | 61 | for value in tree_values.iter() { 62 | let tree = CascadingMerkleTree::::new_with_leaves( 63 | vec![], 64 | value.depth, 65 | &value.empty_value, 66 | &value.initial_values, 67 | ); 68 | 69 | group.bench_with_input( 70 | BenchmarkId::from_parameter(format!("validate_{}", value.depth)), 71 | value, 72 | |bencher: &mut criterion::Bencher, _| { 73 | bencher.iter(|| { 74 | tree.validate().unwrap(); 75 | }); 76 | }, 77 | ); 78 | } 79 | group.finish(); 80 | } 81 | 82 | fn bench_cascading_create_dense_tree(criterion: &mut Criterion) { 83 | let tree_values = [ 84 | create_values_for_tree(4), 85 | create_values_for_tree(10), 86 | create_values_for_tree(14), 87 | ]; 88 | 89 | let mut group = criterion.benchmark_group("bench_cascading_create_dense_tree"); 90 | 91 | for value in tree_values.iter() { 92 | group.bench_with_input( 93 | BenchmarkId::from_parameter(format!("create_dense_tree_depth_{}", value.depth)), 94 | value, 95 | |bencher: &mut criterion::Bencher, value| { 96 | bencher.iter(|| { 97 | let _tree = CascadingMerkleTree::::new_with_leaves( 98 | vec![], 99 | value.depth, 100 | &value.empty_value, 101 | &value.initial_values, 102 | ); 103 | let _root = _tree.root(); 104 | }); 105 | }, 106 | ); 107 | } 108 | group.finish(); 109 | } 110 | 111 | fn bench_cascading_create_dense_mmap_tree(criterion: &mut Criterion) { 112 | let tree_values = [ 113 | create_values_for_tree(4), 114 | create_values_for_tree(10), 115 | create_values_for_tree(14), 116 | ]; 117 | 118 | let mut group = criterion.benchmark_group("bench_cascading_create_dense_mmap_tree"); 119 | 120 | for value in tree_values.iter() { 121 | group.bench_with_input( 122 | BenchmarkId::from_parameter(format!("create_dense_mmap_tree_depth_{}", value.depth)), 123 | value, 124 | |bencher: &mut criterion::Bencher, value| { 125 | bencher.iter(|| { 126 | let tempfile = tempfile::tempfile().unwrap(); 127 | let storage: MmapVec<_> = unsafe { MmapVec::create(tempfile).unwrap() }; 128 | let _tree: CascadingMerkleTree = 129 | CascadingMerkleTree::new_with_leaves( 130 | storage, 131 | value.depth, 132 | &value.empty_value, 133 | &value.initial_values, 134 | ); 135 | let _root = _tree.root(); 136 | }); 137 | }, 138 | ); 139 | } 140 | group.finish(); 141 | } 142 | 143 | fn bench_cascading_restore_dense_mmap_tree(criterion: &mut Criterion) { 144 | let tree_values = vec![ 145 | create_values_for_tree(4), 146 | create_values_for_tree(10), 147 | create_values_for_tree(14), 148 | ]; 149 | 150 | let mut group = criterion.benchmark_group("bench_cascading_restore_dense_mmap_tree"); 151 | 152 | (0..3).zip(tree_values).for_each(|(id, value)| { 153 | let tempfile = tempfile::NamedTempFile::new().unwrap(); 154 | let path = tempfile.path(); 155 | let storage: MmapVec<_> = unsafe { MmapVec::create_from_path(path).unwrap() }; 156 | { 157 | let tree: CascadingMerkleTree = CascadingMerkleTree::new_with_leaves( 158 | storage, 159 | value.depth, 160 | &value.empty_value, 161 | &value.initial_values, 162 | ); 163 | let _ = tree.root(); 164 | } 165 | 166 | group.bench_with_input( 167 | BenchmarkId::from_parameter(format!("restore_dense_mmap_tree_depth_{}", value.depth)), 168 | &(id, value), 169 | |bencher: &mut criterion::Bencher, (_id, value)| { 170 | bencher.iter(|| { 171 | let storage = unsafe { MmapVec::restore_from_path(path).unwrap() }; 172 | let _tree: CascadingMerkleTree = 173 | CascadingMerkleTree::restore(storage, value.depth, &value.empty_value) 174 | .unwrap(); 175 | let _root = _tree.root(); 176 | }); 177 | }, 178 | ); 179 | }); 180 | group.finish(); 181 | } 182 | 183 | #[allow(unused)] 184 | fn bench_cascading_dense_tree_reads(criterion: &mut Criterion) { 185 | let tree_value = create_values_for_tree(14); 186 | 187 | let tree = CascadingMerkleTree::::new_with_leaves( 188 | vec![], 189 | tree_value.depth, 190 | &tree_value.empty_value, 191 | &tree_value.initial_values, 192 | ); 193 | 194 | criterion.bench_function("dense tree reads", |b| { 195 | b.iter(|| { 196 | // read all leaves, and compare to ones in tree value 197 | ((1 << (tree_value.depth - 1))..(1 << tree_value.depth)).for_each(|index| { 198 | let _proof = tree.proof(index); 199 | }) 200 | }) 201 | }); 202 | } 203 | 204 | #[allow(unused)] 205 | fn bench_cascading_dense_mmap_tree_reads(criterion: &mut Criterion) { 206 | let tree_value = create_values_for_tree(14); 207 | let file = tempfile::tempfile().unwrap(); 208 | 209 | let storage = unsafe { MmapVec::create(file).unwrap() }; 210 | let tree = CascadingMerkleTree::::new_with_leaves( 211 | storage, 212 | tree_value.depth, 213 | &tree_value.empty_value, 214 | &tree_value.initial_values, 215 | ); 216 | 217 | criterion.bench_function("dense mmap tree reads", |b| { 218 | b.iter(|| { 219 | // read all leaves, and compare to ones in tree value 220 | ((1 << (tree.depth() - 1))..(1 << tree.depth())).for_each(|index| { 221 | let _proof = tree.proof(index); 222 | }) 223 | }) 224 | }); 225 | } 226 | 227 | fn bench_cascading_dense_tree_writes(criterion: &mut Criterion) { 228 | let tree_value = create_values_for_tree(14); 229 | 230 | let value = Field::from(123_456); 231 | 232 | criterion.bench_function("dense tree writes", |b| { 233 | b.iter_batched_ref( 234 | || { 235 | CascadingMerkleTree::::new_with_leaves( 236 | vec![], 237 | tree_value.depth, 238 | &tree_value.empty_value, 239 | &tree_value.initial_values, 240 | ) 241 | }, 242 | |tree| { 243 | tree.set_leaf(9000, value); 244 | }, 245 | BatchSize::SmallInput, 246 | ); 247 | }); 248 | } 249 | 250 | fn bench_cascading_dense_mmap_tree_writes(criterion: &mut Criterion) { 251 | let tree_value = create_values_for_tree(14); 252 | 253 | let value = Field::from(123_456); 254 | 255 | criterion.bench_function("dense mmap tree writes", |b| { 256 | b.iter_batched_ref( 257 | || { 258 | let file = tempfile::tempfile().unwrap(); 259 | let storage = unsafe { MmapVec::create(file).unwrap() }; 260 | CascadingMerkleTree::::new_with_leaves( 261 | storage, 262 | tree_value.depth, 263 | &tree_value.empty_value, 264 | &tree_value.initial_values, 265 | ) 266 | }, 267 | |tree| { 268 | tree.set_leaf(9000, value); 269 | }, 270 | BatchSize::SmallInput, 271 | ); 272 | }); 273 | } 274 | 275 | fn create_values_for_tree(depth: usize) -> TreeValues { 276 | let empty_value = Field::from(0); 277 | 278 | let initial_values: Vec> = (0..(1 << depth)).map(Field::from).collect(); 279 | 280 | TreeValues { 281 | depth, 282 | empty_value, 283 | initial_values, 284 | } 285 | } 286 | -------------------------------------------------------------------------------- /crates/semaphore/benches/lazy_merkle_tree.rs: -------------------------------------------------------------------------------- 1 | use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; 2 | use semaphore_rs::poseidon_tree::LazyPoseidonTree; 3 | use semaphore_rs::Field; 4 | use semaphore_rs_hasher::Hasher; 5 | use semaphore_rs_poseidon::Poseidon; 6 | 7 | criterion_main!(lazy_merkle_tree); 8 | criterion_group!( 9 | lazy_merkle_tree, 10 | bench_create_dense_tree, 11 | bench_create_dense_mmap_tree, 12 | bench_restore_dense_mmap_tree, 13 | bench_dense_tree_reads, 14 | bench_dense_mmap_tree_reads, 15 | bench_dense_tree_writes, 16 | bench_dense_mmap_tree_writes, 17 | ); 18 | 19 | struct TreeValues { 20 | depth: usize, 21 | prefix_depth: usize, 22 | empty_value: H::Hash, 23 | initial_values: Vec, 24 | } 25 | 26 | fn bench_create_dense_tree(criterion: &mut Criterion) { 27 | let tree_values = vec![ 28 | create_values_for_tree(4), 29 | create_values_for_tree(10), 30 | create_values_for_tree(14), 31 | ]; 32 | 33 | let mut group = criterion.benchmark_group("bench_create_dense_tree"); 34 | 35 | for value in tree_values.iter() { 36 | group.bench_with_input( 37 | BenchmarkId::from_parameter(format!("create_dense_tree_depth_{}", value.depth)), 38 | value, 39 | |bencher: &mut criterion::Bencher, value| { 40 | bencher.iter(|| { 41 | let _tree = LazyPoseidonTree::new_with_dense_prefix_with_initial_values( 42 | value.depth, 43 | value.prefix_depth, 44 | &value.empty_value, 45 | &value.initial_values, 46 | ); 47 | let _root = _tree.root(); 48 | }); 49 | }, 50 | ); 51 | } 52 | group.finish(); 53 | } 54 | 55 | fn bench_create_dense_mmap_tree(criterion: &mut Criterion) { 56 | let tree_values = vec![ 57 | create_values_for_tree(4), 58 | create_values_for_tree(10), 59 | create_values_for_tree(14), 60 | ]; 61 | 62 | let mut group = criterion.benchmark_group("bench_create_dense_mmap_tree"); 63 | 64 | for value in tree_values.iter() { 65 | group.bench_with_input( 66 | BenchmarkId::from_parameter(format!("create_dense_mmap_tree_depth_{}", value.depth)), 67 | value, 68 | |bencher: &mut criterion::Bencher, value| { 69 | let file = tempfile::NamedTempFile::new().unwrap(); 70 | let path = file.path().to_str().unwrap(); 71 | bencher.iter(|| { 72 | let _tree = LazyPoseidonTree::new_mmapped_with_dense_prefix_with_init_values( 73 | value.depth, 74 | value.prefix_depth, 75 | &value.empty_value, 76 | &value.initial_values, 77 | path, 78 | ) 79 | .unwrap(); 80 | let _root = _tree.root(); 81 | }); 82 | }, 83 | ); 84 | } 85 | group.finish(); 86 | // remove created mmap file 87 | let _ = std::fs::remove_file("./testfile"); 88 | } 89 | 90 | fn bench_restore_dense_mmap_tree(criterion: &mut Criterion) { 91 | let tree_values = vec![ 92 | create_values_for_tree(4), 93 | create_values_for_tree(10), 94 | create_values_for_tree(14), 95 | ]; 96 | 97 | let mut group = criterion.benchmark_group("bench_restore_dense_mmap_tree"); 98 | 99 | (0..3).zip(tree_values).for_each(|(id, value)| { 100 | let file = tempfile::NamedTempFile::new().unwrap(); 101 | let path = file.path().to_str().unwrap(); 102 | { 103 | let _tree = LazyPoseidonTree::new_mmapped_with_dense_prefix_with_init_values( 104 | value.depth, 105 | value.prefix_depth, 106 | &value.empty_value, 107 | &value.initial_values, 108 | path, 109 | ) 110 | .unwrap(); 111 | let _root = _tree.root(); 112 | } 113 | 114 | group.bench_with_input( 115 | BenchmarkId::from_parameter(format!("restore_dense_mmap_tree_depth_{}", value.depth)), 116 | &(id, value), 117 | |bencher: &mut criterion::Bencher, (_id, value)| { 118 | bencher.iter(|| { 119 | let _tree = LazyPoseidonTree::attempt_dense_mmap_restore( 120 | value.depth, 121 | value.depth, 122 | &value.empty_value, 123 | path, 124 | ) 125 | .unwrap(); 126 | let _root = _tree.root(); 127 | }); 128 | }, 129 | ); 130 | }); 131 | group.finish(); 132 | } 133 | 134 | #[allow(unused)] 135 | fn bench_dense_tree_reads(criterion: &mut Criterion) { 136 | let tree_value = create_values_for_tree(14); 137 | 138 | let tree = LazyPoseidonTree::new_with_dense_prefix_with_initial_values( 139 | tree_value.depth, 140 | tree_value.prefix_depth, 141 | &tree_value.empty_value, 142 | &tree_value.initial_values, 143 | ); 144 | 145 | criterion.bench_function("dense tree reads", |b| { 146 | b.iter(|| { 147 | // read all leaves, and compare to ones in tree value 148 | ((1 << (tree_value.depth - 1))..(1 << tree_value.depth)).for_each(|index| { 149 | let _proof = tree.proof(index); 150 | }) 151 | }) 152 | }); 153 | } 154 | 155 | #[allow(unused)] 156 | fn bench_dense_mmap_tree_reads(criterion: &mut Criterion) { 157 | let tree_value = create_values_for_tree(14); 158 | let file = tempfile::NamedTempFile::new().unwrap(); 159 | let path = file.path().to_str().unwrap(); 160 | 161 | let tree = LazyPoseidonTree::new_mmapped_with_dense_prefix_with_init_values( 162 | tree_value.depth, 163 | tree_value.prefix_depth, 164 | &tree_value.empty_value, 165 | &tree_value.initial_values, 166 | path, 167 | ) 168 | .unwrap(); 169 | 170 | criterion.bench_function("dense mmap tree reads", |b| { 171 | b.iter(|| { 172 | // read all leaves, and compare to ones in tree value 173 | ((1 << (tree.depth() - 1))..(1 << tree.depth())).for_each(|index| { 174 | let _proof = tree.proof(index); 175 | }) 176 | }) 177 | }); 178 | } 179 | 180 | fn bench_dense_tree_writes(criterion: &mut Criterion) { 181 | let tree_value = create_values_for_tree(14); 182 | 183 | let value = Field::from(123_456); 184 | 185 | criterion.bench_function("dense tree writes", |b| { 186 | b.iter_batched( 187 | || { 188 | LazyPoseidonTree::new_with_dense_prefix_with_initial_values( 189 | tree_value.depth, 190 | tree_value.prefix_depth, 191 | &tree_value.empty_value, 192 | &tree_value.initial_values, 193 | ) 194 | }, 195 | |tree| { 196 | let _new_tree = tree.update_with_mutation(9000, &value); 197 | }, 198 | BatchSize::SmallInput, 199 | ); 200 | }); 201 | } 202 | 203 | fn bench_dense_mmap_tree_writes(criterion: &mut Criterion) { 204 | let tree_value = create_values_for_tree(14); 205 | let file = tempfile::NamedTempFile::new().unwrap(); 206 | let path = file.path().to_str().unwrap(); 207 | 208 | let value = Field::from(123_456); 209 | 210 | criterion.bench_function("dense mmap tree writes", |b| { 211 | b.iter_batched( 212 | || { 213 | LazyPoseidonTree::new_mmapped_with_dense_prefix_with_init_values( 214 | tree_value.depth, 215 | tree_value.prefix_depth, 216 | &tree_value.empty_value, 217 | &tree_value.initial_values, 218 | path, 219 | ) 220 | .unwrap() 221 | }, 222 | |tree| { 223 | let _new_tree = tree.update_with_mutation(9000, &value); 224 | }, 225 | BatchSize::SmallInput, 226 | ); 227 | }); 228 | } 229 | 230 | fn create_values_for_tree(depth: usize) -> TreeValues { 231 | let prefix_depth = depth; 232 | let empty_value = Field::from(0); 233 | 234 | let initial_values: Vec> = (0..(1 << depth)).map(Field::from).collect(); 235 | 236 | TreeValues { 237 | depth, 238 | prefix_depth, 239 | empty_value, 240 | initial_values, 241 | } 242 | } 243 | -------------------------------------------------------------------------------- /crates/semaphore/build.rs: -------------------------------------------------------------------------------- 1 | use std::fs::{create_dir, create_dir_all, File}; 2 | use std::path::{absolute, Path, PathBuf}; 3 | 4 | use color_eyre::eyre::Result; 5 | 6 | extern crate reqwest; 7 | 8 | const SEMAPHORE_FILES_PATH: &str = "semaphore_files"; 9 | const SEMAPHORE_DOWNLOAD_URL: &str = "https://www.trusted-setup-pse.org/semaphore"; 10 | 11 | fn download_and_store_binary(url: &str, path: impl AsRef) -> Result<()> { 12 | let path = path.as_ref(); 13 | 14 | let mut resp = 15 | reqwest::blocking::get(url).unwrap_or_else(|_| panic!("Failed to download file: {url}")); 16 | let mut file = 17 | File::create(path).unwrap_or_else(|_| panic!("Failed to create file: {}", path.display())); 18 | 19 | resp.copy_to(&mut file)?; 20 | Ok(()) 21 | } 22 | 23 | fn create_arkzkey(path: PathBuf) -> Result { 24 | let mut ark_zkey_path = path.clone(); 25 | ark_zkey_path.set_extension("arkzkey"); 26 | 27 | let (original_proving_key, original_constraint_matrices) = 28 | semaphore_rs_ark_zkey::read_proving_key_and_matrices_from_zkey( 29 | path.to_str().expect("Failed to convert path."), 30 | )?; 31 | 32 | semaphore_rs_ark_zkey::convert_zkey( 33 | original_proving_key, 34 | original_constraint_matrices, 35 | ark_zkey_path.to_str().unwrap(), 36 | )?; 37 | 38 | Ok(ark_zkey_path) 39 | } 40 | 41 | fn build_circuit(depth: usize) -> Result<()> { 42 | let out_dir = std::env::var("OUT_DIR").expect("Missing out dir var"); 43 | let base_path = Path::new(&out_dir).join(SEMAPHORE_FILES_PATH); 44 | 45 | if !base_path.exists() { 46 | create_dir_all(&base_path)?; 47 | } 48 | 49 | let depth_str = depth.to_string(); 50 | let depth_subfolder = base_path.join(&depth_str); 51 | if !Path::new(&depth_subfolder).exists() { 52 | create_dir(&depth_subfolder)?; 53 | } 54 | 55 | let filename = "semaphore"; 56 | let download_url = format!("{SEMAPHORE_DOWNLOAD_URL}/{depth_str}/{filename}.zkey"); 57 | let path = Path::new(&depth_subfolder).join(format!("{filename}.zkey")); 58 | download_and_store_binary(&download_url, &path)?; 59 | create_arkzkey(path)?; 60 | 61 | let ark_zkey_path = Path::new(&depth_subfolder).join(format!("{filename}.arkzkey")); 62 | 63 | // Compute absolute paths 64 | let arkzkey_file = absolute(ark_zkey_path)?; 65 | let graph_file = absolute( 66 | Path::new("graphs") 67 | .join(depth.to_string()) 68 | .join("graph.bin"), 69 | )?; 70 | 71 | println!("graph_file = {}", graph_file.display()); 72 | 73 | assert!(arkzkey_file.exists()); 74 | assert!(graph_file.exists()); 75 | 76 | // Export generated paths 77 | println!( 78 | "cargo:rustc-env=BUILD_RS_ARKZKEY_FILE_{}={}", 79 | depth, 80 | arkzkey_file.display() 81 | ); 82 | println!( 83 | "cargo:rustc-env=BUILD_RS_GRAPH_FILE_{}={}", 84 | depth, 85 | graph_file.display() 86 | ); 87 | 88 | Ok(()) 89 | } 90 | 91 | fn main() -> Result<()> { 92 | // We don't build the circuit for `docs.rs`, as the docs.rs build doesn't have network access. 93 | if std::env::var("DOCS_RS").is_ok() { 94 | println!("building for docs.rs, skipping Semaphore circuit builds"); 95 | return Ok(()); 96 | } 97 | 98 | for depth in semaphore_rs_depth_config::get_supported_depths() { 99 | build_circuit(*depth)?; 100 | } 101 | 102 | Ok(()) 103 | } 104 | -------------------------------------------------------------------------------- /crates/semaphore/examples/abort/main.rs: -------------------------------------------------------------------------------- 1 | use color_eyre::Result; 2 | use itertools::Itertools; 3 | use rand::Rng; 4 | use ruint::aliases::U256; 5 | use semaphore_rs_hasher::Hasher; 6 | use semaphore_rs_poseidon::Poseidon; 7 | use semaphore_rs_storage::MmapVec; 8 | use semaphore_rs_trees::cascading::CascadingMerkleTree; 9 | use semaphore_rs_trees::lazy::LazyMerkleTree; 10 | use std::{env, process::Stdio}; 11 | 12 | static FILE_PATH: &str = "target/debug/examples/abort.mmap"; 13 | static BIN_PATH: &str = "target/debug/examples/abort"; 14 | static ITERATIONS: usize = 20; 15 | static INITIAL_LEAVES: usize = 10; 16 | 17 | /// A test that interupts writes to the mmap merkle trees 18 | /// to simulate a crash, and to check if restoring the tree 19 | /// is successful 20 | /// 21 | /// Run this binary with no arguments to run the tests 22 | /// `RUSTFLAGS="-C panic=abort" cargo run --example abort` 23 | #[tokio::main] 24 | async fn main() -> Result<()> { 25 | let args: Vec = env::args().collect(); 26 | 27 | // initialize 28 | if args.len() == 1 { 29 | run()?; 30 | } else if args.len() == 2 && args[1] == "cascade_restore" { 31 | cascade_restore()?; 32 | } else if args.len() == 2 && args[1] == "cascade_init" { 33 | cascade_init()?; 34 | } else if args.len() == 2 && args[1] == "lazy_restore" { 35 | lazy_restore()?; 36 | } else if args.len() == 2 && args[1] == "lazy_init" { 37 | lazy_init()?; 38 | } else { 39 | panic!("invalid arguments"); 40 | } 41 | 42 | Ok(()) 43 | } 44 | 45 | fn run() -> Result<()> { 46 | let cascade_failures = run_test("cascade")?; 47 | let lazy_failures = run_test("lazy")?; 48 | 49 | println!("\nAll Tests Complete!"); 50 | println!("Cascade failure rate: {cascade_failures}/{ITERATIONS}"); 51 | println!("Lazy failure rate: {lazy_failures}/{ITERATIONS}"); 52 | 53 | Ok(()) 54 | } 55 | 56 | fn run_test(prefix: &str) -> Result { 57 | let mut failures = 0u32; 58 | println!("Running {prefix} test"); 59 | for i in 0..ITERATIONS { 60 | println!("\n{prefix} run #{i}"); 61 | let output = std::process::Command::new(BIN_PATH) 62 | .arg(format!("{prefix}_init")) 63 | .stdout(Stdio::piped()) 64 | .output()?; 65 | let stdout = String::from_utf8(output.stdout)?; 66 | print!("{}", stdout); 67 | let stderr = String::from_utf8(output.stderr)?; 68 | print!("{}", stderr); 69 | 70 | let output = std::process::Command::new(BIN_PATH) 71 | .arg(format!("{prefix}_restore")) 72 | .stdout(Stdio::piped()) 73 | .output()?; 74 | let stdout = String::from_utf8(output.stdout)?; 75 | print!("{}", stdout); 76 | let stderr = String::from_utf8(output.stderr)?; 77 | if !stderr.is_empty() { 78 | print!("{}", stderr); 79 | failures += 1; 80 | } 81 | } 82 | 83 | println!("\n{prefix} test complete"); 84 | Ok(failures) 85 | } 86 | 87 | fn cascade_init() -> Result<()> { 88 | let mmap_vec: MmapVec<::Hash> = 89 | unsafe { MmapVec::create_from_path(FILE_PATH)? }; 90 | 91 | let leaves = vec![Default::default(); INITIAL_LEAVES]; 92 | 93 | let mut tree = CascadingMerkleTree::::new_with_leaves( 94 | mmap_vec, 95 | 30, 96 | &Default::default(), 97 | &leaves, 98 | ); 99 | 100 | let _handle = tokio::spawn(async move { 101 | for _ in 0..15 { 102 | tree.push(U256::from(2)).unwrap(); 103 | } 104 | }); 105 | 106 | let mut rng = rand::thread_rng(); 107 | let millis: u64 = rng.gen_range(0..50); 108 | std::thread::sleep(std::time::Duration::from_millis(millis)); 109 | 110 | panic!(""); 111 | } 112 | 113 | fn cascade_restore() -> Result<()> { 114 | let file = std::fs::OpenOptions::new() 115 | .read(true) 116 | .write(true) 117 | .open(FILE_PATH)?; 118 | 119 | let mmap_vec: MmapVec<::Hash> = unsafe { MmapVec::restore(file)? }; 120 | let tree = CascadingMerkleTree::::restore(mmap_vec, 30, &Default::default())?; 121 | println!("tree length: {}", tree.num_leaves()); 122 | tree.validate()?; 123 | 124 | Ok(()) 125 | } 126 | 127 | fn lazy_init() -> Result<()> { 128 | let leaves = vec![Default::default(); INITIAL_LEAVES]; 129 | 130 | let mut tree = LazyMerkleTree::::new_mmapped_with_dense_prefix_with_init_values( 131 | 30, 132 | 13, 133 | &Default::default(), 134 | &leaves, 135 | FILE_PATH, 136 | )?; 137 | 138 | let _handle = std::thread::spawn(move || { 139 | for i in INITIAL_LEAVES..(INITIAL_LEAVES + 15) { 140 | tree = tree.update_with_mutation(i, &U256::from(2)); 141 | } 142 | }); 143 | 144 | let mut rng = rand::thread_rng(); 145 | let millis: u64 = rng.gen_range(0..50); 146 | std::thread::sleep(std::time::Duration::from_millis(millis)); 147 | 148 | panic!(""); 149 | } 150 | 151 | fn lazy_restore() -> Result<()> { 152 | let tree = LazyMerkleTree::::attempt_dense_mmap_restore( 153 | 30, 154 | 13, 155 | &Default::default(), 156 | FILE_PATH, 157 | )?; 158 | 159 | let leaves = tree.leaves().take(20).collect_vec(); 160 | println!("tree length: {leaves:?}"); 161 | Ok(()) 162 | } 163 | -------------------------------------------------------------------------------- /crates/semaphore/graphs/16/graph.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/worldcoin/semaphore-rs/c1a7165bc838c8ea8de6ee7e4b3571b36994b619/crates/semaphore/graphs/16/graph.bin -------------------------------------------------------------------------------- /crates/semaphore/graphs/20/graph.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/worldcoin/semaphore-rs/c1a7165bc838c8ea8de6ee7e4b3571b36994b619/crates/semaphore/graphs/20/graph.bin -------------------------------------------------------------------------------- /crates/semaphore/graphs/30/graph.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/worldcoin/semaphore-rs/c1a7165bc838c8ea8de6ee7e4b3571b36994b619/crates/semaphore/graphs/30/graph.bin -------------------------------------------------------------------------------- /crates/semaphore/src/circuit.rs: -------------------------------------------------------------------------------- 1 | #![allow(unused)] 2 | 3 | use ark_bn254::{Bn254, Fr}; 4 | use ark_groth16::ProvingKey; 5 | use ark_relations::r1cs::ConstraintMatrices; 6 | use once_cell::sync::Lazy; 7 | use semaphore_rs_depth_config::{get_depth_index, get_supported_depth_count}; 8 | use semaphore_rs_depth_macros::array_for_depths; 9 | 10 | const ZKEY_BYTES: [&[u8]; get_supported_depth_count()] = 11 | array_for_depths!(|depth| include_bytes!(env!(concat!("BUILD_RS_ARKZKEY_FILE_", depth)))); 12 | 13 | const GRAPH_BYTES: [&[u8]; get_supported_depth_count()] = 14 | array_for_depths!(|depth| include_bytes!(env!(concat!("BUILD_RS_GRAPH_FILE_", depth)))); 15 | 16 | static ZKEY: [Lazy<(ProvingKey, ConstraintMatrices)>; get_supported_depth_count()] = 17 | array_for_depths!(|depth| Lazy::new(|| { 18 | semaphore_rs_ark_zkey::read_arkzkey_from_bytes(ZKEY_BYTES[get_depth_index(depth).unwrap()]) 19 | .expect("zkey should be valid") 20 | })); 21 | 22 | #[must_use] 23 | pub fn zkey(depth: usize) -> &'static (ProvingKey, ConstraintMatrices) { 24 | let index = get_depth_index(depth).unwrap_or_else(|| panic!("depth {depth} is not supported")); 25 | &ZKEY[index] 26 | } 27 | 28 | #[must_use] 29 | pub fn graph(depth: usize) -> &'static [u8] { 30 | let index = get_depth_index(depth).unwrap_or_else(|| panic!("depth {depth} is not supported")); 31 | 32 | GRAPH_BYTES[index] 33 | } 34 | -------------------------------------------------------------------------------- /crates/semaphore/src/field.rs: -------------------------------------------------------------------------------- 1 | use ruint::{aliases::U256, uint}; 2 | use semaphore_rs_utils::keccak256; 3 | 4 | /// An element of the BN254 scalar field Fr. 5 | /// 6 | /// Represented as a big-endian byte vector without Montgomery reduction. 7 | // TODO: Make sure value is always reduced. 8 | pub type Field = U256; 9 | 10 | // See 11 | pub const MODULUS: Field = 12 | uint!(21888242871839275222246405745257275088548364400416034343698204186575808495617_U256); 13 | 14 | /// Hash arbitrary data to a field element. 15 | /// 16 | /// This is used to create `signal_hash` and `external_nullifier_hash`. 17 | #[must_use] 18 | #[allow(clippy::module_name_repetitions)] 19 | #[allow(clippy::missing_panics_doc)] 20 | pub fn hash_to_field(data: &[u8]) -> Field { 21 | // Never panics because the target uint is large enough. 22 | let n = U256::try_from_be_slice(&keccak256(data)).unwrap(); 23 | // Shift right one byte to make it fit in the field 24 | n >> 8 25 | } 26 | -------------------------------------------------------------------------------- /crates/semaphore/src/hash.rs: -------------------------------------------------------------------------------- 1 | use core::{ 2 | fmt::{Debug, Display}, 3 | str, 4 | str::FromStr, 5 | }; 6 | use num_bigint::{BigInt, Sign}; 7 | use ruint::aliases::U256; 8 | use semaphore_rs_utils::{bytes_from_hex, bytes_to_hex, deserialize_bytes, serialize_bytes}; 9 | use serde::{Deserialize, Deserializer, Serialize, Serializer}; 10 | 11 | /// Container for 256-bit hash values. 12 | #[derive(Clone, Copy, PartialEq, Eq, Default)] 13 | pub struct Hash(pub [u8; 32]); 14 | 15 | impl Hash { 16 | #[must_use] 17 | pub const fn from_bytes_be(bytes: [u8; 32]) -> Self { 18 | Self(bytes) 19 | } 20 | 21 | #[must_use] 22 | pub const fn as_bytes_be(&self) -> &[u8; 32] { 23 | &self.0 24 | } 25 | } 26 | 27 | /// Conversion from U256 28 | impl From<&Hash> for U256 { 29 | fn from(hash: &Hash) -> Self { 30 | Self::from_be_bytes(*hash.as_bytes_be()) 31 | } 32 | } 33 | 34 | /// Conversion to U256 35 | impl From for Hash { 36 | fn from(u256: U256) -> Self { 37 | Self::from_bytes_be(u256.to_be_bytes::<32>()) 38 | } 39 | } 40 | 41 | /// Conversion from vec 42 | impl From> for Hash { 43 | fn from(vec: Vec) -> Self { 44 | let mut bytes = [0_u8; 32]; 45 | bytes.copy_from_slice(&vec[0..32]); 46 | Self::from_bytes_be(bytes) 47 | } 48 | } 49 | 50 | /// Conversion to `BigInt` 51 | impl From for BigInt { 52 | fn from(hash: Hash) -> Self { 53 | Self::from_bytes_be(Sign::Plus, hash.as_bytes_be()) 54 | } 55 | } 56 | 57 | impl From<&Hash> for BigInt { 58 | fn from(hash: &Hash) -> Self { 59 | Self::from_bytes_be(Sign::Plus, hash.as_bytes_be()) 60 | } 61 | } 62 | 63 | impl Debug for Hash { 64 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 65 | let hex = bytes_to_hex::<32, 66>(&self.0); 66 | let hex_str = str::from_utf8(&hex).expect("hex is always valid utf8"); 67 | write!(f, "Field({hex_str})") 68 | } 69 | } 70 | 71 | impl Display for Hash { 72 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 73 | let hex = bytes_to_hex::<32, 66>(&self.0); 74 | let hex_str = str::from_utf8(&hex).expect("hex is always valid utf8"); 75 | write!(f, "{hex_str}") 76 | } 77 | } 78 | 79 | /// Parse Hash from hex string. 80 | /// Hex strings can be upper/lower/mixed case and have an optional `0x` prefix 81 | /// but they must always be exactly 32 bytes. 82 | impl FromStr for Hash { 83 | type Err = hex::FromHexError; 84 | 85 | fn from_str(s: &str) -> Result { 86 | bytes_from_hex::<32>(s).map(Self) 87 | } 88 | } 89 | 90 | /// Serialize hashes into human readable hex strings or byte arrays. 91 | /// Hex strings are lower case without prefix and always 32 bytes. 92 | impl Serialize for Hash { 93 | fn serialize(&self, serializer: S) -> Result { 94 | serialize_bytes::<32, 66, S>(serializer, &self.0) 95 | } 96 | } 97 | 98 | /// Deserialize human readable hex strings or byte arrays into hashes. 99 | /// Hex strings can be upper/lower/mixed case and have an optional `0x` prefix 100 | /// but they must always be exactly 32 bytes. 101 | impl<'de> Deserialize<'de> for Hash { 102 | fn deserialize>(deserializer: D) -> Result { 103 | let bytes = deserialize_bytes::<32, _>(deserializer)?; 104 | Ok(Self(bytes)) 105 | } 106 | } 107 | 108 | #[cfg(test)] 109 | pub mod test { 110 | use super::*; 111 | use hex_literal::hex; 112 | use serde_json::{from_str, to_string}; 113 | 114 | #[test] 115 | fn test_serialize() { 116 | let hash = Hash([0; 32]); 117 | assert_eq!( 118 | to_string(&hash).unwrap(), 119 | "\"0x0000000000000000000000000000000000000000000000000000000000000000\"" 120 | ); 121 | let hash = Hash(hex!( 122 | "1c4823575d154474ee3e5ac838d002456a815181437afd14f126da58a9912bbe" 123 | )); 124 | assert_eq!( 125 | to_string(&hash).unwrap(), 126 | "\"0x1c4823575d154474ee3e5ac838d002456a815181437afd14f126da58a9912bbe\"" 127 | ); 128 | } 129 | 130 | #[test] 131 | fn test_deserialize() { 132 | assert_eq!( 133 | from_str::( 134 | "\"0x1c4823575d154474ee3e5ac838d002456a815181437afd14f126da58a9912bbe\"" 135 | ) 136 | .unwrap(), 137 | Hash(hex!( 138 | "1c4823575d154474ee3e5ac838d002456a815181437afd14f126da58a9912bbe" 139 | )) 140 | ); 141 | assert_eq!( 142 | from_str::( 143 | "\"0X1C4823575d154474EE3e5ac838d002456a815181437afd14f126da58a9912bbe\"" 144 | ) 145 | .unwrap(), 146 | Hash(hex!( 147 | "1c4823575d154474ee3e5ac838d002456a815181437afd14f126da58a9912bbe" 148 | )) 149 | ); 150 | } 151 | } 152 | -------------------------------------------------------------------------------- /crates/semaphore/src/identity.rs: -------------------------------------------------------------------------------- 1 | use sha2::{Digest, Sha256}; 2 | use zeroize::Zeroize; 3 | 4 | use crate::field::MODULUS; 5 | use crate::Field; 6 | 7 | #[derive(Clone, PartialEq, Eq, Debug)] 8 | pub struct Identity { 9 | pub trapdoor: Field, 10 | pub nullifier: Field, 11 | } 12 | 13 | /// Implements the private key derivation function from zk-kit. 14 | /// 15 | /// See 16 | fn derive_field(seed_hex: &[u8; 64], suffix: &[u8]) -> Field { 17 | let mut hasher = Sha256::new(); 18 | hasher.update(seed_hex); 19 | hasher.update(suffix); 20 | Field::try_from_be_slice(hasher.finalize().as_ref()).unwrap() % MODULUS 21 | } 22 | 23 | pub fn seed_hex(seed: &[u8]) -> [u8; 64] { 24 | let mut hasher = Sha256::new(); 25 | hasher.update(seed); 26 | let bytes: [u8; 32] = hasher.finalize().into(); 27 | let mut result = [0_u8; 64]; 28 | hex::encode_to_slice(bytes, &mut result[..]).expect("output buffer is correctly sized"); 29 | result 30 | } 31 | 32 | impl Identity { 33 | #[must_use] 34 | #[deprecated(since = "0.2.0", note = "please use `from_secret` instead")] 35 | pub fn from_seed(seed: &[u8]) -> Self { 36 | let seed_hex = seed_hex(seed); 37 | Self { 38 | trapdoor: derive_field(&seed_hex, b"identity_trapdoor"), 39 | nullifier: derive_field(&seed_hex, b"identity_nullifier"), 40 | } 41 | } 42 | 43 | #[must_use] 44 | pub fn from_secret(secret: &mut [u8], trapdoor_seed: Option<&[u8]>) -> Self { 45 | let mut secret_hex = seed_hex(secret); 46 | secret.zeroize(); 47 | 48 | Self::from_hashed_secret(&mut secret_hex, trapdoor_seed) 49 | } 50 | 51 | #[must_use] 52 | pub fn from_hashed_secret(secret_hex: &mut [u8; 64], trapdoor_seed: Option<&[u8]>) -> Self { 53 | let identity = Self { 54 | trapdoor: derive_field(secret_hex, trapdoor_seed.unwrap_or(b"identity_trapdoor")), 55 | nullifier: derive_field(secret_hex, b"identity_nullifier"), 56 | }; 57 | secret_hex.zeroize(); 58 | identity 59 | } 60 | 61 | #[must_use] 62 | pub fn secret_hash(&self) -> Field { 63 | semaphore_rs_poseidon::poseidon::hash2(self.nullifier, self.trapdoor) 64 | } 65 | 66 | #[must_use] 67 | pub fn commitment(&self) -> Field { 68 | semaphore_rs_poseidon::poseidon::hash1(self.secret_hash()) 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /crates/semaphore/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![doc = include_str!("../README.md")] 2 | 3 | mod circuit; 4 | mod field; 5 | pub mod hash; 6 | pub mod identity; 7 | pub mod packed_proof; 8 | pub mod poseidon_tree; 9 | pub mod protocol; 10 | 11 | pub use semaphore_rs_depth_config::get_supported_depths; 12 | 13 | // Export types 14 | pub use crate::field::{hash_to_field, Field, MODULUS}; 15 | 16 | #[allow(dead_code)] 17 | #[cfg(test)] 18 | mod test { 19 | use std::thread::spawn; 20 | 21 | use semaphore_rs_depth_macros::test_all_depths; 22 | 23 | use crate::identity::Identity; 24 | use crate::poseidon_tree::LazyPoseidonTree; 25 | use crate::protocol::{generate_nullifier_hash, generate_proof, verify_proof}; 26 | use crate::{hash_to_field, protocol, Field}; 27 | 28 | #[test] 29 | fn test_field_serde() { 30 | let value = Field::from(0x1234_5678); 31 | let serialized = serde_json::to_value(value).unwrap(); 32 | let deserialized = serde_json::from_value(serialized).unwrap(); 33 | assert_eq!(value, deserialized); 34 | } 35 | 36 | fn test_end_to_end( 37 | identity: &mut [u8], 38 | external_nullifier: &[u8], 39 | signal: &[u8], 40 | depth: usize, 41 | ) { 42 | let leaf = Field::from(0); 43 | 44 | // generate identity 45 | let id = Identity::from_secret(identity, None); 46 | 47 | // generate merkle tree 48 | let mut tree = LazyPoseidonTree::new(depth, leaf).derived(); 49 | tree = tree.update(0, &id.commitment()); 50 | 51 | let merkle_proof = tree.proof(0); 52 | let root = tree.root(); 53 | 54 | let signal_hash = hash_to_field(signal); 55 | let external_nullifier_hash = hash_to_field(external_nullifier); 56 | let nullifier_hash = generate_nullifier_hash(&id, external_nullifier_hash); 57 | 58 | let proof = 59 | generate_proof(&id, &merkle_proof, external_nullifier_hash, signal_hash).unwrap(); 60 | 61 | for _ in 0..5 { 62 | let success = verify_proof( 63 | root, 64 | nullifier_hash, 65 | signal_hash, 66 | external_nullifier_hash, 67 | &proof, 68 | depth, 69 | ) 70 | .unwrap(); 71 | assert!(success); 72 | } 73 | } 74 | 75 | #[test_all_depths] 76 | fn test_auth_flow(depth: usize) { 77 | let mut secret = *b"oh so secret"; 78 | let id = Identity::from_secret(&mut secret[..], None); 79 | let signal_hash = hash_to_field(b"signal"); 80 | let external_nullifier_hash = hash_to_field(b"appId"); 81 | let nullifier_hash = generate_nullifier_hash(&id, external_nullifier_hash); 82 | let id_commitment = id.commitment(); 83 | 84 | let proof = protocol::authentication::generate_proof( 85 | depth, 86 | &id, 87 | external_nullifier_hash, 88 | signal_hash, 89 | ) 90 | .unwrap(); 91 | 92 | let success = protocol::authentication::verify_proof( 93 | depth, 94 | id_commitment, 95 | nullifier_hash, 96 | signal_hash, 97 | external_nullifier_hash, 98 | &proof, 99 | ) 100 | .unwrap(); 101 | assert!(success); 102 | } 103 | 104 | #[test_all_depths] 105 | fn test_single(depth: usize) { 106 | // Note that rust will still run tests in parallel 107 | let mut hello = *b"hello"; 108 | test_end_to_end(&mut hello, b"appId", b"xxx", depth); 109 | } 110 | 111 | #[test_all_depths] 112 | fn test_parallel(depth: usize) { 113 | // Note that this does not guarantee a concurrency issue will be detected. 114 | // For that we need much more sophisticated static analysis tooling like 115 | // loom. See 116 | let mut a_id = *b"hello"; 117 | let mut b_id = *b"secret"; 118 | let a = spawn(move || test_end_to_end(&mut a_id, b"appId", b"xxx", depth)); 119 | let b = spawn(move || test_end_to_end(&mut b_id, b"test", b"signal", depth)); 120 | a.join().unwrap(); 121 | b.join().unwrap(); 122 | } 123 | } 124 | -------------------------------------------------------------------------------- /crates/semaphore/src/packed_proof.rs: -------------------------------------------------------------------------------- 1 | // Re-export for backwards compatibility 2 | pub use semaphore_rs_proof::packing::PackedProof; 3 | -------------------------------------------------------------------------------- /crates/semaphore/src/poseidon_tree.rs: -------------------------------------------------------------------------------- 1 | use semaphore_rs_hasher::Hasher; 2 | use semaphore_rs_poseidon::Poseidon; 3 | use semaphore_rs_trees::imt::MerkleTree; 4 | use semaphore_rs_trees::lazy::LazyMerkleTree; 5 | 6 | pub type PoseidonTree = MerkleTree; 7 | pub type LazyPoseidonTree = LazyMerkleTree; 8 | pub type Branch = semaphore_rs_trees::Branch<::Hash>; 9 | pub type Proof = semaphore_rs_trees::InclusionProof; 10 | -------------------------------------------------------------------------------- /crates/semaphore/src/protocol/authentication.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | identity::Identity, 3 | poseidon_tree::LazyPoseidonTree, 4 | protocol::{Proof, ProofError}, 5 | Field, 6 | }; 7 | 8 | pub fn generate_proof( 9 | depth: usize, 10 | identity: &Identity, 11 | ext_nullifier_hash: Field, 12 | signal_hash: Field, 13 | ) -> Result { 14 | let merkle_proof = LazyPoseidonTree::new(depth, Field::from(0)) 15 | .update(0, &identity.commitment()) 16 | .proof(0); 17 | super::generate_proof(identity, &merkle_proof, ext_nullifier_hash, signal_hash) 18 | } 19 | 20 | pub fn verify_proof( 21 | depth: usize, 22 | id_commitment: Field, 23 | nullifier_hash: Field, 24 | signal_hash: Field, 25 | ext_nullifier_hash: Field, 26 | proof: &Proof, 27 | ) -> Result { 28 | let root = LazyPoseidonTree::new(depth, Field::from(0)) 29 | .update(0, &id_commitment) 30 | .root(); 31 | super::verify_proof( 32 | root, 33 | nullifier_hash, 34 | signal_hash, 35 | ext_nullifier_hash, 36 | proof, 37 | depth, 38 | ) 39 | } 40 | -------------------------------------------------------------------------------- /crates/semaphore/src/protocol/mod.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | use ark_bn254::Fr; 4 | use ark_ff::PrimeField; 5 | use ark_groth16::{prepare_verifying_key, Groth16}; 6 | use ark_relations::r1cs::SynthesisError; 7 | use ark_std::UniformRand; 8 | use color_eyre::Result; 9 | use once_cell::sync::Lazy; 10 | use rand::{thread_rng, Rng}; 11 | use semaphore_rs_ark_circom::ethereum::AffineError; 12 | use semaphore_rs_ark_circom::CircomReduction; 13 | use semaphore_rs_depth_config::{get_depth_index, get_supported_depth_count}; 14 | use semaphore_rs_depth_macros::array_for_depths; 15 | use semaphore_rs_poseidon::Poseidon; 16 | use semaphore_rs_trees::{Branch, InclusionProof}; 17 | use semaphore_rs_witness::Graph; 18 | use thiserror::Error; 19 | 20 | use crate::circuit::zkey; 21 | use crate::identity::Identity; 22 | use crate::Field; 23 | 24 | pub use semaphore_rs_proof::compression; 25 | pub use semaphore_rs_proof::Proof; 26 | 27 | pub mod authentication; 28 | 29 | static WITHESS_GRAPH: [Lazy; get_supported_depth_count()] = array_for_depths!(|depth| { 30 | Lazy::new(|| { 31 | semaphore_rs_witness::init_graph(crate::circuit::graph(depth)) 32 | .expect("Failed to initialize Graph") 33 | }) 34 | }); 35 | 36 | /// Preloads the ZKEY in memory to skip the lazy loading at first verification 37 | pub fn warmup_for_verification(tree_depth: usize) { 38 | let _zkey = zkey(tree_depth); 39 | } 40 | 41 | /// Helper to merkle proof into a bigint vector 42 | /// TODO: we should create a From trait for this 43 | fn merkle_proof_to_vec(proof: &InclusionProof) -> Vec { 44 | proof 45 | .0 46 | .iter() 47 | .map(|x| match x { 48 | Branch::Left(value) | Branch::Right(value) => *value, 49 | }) 50 | .collect() 51 | } 52 | 53 | /// Generates the nullifier hash 54 | #[must_use] 55 | pub fn generate_nullifier_hash(identity: &Identity, external_nullifier: Field) -> Field { 56 | semaphore_rs_poseidon::poseidon::hash2(external_nullifier, identity.nullifier) 57 | } 58 | 59 | #[derive(Error, Debug)] 60 | pub enum ProofError { 61 | #[error("Error reading circuit key: {0}")] 62 | CircuitKeyError(#[from] std::io::Error), 63 | #[error("Error producing witness: {0}")] 64 | WitnessError(color_eyre::Report), 65 | #[error("Error producing proof: {0}")] 66 | SynthesisError(#[from] SynthesisError), 67 | #[error("Error converting public input: {0}")] 68 | ToFieldError(#[from] ruint::ToFieldError), 69 | #[error(transparent)] 70 | G1AffineError(#[from] AffineError), 71 | } 72 | 73 | /// Generates a semaphore proof 74 | /// 75 | /// # Errors 76 | /// 77 | /// Returns a [`ProofError`] if proving fails. 78 | pub fn generate_proof( 79 | identity: &Identity, 80 | merkle_proof: &InclusionProof, 81 | external_nullifier_hash: Field, 82 | signal_hash: Field, 83 | ) -> Result { 84 | generate_proof_rng( 85 | identity, 86 | merkle_proof, 87 | external_nullifier_hash, 88 | signal_hash, 89 | &mut thread_rng(), 90 | ) 91 | } 92 | 93 | /// Generates a semaphore proof from entropy 94 | /// 95 | /// # Errors 96 | /// 97 | /// Returns a [`ProofError`] if proving fails. 98 | pub fn generate_proof_rng( 99 | identity: &Identity, 100 | merkle_proof: &InclusionProof, 101 | external_nullifier_hash: Field, 102 | signal_hash: Field, 103 | rng: &mut impl Rng, 104 | ) -> Result { 105 | generate_proof_rs( 106 | identity, 107 | merkle_proof, 108 | external_nullifier_hash, 109 | signal_hash, 110 | ark_bn254::Fr::rand(rng), 111 | ark_bn254::Fr::rand(rng), 112 | ) 113 | } 114 | 115 | fn generate_proof_rs( 116 | identity: &Identity, 117 | merkle_proof: &InclusionProof, 118 | external_nullifier_hash: Field, 119 | signal_hash: Field, 120 | r: ark_bn254::Fr, 121 | s: ark_bn254::Fr, 122 | ) -> Result { 123 | let depth = merkle_proof.0.len(); 124 | let full_assignment = 125 | generate_witness(identity, merkle_proof, external_nullifier_hash, signal_hash); 126 | 127 | let zkey = zkey(depth); 128 | let ark_proof = Groth16::<_, CircomReduction>::create_proof_with_reduction_and_matrices( 129 | &zkey.0, 130 | r, 131 | s, 132 | &zkey.1, 133 | zkey.1.num_instance_variables, 134 | zkey.1.num_constraints, 135 | full_assignment.as_slice(), 136 | )?; 137 | let proof = ark_proof.into(); 138 | 139 | Ok(proof) 140 | } 141 | 142 | pub fn generate_witness( 143 | identity: &Identity, 144 | merkle_proof: &InclusionProof, 145 | external_nullifier_hash: Field, 146 | signal_hash: Field, 147 | ) -> Vec { 148 | let depth = merkle_proof.0.len(); 149 | let inputs = HashMap::from([ 150 | ("identityNullifier".to_owned(), vec![identity.nullifier]), 151 | ("identityTrapdoor".to_owned(), vec![identity.trapdoor]), 152 | ("treePathIndices".to_owned(), path_index(merkle_proof)), 153 | ("treeSiblings".to_owned(), merkle_proof_to_vec(merkle_proof)), 154 | ( 155 | "externalNullifier".to_owned(), 156 | vec![external_nullifier_hash], 157 | ), 158 | ("signalHash".to_owned(), vec![signal_hash]), 159 | ]); 160 | 161 | let graph = &WITHESS_GRAPH 162 | [get_depth_index(depth).unwrap_or_else(|| panic!("Depth {depth} not supported"))]; 163 | 164 | let witness = semaphore_rs_witness::calculate_witness(inputs, graph).unwrap(); 165 | witness 166 | .into_iter() 167 | .map(|x| Fr::from_bigint(x.into()).expect("Couldn't cast U256 to BigInteger")) 168 | .collect::>() 169 | } 170 | 171 | /// Compute path index 172 | #[must_use] 173 | pub fn path_index(proof: &InclusionProof) -> Vec { 174 | proof 175 | .0 176 | .iter() 177 | .map(|branch| match branch { 178 | Branch::Left(_) => Field::from(0), 179 | Branch::Right(_) => Field::from(1), 180 | }) 181 | .collect() 182 | } 183 | 184 | /// Verifies a given semaphore proof 185 | /// 186 | /// # Errors 187 | /// 188 | /// Returns a [`ProofError`] if verifying fails. Verification failure does not 189 | /// necessarily mean the proof is incorrect. 190 | pub fn verify_proof( 191 | root: Field, 192 | nullifier_hash: Field, 193 | signal_hash: Field, 194 | external_nullifier_hash: Field, 195 | proof: &Proof, 196 | tree_depth: usize, 197 | ) -> Result { 198 | let zkey = zkey(tree_depth); 199 | let pvk = prepare_verifying_key(&zkey.0.vk); 200 | 201 | let public_inputs = [root, nullifier_hash, signal_hash, external_nullifier_hash] 202 | .iter() 203 | .map(ark_bn254::Fr::try_from) 204 | .collect::, _>>()?; 205 | 206 | let ark_proof = (*proof).try_into()?; 207 | let result = Groth16::<_, CircomReduction>::verify_proof(&pvk, &ark_proof, &public_inputs[..])?; 208 | Ok(result) 209 | } 210 | 211 | #[cfg(test)] 212 | #[allow(dead_code)] 213 | mod test { 214 | use ark_bn254::Config; 215 | use ark_ec::bn::Bn; 216 | use ark_groth16::Proof as ArkProof; 217 | use rand::SeedableRng as _; 218 | use rand_chacha::ChaChaRng; 219 | use semaphore_rs_depth_macros::test_all_depths; 220 | use serde_json::json; 221 | 222 | use super::*; 223 | use crate::hash_to_field; 224 | use crate::poseidon_tree::LazyPoseidonTree; 225 | 226 | fn arb_proof(seed: u64, depth: usize) -> Proof { 227 | // Deterministic randomness for testing 228 | let mut rng = ChaChaRng::seed_from_u64(seed); 229 | 230 | // generate identity 231 | let mut seed: [u8; 16] = rng.gen(); 232 | let id = Identity::from_secret(seed.as_mut(), None); 233 | 234 | // generate merkle tree 235 | let leaf = Field::from(0); 236 | let mut tree = LazyPoseidonTree::new(depth, leaf).derived(); 237 | tree = tree.update(0, &id.commitment()); 238 | 239 | let merkle_proof = tree.proof(0); 240 | 241 | let external_nullifier: [u8; 16] = rng.gen(); 242 | let external_nullifier_hash = hash_to_field(&external_nullifier); 243 | 244 | let signal: [u8; 16] = rng.gen(); 245 | let signal_hash = hash_to_field(&signal); 246 | 247 | generate_proof_rng( 248 | &id, 249 | &merkle_proof, 250 | external_nullifier_hash, 251 | signal_hash, 252 | &mut rng, 253 | ) 254 | .unwrap() 255 | } 256 | 257 | #[test_all_depths] 258 | fn test_proof_cast_roundtrip(depth: usize) { 259 | let proof = arb_proof(123, depth); 260 | let ark_proof: ArkProof> = proof.try_into().unwrap(); 261 | let result: Proof = ark_proof.into(); 262 | assert_eq!(proof, result); 263 | } 264 | 265 | #[test_all_depths] 266 | fn test_proof_serialize(depth: usize) { 267 | let proof = arb_proof(456, depth); 268 | let json = serde_json::to_value(proof).unwrap(); 269 | let valid_values = match depth { 270 | 16 => json!([ 271 | [ 272 | "0xe4267974945a50a541e90a399ed9211752216a3e4e1cefab1f0bcd8925ea56e", 273 | "0xdd9ada36c50d3f1bf75abe5c5ad7d0a29355b74fc3f604aa108b8886a6ac7f8" 274 | ], 275 | [ 276 | [ 277 | "0x1621577ad2f90fe2e7ec6f675751693515c3b7e91ee228f1db47fe3aba7c0450", 278 | "0x2b07bc915b377f8c7126c2d46636632cdbcb426b446a06edf3320939ee4e1911" 279 | ], 280 | [ 281 | "0xf40e93e057c7521720448b3d443eac36ff48705312181c41bd78981923be41a", 282 | "0x9ce138011687b44a08b979a85b3b122e7335254a02d4fbae7b38b57653c7eb0" 283 | ] 284 | ], 285 | [ 286 | "0x295b30c0c025a2b176de1220acdb5f95119a8938689d73076f02bb6d01601fbb", 287 | "0xc71250468b955584be8769b047f79614df1176a7a64683f14c27889d47e614" 288 | ] 289 | ]), 290 | 20 => json!([ 291 | [ 292 | "0x2296e314c88daf893769f4ed0cad8a7f584b39db6ebd4bba230591b5d78f48b3", 293 | "0x2e5d33bf993b8e4aba7c06ee82ff7dd674857b491c46f53eda4365ecbf3e5fde" 294 | ], 295 | [ 296 | [ 297 | "0x277c239fa1cf9e8a7ca65ef09371bee470aad7936583a0b48e60f6a76f17a97c", 298 | "0x2b21c607eff04f704e546451dcd27c5f090639074a54b45e345337e09d0ab3d0" 299 | ], 300 | [ 301 | "0x73fde4daa004ecb853159e54b98cdd204e7874008f91581601881c968607451", 302 | "0x171ee4d007b9286d91b581f6d38902e5befc3876b96c71bc178b5f5e8dbf1e40" 303 | ] 304 | ], 305 | [ 306 | "0x25afbb8fef95d8481e9e49b4a94848473794447d032fdde2cd73a0d6318b6c3c", 307 | "0x2a24e19699e2d8495357cf9b65fb215cebbcda2817b1627758a330e57db5c4b9" 308 | ] 309 | ]), 310 | 30 => json!([ 311 | [ 312 | "0x19ded61ab5c58fdb12367526c6bc04b9186d0980c4b6fd48a44093e80f9b4206", 313 | "0x2e619a034be10e9aab294f1c77a480378e84782c8519449aef0c8f6952382bda" 314 | ], 315 | [ 316 | [ 317 | "0x2202954c0cdb43dc240d56c3a60d125dbc676f8d97bfeac5987500eb0ff4b9a1", 318 | "0x35f5b9d8bfba1341fe9fabef6f46d242e1b22c4006ed3ae3f240f0409b20799" 319 | ], 320 | [ 321 | "0x13ef645aeaffda30d38c1df68d79d9682d3d002a388e5672fe9b9c7f3224acd7", 322 | "0x10a45a9a99cfaf9aef84ab40c5fdad411e800e24471f24ec76addb74b9e041af" 323 | ] 324 | ], 325 | [ 326 | "0x1f72d009494e8694cf608c54131e7d565625d59e4637ea77cbf2620c719e8c77", 327 | "0x19ee17159b599f6f4b2294d4fb29760d2dc1b58adc0519ce546ad274928f6bc4" 328 | ] 329 | ]), 330 | _ => panic!("unexpected depth: {}", depth), 331 | }; 332 | assert_eq!(json, valid_values); 333 | } 334 | } 335 | -------------------------------------------------------------------------------- /crates/semaphore/src/util.rs: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/worldcoin/semaphore-rs/c1a7165bc838c8ea8de6ee7e4b3571b36994b619/crates/semaphore/src/util.rs -------------------------------------------------------------------------------- /crates/storage/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "semaphore-rs-storage" 3 | version.workspace = true 4 | edition.workspace = true 5 | homepage.workspace = true 6 | license.workspace = true 7 | repository.workspace = true 8 | authors.workspace = true 9 | description.workspace = true 10 | keywords.workspace = true 11 | categories.workspace = true 12 | 13 | [dependencies] 14 | bytemuck.workspace = true 15 | color-eyre.workspace = true 16 | mmap-rs.workspace = true 17 | tempfile.workspace = true 18 | -------------------------------------------------------------------------------- /crates/storage/src/lib.rs: -------------------------------------------------------------------------------- 1 | use std::ops::{Deref, DerefMut}; 2 | 3 | mod mmap_vec; 4 | 5 | use bytemuck::Pod; 6 | pub use mmap_vec::MmapVec; 7 | 8 | pub trait GenericStorage: 9 | Deref + DerefMut + Extend + Send + Sync 10 | { 11 | fn push(&mut self, value: T); 12 | 13 | fn extend_from_slice(&mut self, slice: &[T]); 14 | 15 | fn clear(&mut self); 16 | } 17 | 18 | impl GenericStorage for Vec { 19 | fn push(&mut self, value: T) { 20 | self.push(value); 21 | } 22 | 23 | fn extend_from_slice(&mut self, slice: &[T]) { 24 | Vec::extend_from_slice(self, slice); 25 | } 26 | 27 | fn clear(&mut self) { 28 | self.clear(); 29 | } 30 | } 31 | 32 | impl GenericStorage for MmapVec { 33 | fn push(&mut self, value: T) { 34 | self.push(value); 35 | } 36 | 37 | fn extend_from_slice(&mut self, slice: &[T]) { 38 | self.extend_from_slice(slice); 39 | } 40 | 41 | fn clear(&mut self) { 42 | self.clear(); 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /crates/trees/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "semaphore-rs-trees" 3 | version.workspace = true 4 | edition.workspace = true 5 | homepage.workspace = true 6 | license.workspace = true 7 | repository.workspace = true 8 | authors.workspace = true 9 | description.workspace = true 10 | keywords.workspace = true 11 | categories.workspace = true 12 | 13 | [dependencies] 14 | # Internal 15 | semaphore-rs-hasher.workspace = true 16 | semaphore-rs-storage.workspace = true 17 | semaphore-rs-ark-circom.workspace = true 18 | 19 | # 3rd Party 20 | bytemuck.workspace = true 21 | color-eyre.workspace = true 22 | derive-where.workspace = true 23 | hex.workspace = true 24 | hex-literal.workspace = true 25 | itertools.workspace = true 26 | mmap-rs.workspace = true 27 | once_cell.workspace = true 28 | rayon.workspace = true 29 | ruint.workspace = true 30 | serde.workspace = true 31 | thiserror.workspace = true 32 | tiny-keccak = { workspace = true, features = ["sha3"] } 33 | 34 | # Ark 35 | ark-bn254.workspace = true 36 | ark-ec.workspace = true 37 | ark-ff.workspace = true 38 | ark-groth16.workspace = true 39 | ark-relations.workspace = true 40 | ark-std.workspace = true 41 | 42 | [dev-dependencies] 43 | semaphore-rs-poseidon.workspace = true 44 | semaphore-rs-keccak.workspace = true 45 | 46 | rand.workspace = true 47 | serial_test.workspace = true 48 | tempfile.workspace = true 49 | test-case.workspace = true 50 | -------------------------------------------------------------------------------- /crates/trees/src/imt/mod.rs: -------------------------------------------------------------------------------- 1 | //! Implements basic binary Merkle trees 2 | 3 | use std::fmt::Debug; 4 | use std::iter::{once, repeat_n, successors}; 5 | 6 | use bytemuck::Pod; 7 | use derive_where::derive_where; 8 | use semaphore_rs_hasher::Hasher; 9 | 10 | use crate::proof::{Branch, InclusionProof}; 11 | 12 | /// Merkle tree with all leaf and intermediate hashes stored 13 | #[derive_where(Clone; ::Hash: Clone)] 14 | #[derive_where(PartialEq; ::Hash: PartialEq)] 15 | #[derive_where(Eq; ::Hash: Eq)] 16 | #[derive_where(Debug; ::Hash: Debug)] 17 | pub struct MerkleTree 18 | where 19 | H: Hasher, 20 | { 21 | /// Depth of the tree, # of layers including leaf layer 22 | depth: usize, 23 | 24 | /// Hash value of empty subtrees of given depth, starting at leaf level 25 | empty: Vec, 26 | 27 | /// Hash values of tree nodes and leaves, breadth first order 28 | nodes: Vec, 29 | } 30 | 31 | /// For a given node index, return the parent node index 32 | /// Returns None if there is no parent (root node) 33 | const fn parent(index: usize) -> Option { 34 | if index <= 1 { 35 | None 36 | } else { 37 | Some(index >> 1) 38 | } 39 | } 40 | 41 | /// For a given node index, return index of the first (left) child. 42 | const fn left_child(index: usize) -> usize { 43 | index << 1 44 | } 45 | 46 | const fn depth(index: usize) -> usize { 47 | // `n.next_power_of_two()` will return `n` iff `n` is a power of two. 48 | // The extra offset corrects this. 49 | if index <= 1 { 50 | return 0; 51 | } 52 | 53 | index.ilog2() as usize 54 | } 55 | 56 | impl MerkleTree 57 | where 58 | H: Hasher, 59 | ::Hash: Clone + Copy + Pod + Eq + Debug, 60 | { 61 | /// Creates a new `MerkleTree` 62 | /// * `depth` - The depth of the tree, including the root. This is 1 greater 63 | /// than the `treeLevels` argument to the Semaphore contract. 64 | pub fn new(depth: usize, initial_leaf: H::Hash) -> Self { 65 | // Compute empty node values, leaf to root 66 | let empty = successors(Some(initial_leaf), |prev| Some(H::hash_node(prev, prev))) 67 | .take(depth + 1) 68 | .collect::>(); 69 | 70 | // Compute node values 71 | let first_node = std::iter::once(initial_leaf); 72 | let nodes = empty 73 | .iter() 74 | .rev() 75 | .enumerate() 76 | .flat_map(|(depth, hash)| repeat_n(hash, 1 << depth)) 77 | .cloned(); 78 | 79 | let nodes = first_node.chain(nodes).collect(); 80 | 81 | Self { 82 | depth, 83 | empty, 84 | nodes, 85 | } 86 | } 87 | 88 | #[must_use] 89 | pub fn num_leaves(&self) -> usize { 90 | 1 << self.depth 91 | } 92 | 93 | #[must_use] 94 | pub fn root(&self) -> H::Hash { 95 | self.nodes[1] 96 | } 97 | 98 | pub fn set(&mut self, leaf: usize, hash: H::Hash) { 99 | self.set_range(leaf, once(hash)); 100 | } 101 | 102 | pub fn set_range>(&mut self, start: usize, hashes: I) { 103 | let index = self.num_leaves() + start; 104 | 105 | let mut count = 0; 106 | // TODO: Error/panic when hashes is longer than available leafs 107 | for (leaf, hash) in self.nodes[index..].iter_mut().zip(hashes) { 108 | *leaf = hash; 109 | count += 1; 110 | } 111 | 112 | if count != 0 { 113 | self.update_nodes(index, index + (count - 1)); 114 | } 115 | } 116 | 117 | fn update_nodes(&mut self, start: usize, end: usize) { 118 | debug_assert_eq!(depth(start), depth(end)); 119 | if let (Some(start), Some(end)) = (parent(start), parent(end)) { 120 | for parent in start..=end { 121 | let child = left_child(parent); 122 | self.nodes[parent] = H::hash_node(&self.nodes[child], &self.nodes[child + 1]); 123 | } 124 | self.update_nodes(start, end); 125 | } 126 | } 127 | 128 | #[must_use] 129 | pub fn proof(&self, leaf: usize) -> Option> { 130 | if leaf >= self.num_leaves() { 131 | return None; 132 | } 133 | let mut index = self.num_leaves() + leaf; 134 | let mut path = Vec::with_capacity(self.depth); 135 | while let Some(parent) = parent(index) { 136 | // Add proof for node at index to parent 137 | path.push(match index & 1 { 138 | 1 => Branch::Right(self.nodes[index - 1]), 139 | 0 => Branch::Left(self.nodes[index + 1]), 140 | _ => unreachable!(), 141 | }); 142 | index = parent; 143 | } 144 | Some(InclusionProof(path)) 145 | } 146 | 147 | #[must_use] 148 | pub fn verify(&self, hash: H::Hash, proof: &InclusionProof) -> bool { 149 | proof.root(hash) == self.root() 150 | } 151 | 152 | #[must_use] 153 | pub fn leaves(&self) -> &[H::Hash] { 154 | &self.nodes[(self.num_leaves() - 1)..] 155 | } 156 | } 157 | 158 | impl InclusionProof { 159 | /// Compute the leaf index for this proof 160 | #[must_use] 161 | pub fn leaf_index(&self) -> usize { 162 | self.0.iter().rev().fold(0, |index, branch| match branch { 163 | Branch::Left(_) => index << 1, 164 | Branch::Right(_) => (index << 1) + 1, 165 | }) 166 | } 167 | 168 | /// Compute the Merkle root given a leaf hash 169 | #[must_use] 170 | pub fn root(&self, hash: H::Hash) -> H::Hash { 171 | self.0.iter().fold(hash, |hash, branch| match branch { 172 | Branch::Left(sibling) => H::hash_node(&hash, sibling), 173 | Branch::Right(sibling) => H::hash_node(sibling, &hash), 174 | }) 175 | } 176 | } 177 | 178 | #[cfg(test)] 179 | pub mod test { 180 | use hex_literal::hex; 181 | use ruint::aliases::U256; 182 | use semaphore_rs_keccak::keccak::Keccak256; 183 | use semaphore_rs_poseidon::Poseidon; 184 | use test_case::test_case; 185 | 186 | use super::*; 187 | 188 | #[test_case(0 => None)] 189 | #[test_case(1 => None)] 190 | #[test_case(2 => Some(1))] 191 | #[test_case(3 => Some(1))] 192 | #[test_case(4 => Some(2))] 193 | #[test_case(5 => Some(2))] 194 | #[test_case(6 => Some(3))] 195 | #[test_case(27 => Some(13))] 196 | fn parent_of(index: usize) -> Option { 197 | parent(index) 198 | } 199 | 200 | #[test_case(0 => 0 ; "Nonsense case")] 201 | #[test_case(1 => 2)] 202 | #[test_case(2 => 4)] 203 | #[test_case(3 => 6)] 204 | fn left_child_of(index: usize) -> usize { 205 | left_child(index) 206 | } 207 | 208 | #[test_case(0 => 0)] 209 | #[test_case(1 => 0)] 210 | #[test_case(2 => 1)] 211 | #[test_case(3 => 1)] 212 | #[test_case(6 => 2)] 213 | fn depth_of(index: usize) -> usize { 214 | depth(index) 215 | } 216 | 217 | #[test_case(2 => hex!("b4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30"))] 218 | fn empty_keccak(depth: usize) -> [u8; 32] { 219 | let tree = MerkleTree::::new(depth, [0; 32]); 220 | 221 | tree.root() 222 | } 223 | 224 | #[test] 225 | fn simple_poseidon() { 226 | let mut tree = MerkleTree::::new(10, U256::ZERO); 227 | 228 | let expected_root = ruint::uint!( 229 | 12413880268183407374852357075976609371175688755676981206018884971008854919922_U256 230 | ); 231 | assert_eq!(tree.root(), expected_root); 232 | 233 | tree.set(0, ruint::uint!(1_U256)); 234 | 235 | let expected_root = ruint::uint!( 236 | 467068234150758165281816522946040748310650451788100792957402532717155514893_U256 237 | ); 238 | assert_eq!(tree.root(), expected_root); 239 | } 240 | } 241 | -------------------------------------------------------------------------------- /crates/trees/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod cascading; 2 | pub mod imt; 3 | pub mod lazy; 4 | pub mod proof; 5 | 6 | pub use proof::{Branch, InclusionProof}; 7 | -------------------------------------------------------------------------------- /crates/trees/src/proof.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::Debug; 2 | 3 | use derive_where::derive_where; 4 | use semaphore_rs_hasher::Hasher; 5 | use serde::{Deserialize, Serialize}; 6 | 7 | /// Merkle proof path, bottom to top. 8 | #[derive_where(Clone; ::Hash: Clone)] 9 | #[derive_where(PartialEq; ::Hash: PartialEq)] 10 | #[derive_where(Eq; ::Hash: Eq)] 11 | #[derive_where(Debug; ::Hash: Debug)] 12 | pub struct InclusionProof(pub Vec>) 13 | where 14 | H: Hasher; 15 | 16 | /// Element of a Merkle proof 17 | #[derive(Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] 18 | pub enum Branch { 19 | /// Left branch taken, value is the right sibling hash. 20 | Left(T), 21 | 22 | /// Right branch taken, value is the left sibling hash. 23 | Right(T), 24 | } 25 | 26 | impl Serialize for InclusionProof 27 | where 28 | H: Hasher, 29 | H::Hash: Serialize, 30 | { 31 | fn serialize(&self, serializer: S) -> Result 32 | where 33 | S: serde::Serializer, 34 | { 35 | self.0.serialize(serializer) 36 | } 37 | } 38 | 39 | impl<'de, H> Deserialize<'de> for InclusionProof 40 | where 41 | H: Hasher, 42 | H::Hash: Deserialize<'de>, 43 | { 44 | fn deserialize(deserializer: D) -> Result 45 | where 46 | D: serde::Deserializer<'de>, 47 | { 48 | let branches = Vec::deserialize(deserializer)?; 49 | Ok(InclusionProof(branches)) 50 | } 51 | } 52 | 53 | impl Branch { 54 | /// Get the inner value 55 | #[must_use] 56 | pub fn into_inner(self) -> T { 57 | match self { 58 | Self::Left(sibling) => sibling, 59 | Self::Right(sibling) => sibling, 60 | } 61 | } 62 | } 63 | 64 | impl Debug for Branch { 65 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 66 | match self { 67 | Self::Left(arg0) => f.debug_tuple("Left").field(arg0).finish(), 68 | Self::Right(arg0) => f.debug_tuple("Right").field(arg0).finish(), 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /crates/trees/tests/equivalent.rs: -------------------------------------------------------------------------------- 1 | use rand::{thread_rng, Rng}; 2 | use ruint::aliases::U256; 3 | use semaphore_rs_poseidon::Poseidon; 4 | use semaphore_rs_trees::cascading::CascadingMerkleTree; 5 | use semaphore_rs_trees::imt::MerkleTree; 6 | use semaphore_rs_trees::lazy::{Canonical, LazyMerkleTree}; 7 | 8 | const DEPTH: usize = 20; 9 | const DENSE_PREFIX: usize = 16; 10 | 11 | const NUM_LEAVES: usize = 100; 12 | 13 | type HashType = Poseidon; 14 | const EMPTY_VALUE: U256 = U256::ZERO; 15 | 16 | #[test] 17 | fn equivalent() { 18 | let mut lazy: LazyMerkleTree = 19 | LazyMerkleTree::::new_with_dense_prefix( 20 | DEPTH, 21 | DENSE_PREFIX, 22 | &EMPTY_VALUE, 23 | ); 24 | let mut lazy_derived = lazy.derived(); 25 | let mut imt: MerkleTree = MerkleTree::new(DEPTH, EMPTY_VALUE); 26 | let mut cascading: CascadingMerkleTree = 27 | CascadingMerkleTree::new(vec![], DEPTH, &EMPTY_VALUE); 28 | 29 | assert_eq!(lazy.root(), cascading.root()); 30 | assert_eq!(lazy.root(), imt.root()); 31 | 32 | let mut rng = thread_rng(); 33 | 34 | let random_leaves = (0..NUM_LEAVES) 35 | .map(|_| { 36 | let mut limbs = [0u64; 4]; 37 | for limb in limbs.iter_mut() { 38 | *limb = rng.gen(); 39 | } 40 | // zero last to fit in field 41 | limbs[3] &= 0x0FFFFFFFFFFFFFFF; 42 | 43 | U256::from_limbs(limbs) 44 | }) 45 | .collect::>(); 46 | 47 | for (i, leaf) in random_leaves.iter().enumerate() { 48 | lazy_derived = lazy_derived.update(i, leaf); 49 | imt.set(i, *leaf); 50 | cascading.push(*leaf).unwrap(); 51 | } 52 | 53 | // Lazy & IMT both return the total (i.e. max) number of leaves 54 | assert_eq!(lazy.leaves().count(), lazy_derived.leaves().count()); 55 | assert_eq!(lazy.leaves().count(), imt.num_leaves()); 56 | 57 | // Cascading returns the current number of leaves 58 | assert_eq!(cascading.num_leaves(), NUM_LEAVES); 59 | 60 | assert_eq!(lazy_derived.root(), cascading.root()); 61 | assert_eq!(lazy_derived.root(), imt.root()); 62 | 63 | // Mutably update the canonical lazy tree 64 | for (i, leaf) in random_leaves.iter().enumerate() { 65 | lazy = lazy.update_with_mutation(i, leaf); 66 | } 67 | 68 | assert_eq!(lazy.root(), cascading.root()); 69 | 70 | for (i, leaf) in random_leaves.iter().enumerate() { 71 | let cascading_proof = cascading.proof(i); 72 | let lazy_proof = lazy.proof(i); 73 | let imt_proof = imt.proof(i).unwrap(); 74 | 75 | assert_eq!(cascading_proof, lazy_proof); 76 | assert_eq!(cascading_proof, imt_proof); 77 | 78 | assert!(cascading.verify(*leaf, &cascading_proof)); 79 | assert!(lazy.verify(*leaf, &cascading_proof)); 80 | assert!(imt.verify(*leaf, &cascading_proof)); 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /crates/utils/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "semaphore-rs-utils" 3 | version.workspace = true 4 | edition.workspace = true 5 | homepage.workspace = true 6 | license.workspace = true 7 | repository.workspace = true 8 | authors.workspace = true 9 | description.workspace = true 10 | keywords.workspace = true 11 | categories.workspace = true 12 | 13 | [dependencies] 14 | serde.workspace = true 15 | tiny-keccak.workspace = true 16 | hex.workspace = true 17 | 18 | [dev-dependencies] 19 | serde_json.workspace = true 20 | bincode.workspace = true 21 | 22 | -------------------------------------------------------------------------------- /crates/utils/src/lib.rs: -------------------------------------------------------------------------------- 1 | use core::{ 2 | fmt::{Formatter, Result as FmtResult}, 3 | str, 4 | }; 5 | use serde::{ 6 | de::{Error as DeError, Visitor}, 7 | Deserializer, Serializer, 8 | }; 9 | use tiny_keccak::{Hasher as _, Keccak}; 10 | 11 | pub fn keccak256(bytes: &[u8]) -> [u8; 32] { 12 | let mut output = [0; 32]; 13 | let mut hasher = Keccak::v256(); 14 | hasher.update(bytes); 15 | hasher.finalize(&mut output); 16 | output 17 | } 18 | 19 | pub fn bytes_to_hex(bytes: &[u8; N]) -> [u8; M] { 20 | // TODO: Replace `M` with a const expression once it's stable. 21 | debug_assert_eq!(M, 2 * N + 2); 22 | let mut result = [0u8; M]; 23 | result[0] = b'0'; 24 | result[1] = b'x'; 25 | hex::encode_to_slice(&bytes[..], &mut result[2..]).expect("the buffer is correctly sized"); 26 | result 27 | } 28 | 29 | /// Helper to serialize byte arrays 30 | pub fn serialize_bytes( 31 | serializer: S, 32 | bytes: &[u8; N], 33 | ) -> Result { 34 | // TODO: Replace `M` with a const expression once it's stable. 35 | debug_assert_eq!(M, 2 * N + 2); 36 | if serializer.is_human_readable() { 37 | // Write as a 0x prefixed lower-case hex string 38 | let buffer = bytes_to_hex::(bytes); 39 | let string = str::from_utf8(&buffer).expect("the buffer is valid UTF-8"); 40 | serializer.serialize_str(string) 41 | } else { 42 | // Write as bytes directly 43 | serializer.serialize_bytes(&bytes[..]) 44 | } 45 | } 46 | 47 | /// Helper to deserialize byte arrays from hex strings 48 | /// 49 | /// TODO: How does it handle strings that are to short? 50 | pub fn bytes_from_hex(s: &str) -> Result<[u8; N], hex::FromHexError> { 51 | let str = trim_hex_prefix(s); 52 | let mut result = [0_u8; N]; 53 | hex::decode_to_slice(str, &mut result)?; 54 | Ok(result) 55 | } 56 | 57 | /// Helper function to remove optionally `0x` prefix from hex strings. 58 | fn trim_hex_prefix(str: &str) -> &str { 59 | str.trim_start_matches("0x").trim_start_matches("0X") 60 | } 61 | 62 | /// Helper to deserialize byte arrays. 63 | pub fn deserialize_bytes<'de, const N: usize, D: Deserializer<'de>>( 64 | deserializer: D, 65 | ) -> Result<[u8; N], D::Error> { 66 | if deserializer.is_human_readable() { 67 | struct StrVisitor; 68 | impl Visitor<'_> for StrVisitor { 69 | type Value = [u8; N]; 70 | 71 | fn expecting(&self, formatter: &mut Formatter) -> FmtResult { 72 | write!(formatter, "a {N} byte hex string") 73 | } 74 | 75 | fn visit_str(self, value: &str) -> Result 76 | where 77 | E: DeError, 78 | { 79 | bytes_from_hex(value).map_err(|e| E::custom(format!("Error in hex: {e}"))) 80 | } 81 | } 82 | deserializer.deserialize_str(StrVisitor) 83 | } else { 84 | struct ByteVisitor; 85 | impl Visitor<'_> for ByteVisitor { 86 | type Value = [u8; N]; 87 | 88 | fn expecting(&self, formatter: &mut Formatter) -> FmtResult { 89 | write!(formatter, "{N} bytes of binary data") 90 | } 91 | 92 | fn visit_bytes(self, value: &[u8]) -> Result 93 | where 94 | E: DeError, 95 | { 96 | if value.len() != N { 97 | return Err(E::invalid_length(value.len(), &self)); 98 | } 99 | let mut result = [0_u8; N]; 100 | result.copy_from_slice(value); 101 | Ok(result) 102 | } 103 | } 104 | deserializer.deserialize_bytes(ByteVisitor) 105 | } 106 | } 107 | 108 | #[cfg(test)] 109 | mod test { 110 | use super::*; 111 | 112 | #[test] 113 | fn test_serialize_bytes_hex() { 114 | let bytes = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]; 115 | let mut ser = serde_json::Serializer::new(Vec::new()); 116 | serialize_bytes::<16, 34, _>(&mut ser, &bytes).unwrap(); 117 | let json = ser.into_inner(); 118 | assert_eq!(json, b"\"0x0102030405060708090a0b0c0d0e0f10\""); 119 | } 120 | 121 | #[test] 122 | fn test_serialize_bytes_bin() { 123 | let bytes = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]; 124 | let mut bin: Vec = Vec::new(); 125 | { 126 | let mut ser = bincode::Serializer::new(&mut bin, bincode::options()); 127 | serialize_bytes::<16, 34, _>(&mut ser, &bytes).unwrap(); 128 | } 129 | // Bincode appears to prefix with a length. 130 | assert_eq!( 131 | bin, 132 | [16, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16] 133 | ); 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /cspell.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2", 3 | "ignorePaths": [], 4 | "dictionaryDefinitions": [], 5 | "dictionaries": [], 6 | "words": [ 7 | "biguint", 8 | "chacha", 9 | "circom", 10 | "groth", 11 | "hasher", 12 | "keccak", 13 | "merkle", 14 | "mmaped", 15 | "modpow", 16 | "mulmod", 17 | "proptest", 18 | "Repr", 19 | "Seedable", 20 | "snarkfiles", 21 | "thiserror", 22 | "zkey" 23 | ], 24 | "ignoreWords": [], 25 | "import": [] 26 | } 27 | -------------------------------------------------------------------------------- /mit-license.md: -------------------------------------------------------------------------------- 1 | # The MIT License (MIT) 2 | 3 | Copyright © 2021-2023 Worldcoin Foundation 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 6 | 7 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 8 | 9 | **The Software is provided “as is”, without warranty of any kind, express or implied, including but not limited to the warranties of merchantability, fitness for a particular purpose and noninfringement. In no event shall the authors or copyright holders be liable for any claim, damages or other liability, whether in an action of contract, tort or otherwise, arising from, out of or in connection with the Software or the use or other dealings in the Software.** 10 | -------------------------------------------------------------------------------- /publish_all.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | cargo publish -p semaphore-rs-utils 6 | cargo publish -p semaphore-rs-ark-circom 7 | cargo publish -p semaphore-rs-proof 8 | cargo publish -p semaphore-rs-ark-zkey 9 | cargo publish -p semaphore-rs-hasher 10 | cargo publish -p semaphore-rs-poseidon 11 | cargo publish -p semaphore-rs-keccak 12 | cargo publish -p semaphore-rs-storage 13 | cargo publish -p semaphore-rs-trees 14 | cargo publish -p semaphore-rs-depth-config 15 | cargo publish -p semaphore-rs-depth-macros 16 | cargo publish -p semaphore-rs-witness 17 | cargo publish -p semaphore-rs 18 | 19 | -------------------------------------------------------------------------------- /supply-chain/audits.toml: -------------------------------------------------------------------------------- 1 | 2 | # cargo-vet audits file 3 | 4 | [audits] 5 | 6 | [[trusted.aho-corasick]] 7 | criteria = "safe-to-deploy" 8 | user-id = 189 # Andrew Gallant (BurntSushi) 9 | start = "2019-03-28" 10 | end = "2024-06-15" 11 | 12 | [[trusted.byteorder]] 13 | criteria = "safe-to-deploy" 14 | user-id = 189 # Andrew Gallant (BurntSushi) 15 | start = "2019-06-09" 16 | end = "2024-06-15" 17 | 18 | [[trusted.csv]] 19 | criteria = "safe-to-deploy" 20 | user-id = 189 # Andrew Gallant (BurntSushi) 21 | start = "2019-04-05" 22 | end = "2024-06-15" 23 | 24 | [[trusted.csv-core]] 25 | criteria = "safe-to-deploy" 26 | user-id = 189 # Andrew Gallant (BurntSushi) 27 | start = "2019-06-26" 28 | end = "2024-06-15" 29 | 30 | [[trusted.memchr]] 31 | criteria = "safe-to-deploy" 32 | user-id = 189 # Andrew Gallant (BurntSushi) 33 | start = "2019-07-07" 34 | end = "2024-06-15" 35 | 36 | [[trusted.proc-macro2]] 37 | criteria = "safe-to-deploy" 38 | user-id = 3618 # David Tolnay (dtolnay) 39 | start = "2019-04-23" 40 | end = "2024-06-14" 41 | 42 | [[trusted.regex]] 43 | criteria = "safe-to-deploy" 44 | user-id = 189 # Andrew Gallant (BurntSushi) 45 | start = "2019-02-27" 46 | end = "2024-06-15" 47 | 48 | [[trusted.regex-automata]] 49 | criteria = "safe-to-deploy" 50 | user-id = 189 # Andrew Gallant (BurntSushi) 51 | start = "2019-02-25" 52 | end = "2024-06-15" 53 | 54 | [[trusted.regex-syntax]] 55 | criteria = "safe-to-deploy" 56 | user-id = 189 # Andrew Gallant (BurntSushi) 57 | start = "2019-03-30" 58 | end = "2024-06-15" 59 | 60 | [[trusted.same-file]] 61 | criteria = "safe-to-deploy" 62 | user-id = 189 # Andrew Gallant (BurntSushi) 63 | start = "2019-07-16" 64 | end = "2024-06-15" 65 | 66 | [[trusted.serde]] 67 | criteria = "safe-to-deploy" 68 | user-id = 3618 # David Tolnay (dtolnay) 69 | start = "2019-03-01" 70 | end = "2024-06-14" 71 | 72 | [[trusted.serde_derive]] 73 | criteria = "safe-to-deploy" 74 | user-id = 3618 # David Tolnay (dtolnay) 75 | start = "2019-03-01" 76 | end = "2024-06-14" 77 | 78 | [[trusted.ucd-trie]] 79 | criteria = "safe-to-deploy" 80 | user-id = 189 # Andrew Gallant (BurntSushi) 81 | start = "2019-07-21" 82 | end = "2024-06-15" 83 | 84 | [[trusted.walkdir]] 85 | criteria = "safe-to-deploy" 86 | user-id = 189 # Andrew Gallant (BurntSushi) 87 | start = "2019-06-09" 88 | end = "2024-06-15" 89 | 90 | [[trusted.winapi-util]] 91 | criteria = "safe-to-deploy" 92 | user-id = 189 # Andrew Gallant (BurntSushi) 93 | start = "2020-01-11" 94 | end = "2024-06-15" 95 | --------------------------------------------------------------------------------