├── VERSION ├── .env ├── groth16-framework ├── src │ ├── prover │ │ └── mod.rs │ ├── evm │ │ ├── mod.rs │ │ ├── utils.rs │ │ └── executor.rs │ ├── verifier │ │ ├── mod.rs │ │ ├── groth16.rs │ │ └── evm.rs │ ├── proof.rs │ ├── utils.rs │ └── test_utils.rs ├── groth16_query │ ├── query_output.json │ └── query_input.json ├── tests │ ├── common │ │ ├── mod.rs │ │ ├── io.rs │ │ ├── utils.rs │ │ └── context.rs │ ├── group_hashing.rs │ └── keccak.rs └── Cargo.toml ├── ryhope ├── src │ ├── tests │ │ ├── mod.rs │ │ └── example.rs │ └── error.rs ├── Cargo.toml └── README.org ├── rust-toolchain.toml ├── mp2-v1 ├── src │ ├── query │ │ └── mod.rs │ ├── values_extraction │ │ └── gadgets │ │ │ └── mod.rs │ ├── contract_extraction │ │ └── mod.rs │ ├── final_extraction │ │ ├── mod.rs │ │ └── simple_circuit.rs │ ├── length_extraction │ │ └── mod.rs │ ├── lib.rs │ ├── indexing │ │ ├── block.rs │ │ ├── index.rs │ │ └── mod.rs │ └── block_extraction │ │ ├── public_inputs.rs │ │ └── mod.rs ├── rustfmt.toml ├── test-contracts │ └── foundry.toml ├── tests │ ├── README.md │ └── common │ │ ├── bindings │ │ └── mod.rs │ │ ├── cases │ │ └── mod.rs │ │ ├── block_extraction.rs │ │ ├── values_extraction.rs │ │ ├── benchmarker.rs │ │ ├── length_extraction.rs │ │ └── ivc.rs ├── README.md └── Cargo.toml ├── verifiable-db ├── src │ ├── results_tree │ │ ├── binding │ │ │ └── mod.rs │ │ └── mod.rs │ ├── row_tree │ │ └── mod.rs │ ├── ivc │ │ └── mod.rs │ ├── query │ │ ├── mod.rs │ │ └── universal_circuit │ │ │ └── mod.rs │ ├── lib.rs │ └── cells_tree │ │ └── empty_node.rs ├── rustfmt.toml └── Cargo.toml ├── .envrc ├── mp2-test ├── src │ ├── lib.rs │ ├── log.rs │ ├── utils.rs │ ├── eth.rs │ ├── mpt_sequential.rs │ └── circuit.rs └── Cargo.toml ├── devenv.yaml ├── gnark-utils ├── Cargo.toml ├── lib │ ├── proof.go │ └── deserialize.go ├── src │ ├── utils.rs │ ├── verify.rs │ ├── compile.rs │ ├── prove.rs │ └── lib.rs ├── go.mod └── build.rs ├── .cargo └── config.toml ├── .gitignore ├── custom-hooks ├── README.md └── pre-commit ├── inspect ├── Cargo.toml └── src │ ├── main.rs │ └── index.rs ├── .github ├── changelog.sh └── workflows │ ├── release.yml │ ├── prepare-release.yml │ ├── pr.yml │ ├── doc.yml │ └── rust.yml ├── mp2-common ├── src │ ├── merkle_tree │ │ └── mod.rs │ ├── public_inputs.rs │ ├── group_hashing │ │ ├── utils.rs │ │ ├── curve_add.rs │ │ └── sswu_value.rs │ ├── mpt_sequential │ │ ├── leaf_or_extension.rs │ │ └── utils.rs │ ├── types.rs │ └── hash.rs └── Cargo.toml ├── recursion-framework ├── Cargo.toml └── src │ ├── lib.rs │ └── universal_verifier_gadget │ └── mod.rs ├── parsil ├── Cargo.toml ├── src │ ├── parser.rs │ ├── lib.rs │ ├── errors.rs │ ├── dsl.rs │ ├── queries.rs │ └── placeholders.rs └── tests │ └── context.json ├── devenv.nix ├── Cargo.toml └── devenv.lock /VERSION: -------------------------------------------------------------------------------- 1 | 3.0.0 2 | -------------------------------------------------------------------------------- /.env: -------------------------------------------------------------------------------- 1 | # Use 5432 by default 2 | PGSQL_PORT= 3 | -------------------------------------------------------------------------------- /groth16-framework/src/prover/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod groth16; 2 | -------------------------------------------------------------------------------- /ryhope/src/tests/mod.rs: -------------------------------------------------------------------------------- 1 | mod example; 2 | mod trees; 3 | -------------------------------------------------------------------------------- /groth16-framework/src/evm/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod executor; 2 | pub mod utils; 3 | -------------------------------------------------------------------------------- /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "nightly-2025-05-22" 3 | -------------------------------------------------------------------------------- /groth16-framework/src/verifier/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod evm; 2 | pub mod groth16; 3 | -------------------------------------------------------------------------------- /mp2-v1/src/query/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod batching_planner; 2 | pub mod planner; 3 | -------------------------------------------------------------------------------- /verifiable-db/src/results_tree/binding/mod.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod binding_results; 2 | pub(crate) mod public_inputs; 3 | -------------------------------------------------------------------------------- /mp2-v1/src/values_extraction/gadgets/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod column_gadget; 2 | pub mod column_info; 3 | pub mod metadata_gadget; 4 | -------------------------------------------------------------------------------- /mp2-v1/rustfmt.toml: -------------------------------------------------------------------------------- 1 | edition = "2021" 2 | 3 | imports_granularity = "Crate" 4 | max_width = 100 5 | newline_style = "Unix" 6 | reorder_imports = true 7 | -------------------------------------------------------------------------------- /verifiable-db/rustfmt.toml: -------------------------------------------------------------------------------- 1 | edition = "2021" 2 | 3 | imports_granularity = "Crate" 4 | max_width = 100 5 | newline_style = "Unix" 6 | reorder_imports = true 7 | -------------------------------------------------------------------------------- /.envrc: -------------------------------------------------------------------------------- 1 | source_url "https://raw.githubusercontent.com/cachix/devenv/95f329d49a8a5289d31e0982652f7058a189bfca/direnvrc" "sha256-d+8cBpDfDBj41inrADaJt+bDWhOktwslgoP5YiGJ1v0=" 2 | 3 | use devenv -------------------------------------------------------------------------------- /mp2-test/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Utility functions and gadgets for testing 2 | 3 | pub mod cells_tree; 4 | pub mod circuit; 5 | pub mod eth; 6 | pub mod log; 7 | pub mod mpt_sequential; 8 | pub mod utils; 9 | -------------------------------------------------------------------------------- /devenv.yaml: -------------------------------------------------------------------------------- 1 | inputs: 2 | fenix: 3 | url: github:nix-community/fenix 4 | inputs: 5 | nixpkgs: 6 | follows: nixpkgs 7 | nixpkgs: 8 | url: github:cachix/devenv-nixpkgs/rolling 9 | -------------------------------------------------------------------------------- /gnark-utils/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "gnark-utils" 3 | version = "3.0.0" 4 | edition = "2021" 5 | 6 | [build-dependencies] 7 | glob.workspace = true 8 | gobuild.workspace = true 9 | 10 | [dependencies] 11 | anyhow.workspace = true 12 | base64.workspace = true 13 | -------------------------------------------------------------------------------- /verifiable-db/src/row_tree/mod.rs: -------------------------------------------------------------------------------- 1 | mod api; 2 | mod full_node; 3 | mod leaf; 4 | mod partial_node; 5 | mod public_inputs; 6 | mod secondary_index_cell; 7 | 8 | pub use api::{extract_hash_from_proof, CircuitInput, PublicParameters}; 9 | pub use public_inputs::PublicInputs; 10 | -------------------------------------------------------------------------------- /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [build] 2 | rustflags = ["-C", "target-cpu=native"] 3 | 4 | # Used for compile the Go code of gnark-utils on Mac. 5 | [target.'cfg(target_os="macos")'] 6 | rustflags = [ 7 | "-C", 8 | "target-cpu=native", 9 | "-C", 10 | "link-args=-framework CoreFoundation -framework Security", 11 | ] 12 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.csv 2 | .DS_Store 3 | /.direnv/ 4 | /.idea/ 5 | /mp2-v1/mp2.params 6 | /mp2-v1/test-contracts/broadcast 7 | /mp2-v1/test-contracts/cache 8 | /mp2-v1/test-contracts/out 9 | /target 10 | 11 | # Devenv 12 | .devenv* 13 | devenv.local.nix 14 | 15 | # direnv 16 | .direnv 17 | 18 | # pre-commit 19 | .pre-commit-config.yaml 20 | -------------------------------------------------------------------------------- /verifiable-db/src/ivc/mod.rs: -------------------------------------------------------------------------------- 1 | mod api; 2 | mod circuit; 3 | pub(crate) mod public_inputs; 4 | 5 | pub use api::{CircuitInput, PublicParameters}; 6 | pub use circuit::add_provable_data_commitment_prefix; 7 | use plonky2::iop::target::Target; 8 | pub use public_inputs::PublicInputs; 9 | pub const NUM_IO: usize = PublicInputs::::TOTAL_LEN; 10 | -------------------------------------------------------------------------------- /groth16-framework/groth16_query/query_output.json: -------------------------------------------------------------------------------- 1 | {"total_matched_rows":2831132460,"rows":[["0x4d6e3f547bdf26efbe72df4a3777834935922dabd5fac7f82545f1c0","0xa8bfa72c","0x74680b0ee5f39d1dee18e2c2e4fbf543fcaef873ea8d7b25df5ea7c72bda7d52","0xbed2c6d5dbf91eaa1d633c519dffaa9fbce9326c41cb57c7d9d7b0ad5e822ae5","0x52ecc550d5b51ab42dd0f7cf9dff74e29dd89d9f6f31c068e7f1a562ddb8cd02"]]} 2 | -------------------------------------------------------------------------------- /mp2-v1/test-contracts/foundry.toml: -------------------------------------------------------------------------------- 1 | [profile.default] 2 | src = "src" 3 | out = "out" 4 | broadcast = "broadcast" 5 | script = "script" 6 | libs = ["lib"] 7 | optimizer = true 8 | optimizer_runs = 200 9 | # The highest solidity version supported on MAC for now 10 | solc-version = "0.8.24" 11 | evm_version = "cancun" 12 | verbosity = 4 13 | 14 | [fmt] 15 | line_length = 80 16 | -------------------------------------------------------------------------------- /mp2-v1/tests/README.md: -------------------------------------------------------------------------------- 1 | ## Integration test 2 | 3 | ## Local test contract 4 | 5 | The local test contracts are organized by [Foundry](https://github.com/foundry-rs/foundry), 6 | it must be installed as a prerequisite. 7 | 8 | The test contracts are located in `mp2-v1/test-contracts`. 9 | 10 | Run `make bindings` to regenerate the Rust bindings if the test contracts are changed. 11 | -------------------------------------------------------------------------------- /mp2-v1/tests/common/bindings/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod simple { 2 | use alloy::sol; 3 | 4 | sol!( 5 | // solc --optimize --abi --bin ./mp2-v1/test-contracts/src/Simple.sol -o mp2-v1/test-contracts/src/ 6 | #[sol(rpc, bytecode = include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/test-contracts/src/Simple.bin")))] 7 | Simple, 8 | "./test-contracts/src/Simple.abi" 9 | ); 10 | } 11 | -------------------------------------------------------------------------------- /gnark-utils/lib/proof.go: -------------------------------------------------------------------------------- 1 | // Groth16 proof struct 2 | 3 | package main 4 | 5 | import ( 6 | "github.com/ethereum/go-ethereum/common/hexutil" 7 | ) 8 | 9 | type Groth16Proof struct { 10 | Inputs []string `json:"inputs"` 11 | Proofs []string `json:"proofs"` 12 | RawProof hexutil.Bytes `json:"raw_proof"` 13 | RawPublicWitness hexutil.Bytes `json:"raw_public_witness"` 14 | } 15 | -------------------------------------------------------------------------------- /custom-hooks/README.md: -------------------------------------------------------------------------------- 1 | ## Hook Installation 2 | 3 | To install hooks in this file, from the root directory navigate to `./.git/hooks`. Then replace the corresponding file with the one in this folder. If you haven't installed any hooks before it will be called `.sample` instead of just ``. You may also 4 | have to make the files inside `./.git/hooks` executable by running `chmod +x ./.git/hooks/*` from the project root. -------------------------------------------------------------------------------- /custom-hooks/pre-commit: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | set -eu 4 | 5 | if cargo fmt --all -- --check 6 | then 7 | echo "cargo fmt OK" 8 | else 9 | echo "There are some code style issues." 10 | echo "Run cargo fmt first." 11 | exit 1 12 | fi 13 | 14 | if cargo clippy --all-targets -- -D warnings 15 | then 16 | echo "cargo clippy OK" 17 | else 18 | echo "There are some clippy issues." 19 | exit 1 20 | fi 21 | 22 | exit 0 -------------------------------------------------------------------------------- /groth16-framework/groth16_query/query_input.json: -------------------------------------------------------------------------------- 1 | {"query_limit":0,"query_offset":0,"min_block_number":42,"max_block_number":76,"block_hash":"0x63f6a00302556a531b4f409ad0b4589b2294759629ffc207e6cb28c243f44c16","computational_hash":"0xff8d479e652a80dc4e4089d85675950cb1e5f5d639ec35ec01286f22e4445aa2","user_placeholders":["0x4dc1b24317f7d31d58d397aa3b3c10cdc4c827dcfd6742928525d125ee8232f3","0x1925416df8d89a48383eaef634817e1e2fabe893ea3a0cef52752604554d3ec5"]} 2 | -------------------------------------------------------------------------------- /inspect/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "inspect" 3 | version = "3.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | anyhow.workspace = true 8 | clap.workspace = true 9 | colored.workspace = true 10 | dialoguer.workspace = true 11 | hex.workspace = true 12 | itertools.workspace = true 13 | serde.workspace = true 14 | tabled.workspace = true 15 | tokio.workspace = true 16 | 17 | ryhope = { path = "../ryhope" , version = "3.0.0" } 18 | mp2_v1 = { path = "../mp2-v1" , version = "3.0.0" } 19 | -------------------------------------------------------------------------------- /.github/changelog.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # This is a hack to palliate the fact that cargo-release runs the pre-release 4 | # hook once for each crate, instead of only once for the whole workspace. 5 | # Calling git-cliff multiple times with the same argument is idempotent, so we 6 | # call it with settings generating the workspace-level changelog once for every 7 | # crate. 8 | git-cliff -o $WORKSPACE_ROOT/CHANGELOG.md --tag $NEW_VERSION -w $WORKSPACE_ROOT 9 | echo $NEW_VERSION > $WORKSPACE_ROOT/VERSION 10 | -------------------------------------------------------------------------------- /groth16-framework/tests/common/mod.rs: -------------------------------------------------------------------------------- 1 | //! Common structs and functions used for integration tests 2 | 3 | use verifiable_db::test_utils::MAX_NUM_ITEMS_PER_OUTPUT; 4 | 5 | mod context; 6 | mod io; 7 | mod query; 8 | pub mod utils; 9 | 10 | pub(crate) use context::TestContext; 11 | pub(crate) use io::{TestQueryInput, TestQueryOutput}; 12 | 13 | pub(crate) const NUM_PREPROCESSING_IO: usize = verifiable_db::ivc::NUM_IO; 14 | pub(crate) const NUM_QUERY_IO: usize = verifiable_db::query::pi_len::(); 15 | -------------------------------------------------------------------------------- /mp2-test/src/log.rs: -------------------------------------------------------------------------------- 1 | use log::{log_enabled, Level, LevelFilter}; 2 | use std::{env, io::Write}; 3 | 4 | /// Sets RUST_LOG=debug and initializes the logger 5 | /// if it hasn't been enabled already. 6 | pub fn init_logging() { 7 | if !log_enabled!(Level::Debug) { 8 | env::set_var("RUST_LOG", "debug"); 9 | let _ = env_logger::builder() 10 | .format(|buf, record| writeln!(buf, " {}", record.args())) 11 | .try_init(); 12 | log::set_max_level(LevelFilter::Debug); 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /verifiable-db/src/query/mod.rs: -------------------------------------------------------------------------------- 1 | use plonky2::iop::target::Target; 2 | use public_inputs::PublicInputsQueryCircuits; 3 | 4 | pub mod api; 5 | pub(crate) mod circuits; 6 | pub mod computational_hash_ids; 7 | pub mod merkle_path; 8 | pub(crate) mod output_computation; 9 | pub mod public_inputs; 10 | pub(crate) mod row_chunk_gadgets; 11 | pub mod universal_circuit; 12 | pub mod utils; 13 | 14 | pub const fn pi_len() -> usize { 15 | PublicInputsQueryCircuits::::total_len() 16 | } 17 | -------------------------------------------------------------------------------- /mp2-v1/README.md: -------------------------------------------------------------------------------- 1 | This crate uses artifacts that are built for `mp2-v1/test-contracts/src/Simple.sol` 2 | contract: 3 | 4 | * `mp2-v1/test-contracts/src/Simple.abi` 5 | * `mp2-v1/Simple.bin`. 6 | 7 | They were generated with the following version of `solc`: 8 | 9 | ``` 10 | solc --version 11 | solc, the solidity compiler commandline interface 12 | Version: 0.8.30+commit.73712a01.Darwin.appleclang 13 | ``` 14 | 15 | To regenerate these files, run: 16 | 17 | ``` 18 | solc --optimize --abi --bin ./mp2-v1/test-contracts/src/Simple.sol -o ./mp2-v1/test-contracts/src/ 19 | ``` 20 | -------------------------------------------------------------------------------- /gnark-utils/src/utils.rs: -------------------------------------------------------------------------------- 1 | /// Utility functions 2 | use crate::go; 3 | use anyhow::{bail, Result}; 4 | use std::ffi::CStr; 5 | use std::os::raw::c_char; 6 | 7 | /// Convert a C result of string pointer to a Rust result. It's OK if the 8 | /// pointer is null. 9 | pub fn handle_c_result(result: *const c_char) -> Result<()> { 10 | if result.is_null() { 11 | return Ok(()); 12 | } 13 | 14 | let c_result = unsafe { CStr::from_ptr(result) }; 15 | let error = c_result.to_str()?.to_string(); 16 | 17 | unsafe { go::FreeString(c_result.as_ptr()) }; 18 | 19 | bail!(error); 20 | } 21 | -------------------------------------------------------------------------------- /mp2-common/src/merkle_tree/mod.rs: -------------------------------------------------------------------------------- 1 | //! Circuits for proving Merkle Tree nodes recursively. 2 | 3 | mod state_tree; 4 | 5 | /// The trait of digest tree circuit 6 | /// With this trait, both the arity circuit and multiset hashing circuit could 7 | /// be reused in the same benchmark and testing functions. 8 | pub trait DigestTreeCircuit { 9 | /// Create a circuit instance for a leaf of Merkle tree. 10 | fn new_leaf(value: [u8; 32]) -> Self; 11 | 12 | /// Create a circuit instance for a branch of Merkle tree. 13 | fn new_branch(children: Vec) -> Self; 14 | } 15 | 16 | pub use state_tree::StateTreeWires; 17 | -------------------------------------------------------------------------------- /mp2-test/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "mp2_test" 3 | version = "3.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | alloy.workspace = true 8 | anyhow.workspace = true 9 | env_logger.workspace = true 10 | eth_trie.workspace = true 11 | log.workspace = true 12 | plonky2.workspace = true 13 | plonky2_ecgfp5.workspace = true 14 | rand.workspace = true 15 | serde.workspace = true 16 | 17 | mp2_common = { path = "../mp2-common" , version = "3.0.0" } 18 | recursion_framework = { path = "../recursion-framework" , version = "3.0.0" } 19 | ryhope = { path = "../ryhope" , version = "3.0.0" } 20 | 21 | [features] 22 | ci = [] 23 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Create Release 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | create-release: 10 | runs-on: ubuntu-latest 11 | if: startsWith( github.event.head_commit.message, 'release:' ) 12 | steps: 13 | - uses: actions/checkout@v4 14 | - name: Get version 15 | id: set-tag 16 | run: echo "tag=$(cat VERSION)" >> $GITHUB_OUTPUT 17 | - uses: ncipollo/release-action@v1 18 | with: 19 | allowUpdates: true 20 | omitBody: true 21 | commit: "main" 22 | tag: ${{ steps.set-tag.outputs.tag }} 23 | -------------------------------------------------------------------------------- /recursion-framework/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "recursion_framework" 3 | version = "3.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | anyhow.workspace = true 8 | log.workspace = true 9 | plonky2.workspace = true 10 | serde.workspace = true 11 | poseidon2_plonky2.workspace = true 12 | 13 | mp2_common = { path = "../mp2-common" , version = "3.0.0" } 14 | 15 | [dev-dependencies] 16 | bincode.workspace = true 17 | env_logger.workspace = true 18 | plonky2_monolith.workspace = true 19 | rstest.workspace = true 20 | serial_test.workspace = true 21 | 22 | [features] 23 | original_poseidon = ["mp2_common/original_poseidon"] 24 | -------------------------------------------------------------------------------- /gnark-utils/src/verify.rs: -------------------------------------------------------------------------------- 1 | //! Initialize the verifier and verify the proofs. 2 | 3 | use crate::{go, utils::handle_c_result}; 4 | use anyhow::Result; 5 | use std::ffi::CString; 6 | 7 | /// Initialize the verifier. 8 | pub fn init_verifier(asset_dir: &str) -> Result<()> { 9 | let asset_dir = CString::new(asset_dir)?; 10 | 11 | let result = unsafe { go::InitVerifier(asset_dir.as_ptr()) }; 12 | 13 | handle_c_result(result) 14 | } 15 | 16 | /// Verify the proof. 17 | pub fn verify(proof: &str) -> Result<()> { 18 | let proof = CString::new(proof)?; 19 | 20 | let result = unsafe { go::Verify(proof.as_ptr()) }; 21 | 22 | handle_c_result(result) 23 | } 24 | -------------------------------------------------------------------------------- /mp2-v1/src/contract_extraction/mod.rs: -------------------------------------------------------------------------------- 1 | use alloy::primitives::Address; 2 | use mp2_common::{ 3 | group_hashing::map_to_curve_point, 4 | utils::{Endianness, Packer, ToFields}, 5 | }; 6 | use plonky2_ecgfp5::curve::curve::Point as Digest; 7 | 8 | mod api; 9 | mod branch; 10 | mod extension; 11 | mod leaf; 12 | mod public_inputs; 13 | 14 | pub fn compute_metadata_digest(contract_addr: &Address) -> Digest { 15 | let packed_contract_address = contract_addr.as_slice().pack(Endianness::Big).to_fields(); 16 | 17 | map_to_curve_point(&packed_contract_address) 18 | } 19 | 20 | pub use api::{build_circuits_params, generate_proof, CircuitInput, PublicParameters}; 21 | pub use public_inputs::PublicInputs; 22 | -------------------------------------------------------------------------------- /groth16-framework/src/verifier/groth16.rs: -------------------------------------------------------------------------------- 1 | //! The verifier used to verify the Groth16 proof. 2 | 3 | use crate::proof::Groth16Proof; 4 | use anyhow::Result; 5 | 6 | /// Groth16 verifier 7 | #[derive(Debug)] 8 | pub struct Groth16Verifier; 9 | 10 | impl Groth16Verifier { 11 | pub fn new(asset_dir: &str) -> Result { 12 | gnark_utils::init_verifier(asset_dir)?; 13 | 14 | Ok(Self) 15 | } 16 | 17 | /// Verify the proof. Return Ok if it's verified successfully, otherwise 18 | /// it returns an error. 19 | pub fn verify(&self, proof: &Groth16Proof) -> Result<()> { 20 | let proof = serde_json::to_string(proof)?; 21 | 22 | gnark_utils::verify(&proof) 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /ryhope/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "ryhope" 3 | version = "3.0.0" 4 | edition = "2021" 5 | 6 | [lib] 7 | name = "ryhope" 8 | path = "src/lib.rs" 9 | 10 | [dependencies] 11 | anyhow.workspace = true 12 | bb8-postgres.workspace = true 13 | bb8.workspace = true 14 | delegate.workspace = true 15 | futures.workspace = true 16 | hex.workspace = true 17 | itertools.workspace = true 18 | log.workspace = true 19 | postgres-types.workspace = true 20 | serde.workspace = true 21 | serde_json.workspace = true 22 | thiserror.workspace = true 23 | tokio-postgres.workspace = true 24 | tokio.workspace = true 25 | tracing.workspace = true 26 | 27 | [dev-dependencies] 28 | rand.workspace = true 29 | sha256.workspace = true 30 | simple_logger.workspace = true 31 | -------------------------------------------------------------------------------- /parsil/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "parsil" 3 | version = "3.0.0" 4 | edition = "2021" 5 | 6 | [lib] 7 | name = "parsil" 8 | path = "src/lib.rs" 9 | 10 | [[bin]] 11 | name = "parsil" 12 | path = "src/main.rs" 13 | required-features = ["cli"] 14 | 15 | [dependencies] 16 | alloy.workspace = true 17 | anyhow.workspace = true 18 | camelpaste.workspace = true 19 | clap = { workspace = true, optional = true } 20 | log.workspace = true 21 | serde.workspace = true 22 | serde_json.workspace = true 23 | sqlparser.workspace = true 24 | stderrlog = { workspace = true, optional = true } 25 | thiserror.workspace = true 26 | 27 | ryhope = { path = "../ryhope" , version = "3.0.0" } 28 | verifiable-db = { path = "../verifiable-db" , version = "3.0.0" } 29 | 30 | [features] 31 | cli = ["dep:stderrlog", "dep:clap"] 32 | -------------------------------------------------------------------------------- /groth16-framework/src/proof.rs: -------------------------------------------------------------------------------- 1 | //! The Groth16 proof struct 2 | 3 | use serde::{Deserialize, Serialize}; 4 | 5 | /// Groth16 proof 6 | #[derive(Clone, Debug, Serialize, Deserialize)] 7 | pub struct Groth16Proof { 8 | /// The proofs item is an array of [U256; 8], which should be passed to the 9 | /// `verifyProof` function of the Solidity verifier contract. 10 | pub proofs: Vec, 11 | /// The inputs item is an array of [U256; 3], which should be passed to the 12 | /// `verifyProof` function of the Solidity verifier contract. 13 | pub inputs: Vec, 14 | /// The original raw proof data is used to be verified off-chain. 15 | pub raw_proof: String, 16 | /// The original raw public witness data is used to be verified off-chain. 17 | pub raw_public_witness: String, 18 | } 19 | -------------------------------------------------------------------------------- /mp2-v1/src/final_extraction/mod.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod api; 2 | mod base_circuit; 3 | mod dummy_circuit; 4 | mod lengthed_circuit; 5 | mod merge_circuit; 6 | mod public_inputs; 7 | mod simple_circuit; 8 | 9 | pub use api::{CircuitInput, OffChainRootOfTrust, PublicParameters}; 10 | pub use public_inputs::PublicInputs; 11 | 12 | pub(crate) use base_circuit::BaseCircuitProofInputs; 13 | pub(crate) use dummy_circuit::DummyCircuit; 14 | pub(crate) use lengthed_circuit::LengthedCircuitInput as LengthedCircuit; 15 | pub(crate) use merge_circuit::MergeCircuitInput as MergeCircuit; 16 | pub(crate) use simple_circuit::SimpleCircuitInput as SimpleCircuit; 17 | 18 | /// The prefix to ensure the metadata digest will keep track of whether 19 | /// we use this dummy circuit or not 20 | pub(crate) const DUMMY_METADATA_DIGEST_PREFIX: &[u8] = b"DUMMY_EXTRACTION"; 21 | -------------------------------------------------------------------------------- /groth16-framework/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "groth16_framework" 3 | version = "3.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | alloy.workspace = true 8 | anyhow.workspace = true 9 | hex.workspace = true 10 | log.workspace = true 11 | plonky2.workspace = true 12 | plonky2x.workspace = true 13 | revm.workspace = true 14 | serde.workspace = true 15 | serde_json.workspace = true 16 | 17 | gnark-utils = { path = "../gnark-utils" , version = "3.0.0" } 18 | mp2_common = { path = "../mp2-common" , version = "3.0.0" } 19 | 20 | [dev-dependencies] 21 | env_logger.workspace = true 22 | itertools.workspace = true 23 | rand.workspace = true 24 | serial_test.workspace = true 25 | sha2.workspace = true 26 | mp2_test = { path = "../mp2-test" , version = "3.0.0" } 27 | 28 | recursion_framework = { path = "../recursion-framework" , version = "3.0.0" } 29 | verifiable-db = { path = "../verifiable-db" , version = "3.0.0" } 30 | -------------------------------------------------------------------------------- /mp2-v1/tests/common/cases/mod.rs: -------------------------------------------------------------------------------- 1 | //! Define test cases 2 | 3 | use contract::Contract; 4 | use mp2_v1::values_extraction::identifier_for_mapping_key_column; 5 | use table_source::{ContractExtractionArgs, TableSource}; 6 | 7 | use super::table::Table; 8 | 9 | pub mod contract; 10 | pub mod indexing; 11 | pub mod query; 12 | pub mod slot_info; 13 | pub mod table_source; 14 | 15 | /// Test case definition 16 | pub(crate) struct TableIndexing { 17 | pub(crate) table: Table, 18 | pub(crate) contract: Option, // might be meaningless for off-chain data 19 | pub(crate) contract_extraction: Option, // might be meaningless for off-chain data 20 | pub(crate) source: TableSource, 21 | // the column over which we can do queries like ` y > 64`. It is not the address column that we 22 | // assume it the secondary index always. 23 | pub(crate) value_column: String, 24 | } 25 | -------------------------------------------------------------------------------- /mp2-test/src/utils.rs: -------------------------------------------------------------------------------- 1 | use alloy::primitives::U256; 2 | use plonky2::hash::hash_types::{HashOut, RichField}; 3 | use plonky2_ecgfp5::curve::curve::{Point, WeierstrassPoint}; 4 | use rand::{ 5 | distributions::{Distribution, Standard}, 6 | thread_rng, Rng, 7 | }; 8 | 9 | /// Generate a random vector. 10 | pub fn random_vector(size: usize) -> Vec 11 | where 12 | Standard: Distribution, 13 | { 14 | (0..size).map(|_| thread_rng().gen::()).collect() 15 | } 16 | 17 | pub fn weierstrass_to_point(w: &WeierstrassPoint) -> Point { 18 | let p = Point::decode(w.encode()).expect("input weierstrass point invalid"); 19 | assert_eq!(&p.to_weierstrass(), w); 20 | p 21 | } 22 | 23 | /// Generate a random Uint256. 24 | pub fn gen_random_u256(rng: &mut R) -> U256 { 25 | U256::from_limbs(rng.gen()) 26 | } 27 | 28 | pub fn gen_random_field_hash() -> HashOut { 29 | HashOut::from(F::rand_array()) 30 | } 31 | -------------------------------------------------------------------------------- /mp2-test/src/eth.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | 3 | /// Get the Sepolia test URL. 4 | pub fn get_sepolia_url() -> String { 5 | #[cfg(feature = "ci")] 6 | let url = env::var("CI_SEPOLIA").expect("CI_SEPOLIA env var not set"); 7 | #[cfg(not(feature = "ci"))] 8 | let url = 9 | env::var("CI_SEPOLIA").unwrap_or("https://ethereum-sepolia-rpc.publicnode.com".to_string()); 10 | url.to_string() 11 | } 12 | 13 | /// Get the Mainnet test URL. 14 | pub fn get_mainnet_url() -> String { 15 | #[cfg(feature = "ci")] 16 | let url = std::env::var("CI_ETH").expect("CI_ETH env var not set"); 17 | #[cfg(not(feature = "ci"))] 18 | let url = "https://eth.llamarpc.com"; 19 | log::info!("Using mainnet url {url}"); 20 | url.to_string() 21 | } 22 | 23 | pub fn get_holesky_url() -> String { 24 | let default_rpc_url = "https://rpc.holesky.ethpandaops.io".to_string(); 25 | let url = env::var("CI_HOLESKY").unwrap_or(default_rpc_url); 26 | url.to_string() 27 | } 28 | -------------------------------------------------------------------------------- /gnark-utils/src/compile.rs: -------------------------------------------------------------------------------- 1 | //! Compile and generate asset files from the circuit data. 2 | 3 | use crate::{go, utils::handle_c_result}; 4 | use anyhow::Result; 5 | use std::ffi::CString; 6 | 7 | /// Compile the circuit data and generate the asset files of `r1cs.bin`, 8 | /// `pk.bin`, `vk.bin` and `Verifier.sol`. 9 | pub fn compile_and_generate_assets( 10 | common_circuit_data: &str, 11 | verifier_only_circuit_data: &str, 12 | dst_asset_dir: &str, 13 | ) -> Result<()> { 14 | let [common_circuit_data, verifier_only_circuit_data, dst_asset_dir] = [ 15 | common_circuit_data, 16 | verifier_only_circuit_data, 17 | dst_asset_dir, 18 | ] 19 | .map(CString::new); 20 | 21 | let result = unsafe { 22 | go::CompileAndGenerateAssets( 23 | common_circuit_data?.as_ptr(), 24 | verifier_only_circuit_data?.as_ptr(), 25 | dst_asset_dir?.as_ptr(), 26 | ) 27 | }; 28 | 29 | handle_c_result(result) 30 | } 31 | -------------------------------------------------------------------------------- /mp2-common/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "mp2_common" 3 | version = "3.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | alloy.workspace = true 8 | anyhow.workspace = true 9 | bincode.workspace = true 10 | derive_more.workspace = true 11 | eth_trie.workspace = true 12 | ethereum-types.workspace = true 13 | hashbrown.workspace = true 14 | hex.workspace = true 15 | itertools.workspace = true 16 | log.workspace = true 17 | num.workspace = true 18 | plonky2.workspace = true 19 | plonky2_crypto.workspace = true 20 | plonky2_ecdsa.workspace = true 21 | plonky2_ecgfp5.workspace = true 22 | poseidon2_plonky2.workspace = true 23 | rand.workspace = true 24 | rlp.workspace = true 25 | serde.workspace = true 26 | sha3.workspace = true 27 | 28 | [dev-dependencies] 29 | hex.workspace = true 30 | rand.workspace = true 31 | rstest.workspace = true 32 | tokio.workspace = true 33 | 34 | mp2_test = { path = "../mp2-test" , version = "3.0.0" } 35 | 36 | [features] 37 | ci = ["mp2_test/ci"] 38 | original_poseidon = [] 39 | -------------------------------------------------------------------------------- /parsil/src/parser.rs: -------------------------------------------------------------------------------- 1 | use anyhow::*; 2 | use log::*; 3 | use sqlparser::{ 4 | ast::{Query, Statement}, 5 | dialect::GenericDialect, 6 | parser::Parser, 7 | }; 8 | 9 | const DIALECT: GenericDialect = GenericDialect {}; 10 | 11 | pub fn parse(req: &str) -> Result { 12 | debug!("Parsing `{req}`"); 13 | let mut parsed = 14 | Parser::parse_sql(&DIALECT, req).with_context(|| format!("trying to parse `{req}`"))?; 15 | 16 | ensure!( 17 | parsed.len() == 1, 18 | "expected 1 statement, found {}", 19 | parsed.len() 20 | ); 21 | 22 | if let Statement::Query(ref mut query) = &mut parsed[0] { 23 | Ok(*query.clone()) 24 | } else { 25 | bail!("expected query, found `{}`", parsed[0]) 26 | } 27 | } 28 | 29 | #[cfg(test)] 30 | mod test { 31 | use super::parse; 32 | 33 | #[test] 34 | fn test_parsing_query_underscore() { 35 | let req = "SELECT AVG(value / _totalSupply) FROM myTable;"; 36 | parse(req).unwrap(); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /.github/workflows/prepare-release.yml: -------------------------------------------------------------------------------- 1 | name: Open a release PR 2 | on: 3 | workflow_dispatch: 4 | inputs: 5 | bump: 6 | description: Release Level 7 | required: true 8 | type: choice 9 | options: 10 | - patch 11 | - minor 12 | - major 13 | 14 | jobs: 15 | make-release-pr: 16 | permissions: 17 | id-token: write 18 | pull-requests: write 19 | contents: write 20 | runs-on: ubuntu-latest 21 | steps: 22 | - uses: actions/checkout@v4 23 | - uses: chainguard-dev/actions/setup-gitsign@main 24 | - name: Install tooling 25 | uses: taiki-e/install-action@v2 26 | with: 27 | tool: cargo-release,git-cliff 28 | - uses: cargo-bins/release-pr@v2 29 | with: 30 | version: ${{ inputs.bump }} 31 | github-token: ${{ secrets.GITHUB_TOKEN }} 32 | # Keep all the crates versions in sync 33 | crate-release-all: true 34 | pr-meta-comment: false 35 | pr-merge-strategy: "squash" 36 | -------------------------------------------------------------------------------- /groth16-framework/tests/common/io.rs: -------------------------------------------------------------------------------- 1 | //! Testing query input and output structs 2 | 3 | use super::MAX_NUM_ITEMS_PER_OUTPUT; 4 | use alloy::primitives::{B256, U256}; 5 | use serde::{Deserialize, Serialize}; 6 | 7 | /// Testing query input used to check with the public inputs in Solidity function 8 | #[derive(Debug, Serialize, Deserialize)] 9 | pub(crate) struct TestQueryInput { 10 | // Query limit parameter 11 | pub(crate) query_limit: u32, 12 | // Query offset parameter 13 | pub(crate) query_offset: u32, 14 | // Minimum block number 15 | pub(crate) min_block_number: u32, 16 | // Maximum block number 17 | pub(crate) max_block_number: u32, 18 | // Block hash 19 | pub(crate) block_hash: B256, 20 | // Computational hash 21 | pub(crate) computational_hash: B256, 22 | // User placeholder values 23 | pub(crate) user_placeholders: Vec, 24 | } 25 | 26 | /// Testing query output returned from the Solidity function 27 | #[derive(Debug, Serialize, Deserialize)] 28 | pub(crate) struct TestQueryOutput { 29 | pub(crate) total_matched_rows: u32, 30 | pub(crate) rows: Vec<[U256; MAX_NUM_ITEMS_PER_OUTPUT]>, 31 | pub(crate) error: u32, 32 | } 33 | -------------------------------------------------------------------------------- /verifiable-db/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "verifiable-db" 3 | version = "3.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | num.workspace = true 8 | alloy.workspace = true 9 | anyhow.workspace = true 10 | bincode.workspace = true 11 | derive_more= { workspace = true, features = [ "into" ] } 12 | git-version.workspace = true 13 | itertools.workspace = true 14 | log.workspace = true 15 | plonky2.workspace = true 16 | plonky2_crypto.workspace = true 17 | plonky2_ecdsa.workspace = true 18 | plonky2_ecgfp5.workspace = true 19 | poseidon2_plonky2.workspace = true 20 | rand.workspace = true 21 | serde.workspace = true 22 | 23 | mp2_common = { path = "../mp2-common" , version = "3.0.0" } 24 | recursion_framework = { path = "../recursion-framework" , version = "3.0.0" } 25 | ryhope = { path = "../ryhope" , version = "3.0.0" } 26 | mp2_test = { path = "../mp2-test" , version = "3.0.0" } 27 | 28 | [dev-dependencies] 29 | futures.workspace = true 30 | rand.workspace = true 31 | serial_test.workspace = true 32 | tokio.workspace = true 33 | 34 | [features] 35 | original_poseidon = ["mp2_common/original_poseidon"] 36 | results_tree = [] # temporary features to disable compiling results_tree code by default, as it is still WiP 37 | -------------------------------------------------------------------------------- /.github/workflows/pr.yml: -------------------------------------------------------------------------------- 1 | name: "Lint PR" 2 | 3 | on: 4 | pull_request: 5 | types: 6 | - opened 7 | - edited 8 | - synchronize 9 | - reopened 10 | 11 | permissions: 12 | pull-requests: write 13 | 14 | jobs: 15 | main: 16 | name: Validate PR title 17 | runs-on: ubuntu-latest 18 | steps: 19 | - uses: amannn/action-semantic-pull-request@v5 20 | env: 21 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 22 | - uses: marocchino/sticky-pull-request-comment@v2 23 | if: always() && (steps.lint_pr_title.outputs.error_message != null) 24 | with: 25 | header: pr-title-lint-error 26 | message: | 27 | Please ensure that the PR title matches the [Conventional Commits specification](https://www.conventionalcommits.org/en/v1.0.0/). 28 | 29 | Details: 30 | 31 | ``` 32 | ${{ steps.lint_pr_title.outputs.error_message }} 33 | ``` 34 | 35 | # Delete a previous comment when the issue has been resolved 36 | - if: ${{ steps.lint_pr_title.outputs.error_message == null }} 37 | uses: marocchino/sticky-pull-request-comment@v2 38 | with: 39 | header: pr-title-lint-error 40 | delete: true 41 | -------------------------------------------------------------------------------- /gnark-utils/go.mod: -------------------------------------------------------------------------------- 1 | module main 2 | 3 | go 1.20 4 | 5 | require ( 6 | github.com/consensys/gnark v0.9.1 7 | github.com/consensys/gnark-crypto v0.12.2-0.20231013160410-1f65e75b6dfb 8 | github.com/pkg/errors v0.9.1 9 | github.com/rs/zerolog v1.32.0 10 | github.com/succinctlabs/gnark-plonky2-verifier v0.1.0 11 | ) 12 | 13 | require ( 14 | github.com/bits-and-blooms/bitset v1.10.0 // indirect 15 | github.com/blang/semver/v4 v4.0.0 // indirect 16 | github.com/consensys/bavard v0.1.13 // indirect 17 | github.com/davecgh/go-spew v1.1.1 // indirect 18 | github.com/ethereum/go-ethereum v1.13.14 // indirect 19 | github.com/fxamacker/cbor/v2 v2.5.0 // indirect 20 | github.com/google/pprof v0.0.0-20230817174616-7a8ec2ada47b // indirect 21 | github.com/holiman/uint256 v1.2.4 // indirect 22 | github.com/mattn/go-colorable v0.1.13 // indirect 23 | github.com/mattn/go-isatty v0.0.19 // indirect 24 | github.com/mmcloughlin/addchain v0.4.0 // indirect 25 | github.com/pmezard/go-difflib v1.0.0 // indirect 26 | github.com/stretchr/testify v1.8.4 // indirect 27 | github.com/x448/float16 v0.8.4 // indirect 28 | golang.org/x/crypto v0.17.0 // indirect 29 | golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa // indirect 30 | golang.org/x/sys v0.16.0 // indirect 31 | gopkg.in/yaml.v3 v3.0.1 // indirect 32 | rsc.io/tmplfunc v0.0.3 // indirect 33 | ) 34 | -------------------------------------------------------------------------------- /gnark-utils/build.rs: -------------------------------------------------------------------------------- 1 | //! Compile Go functions 2 | 3 | use std::{ 4 | env, 5 | io::{self, Write}, 6 | }; 7 | 8 | fn main() { 9 | let lib_name = "go-gnark-utils"; 10 | let out_dir = env::var("OUT_DIR").unwrap(); 11 | 12 | if let Err(e) = gobuild::Build::new() 13 | .files( 14 | glob::glob("./lib/*.go") 15 | .unwrap() 16 | .map(|p| p.unwrap().to_string_lossy().to_string()), 17 | ) 18 | .try_compile(lib_name) 19 | { 20 | if format!("{e}").starts_with("Failed to find tool.") { 21 | fail(" Failed to find Go. Please install Go 1.20.".to_string()); 22 | } else { 23 | fail(format!("{e}")); 24 | } 25 | } 26 | 27 | // Files of the lib depends on that should recompile the lib. 28 | println!("cargo:rerun-if-changed=go.mod"); 29 | let dep_files = glob::glob("./lib/*.go").unwrap().filter_map(|v| v.ok()); 30 | for file in dep_files { 31 | println!("cargo:rerun-if-changed={}", file.to_str().unwrap()); 32 | } 33 | 34 | // Links 35 | println!("cargo:rustc-link-search=native={out_dir}"); 36 | println!("cargo:rustc-link-lib=static={lib_name}"); 37 | } 38 | 39 | fn fail(message: String) { 40 | let _ = writeln!( 41 | io::stderr(), 42 | "\n\nError while building gnark-utils: {message}\n\n" 43 | ); 44 | std::process::exit(1); 45 | } 46 | -------------------------------------------------------------------------------- /.github/workflows/doc.yml: -------------------------------------------------------------------------------- 1 | name: Generate cargo docs 2 | 3 | # Only generate docs on main 4 | on: 5 | push: 6 | branches: [ "main" ] 7 | 8 | jobs: 9 | docs: 10 | name: Docs 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Checkout repository 14 | uses: actions/checkout@v4 15 | 16 | - name: Setup Rust 17 | uses: dtolnay/rust-toolchain@stable 18 | 19 | - name: Configure cache 20 | uses: Swatinem/rust-cache@v2 21 | 22 | - name: Setup pages 23 | id: pages 24 | uses: actions/configure-pages@v4 25 | 26 | - name: Clean docs folder 27 | run: cargo clean --doc 28 | 29 | - name: Build docs 30 | run: cargo doc --no-deps 31 | 32 | - name: Remove lock file 33 | run: rm target/doc/.lock 34 | 35 | - name: Upload artifact 36 | uses: actions/upload-pages-artifact@v3 37 | with: 38 | path: target/doc 39 | 40 | deploy: 41 | name: Deploy 42 | runs-on: ubuntu-latest 43 | needs: docs 44 | # Grant GITHUB_TOKEN the permissions required to make a Pages deployment 45 | permissions: 46 | pages: write # to deploy to Pages 47 | id-token: write # to verify the deployment originates from an appropriate source 48 | environment: 49 | name: github-pages 50 | url: ${{ steps.deployment.outputs.page_url }} 51 | steps: 52 | - name: Deploy to GitHub Pages 53 | id: deployment 54 | uses: actions/deploy-pages@v4 55 | -------------------------------------------------------------------------------- /groth16-framework/src/verifier/evm.rs: -------------------------------------------------------------------------------- 1 | //! The verifier used to test the Solidity verification. 2 | 3 | use crate::{ 4 | evm::{executor::deploy_and_call, utils::compile_solidity}, 5 | utils::read_file, 6 | }; 7 | use anyhow::Result; 8 | 9 | /// EVM verifier 10 | #[derive(Debug)] 11 | pub struct EVMVerifier { 12 | /// The compiled deployment code of Solidity verifier contract 13 | deployment_code: Vec, 14 | } 15 | 16 | impl EVMVerifier { 17 | pub fn new(solidity_file_path: &str) -> Result { 18 | // Read the Solidity code from file. 19 | let solidity_code = read_file(solidity_file_path)?; 20 | 21 | // Compile the Solidity code. 22 | let deployment_code = compile_solidity(&solidity_code); 23 | 24 | Ok(Self { deployment_code }) 25 | } 26 | 27 | /// Verify the calldata with Solidity verifier contract. 28 | /// Return the gas_used and the output bytes if success. 29 | pub fn verify(&self, calldata: Vec) -> Result<(u64, Vec)> { 30 | match deploy_and_call(self.deployment_code.clone(), calldata) { 31 | Ok(result) => { 32 | log::debug!( 33 | "Succeeded to do EVM verification: gas_used = {}, output = {:?}", 34 | result.0, 35 | result.1 36 | ); 37 | Ok(result) 38 | } 39 | Err(error) => { 40 | log::error!("Failed to do EVM verification: {error}"); 41 | Err(error) 42 | } 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /groth16-framework/src/evm/utils.rs: -------------------------------------------------------------------------------- 1 | //! EVM utility functions 2 | //! Copied and modified from [snark-verifier](https://github.com/privacy-scaling-explorations/snark-verifier). 3 | 4 | use std::{ 5 | io::{ErrorKind, Write}, 6 | process::{Command, Stdio}, 7 | }; 8 | 9 | /// Compile given Solidity `code` into deployment bytecode. 10 | pub fn compile_solidity(code: &[u8]) -> Vec { 11 | let mut cmd = match Command::new("solc") 12 | .stdin(Stdio::piped()) 13 | .stdout(Stdio::piped()) 14 | .arg("--bin") 15 | .arg("-") 16 | .spawn() 17 | { 18 | Ok(cmd) => cmd, 19 | Err(err) if err.kind() == ErrorKind::NotFound => { 20 | panic!("Command 'solc' not found"); 21 | } 22 | Err(err) => { 23 | panic!("Failed to spawn cmd with command 'solc':\n{err}"); 24 | } 25 | }; 26 | 27 | cmd.stdin.take().unwrap().write_all(code).unwrap(); 28 | let output = cmd.wait_with_output().unwrap().stdout; 29 | let binary = *split_by_ascii_whitespace(&output).last().unwrap(); 30 | hex::decode(binary).unwrap() 31 | } 32 | 33 | fn split_by_ascii_whitespace(bytes: &[u8]) -> Vec<&[u8]> { 34 | let mut split = Vec::new(); 35 | let mut start = None; 36 | for (idx, byte) in bytes.iter().enumerate() { 37 | if byte.is_ascii_whitespace() { 38 | if let Some(start) = start.take() { 39 | split.push(&bytes[start..idx]); 40 | } 41 | } else if start.is_none() { 42 | start = Some(idx); 43 | } 44 | } 45 | split 46 | } 47 | -------------------------------------------------------------------------------- /.github/workflows/rust.yml: -------------------------------------------------------------------------------- 1 | on: 2 | pull_request: 3 | push: 4 | branches: 5 | - main 6 | 7 | 8 | name: Check, Test, and Format 9 | 10 | jobs: 11 | check-format: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions-rust-lang/setup-rust-toolchain@v1 15 | with: 16 | toolchain: 'nightly-2025-05-22' 17 | components: rustfmt,clippy 18 | - uses: actions/setup-go@v5 19 | with: 20 | go-version: '1.22' 21 | - name: Checkout sources 22 | uses: actions/checkout@v4 23 | - name: check 24 | run: cargo check --all --tests 25 | - name: clippy 26 | run: cargo clippy --all-targets --all -- -D warnings 27 | - name: fmt 28 | run: cargo fmt --all -- --check 29 | 30 | test: 31 | name: Test Suite 32 | runs-on: ['nix-128g'] 33 | steps: 34 | - name: Checkout sources 35 | uses: actions/checkout@v4 36 | - name: Generate a random port number 37 | run: echo PGSQL_PORT=$(shuf -i 5000-30000 -n1) > .env 38 | - name: Install foundry 39 | uses: foundry-rs/foundry-toolchain@v1 40 | - name: Put cargo in PATH 41 | shell: devenv shell bash -- -e {0} 42 | run: echo "${HOME}/.cargo/bin" >> $GITHUB_PATH 43 | - name: Run cargo test 44 | run: devenv test 45 | env: 46 | RUST_LOG: "info" 47 | RUST_MIN_STACK: 10485760 48 | CI_RPC_URL: ${{ secrets.CI_RPC_URL }} 49 | CI_SEPOLIA: ${{ secrets.CI_SEPOLIA }} 50 | CI_ETH: ${{ secrets.CI_ETH }} 51 | LPN_PARAMS_DIR: $HOME/ci_params/ 52 | LPN_PARAMS_REBUILD: "true" 53 | -------------------------------------------------------------------------------- /ryhope/README.org: -------------------------------------------------------------------------------- 1 | * Ryhope - A Forest Library 2 | 3 | Ryhope aims at centralizing all the tree-related need of Lagrange projects. Its 4 | core-feature is to juggle all the ways trees are used in these projects. 5 | 6 | ** Interface 7 | Storage trees are exposed in two different ways to the components using them: 8 | - as key-value database to the components handling data collection and transformation; 9 | - as tree structures to the ZK system, which needs to prove tree transformation, inclusion, etc. 10 | 11 | To this end, two main traits are provided: 12 | - the [[file:src/storage/mod.rs][Storage]] trait presents a KV database-like interface to components just needing to interact with data; 13 | - the [[file:src/tree/mod.rs][TreeTopology]] trait presents a classical tree interface, targeted at the proving system. 14 | ** Tree Types 15 | Different tree types are proposed, depending on the pattern of the keys stored: 16 | - [[file:src/tree/sbbst.rs][self-balanced BST]] :: optimal in the case of monotonously increasing sequenctial keys; 17 | - [[file:src/tree/scapegoat.rs][scapegoat tree]] :: aimed at pseudo-random keys, with no discernible patterns. 18 | 19 | More types may be implemented on an on-demand basis. 20 | ** Storage 21 | Storage tree may be backed in multiple fashions, adapted to different workflows. For now, the following backends are available: 22 | - [[file:src/storage/memory.rs][in-memory]] :: fit for ephemeral experimental or testing purposes, where simplicity of usage is important and lasting storage does not matter; 23 | - [TODO] PostgreSQL :: designed for production usage, where a drop in performances is an acceptable price to pay for ACID guarantees, parallelism, and long-lasting storage. 24 | -------------------------------------------------------------------------------- /mp2-v1/src/length_extraction/mod.rs: -------------------------------------------------------------------------------- 1 | //! Length extraction circuits 2 | //! 3 | //! # Leaf 4 | //! 5 | //! The leaf extraction circuit derives the MPT key from the length slot and replaces the current 6 | //! key pointer with the supplied witness. Subsequently, the circuit computes the latest hash along 7 | //! such traversal path, and calculates the RLP headers. 8 | //! 9 | //! It exposes as public input a curve point commitment derived from both the length slot and the 10 | //! unconstrained variable slot. The circuit exposes the Keccak hash of the current node (H), the 11 | //! DM commitment, MPT key (K), MPT key pointer for the next tree level (T), and the decoded leaf 12 | //! node length (N). 13 | //! 14 | //! # Extension 15 | //! 16 | //! The extension node circuit accepts a branch child proof as input and extracts the expected 17 | //! branch node value, which serves as the root value in the traversal path up to that point. 18 | //! Subsequently, it navigates through the MPT based on the consumed key nibbles, updating the next 19 | //! tree level accordingly (T). 20 | //! 21 | //! # Branch 22 | //! 23 | //! The branch node traverses the tree until it reaches the MPT root node, represented by a T value 24 | //! of -1. At each level, it returns the new root and an updated T value. 25 | 26 | mod api; 27 | mod branch; 28 | mod extension; 29 | mod leaf; 30 | mod public_inputs; 31 | 32 | #[cfg(test)] 33 | mod tests; 34 | 35 | pub use api::{utils::compute_metadata_digest, LengthCircuitInput, PublicParameters}; 36 | pub use branch::{BranchLengthCircuit, BranchLengthWires}; 37 | pub use extension::{ExtensionLengthCircuit, ExtensionLengthWires}; 38 | pub use leaf::{LeafLengthCircuit, LeafLengthWires}; 39 | pub use public_inputs::PublicInputs; 40 | -------------------------------------------------------------------------------- /mp2-test/src/mpt_sequential.rs: -------------------------------------------------------------------------------- 1 | use eth_trie::{EthTrie, MemoryDB, Trie}; 2 | use rand::{thread_rng, Rng}; 3 | use std::sync::Arc; 4 | 5 | /// Simply the maximum number of nibbles a key can have. 6 | const MAX_KEY_NIBBLE_LEN: usize = 64; 7 | 8 | /// generate a random storage trie and a key. The MPT proof corresponding to 9 | /// that key is guaranteed to be of DEPTH length. Each leaves in the trie 10 | /// is of NODE_LEN length. 11 | /// The returned key is RLP encoded 12 | pub fn generate_random_storage_mpt( 13 | ) -> (EthTrie, Vec) { 14 | let memdb = Arc::new(MemoryDB::new(true)); 15 | let mut trie = EthTrie::new(Arc::clone(&memdb)); 16 | let mut keys = Vec::new(); 17 | let right_key_idx: usize; 18 | // loop: insert random elements as long as a random selected proof is not of the right length 19 | loop { 20 | println!( 21 | "[+] Random mpt: insertion of {} elements so far...", 22 | keys.len() 23 | ); 24 | let key = thread_rng().gen::<[u8; MAX_KEY_NIBBLE_LEN / 2]>().to_vec(); 25 | let random_bytes = (0..VALUE_LEN) 26 | .map(|_| thread_rng().gen::()) 27 | .collect::>(); 28 | trie.insert(&key, &random_bytes).expect("can't insert"); 29 | keys.push(key.clone()); 30 | trie.root_hash().expect("root hash problem"); 31 | if let Some(idx) = (0..keys.len()).find(|k| { 32 | let ke = &keys[*k]; 33 | let proof = trie.get_proof(ke).unwrap(); 34 | proof.len() == DEPTH 35 | }) { 36 | right_key_idx = idx; 37 | break; 38 | } 39 | } 40 | (trie, keys[right_key_idx].to_vec()) 41 | } 42 | -------------------------------------------------------------------------------- /mp2-v1/tests/common/block_extraction.rs: -------------------------------------------------------------------------------- 1 | use alloy::primitives::U256; 2 | use anyhow::Result; 3 | use mp2_common::{ 4 | eth::BlockUtil, 5 | proof::deserialize_proof, 6 | utils::{Endianness, Packer, ToFields}, 7 | C, D, F, 8 | }; 9 | use mp2_v1::{api, block_extraction, indexing::block::BlockPrimaryIndex}; 10 | 11 | use super::TestContext; 12 | 13 | impl TestContext { 14 | pub(crate) async fn prove_block_extraction(&self, bn: BlockPrimaryIndex) -> Result> { 15 | let block = self 16 | .query_block_at(alloy::eips::BlockNumberOrTag::Number(bn as u64)) 17 | .await; 18 | let buffer = block.rlp(); 19 | let proof = self.b.bench("indexing::extraction::block", || { 20 | api::generate_proof( 21 | self.params(), 22 | api::CircuitInput::BlockExtraction( 23 | block_extraction::CircuitInput::from_block_header(buffer.clone()), 24 | ), 25 | ) 26 | })?; 27 | 28 | let pproof = deserialize_proof::(&proof)?; 29 | let pi = block_extraction::PublicInputs::from_slice(&pproof.public_inputs); 30 | let block_number = U256::from(block.header.number).to_fields(); 31 | let block_hash = block 32 | .header 33 | .hash 34 | .as_slice() 35 | .pack(Endianness::Little) 36 | .to_fields(); 37 | let prev_block_hash = block 38 | .header 39 | .parent_hash 40 | .as_slice() 41 | .pack(Endianness::Little) 42 | .to_fields(); 43 | 44 | assert_eq!(pi.block_number_raw(), &block_number); 45 | assert_eq!(pi.block_hash_raw(), block_hash); 46 | assert_eq!(pi.prev_block_hash_raw(), prev_block_hash); 47 | 48 | Ok(proof) 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /gnark-utils/src/prove.rs: -------------------------------------------------------------------------------- 1 | //! Initialize the prover and generate the proofs. 2 | 3 | use crate::{go, utils::handle_c_result}; 4 | use anyhow::{bail, Result}; 5 | use base64::prelude::{Engine, BASE64_STANDARD}; 6 | use std::ffi::{CStr, CString}; 7 | 8 | /// Initialize the prover. 9 | pub fn init_prover(asset_dir: &str) -> Result<()> { 10 | let asset_dir = CString::new(asset_dir)?; 11 | 12 | let result = unsafe { go::InitProver(asset_dir.as_ptr()) }; 13 | 14 | handle_c_result(result) 15 | } 16 | 17 | /// Initialize the prover from bytes. 18 | pub fn init_prover_from_bytes(r1cs: Vec, pk: Vec) -> Result<()> { 19 | // Manual drop the Vecs of big memory before calling the Go function below. 20 | // It may cause out of memory. 21 | let base64_r1cs = CString::new(BASE64_STANDARD.encode(&r1cs))?; 22 | drop(r1cs); 23 | let base64_pk = CString::new(BASE64_STANDARD.encode(&pk))?; 24 | drop(pk); 25 | 26 | let result = unsafe { go::InitProverFromBytes(base64_r1cs.as_ptr(), base64_pk.as_ptr()) }; 27 | 28 | handle_c_result(result) 29 | } 30 | 31 | /// Generate the proof. 32 | pub fn prove(verifier_only_circuit_data: &str, proof_with_public_inputs: &str) -> Result { 33 | let [verifier_only_circuit_data, proof_with_public_inputs] = 34 | [verifier_only_circuit_data, proof_with_public_inputs].map(CString::new); 35 | 36 | let result = unsafe { 37 | go::Prove( 38 | verifier_only_circuit_data?.as_ptr(), 39 | proof_with_public_inputs?.as_ptr(), 40 | ) 41 | }; 42 | 43 | if result.is_null() { 44 | bail!("Failed to generate the proof"); 45 | } 46 | 47 | let c_proof = unsafe { CStr::from_ptr(result) }; 48 | let proof = c_proof.to_str()?.to_string(); 49 | 50 | unsafe { go::FreeString(c_proof.as_ptr()) }; 51 | 52 | Ok(proof) 53 | } 54 | -------------------------------------------------------------------------------- /mp2-v1/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "mp2_v1" # TODO: fix the name to a meaningful one. 3 | version = "3.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | alloy.workspace = true 8 | anyhow.workspace = true 9 | bb8-postgres.workspace = true 10 | bb8.workspace = true 11 | bincode.workspace = true 12 | derive_more.workspace = true 13 | eth_trie.workspace = true 14 | futures.workspace = true 15 | hashbrown.workspace = true 16 | hex.workspace = true 17 | itertools.workspace = true 18 | log.workspace = true 19 | paste.workspace = true 20 | plonky2.workspace = true 21 | plonky2_crypto.workspace = true 22 | plonky2_ecdsa.workspace = true 23 | plonky2_ecgfp5.workspace = true 24 | poseidon2_plonky2.workspace = true 25 | rand.workspace = true 26 | rlp.workspace = true 27 | serde.workspace = true 28 | serde_json.workspace = true 29 | tokio-postgres.workspace = true 30 | tracing.workspace = true 31 | 32 | mp2_common = { path = "../mp2-common" , version = "3.0.0" } 33 | recursion_framework = { path = "../recursion-framework" , version = "3.0.0" } 34 | ryhope = { path = "../ryhope" , version = "3.0.0" } 35 | parsil = { path = "../parsil" , version = "3.0.0" } 36 | verifiable-db = { path = "../verifiable-db" , version = "3.0.0" } 37 | 38 | [dev-dependencies] 39 | alloy.workspace = true 40 | bb8-postgres.workspace = true 41 | bb8.workspace = true 42 | bincode.workspace = true 43 | csv.workspace = true 44 | env_logger.workspace = true 45 | envconfig.workspace = true 46 | futures.workspace = true 47 | hex.workspace = true 48 | jammdb.workspace = true 49 | lazy_static.workspace = true 50 | rand_chacha.workspace = true 51 | serde_json.workspace = true 52 | serial_test.workspace = true 53 | sqlparser.workspace = true 54 | test-log.workspace = true 55 | testfile.workspace = true 56 | tokio-postgres.workspace = true 57 | tokio.workspace = true 58 | 59 | mp2_test = { path = "../mp2-test" , version = "3.0.0" } 60 | parsil = { path = "../parsil" , version = "3.0.0" } 61 | 62 | [features] 63 | original_poseidon = ["mp2_common/original_poseidon"] 64 | -------------------------------------------------------------------------------- /gnark-utils/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Build Go functions and export to Rust 2 | 3 | mod compile; 4 | mod prove; 5 | mod utils; 6 | mod verify; 7 | 8 | pub use compile::compile_and_generate_assets; 9 | pub use prove::{init_prover, init_prover_from_bytes, prove}; 10 | pub use verify::{init_verifier, verify}; 11 | 12 | mod go { 13 | use std::os::raw::c_char; 14 | 15 | extern "C" { 16 | /// Compile and generate the asset files from the circuit data to the 17 | /// specified dir. The generated files are `r1cs.bin`, `pk.bin`, 18 | /// `vk.bin` and `Verifier.sol`. 19 | pub fn CompileAndGenerateAssets( 20 | common_circuit_data: *const c_char, 21 | verifier_only_circuit_data: *const c_char, 22 | dst_asset_dir: *const c_char, 23 | ) -> *const c_char; 24 | 25 | /// Initialize the prover. The asset dir must include `r1cs.bin` and 26 | /// `pk.bin`. 27 | pub fn InitProver(asset_dir: *const c_char) -> *const c_char; 28 | 29 | /// Initialize the prover from bytes. Both `r1cs` and `pk` bytes are 30 | /// encoded into Base64. 31 | pub fn InitProverFromBytes( 32 | base64_r1cs: *const c_char, 33 | base64_pk: *const c_char, 34 | ) -> *const c_char; 35 | 36 | /// Generate the proof from data. The InitProver function must be called 37 | /// before. 38 | pub fn Prove( 39 | verifier_only_circuit_data: *const c_char, 40 | proof_with_public_inputs: *const c_char, 41 | ) -> *const c_char; 42 | 43 | /// Initialize the verifier. The asset dir must include `vk.bin`. 44 | pub fn InitVerifier(asset_dir: *const c_char) -> *const c_char; 45 | 46 | /// Verify the proof. Return null if it's verified successfully, 47 | /// otherwise it returns an error string. 48 | pub fn Verify(proof: *const c_char) -> *const c_char; 49 | 50 | /// Free the C String returned from Go to Rust. 51 | pub fn FreeString(s: *const c_char); 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /mp2-v1/tests/common/values_extraction.rs: -------------------------------------------------------------------------------- 1 | //! Test utilities for Values Extraction (C.1) 2 | 3 | use super::{storage_trie::TestStorageTrie, TestContext}; 4 | use alloy::{eips::BlockNumberOrTag, primitives::Address, providers::Provider}; 5 | use itertools::Itertools; 6 | use log::info; 7 | use mp2_common::{mpt_sequential::utils::bytes_to_nibbles, F}; 8 | use mp2_v1::values_extraction::{public_inputs::PublicInputs, StorageSlotInfo}; 9 | use plonky2::field::types::Field; 10 | 11 | impl TestContext { 12 | /// Generate the Values Extraction proof for single or mapping variables. 13 | pub(crate) async fn prove_values_extraction( 14 | &self, 15 | contract_address: &Address, 16 | bn: BlockNumberOrTag, 17 | slots: &[StorageSlotInfo], 18 | ) -> Vec { 19 | // Initialize the test trie. 20 | let mut trie = TestStorageTrie::new(); 21 | info!("Initialized the test storage trie"); 22 | 23 | // Query the slot and add the node path to the trie. 24 | for slot_info in slots { 25 | trie.query_proof_and_add_slot(self, contract_address, bn, slot_info.clone()) 26 | .await; 27 | } 28 | 29 | let chain_id = self.rpc.get_chain_id().await.unwrap(); 30 | let proof_value = trie.prove_value(contract_address, chain_id, self.params(), &self.b); 31 | 32 | // Check the public inputs. 33 | let pi = PublicInputs::new(&proof_value.proof().public_inputs); 34 | assert_eq!(pi.root_hash(), trie.root_hash()); 35 | assert_eq!(pi.n(), F::from_canonical_usize(slots.len())); 36 | { 37 | let exp_key = slots[0].slot().mpt_key_vec(); 38 | let exp_key = bytes_to_nibbles(&exp_key) 39 | .into_iter() 40 | .map(F::from_canonical_u8) 41 | .collect_vec(); 42 | 43 | let (key, ptr) = pi.mpt_key_info(); 44 | assert_eq!(key, exp_key); 45 | assert_eq!(ptr, F::NEG_ONE); 46 | } 47 | 48 | proof_value.serialize().unwrap() 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /mp2-common/src/public_inputs.rs: -------------------------------------------------------------------------------- 1 | //! Common public input types 2 | 3 | use std::ops::Range; 4 | 5 | use crate::types::CBuilder; 6 | 7 | /// Public input range for each item 8 | pub type PublicInputRange = Range; 9 | 10 | /// Public input common trait 11 | pub trait PublicInputCommon { 12 | /// The slices within the public inputs arguments of this structure encompass the following 13 | /// ranges, which correspond to the logical attributes of the circuit. 14 | const RANGES: &'static [PublicInputRange]; 15 | 16 | /// A user-defined function that registers the supplied arguments into [CBuilder]. 17 | /// 18 | /// This function is not intended for use during circuit definition. Instead, please utilize 19 | /// the [PublicInputCommon::register] function for such purposes. 20 | fn register_args(&self, cb: &mut CBuilder); 21 | 22 | /// Registers the provided arguments as public inputs of the circuit. 23 | /// 24 | /// It will perform a validation, asserting the number of registered public inputs corresponds 25 | /// to the defined range arguments in length. 26 | fn register(&self, cb: &mut CBuilder) { 27 | let len: usize = Self::RANGES.iter().map(|r| r.end - r.start).sum(); 28 | let initial = cb.num_public_inputs(); 29 | 30 | self.register_args(cb); 31 | 32 | let dif = cb.num_public_inputs() - initial; 33 | 34 | // This assertion can be replaced with `debug_assert_eq` in production environments to 35 | // prevent rewind overhead. The runtime overhead in normal cases is expected to be 36 | // insignificant, while maintaining this check enhances test robustness. 37 | // 38 | // If multiple circuits utilizing the same proving key have overlapping public inputs 39 | // (i.e., the same target is used as public input in each circuit), potential issues may 40 | // arise. However, such a scenario is unlikely to occur under normal circumstances. 41 | assert_eq!(dif, len, "The number of registered public inputs {dif} doesn't match the expected ranges length {len}."); 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /parsil/src/lib.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Context; 2 | use anyhow::Result; 3 | use executor::TranslatedQuery; 4 | use symbols::ContextProvider; 5 | pub use utils::parse_and_validate; 6 | pub use utils::ParsilSettings; 7 | pub use utils::PlaceholderSettings; 8 | pub use utils::DEFAULT_MAX_BLOCK_PLACEHOLDER; 9 | pub use utils::DEFAULT_MIN_BLOCK_PLACEHOLDER; 10 | use verifiable_db::query::utils::QueryBounds; 11 | 12 | pub mod assembler; 13 | pub mod bracketer; 14 | pub mod errors; 15 | pub mod executor; 16 | mod expand; 17 | pub mod isolator; 18 | mod parser; 19 | mod placeholders; 20 | pub mod queries; 21 | pub mod symbols; 22 | #[cfg(test)] 23 | mod tests; 24 | pub mod utils; 25 | mod validate; 26 | mod visitor; 27 | 28 | // required for enforcing the right number of placeholders is given during a query request 29 | pub use placeholders::gather_placeholders as placeholders_set; 30 | 31 | /// Given an SQL query textual representation, ensure it satisfies all the 32 | /// criterion imposed by the current proving architecture. 33 | pub fn check(query: &str, settings: &ParsilSettings) -> Result<()> { 34 | parse_and_validate(query, settings).map(|_| ()) 35 | } 36 | 37 | /// Generate a SQL queries to fetch the keys and blocks where the conditions on 38 | /// the primary (and potentially secondary) index is satisfied. 39 | pub fn keys_in_index_boundaries( 40 | query: &str, 41 | settings: &ParsilSettings, 42 | bounds: &QueryBounds, 43 | ) -> Result { 44 | let mut q = parse_and_validate(query, settings).context("while validating query")?; 45 | q = isolator::isolate(&q, settings, bounds).context("while isolating indices")?; 46 | executor::generate_query_keys(&mut q, settings).context("while generating query keys") 47 | } 48 | 49 | /// Returns whether the given string is a valid column or table name. 50 | pub fn is_valid_name(name: &str) -> anyhow::Result<()> { 51 | anyhow::ensure!(!name.is_empty(), "empty table name"); 52 | anyhow::ensure!( 53 | name.chars().next().unwrap().is_ascii_alphabetic(), 54 | "table name must start with a letter" 55 | ); 56 | anyhow::ensure!( 57 | name.chars().all(|c| c.is_ascii_alphanumeric() || c == '_'), 58 | "invalid character in table name" 59 | ); 60 | 61 | Ok(()) 62 | } 63 | -------------------------------------------------------------------------------- /mp2-v1/tests/common/benchmarker.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use envconfig::Envconfig; 3 | use log::info; 4 | use std::{fs::File, path::PathBuf}; 5 | 6 | use super::context::TestContextConfig; 7 | 8 | pub struct Benchmarker { 9 | csv_path: PathBuf, 10 | } 11 | 12 | const DEFAULT_BENCH_FILE: &str = "bench.csv"; 13 | 14 | impl Benchmarker { 15 | pub fn new_from_env() -> Result { 16 | let cfg = TestContextConfig::init_from_env()?; 17 | let path = cfg 18 | .params_dir 19 | .expect("we need a config folder to run the integrated test"); 20 | let mut path = PathBuf::from(path); 21 | path.push(DEFAULT_BENCH_FILE); 22 | Self::new_from_path(path) 23 | } 24 | 25 | pub fn new_from_path(path: PathBuf) -> Result { 26 | if !path.exists() { 27 | // only write the header if the file doesn't exists 28 | let writer = File::options().create(true).append(true).open(&path)?; 29 | let mut wtr = csv::Writer::from_writer(writer); 30 | wtr.write_record(["name", "time"])?; 31 | } 32 | info!("Benchmarker setup to write output in {path:?}"); 33 | Ok(Self { csv_path: path }) 34 | } 35 | 36 | pub fn bench(&self, name: &str, f: F) -> Result 37 | where 38 | F: FnOnce() -> Result, 39 | { 40 | let now = std::time::Instant::now(); 41 | let output = f()?; 42 | let elapsed = now.elapsed().as_millis(); 43 | self.write_to_csv(name, elapsed)?; 44 | Ok(output) 45 | } 46 | 47 | pub fn write_to_csv(&self, name: &str, elapsed: u128) -> Result<()> { 48 | let writer = File::options().append(true).open(&self.csv_path)?; 49 | let mut wtr = csv::Writer::from_writer(writer); 50 | wtr.write_record([name, &elapsed.to_string()])?; 51 | wtr.flush()?; 52 | Ok(()) 53 | } 54 | } 55 | 56 | #[cfg(test)] 57 | mod test { 58 | use super::Benchmarker; 59 | use anyhow::Result; 60 | #[test] 61 | fn benchmarker() -> Result<()> { 62 | let path = testfile::generate_name(); 63 | let b = Benchmarker::new_from_path(path)?; 64 | b.bench("test_fun", || { 65 | let _total: u32 = (0..10000).sum(); 66 | Ok(()) 67 | })?; 68 | Ok(()) 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /mp2-v1/tests/common/length_extraction.rs: -------------------------------------------------------------------------------- 1 | use alloy::{eips::BlockNumberOrTag, primitives::Address, providers::Provider}; 2 | use log::info; 3 | use mp2_common::{ 4 | eth::StorageSlot, mpt_sequential::utils::bytes_to_nibbles, proof::ProofWithVK, types::GFp, 5 | }; 6 | use mp2_v1::{length_extraction::PublicInputs, values_extraction::StorageSlotInfo}; 7 | use plonky2::field::types::Field; 8 | 9 | use crate::common::storage_trie::TestStorageTrie; 10 | 11 | use super::TestContext; 12 | 13 | impl TestContext { 14 | /// Generate the Values Extraction (C.2) proof for single variables. 15 | #[allow(dead_code)] 16 | pub(crate) async fn prove_length_extraction( 17 | &self, 18 | contract_address: &Address, 19 | bn: BlockNumberOrTag, 20 | slot_info: StorageSlotInfo, 21 | value: u8, 22 | ) -> ProofWithVK { 23 | // Initialize the test trie. 24 | let mut trie = TestStorageTrie::new(); 25 | info!("Initialized the test storage trie"); 26 | 27 | let slot = slot_info.slot().slot(); 28 | 29 | // Query the slot and add the node path to the trie. 30 | trie.query_proof_and_add_slot(self, contract_address, bn, slot_info) 31 | .await; 32 | let chain_id = self.rpc.get_chain_id().await.unwrap(); 33 | let proof = trie.prove_length(contract_address, chain_id, value, self.params(), &self.b); 34 | 35 | // Check the public inputs. 36 | let pi = PublicInputs::from_slice(&proof.proof().public_inputs); 37 | let root: Vec<_> = trie 38 | .root_hash() 39 | .into_iter() 40 | .map(GFp::from_canonical_u32) 41 | .collect(); 42 | 43 | assert_eq!(pi.root_hash_raw(), &root, "root of the trie should match"); 44 | 45 | { 46 | let exp_key = StorageSlot::Simple(slot as usize).mpt_key_vec(); 47 | let exp_key: Vec<_> = bytes_to_nibbles(&exp_key) 48 | .into_iter() 49 | .map(GFp::from_canonical_u8) 50 | .collect(); 51 | 52 | assert_eq!( 53 | pi.mpt_key(), 54 | exp_key, 55 | "MPT key is immutable for the whole path" 56 | ); 57 | assert_eq!( 58 | pi.mpt_key_pointer(), 59 | &GFp::NEG_ONE, 60 | "at root, pointer should be -1" 61 | ); 62 | } 63 | 64 | proof 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /devenv.nix: -------------------------------------------------------------------------------- 1 | { pkgs, lib, config, inputs, ... }: 2 | 3 | let 4 | # return `s` if it not empty, `default` otherwise. 5 | orDefault = s: default: if builtins.stringLength s == 0 then default else s; 6 | in 7 | { 8 | cachix.enable = false; 9 | 10 | # https://devenv.sh/packages/ 11 | packages = [ pkgs.git pkgs.rustup pkgs.figlet pkgs.openssl pkgs.pkg-config pkgs.cargo-limit pkgs.awscli2 pkgs.perl ] 12 | ++ lib.optionals config.devenv.isTesting [ pkgs.docker ] 13 | ++ lib.optionals pkgs.stdenv.targetPlatform.isDarwin [ 14 | pkgs.libiconv 15 | pkgs.darwin.apple_sdk.frameworks.SystemConfiguration 16 | pkgs.darwin.apple_sdk.frameworks.AppKit 17 | ]; 18 | 19 | dotenv.enable = true; 20 | 21 | enterShell = ''figlet -f slant "MR2 loaded"''; 22 | 23 | # Env. variables 24 | env = { 25 | # Rust debuggingin 26 | RUST_BACKTRACE = 1; 27 | RUST_LOG = "debug"; 28 | 29 | # Required for Rust linking to OpenSSL 30 | OPENSSL_DEV = pkgs.openssl.dev; 31 | 32 | # Make Go dependencies RW 33 | GOFLAGS = "-modcacherw"; 34 | 35 | DB_URL = "host=localhost dbname=storage port=${builtins.toString config.env.PGPORT}"; 36 | }; 37 | 38 | # Use a DB_URL tuned for the dockerized processes.postgres-ci 39 | enterTest = '' 40 | cargo test --features ci -- --test-threads 16 41 | ''; 42 | 43 | # Spawn a local PgSQL instance iff we are not in test mode (e.g. when running 44 | # `devenv up`) for development purposes. 45 | services.postgres = { 46 | enable = true; 47 | listen_addresses = "127.0.0.1"; 48 | port = lib.strings.toInt (orDefault config.env.PGSQL_PORT "5432"); 49 | settings = { 50 | log_connections = false; 51 | log_statement = "all"; 52 | }; 53 | initialDatabases = [{ 54 | name = "storage"; 55 | }]; 56 | }; 57 | 58 | scripts = { 59 | # Open a shell to the DB 60 | db.exec = "psql storage -h localhost -p ${builtins.toString config.env.PGPORT}"; 61 | 62 | # Wipe out the database 63 | reset-db.exec = "rm -rf ${config.env.DEVENV_STATE}/postgres"; 64 | }; 65 | 66 | languages.go.enable = true; 67 | 68 | # https://devenv.sh/pre-commit-hooks/ 69 | ## pre-commit.hooks = { 70 | ## # cargo-check.enable = true; 71 | ## # check-merge-conflicts.enable = true; 72 | ## # clippy.enable = true; 73 | ## # commitizen.enable = true; 74 | ## # rustfmt.enable = true; 75 | ## }; 76 | } 77 | -------------------------------------------------------------------------------- /groth16-framework/tests/group_hashing.rs: -------------------------------------------------------------------------------- 1 | //! Test the Groth16 proving process for the Group Hashing circuit. 2 | 3 | use groth16_framework::{ 4 | compile_and_generate_assets, test_utils::test_groth16_proving_and_verification, C, 5 | }; 6 | use mp2_common::{group_hashing::CircuitBuilderGroupHashing, proof::serialize_proof, D, F}; 7 | use plonky2::{ 8 | field::types::Field, 9 | iop::witness::{PartialWitness, WitnessWrite}, 10 | plonk::{ 11 | circuit_builder::CircuitBuilder, 12 | circuit_data::{CircuitConfig, CircuitData}, 13 | }, 14 | }; 15 | use rand::{thread_rng, Rng}; 16 | use serial_test::serial; 17 | 18 | /// Test proving for the group-hashing circuit. 19 | #[ignore] // Ignore for long running time in CI. 20 | #[serial] 21 | #[test] 22 | fn test_groth16_proving_for_group_hashing() { 23 | env_logger::init(); 24 | 25 | const ASSET_DIR: &str = "groth16_group_hashing"; 26 | 27 | // Build for the Group Hashing circuit and generate the plonky2 proof. 28 | let (circuit_data, proof) = plonky2_build_and_prove(); 29 | 30 | // Generate the asset files. 31 | compile_and_generate_assets(circuit_data, ASSET_DIR) 32 | .expect("Failed to generate the asset files"); 33 | 34 | // Test Groth16 proving, verification and Solidity verification. 35 | test_groth16_proving_and_verification(ASSET_DIR, &proof); 36 | } 37 | 38 | /// Build for the plonky2 circuit and generate the proof. 39 | fn plonky2_build_and_prove() -> (CircuitData, Vec) { 40 | let config = CircuitConfig::standard_recursion_config(); 41 | let mut cb = CircuitBuilder::::new(config); 42 | 43 | let inputs = [0; 8].map(|_| cb.add_virtual_target()); 44 | let a = cb.map_to_curve_point(&inputs[..4]); 45 | let b = cb.map_to_curve_point(&inputs[4..]); 46 | let _c = cb.add_curve_point(&[a, b]); 47 | 48 | // TODO: We restrict the fields of public inputs must be within the range of 49 | // Uint32 for sha256 of Groth16. 50 | // Register the public inputs twice for testing. 51 | // cb.register_curve_public_input(c); 52 | // cb.register_curve_public_input(c); 53 | 54 | let mut pw = PartialWitness::new(); 55 | let mut rng = thread_rng(); 56 | inputs 57 | .into_iter() 58 | .zip([0; 8].map(|_| F::from_canonical_u64(rng.gen()))) 59 | .for_each(|(t, v)| pw.set_target(t, v)); 60 | 61 | let circuit_data = cb.build::(); 62 | let proof = circuit_data.prove(pw).unwrap(); 63 | let proof = serialize_proof(&proof).unwrap(); 64 | 65 | (circuit_data, proof) 66 | } 67 | -------------------------------------------------------------------------------- /mp2-v1/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Provide the generic APIs to build proving parameters and generate proofs for 2 | //! the Zk-SQL coprocessor by Lagrange. 3 | //! 4 | //! In a nutshell, Lagrange Zk-SQL coprocessor allows to run verifiable SQL queries 5 | //! over tables in Lagrange verifiable DB. The verifiable DB allows to create 6 | //! tables from blockchain data, altogether with a proof that the DB was constructed 7 | //! with the same data extracted from the blockchain. 8 | #![allow(incomplete_features)] 9 | #![allow(clippy::large_enum_variant)] 10 | // Add this to allow generic const expressions, e.g. `PAD_LEN(NODE_LEN)`. 11 | #![feature(generic_const_exprs)] 12 | // Add this so we don't need to always specify const generic in generic 13 | // parameters (i.e. use "_") 14 | #![feature(generic_arg_infer)] 15 | use mp2_common::{array::Array, keccak::PACKED_HASH_LEN, mpt_sequential::PAD_LEN}; 16 | use plonky2::{ 17 | field::extension::quintic::QuinticExtension, 18 | plonk::{circuit_builder::CircuitBuilder, config::GenericConfig}, 19 | }; 20 | use plonky2_crypto::u32::arithmetic_u32::U32Target; 21 | 22 | pub const MAX_BRANCH_NODE_LEN: usize = 532; 23 | pub const MAX_BRANCH_NODE_LEN_PADDED: usize = PAD_LEN(532); 24 | /// rlp( rlp(max key 32b) + rlp(max value 32b) ) + 1 for compact encoding 25 | /// see test_len() 26 | pub const MAX_EXTENSION_NODE_LEN: usize = 69; 27 | pub const MAX_EXTENSION_NODE_LEN_PADDED: usize = PAD_LEN(69); 28 | pub const MAX_LEAF_NODE_LEN: usize = MAX_EXTENSION_NODE_LEN; 29 | 30 | pub mod api; 31 | pub mod block_extraction; 32 | pub mod contract_extraction; 33 | pub mod final_extraction; 34 | pub mod indexing; 35 | pub mod length_extraction; 36 | pub mod query; 37 | pub mod values_extraction; 38 | 39 | pub(crate) const D: usize = 2; 40 | #[cfg(feature = "original_poseidon")] 41 | pub(crate) type C = plonky2::plonk::config::PoseidonGoldilocksConfig; 42 | #[cfg(not(feature = "original_poseidon"))] 43 | pub(crate) type C = poseidon2_plonky2::poseidon2_goldilock::Poseidon2GoldilocksConfig; 44 | pub(crate) type F = >::F; 45 | pub(crate) type CHasher = >::Hasher; 46 | pub(crate) type H = >::Hasher; 47 | pub(crate) type CBuilder = CircuitBuilder; 48 | pub(crate) type GFp5 = QuinticExtension; 49 | pub(crate) type OutputHash = Array; 50 | 51 | #[cfg(test)] 52 | pub(crate) mod tests { 53 | /// Testing maximum columns 54 | pub(crate) const TEST_MAX_COLUMNS: usize = 32; 55 | /// Testing maximum fields for each EVM word 56 | pub(crate) const TEST_MAX_FIELD_PER_EVM: usize = 32; 57 | } 58 | -------------------------------------------------------------------------------- /parsil/src/errors.rs: -------------------------------------------------------------------------------- 1 | use sqlparser::ast::{BinaryOperator, UnaryOperator}; 2 | use thiserror::Error; 3 | 4 | #[derive(Error, Debug)] 5 | pub enum ValidationError { 6 | #[error("query projection must not mix aggregates and scalars")] 7 | MixedQuery, 8 | 9 | #[error("qery body should be a SELECT statement")] 10 | NotASelect, 11 | 12 | #[error("`{0}`: unsupported operator")] 13 | UnsupportedUnaryOperator(UnaryOperator), 14 | 15 | #[error("`{0}`: unsupported operator")] 16 | UnsupportedBinaryOperator(BinaryOperator), 17 | 18 | #[error("`{0}`: unknown function")] 19 | UnknownFunction(String), 20 | 21 | #[error("`{0}`: expected {1} argument, found {2}")] 22 | InvalidArity(String, usize, usize), 23 | 24 | #[error("`{0}`: unexpected argument type")] 25 | InvalidFunctionArgument(String), 26 | 27 | #[error("`{0}`: unknown placeholder")] 28 | UnknownPlaceholder(String), 29 | 30 | #[error("`{0}` is not used")] 31 | MissingPlaceholder(String), 32 | 33 | #[error("Too many query parameters. Expected {expected}, got {got}")] 34 | TooManyParameters { expected: usize, got: usize }, 35 | 36 | #[error("`{0}`: unsupported immediate value")] 37 | UnsupportedImmediateValue(String), 38 | 39 | #[error("`{0}`: nested selects are not supported")] 40 | NestedSelect(String), 41 | 42 | #[error("`{0}`: set operations are not supported")] 43 | SetOperation(String), 44 | 45 | #[error("`{0}`: mutable queries are not supported")] 46 | MutableQueries(String), 47 | 48 | #[error("{0} unsupported")] 49 | UnsupportedFeature(String), 50 | 51 | #[error("`{0}`: unsupported jointure")] 52 | UnsupportedJointure(String), 53 | 54 | #[error("`{0}`: non-standard SQL")] 55 | NonStandardSql(String), 56 | 57 | #[error("`{0}`: ORDER BY only supports up to {1} criterions")] 58 | OrderByArity(String, usize), 59 | 60 | #[error( 61 | "ORDER BY criterions must be present in the SELECT expressions; `{0}` not found in SELECT" 62 | )] 63 | SpecialOrderBy(String), 64 | 65 | #[error("`{0}`: compounded table names unsupported")] 66 | CompoundTableName(String), 67 | 68 | #[error("`{0}`: reserved identifier")] 69 | ReservedIdentifier(String), 70 | 71 | #[error("unable to convert `{0}` to a U256")] 72 | InvalidInteger(String), 73 | 74 | #[error("NULL-related ordering specifiers unsupported")] 75 | NullRelatedOrdering, 76 | 77 | #[error("Clause `{0}` value should be set in the approporiate parameter at execution time")] 78 | UseInvocationParameter(String), 79 | } 80 | -------------------------------------------------------------------------------- /groth16-framework/tests/keccak.rs: -------------------------------------------------------------------------------- 1 | //! Test the Groth16 proving process for the Keccak circuit. 2 | 3 | use groth16_framework::{ 4 | compile_and_generate_assets, test_utils::test_groth16_proving_and_verification, C, 5 | }; 6 | use mp2_common::{ 7 | array::{Array, Vector, VectorWire}, 8 | keccak::{InputData, KeccakCircuit}, 9 | mpt_sequential::PAD_LEN, 10 | proof::serialize_proof, 11 | D, F, 12 | }; 13 | use plonky2::{ 14 | field::types::Field, 15 | iop::{target::Target, witness::PartialWitness}, 16 | plonk::{ 17 | circuit_builder::CircuitBuilder, 18 | circuit_data::{CircuitConfig, CircuitData}, 19 | }, 20 | }; 21 | use rand::{thread_rng, Rng}; 22 | use serial_test::serial; 23 | use std::array; 24 | 25 | /// Test proving for the keccak circuit. 26 | #[ignore] // Ignore for long running time in CI. 27 | #[serial] 28 | #[test] 29 | fn test_groth16_proving_for_keccak() { 30 | env_logger::init(); 31 | 32 | const ASSET_DIR: &str = "groth16_keccak"; 33 | 34 | // Build for the Keccak circuit and generate the plonky2 proof. 35 | let (circuit_data, proof) = plonky2_build_and_prove(); 36 | 37 | // Generate the asset files. 38 | compile_and_generate_assets(circuit_data, ASSET_DIR) 39 | .expect("Failed to generate the asset files"); 40 | 41 | // Test Groth16 proving, verification and Solidity verification. 42 | test_groth16_proving_and_verification(ASSET_DIR, &proof); 43 | } 44 | 45 | /// Build for the plonky2 circuit and generate the proof. 46 | fn plonky2_build_and_prove() -> (CircuitData, Vec) { 47 | let config = CircuitConfig::standard_recursion_config(); 48 | let mut cb = CircuitBuilder::::new(config); 49 | 50 | const REAL_LEN: usize = 10; 51 | const PADDED_LEN: usize = PAD_LEN(REAL_LEN); 52 | 53 | let arr = Array::new(&mut cb); 54 | let v = VectorWire:: { 55 | real_len: cb.constant(F::from_canonical_usize(REAL_LEN)), 56 | arr: arr.clone(), 57 | }; 58 | let k = KeccakCircuit::hash_vector(&mut cb, &v); 59 | 60 | let mut pw = PartialWitness::new(); 61 | let inputs = array::from_fn(|_| thread_rng().gen::()); 62 | arr.assign(&mut pw, &inputs.map(F::from_canonical_u8)); 63 | KeccakCircuit::::assign( 64 | &mut pw, 65 | &k, 66 | &InputData::Assigned(&Vector::from_vec(&inputs).unwrap()), 67 | ); 68 | 69 | let circuit_data = cb.build::(); 70 | let proof = circuit_data.prove(pw).unwrap(); 71 | let proof = serialize_proof(&proof).unwrap(); 72 | 73 | (circuit_data, proof) 74 | } 75 | -------------------------------------------------------------------------------- /groth16-framework/src/evm/executor.rs: -------------------------------------------------------------------------------- 1 | //! Test contract deployment and call 2 | //! Copied and modified from [snark-verifier](https://github.com/privacy-scaling-explorations/snark-verifier). 3 | 4 | use anyhow::{bail, Result}; 5 | use revm::{ 6 | primitives::{CreateScheme, ExecutionResult, Output, TransactTo, TxEnv}, 7 | InMemoryDB, EVM, 8 | }; 9 | 10 | /// Deploy contract and then call with calldata. 11 | /// Return the gas_used and the output bytes of call to deployed contract if 12 | /// both transactions are successful. 13 | pub fn deploy_and_call(deployment_code: Vec, calldata: Vec) -> Result<(u64, Vec)> { 14 | let mut evm = EVM { 15 | env: Default::default(), 16 | db: Some(InMemoryDB::default()), 17 | }; 18 | 19 | evm.env.tx = TxEnv { 20 | gas_limit: u64::MAX, 21 | transact_to: TransactTo::Create(CreateScheme::Create), 22 | data: deployment_code.into(), 23 | ..Default::default() 24 | }; 25 | 26 | let result = evm.transact_commit().unwrap(); 27 | let contract = match result { 28 | ExecutionResult::Success { 29 | output: Output::Create(_, Some(contract)), 30 | .. 31 | } => contract, 32 | ExecutionResult::Revert { gas_used, output } => 33 | bail!( 34 | "Contract deployment transaction reverts with gas_used {gas_used} and output {:#x}", 35 | output 36 | ), 37 | ExecutionResult::Halt { reason, gas_used } => bail!( 38 | "Contract deployment transaction halts unexpectedly with gas_used {gas_used} and reason {:?}", 39 | reason 40 | ), 41 | _ => unreachable!(), 42 | }; 43 | 44 | evm.env.tx = TxEnv { 45 | gas_limit: u64::MAX, 46 | transact_to: TransactTo::Call(contract), 47 | data: calldata.into(), 48 | ..Default::default() 49 | }; 50 | 51 | let result = evm.transact_commit().unwrap(); 52 | log::info!("EVM result: {result:?}"); 53 | match result { 54 | ExecutionResult::Success { 55 | gas_used, 56 | output: Output::Call(bytes), 57 | .. 58 | } => Ok((gas_used, bytes.to_vec())), 59 | ExecutionResult::Revert { gas_used, output } => bail!( 60 | "Contract call transaction reverts with gas_used {gas_used} and output {:#x}", 61 | output 62 | ), 63 | ExecutionResult::Halt { reason, gas_used } => bail!( 64 | "Contract call transaction halts unexpectedly with gas_used {gas_used} and reason {:?}", 65 | reason 66 | ), 67 | _ => unreachable!(), 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /verifiable-db/src/query/universal_circuit/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::F; 2 | use plonky2::hash::hash_types::{HashOut, HashOutTarget}; 3 | 4 | /// Component implementing the basic operation supported in the universal query circuit, described here 5 | /// https://www.notion.so/lagrangelabs/Queries-Circuits-2695199166a54954bbc44ad9dc398825?pvs=4#46985c2eb90f4af8aa0805a9203e9efa 6 | mod basic_operation; 7 | mod cells; 8 | /// Component binding column values for the given row to the cells tree hash, descibed here 9 | /// https://www.notion.so/lagrangelabs/Queries-Circuits-2695199166a54954bbc44ad9dc398825?pvs=4#9e7230af7b844b4699a078291591b3eb 10 | mod column_extraction; 11 | /// Output component for queries without aggregation operations (i.e., `SUM` or `MIN`) specified in the `SELECT` statement 12 | /// described here: https://www.notion.so/lagrangelabs/Queries-Circuits-2695199166a54954bbc44ad9dc398825?pvs=4#8799521e9e9547aeb61dc306d399d654 13 | pub(crate) mod output_no_aggregation; 14 | /// Output component for queries with aggregation operations (i.e., `SUM` or `MIN`) specified in the `SELECT` statement, 15 | /// described here: https://www.notion.so/lagrangelabs/Queries-Circuits-2695199166a54954bbc44ad9dc398825?pvs=4#3e0a95407a4a474ca8f0fe45b913ea70 16 | pub(crate) mod output_with_aggregation; 17 | /// Universal query circuit, employing several instances of the atomic components found in other modules to process 18 | /// a single row of a table according to a given query. The overall layout of the circuit is described here 19 | /// https://www.notion.so/lagrangelabs/Queries-Circuits-2695199166a54954bbc44ad9dc398825?pvs=4#5c0d5af8c40f4bf0ae7dd13b20a54dcc 20 | /// while the detailed specs can be found here https://www.notion.so/lagrangelabs/Queries-Circuits-2695199166a54954bbc44ad9dc398825?pvs=4#22fbb552e11e411e95d426264c94aa46 21 | pub mod universal_query_circuit; 22 | /// Gadget to process a single row in the DB according to a specific query 23 | pub(crate) mod universal_query_gadget; 24 | 25 | /// Set of data structures to be provided as input to initialize a universal query circuit to prove 26 | /// the query computation for a single row. They basically allow to represent in a strucutred format 27 | /// the operations to be performed to compute the results of the query for each row 28 | pub mod universal_circuit_inputs; 29 | 30 | // type alias introduced to specify the semantic of a hash, given that we have many in the query circuits 31 | pub type ComputationalHash = HashOut; 32 | pub type PlaceholderHash = HashOut; 33 | pub type MembershipHash = HashOut; 34 | pub type ComputationalHashTarget = HashOutTarget; 35 | pub type PlaceholderHashTarget = HashOutTarget; 36 | pub type MembershipHashTarget = HashOutTarget; 37 | 38 | pub(crate) use cells::build_cells_tree; 39 | -------------------------------------------------------------------------------- /groth16-framework/src/utils.rs: -------------------------------------------------------------------------------- 1 | //! Utility functions 2 | 3 | use crate::C; 4 | use alloy::primitives::U256; 5 | use anyhow::{anyhow, Result}; 6 | use mp2_common::{ 7 | serialization::{FromBytes, ToBytes}, 8 | D, F, 9 | }; 10 | use plonky2::plonk::circuit_data::CircuitData; 11 | use std::{ 12 | fs::{create_dir_all, File}, 13 | io::{Read, Write}, 14 | path::Path, 15 | }; 16 | 17 | /// The filename of the cached circuit data. This is the circuit data of the final wrapped proof. 18 | /// The actual mapreduce-plonky2 proof being verified has already been hardcoded in the wrapped proof. 19 | pub const CIRCUIT_DATA_FILENAME: &str = "circuit.bin"; 20 | 21 | /// The filename of the exported Solidity verifier contract. 22 | pub const SOLIDITY_VERIFIER_FILENAME: &str = "Verifier.sol"; 23 | 24 | /// Convert a string with `0x` prefix to an U256. 25 | pub fn hex_to_u256(s: &str) -> Result { 26 | let s = s 27 | .strip_prefix("0x") 28 | .ok_or(anyhow!("The hex string must have `0x` prefix: {s}"))?; 29 | let u = U256::from_str_radix(s, 16)?; 30 | 31 | Ok(u) 32 | } 33 | 34 | /// Read the data from a file. 35 | pub fn read_file>(file_path: P) -> Result> { 36 | let mut data = vec![]; 37 | let mut fd = File::open(file_path)?; 38 | fd.read_to_end(&mut data)?; 39 | 40 | Ok(data) 41 | } 42 | 43 | /// Write the data to a file. 44 | pub fn write_file>(file_path: P, data: &[u8]) -> Result<()> { 45 | // Try to create the parent dir if not exists. 46 | if let Some(parent_dir) = file_path.as_ref().parent() { 47 | create_dir_all(parent_dir)?; 48 | } 49 | 50 | // Write the file. 51 | let mut fd = File::create(file_path)?; 52 | fd.write_all(data)?; 53 | 54 | Ok(()) 55 | } 56 | 57 | /// Serialize the circuit data to bytes. 58 | pub fn serialize_circuit_data(circuit_data: &CircuitData) -> Result> { 59 | // Assume that the circuit data could always be serialized by the custom 60 | // gate and generator serializers of recursive-framework. 61 | Ok(ToBytes::to_bytes(circuit_data)) 62 | } 63 | 64 | /// Deserialize bytes to the circuit data. 65 | pub fn deserialize_circuit_data(bytes: &[u8]) -> Result> { 66 | // Assume that the circuit data could always be deserialized by the custom 67 | // gate and generator serializers of recursive-framework. 68 | as FromBytes>::from_bytes(bytes) 69 | .map_err(|err| anyhow::Error::msg(err.to_string())) 70 | } 71 | 72 | /// Serialize reference of circuit data, then deserialize to implement clone. 73 | pub fn clone_circuit_data(circuit_data: &CircuitData) -> Result> { 74 | deserialize_circuit_data(&serialize_circuit_data(circuit_data)?) 75 | } 76 | -------------------------------------------------------------------------------- /ryhope/src/error.rs: -------------------------------------------------------------------------------- 1 | use thiserror::Error; 2 | use tokio_postgres::error::Error as PgError; 3 | 4 | use crate::IncrementalEpoch; 5 | 6 | #[derive(Error, Debug)] 7 | pub enum RyhopeError { 8 | /// An error that occured while interacting with the DB. 9 | #[error("DB error while {msg}: {err}")] 10 | DbError { msg: String, err: PgError }, 11 | 12 | /// An error that occured while interacting with the DB. 13 | #[error("DB error while {msg}: {err}")] 14 | DbPoolError { 15 | msg: String, 16 | err: bb8::RunError, 17 | }, 18 | 19 | /// The internal state is incoherent; this is a bug. 20 | #[error("internal Error: {0}. Please fill a bug report here https://github.com/Lagrange-Labs/mapreduce-plonky2")] 21 | Internal(String), 22 | 23 | /// Unable to extract data from the DB 24 | #[error("unable to deserialize data while {msg}: {err} -- this should never happen; has a non-retrocompatible update of the storage format happened?")] 25 | InvalidFormat { msg: String, err: PgError }, 26 | 27 | #[error("already in a transaction")] 28 | AlreadyInTransaction, 29 | 30 | #[error("not in a transaction")] 31 | NotInATransaction, 32 | 33 | /// A non-recoverable error 34 | #[error("fatal error: {0}")] 35 | Fatal(String), 36 | 37 | #[error("key not found in tree")] 38 | KeyNotFound, 39 | 40 | #[error("Current epoch is undefined: internal epoch is {0}, but no corresponding user epoch was found")] 41 | CurrenEpochUndefined(IncrementalEpoch), 42 | 43 | #[error("Error in epoch mapper operation: {0}")] 44 | EpochMapperError(String), 45 | } 46 | impl RyhopeError { 47 | pub fn from_db>(msg: S, err: PgError) -> Self { 48 | RyhopeError::DbError { 49 | msg: msg.as_ref().to_string(), 50 | err, 51 | } 52 | } 53 | 54 | pub fn from_bb8>(msg: S, err: bb8::RunError) -> Self { 55 | RyhopeError::DbPoolError { 56 | msg: msg.as_ref().to_string(), 57 | err, 58 | } 59 | } 60 | 61 | pub fn invalid_format>(msg: S, err: PgError) -> Self { 62 | RyhopeError::InvalidFormat { 63 | msg: msg.as_ref().to_string(), 64 | err, 65 | } 66 | } 67 | 68 | pub fn internal>(msg: S) -> Self { 69 | RyhopeError::Internal(msg.as_ref().to_string()) 70 | } 71 | 72 | pub fn fatal>(msg: S) -> Self { 73 | RyhopeError::Fatal(msg.as_ref().to_string()) 74 | } 75 | 76 | pub fn epoch_error>(msg: S) -> Self { 77 | RyhopeError::EpochMapperError(msg.as_ref().to_string()) 78 | } 79 | } 80 | 81 | pub fn ensure>(cond: bool, msg: S) -> Result<(), RyhopeError> { 82 | if cond { 83 | Ok(()) 84 | } else { 85 | Err(RyhopeError::fatal(msg.as_ref())) 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /parsil/src/dsl.rs: -------------------------------------------------------------------------------- 1 | use alloy::primitives::U256; 2 | use anyhow::*; 3 | use sqlparser::ast::Expr; 4 | use verifiable_db::query::computational_hash_ids::{Operation, PlaceholderIdentifier}; 5 | 6 | use crate::{ 7 | symbols::{ColumnKind, ContextProvider, ScopeTable, Symbol, ZkColumn}, 8 | visitor::AstPass, 9 | ParsilSettings, 10 | }; 11 | 12 | struct ExprAstBuilder<'a, C: ContextProvider> { 13 | settings: &'a ParsilSettings, 14 | scopes: ScopeTable<(), ()>, 15 | } 16 | impl<'a, C: ContextProvider> ExprAstBuilder<'a, C> { 17 | fn new(settings: &'a ParsilSettings) -> Self { 18 | Self { 19 | settings, 20 | scopes: ScopeTable::new(), 21 | } 22 | } 23 | 24 | fn exit_scope(&mut self) -> Result<()> { 25 | let exited_scope = self.scopes.exit_scope()?; 26 | 27 | // Prepare the data that will be used to generate the circuit PIs 28 | let mut output_items = Vec::new(); 29 | let mut aggregations = Vec::new(); 30 | for r in self.scopes.currently_reachable()?.into_iter() { 31 | match r { 32 | Symbol::Column { payload: id, .. } 33 | | Symbol::NamedExpression { payload: id, .. } 34 | | Symbol::Expression(id) => { 35 | let (_, output_item) = self.to_output_expression(id, false)?; 36 | output_items.push(output_item); 37 | } 38 | Symbol::Alias { .. } => {} 39 | Symbol::Wildcard => unreachable!(), 40 | }; 41 | } 42 | 43 | let exited_scope_metadata = self.scopes.scope_at_mut(exited_scope).metadata_mut(); 44 | exited_scope_metadata.outputs = output_items; 45 | exited_scope_metadata.aggregation = aggregations; 46 | 47 | Ok(()) 48 | } 49 | } 50 | 51 | enum Node { 52 | UnaryOperation(Operation, Box), 53 | BinaryOperation(Operation, Box, Box), 54 | Nested(Box), 55 | Constant(U256), 56 | Column { 57 | column: ZkColumn, 58 | alias: Option, 59 | }, 60 | Placeholder(PlaceholderIdentifier), 61 | } 62 | impl Node { 63 | fn from_expr(e: &Expr) {} 64 | } 65 | 66 | struct Program {} 67 | 68 | struct If(Select, Action); 69 | impl AstPass for If { 70 | fn post_expr(&mut self, expr: &mut Expr) -> anyhow::Result<()> { 71 | Ok(()) 72 | } 73 | } 74 | 75 | enum Select { 76 | ContainsName(String), 77 | ContainsKind(ColumnKind), 78 | } 79 | impl Select { 80 | fn apply(&self, expr: &Expr) -> bool { 81 | match self { 82 | Select::ContainsName(String) => todo!(), 83 | Select::ContainsKind(ColumnKind) => todo!(), 84 | } 85 | } 86 | } 87 | 88 | enum Action { 89 | NeutralizeInParent, 90 | } 91 | 92 | struct Interpreter { 93 | settings: ParsilSettings, 94 | } 95 | impl AstPass for Interpreter {} 96 | -------------------------------------------------------------------------------- /parsil/src/queries.rs: -------------------------------------------------------------------------------- 1 | //! This module exposes some common queries, used in mandatory steps of the 2 | //! pre-processing and query validation. 3 | 4 | use crate::{keys_in_index_boundaries, symbols::ContextProvider, ParsilSettings}; 5 | use anyhow::*; 6 | use ryhope::{ 7 | mapper_table_name, tree::sbbst::NodeIdx, UserEpoch, EPOCH, INCREMENTAL_EPOCH, KEY, USER_EPOCH, 8 | }; 9 | use verifiable_db::query::{ 10 | universal_circuit::universal_circuit_inputs::Placeholders, utils::QueryBounds, 11 | }; 12 | 13 | /// Return a query ready to be injected in the wide lineage computation for the 14 | /// index tree. 15 | /// 16 | /// * execution_epoch: the epoch (block number) at which the query is executed; 17 | /// * query_epoch_bounds: the min. and max. block numbers onto which the query 18 | /// is executed; 19 | /// * table_name: the name of the index tree table over which the query is executed; 20 | pub fn core_keys_for_index_tree( 21 | execution_epoch: UserEpoch, 22 | query_epoch_bounds: (NodeIdx, NodeIdx), 23 | table_name: &str, 24 | ) -> Result { 25 | let (query_min_block, query_max_block) = query_epoch_bounds; 26 | 27 | let mapper_table_name = mapper_table_name(table_name); 28 | 29 | let (lower_epoch, higher_epoch) = ( 30 | query_min_block, 31 | query_max_block.min( 32 | execution_epoch 33 | .try_into() 34 | .with_context(|| format!("unable to convert {execution_epoch} to i64"))?, 35 | ), 36 | ); 37 | 38 | // Integer default to i32 in PgSQL, they must be cast to i64, a.k.a. BIGINT. 39 | Ok(format!( 40 | " 41 | SELECT {execution_epoch}::BIGINT as {EPOCH}, 42 | {USER_EPOCH} as {KEY} 43 | FROM {mapper_table_name} 44 | WHERE {USER_EPOCH} >= {lower_epoch}::BIGINT AND {USER_EPOCH} <= {higher_epoch}::BIGINT 45 | AND NOT {INCREMENTAL_EPOCH} = 0 46 | ORDER BY {USER_EPOCH} 47 | " 48 | )) 49 | } 50 | 51 | /// Return a query read to be injected in the wide lineage computation for the 52 | /// row tree. 53 | /// 54 | /// * query: the zkQuery, as registered by the end user; 55 | /// * settings: the Parsil settings used to parse & execute the query; 56 | /// * bounds: the bounds on the prim. and sec. index for this execution of 57 | /// the query; 58 | /// * placeholders: the placeholders value for this execution of the query. 59 | pub fn core_keys_for_row_tree( 60 | query: &str, 61 | settings: &ParsilSettings, 62 | bounds: &QueryBounds, 63 | placeholders: &Placeholders, 64 | ) -> Result { 65 | Ok(keys_in_index_boundaries(query, settings, bounds) 66 | .context("while computing core keys query from zkQuery")? 67 | .interpolate(settings, placeholders) 68 | .context("while injecting placeholder values in the core keys query")? 69 | .to_pgsql_string_no_placeholders()) 70 | } 71 | -------------------------------------------------------------------------------- /mp2-v1/src/indexing/block.rs: -------------------------------------------------------------------------------- 1 | //! Module to handle the block number as a primary index 2 | use anyhow::anyhow; 3 | use ryhope::{ 4 | storage::{pgsql::PgsqlStorage, RoEpochKvStorage}, 5 | tree::{sbbst, TreeTopology}, 6 | MerkleTreeKvDb, 7 | }; 8 | 9 | use crate::query::planner::TreeFetcher; 10 | 11 | use super::index::IndexNode; 12 | 13 | /// The index tree when the primary index is an epoch in a time-series DB, like the block number for a blockchain. 14 | /// It is a sbbst since it is a highly optimized tree for monotonically increasing index. 15 | /// It produces very little tree-manipulating operations on update, and therefore, requires the least amount 16 | /// of reproving when adding a new index. 17 | /// NOTE: it is still required that monotonically increasing indexes are inserted in the tree, 18 | /// i.e. a general index such as what can happen on a result table wouldn't work with this tree. 19 | pub type BlockTree = sbbst::EpochTree; 20 | /// The key used to refer to a table where the block number is the primary index. 21 | pub type BlockTreeKey = ::Key; 22 | /// Just an alias that give more meaning depending on the context 23 | pub type BlockPrimaryIndex = BlockTreeKey; 24 | 25 | pub type IndexStorage = PgsqlStorage, false>; 26 | pub type MerkleIndexTree = MerkleTreeKvDb, IndexStorage>; 27 | 28 | /// Get the previous epoch of `epoch` in `tree` 29 | pub async fn get_previous_epoch( 30 | tree: &MerkleIndexTree, 31 | epoch: BlockPrimaryIndex, 32 | ) -> anyhow::Result> { 33 | let current_epoch = tree.current_epoch().await?; 34 | let epoch_ctx = tree 35 | .node_context(&epoch) 36 | .await? 37 | .ok_or(anyhow!("epoch {epoch} not found in the tree"))?; 38 | 39 | Ok(tree 40 | .get_predecessor(&epoch_ctx, current_epoch) 41 | .await 42 | .map(|(ctx, _)| ctx.node_id)) 43 | } 44 | 45 | /// Get the next epoch of `epoch` in `tree` 46 | pub async fn get_next_epoch( 47 | tree: &MerkleIndexTree, 48 | epoch: BlockPrimaryIndex, 49 | ) -> anyhow::Result> { 50 | let current_epoch = tree.current_epoch().await?; 51 | let epoch_ctx = tree 52 | .node_context(&epoch) 53 | .await? 54 | .ok_or(anyhow!("epoch {epoch} not found in the tree"))?; 55 | 56 | Ok(tree 57 | .get_successor(&epoch_ctx, current_epoch) 58 | .await 59 | .map(|(ctx, _)| ctx.node_id)) 60 | } 61 | 62 | #[cfg(test)] 63 | mod tests { 64 | use super::MerkleIndexTree; 65 | 66 | fn check_if_send() {} 67 | fn check_if_sync() {} 68 | 69 | #[test] 70 | fn test_is_send() { 71 | check_if_send::(); 72 | check_if_send::<&MerkleIndexTree>(); 73 | check_if_sync::(); 74 | check_if_sync::<&MerkleIndexTree>(); 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /verifiable-db/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Provides API to generate proofs for the table creation and query execution 2 | //! steps of Lagrange Zk-SQL coprocessor. 3 | 4 | // Add this to allow generic const expressions, e.g. `PAD_LEN(NODE_LEN)`. 5 | #![allow(incomplete_features)] 6 | #![feature(generic_const_exprs)] 7 | // Add this to allow generic const items, e.g. `const IO_LEN` 8 | #![feature(generic_const_items)] 9 | #![feature(variant_count)] 10 | 11 | use git_version::git_version; 12 | use plonky2::plonk::{ 13 | circuit_builder::CircuitBuilder, 14 | config::{GenericConfig, Hasher}, 15 | }; 16 | 17 | pub mod api; 18 | pub mod block_tree; 19 | pub mod cells_tree; 20 | pub mod extraction; 21 | pub mod ivc; 22 | /// Module for circuits for simple queries 23 | pub mod query; 24 | #[cfg(feature = "results_tree")] 25 | pub mod results_tree; 26 | /// Module for the query revelation circuits 27 | pub mod revelation; 28 | pub mod row_tree; 29 | pub mod test_utils; 30 | 31 | pub(crate) const D: usize = 2; 32 | #[cfg(feature = "original_poseidon")] 33 | pub(crate) type C = plonky2::plonk::config::PoseidonGoldilocksConfig; 34 | #[cfg(not(feature = "original_poseidon"))] 35 | pub(crate) type C = poseidon2_plonky2::poseidon2_goldilock::Poseidon2GoldilocksConfig; 36 | pub(crate) type F = >::F; 37 | pub(crate) type CHasher = >::Hasher; 38 | pub(crate) type H = >::Hasher; 39 | pub(crate) type CBuilder = CircuitBuilder; 40 | pub(crate) type HashPermutation = >::Permutation; 41 | 42 | /// Return the current version of the library. 43 | pub fn version() -> &'static str { 44 | env!("CARGO_PKG_VERSION") 45 | } 46 | 47 | /// The full git version of mapreduce-plonky2 48 | /// 49 | /// `git` is required when compiling. This is the string returned from the command 50 | /// `git describe --abbrev=7 --always`, e.g. `v1.1.1-8-g77fa458`. 51 | pub const GIT_VERSION: &str = git_version!(args = ["--abbrev=7", "--always"]); 52 | 53 | /// Get the short git version of mapreduce-plonky2. 54 | /// 55 | /// Return `77fa458` if the full git version is `v1.1.1-8-g77fa458`. 56 | pub fn short_git_version() -> String { 57 | let commit_version = GIT_VERSION.split('-').next_back().unwrap(); 58 | 59 | // Check if use commit object as fallback. 60 | if commit_version.len() < 8 { 61 | commit_version.to_string() 62 | } else { 63 | commit_version[1..8].to_string() 64 | } 65 | } 66 | 67 | #[cfg(test)] 68 | pub(crate) mod tests { 69 | use mp2_common::{array::Array, keccak::PACKED_HASH_LEN}; 70 | use plonky2_crypto::u32::arithmetic_u32::U32Target; 71 | 72 | use super::*; 73 | 74 | pub(crate) type OutputHash = Array; 75 | 76 | #[test] 77 | fn test_short_git_version() { 78 | let v = short_git_version(); 79 | 80 | assert_eq!(v.len(), 7); 81 | assert!(v.chars().all(|c| c.is_ascii_hexdigit())); 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /parsil/tests/context.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "user_facing_name": "table1", 4 | "zktable_name": "contract_user_table1", 5 | "columns": [ 6 | { 7 | "name": "block", 8 | "id": 1, 9 | "kind": "PrimaryIndex" 10 | }, 11 | { 12 | "name": "pipo", 13 | "id": 45678, 14 | "kind": "Standard" 15 | } 16 | ] 17 | }, 18 | { 19 | "user_facing_name": "table2", 20 | "zktable_name": "contract_user_table2", 21 | "id": 123456, 22 | "columns": [ 23 | { 24 | "name": "block", 25 | "id": 1, 26 | "kind": "PrimaryIndex" 27 | }, 28 | { 29 | "name": "foo", 30 | "id": 45678, 31 | "kind": "SecondaryIndex" 32 | }, 33 | { 34 | "name": "bar", 35 | "id": 45678, 36 | "kind": "Standard" 37 | } 38 | ] 39 | }, 40 | { 41 | "user_facing_name": "T", 42 | "zktable_name": "contract_user_T", 43 | "columns": [ 44 | { 45 | "name": "block", 46 | "id": 1, 47 | "kind": "PrimaryIndex" 48 | }, 49 | { 50 | "name": "C1", 51 | "id": 45678, 52 | "kind": "Standard" 53 | }, 54 | { 55 | "name": "C2", 56 | "id": 45678, 57 | "kind": "Standard" 58 | }, 59 | { 60 | "name": "C3", 61 | "id": 45678, 62 | "kind": "Standard" 63 | }, 64 | { 65 | "name": "C4", 66 | "id": 45678, 67 | "kind": "Standard" 68 | }, 69 | { 70 | "name": "C5", 71 | "id": 45678, 72 | "kind": "Standard" 73 | }, 74 | { 75 | "name": "C6", 76 | "id": 45678, 77 | "kind": "Standard" 78 | } 79 | ] 80 | }, 81 | { 82 | "user_facing_name": "row_mapping_table", 83 | "zktable_name": "row_mapping_table_rows", 84 | "columns": [ 85 | { 86 | "name": "block_number", 87 | "kind": "PrimaryIndex", 88 | "id": 17422912802427138938 89 | }, 90 | { 91 | "name": "map_value", 92 | "kind": "SecondaryIndex", 93 | "id": 12191544657365810443 94 | }, 95 | { 96 | "name": "map_key", 97 | "kind": "Standard", 98 | "id": 10362498354857054631 99 | } 100 | ] 101 | } 102 | ] 103 | -------------------------------------------------------------------------------- /mp2-v1/tests/common/ivc.rs: -------------------------------------------------------------------------------- 1 | use super::{ 2 | context::TestContext, 3 | proof_storage::{IndexProofIdentifier, ProofKey, ProofStorage}, 4 | table::TableID, 5 | }; 6 | use mp2_common::{proof::ProofWithVK, types::HashOutput, F}; 7 | use mp2_v1::{ 8 | api, 9 | indexing::block::{get_previous_epoch, BlockPrimaryIndex, MerkleIndexTree}, 10 | }; 11 | use plonky2::{hash::hash_types::HashOut, plonk::config::GenericHashOut}; 12 | use verifiable_db::ivc::PublicInputs; 13 | 14 | impl TestContext { 15 | pub async fn prove_ivc( 16 | &mut self, 17 | table_id: &TableID, 18 | bn: BlockPrimaryIndex, 19 | index_tree: &MerkleIndexTree, 20 | provable_data_commitment: bool, 21 | expected_metadata_hash: &HashOutput, 22 | expected_root_of_trust: HashOutput, 23 | ) -> anyhow::Result<()> { 24 | // load the block proof of the current block 25 | let root_key = index_tree.root().await?.unwrap(); 26 | let index_root_key = ProofKey::Index(IndexProofIdentifier { 27 | table: table_id.clone(), 28 | tree_key: root_key, 29 | }); 30 | let root_proof = self 31 | .storage 32 | .get_proof_exact(&index_root_key) 33 | .expect("index tree proof is not stored"); 34 | // load the previous IVC proof if there is one 35 | // we simply can try to load from the storage at block -1 36 | // TODO: generalize that to a better more generic method for any index tree 37 | let previous_block = get_previous_epoch(index_tree, bn).await?; 38 | let input = if let Some(prev_bn) = previous_block { 39 | let previous_ivc_key = ProofKey::IVC(prev_bn); 40 | let previous_proof = self.storage.get_proof_exact(&previous_ivc_key)?; 41 | verifiable_db::ivc::CircuitInput::new_subsequent_input( 42 | provable_data_commitment, 43 | root_proof, 44 | previous_proof, 45 | ) 46 | } else { 47 | verifiable_db::ivc::CircuitInput::new_first_input(provable_data_commitment, root_proof) 48 | } 49 | .expect("unable to create ivc circuit inputs"); 50 | let ivc_proof = self 51 | .b 52 | .bench("indexing::ivc", || { 53 | api::generate_proof(self.params(), api::CircuitInput::IVC(input)) 54 | }) 55 | .expect("unable to create ivc proof"); 56 | let proof = ProofWithVK::deserialize(&ivc_proof)?; 57 | let ivc_pi = PublicInputs::from_slice(&proof.proof().public_inputs); 58 | // check metadata hash 59 | assert_eq!( 60 | ivc_pi.metadata_hash(), 61 | &HashOut::::from_bytes(expected_metadata_hash.into()).to_vec(), 62 | ); 63 | // check root of trust 64 | assert_eq!(ivc_pi.block_hash_output(), expected_root_of_trust,); 65 | self.storage 66 | .store_proof(ProofKey::IVC(bn), ivc_proof) 67 | .expect("unable to store new ivc proof"); 68 | Ok(()) 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /mp2-common/src/group_hashing/utils.rs: -------------------------------------------------------------------------------- 1 | //! Map to curve utility functions 2 | 3 | use super::EXTENSION_DEGREE as N; 4 | use plonky2::{ 5 | field::extension::{quintic::QuinticExtension, Extendable, FieldExtension}, 6 | hash::hash_types::RichField, 7 | }; 8 | 9 | pub fn two_thirds>() -> QuinticExtension { 10 | QuinticExtension::::from_basefield_array([ 11 | F::from_canonical_u64(6148914689804861441), 12 | F::ZERO, 13 | F::ZERO, 14 | F::ZERO, 15 | F::ZERO, 16 | ]) 17 | } 18 | 19 | // Coefficients for double-odd form y^2 = x(x^2 + Ax + B) 20 | // A = QuinticExtension[2, 0, 0, 0, 0] 21 | // B = QuinticExtension[0, 263, 0, 0, 0] 22 | // Coefficients for Short Weierstrass form y^2 = x^3 + A_sw*x + B_sw 23 | // A_sw = (3B - A^2)/3 24 | pub fn a_sw>() -> QuinticExtension { 25 | QuinticExtension::::from_basefield_array([ 26 | F::from_canonical_u64(6148914689804861439), 27 | F::from_canonical_u64(263), 28 | F::ZERO, 29 | F::ZERO, 30 | F::ZERO, 31 | ]) 32 | } 33 | 34 | // Coefficients for double-odd form y^2 = x(x^2 + Ax + B) 35 | // A = QuinticExtension[2, 0, 0, 0, 0] 36 | // B = QuinticExtension[0, 263, 0, 0, 0] 37 | // Coefficients for Short Weierstrass form y^2 = x^3 + A_sw*x + B_sw 38 | // B_sw = A(2A^2 -9B)/27 39 | pub fn b_sw>() -> QuinticExtension { 40 | QuinticExtension::::from_basefield_array([ 41 | F::from_canonical_u64(15713893096167979237), 42 | F::from_canonical_u64(6148914689804861265), 43 | F::ZERO, 44 | F::ZERO, 45 | F::ZERO, 46 | ]) 47 | } 48 | 49 | // Z computed using SageMath 50 | // Z_sw = -4 - z = 18446744069414584317 + 18446744069414584320*z 51 | // GoldilocksField::ORDER = 0xFFFFFFFF00000001 52 | // GoldilocksField::NEG_ONE = ORDER - 1 53 | // 54 | pub fn z_sw>() -> QuinticExtension { 55 | QuinticExtension::::from_basefield_array([ 56 | F::from_canonical_u64(F::ORDER - 4), 57 | F::NEG_ONE, 58 | F::ZERO, 59 | F::ZERO, 60 | F::ZERO, 61 | ]) 62 | } 63 | 64 | pub fn neg_z_inv_sw>() -> QuinticExtension { 65 | QuinticExtension::::from_basefield_array([ 66 | F::from_canonical_u64(4795794222525505369), 67 | F::from_canonical_u64(3412737461722269738), 68 | F::from_canonical_u64(8370187669276724726), 69 | F::from_canonical_u64(7130825117388110979), 70 | F::from_canonical_u64(12052351772713910496), 71 | ]) 72 | } 73 | 74 | pub fn neg_b_div_a_sw>() -> QuinticExtension { 75 | QuinticExtension::::from_basefield_array([ 76 | F::from_canonical_u64(6585749426319121644), 77 | F::from_canonical_u64(16990361517133133838), 78 | F::from_canonical_u64(3264760655763595284), 79 | F::from_canonical_u64(16784740989273302855), 80 | F::from_canonical_u64(13434657726302040770), 81 | ]) 82 | } 83 | -------------------------------------------------------------------------------- /mp2-common/src/mpt_sequential/leaf_or_extension.rs: -------------------------------------------------------------------------------- 1 | //! MPT leaf or extension node gadget 2 | 3 | use super::{Circuit as MPTCircuit, MPTKeyWire, PAD_LEN}; 4 | use crate::{ 5 | array::{Array, Vector, VectorWire}, 6 | keccak::{InputData, KeccakCircuit, KeccakWires}, 7 | rlp::decode_fixed_list, 8 | types::GFp, 9 | }; 10 | use plonky2::{ 11 | field::extension::Extendable, 12 | hash::hash_types::RichField, 13 | iop::{target::Target, witness::PartialWitness}, 14 | plonk::circuit_builder::CircuitBuilder, 15 | }; 16 | use serde::{Deserialize, Serialize}; 17 | 18 | /// Wrapped wires for a MPT leaf or extension node 19 | #[derive(Clone, Debug, Serialize, Deserialize)] 20 | pub struct MPTLeafOrExtensionWires 21 | where 22 | [(); PAD_LEN(NODE_LEN)]:, 23 | { 24 | /// MPT node 25 | pub node: VectorWire, 26 | /// MPT root 27 | pub root: KeccakWires<{ PAD_LEN(NODE_LEN) }>, 28 | /// New MPT key after advancing the current key 29 | pub key: MPTKeyWire, 30 | /// New MPT value 31 | pub value: Array, 32 | } 33 | 34 | impl MPTLeafOrExtensionWires 35 | where 36 | [(); PAD_LEN(NODE_LEN)]:, 37 | { 38 | pub fn assign(&self, pw: &mut PartialWitness, node: &Vector) { 39 | self.node.assign(pw, node); 40 | KeccakCircuit::assign(pw, &self.root, &InputData::Assigned(node)); 41 | } 42 | } 43 | 44 | /// MPT leaf or extension node gadget 45 | pub struct MPTLeafOrExtensionNode; 46 | 47 | impl MPTLeafOrExtensionNode { 48 | /// Build the MPT node and advance the current key. 49 | pub fn build_and_advance_key< 50 | F: RichField + Extendable, 51 | const D: usize, 52 | const NODE_LEN: usize, 53 | const VALUE_LEN: usize, 54 | >( 55 | b: &mut CircuitBuilder, 56 | current_key: &MPTKeyWire, 57 | ) -> MPTLeafOrExtensionWires 58 | where 59 | [(); PAD_LEN(NODE_LEN)]:, 60 | { 61 | let zero = b.zero(); 62 | let tru = b._true(); 63 | 64 | // Build the node and ensure it only includes bytes. 65 | let node = VectorWire::::new(b); 66 | node.assert_bytes(b); 67 | 68 | // Expose the keccak root of this subtree starting at this node. 69 | let root = KeccakCircuit::<{ PAD_LEN(NODE_LEN) }>::hash_vector(b, &node); 70 | 71 | // Advance the key and extract the value (only decode two headers in the case of leaf). 72 | let rlp_headers = decode_fixed_list::<_, D, 2>(b, &node.arr.arr, zero); 73 | let (key, value, valid) = MPTCircuit::<1, NODE_LEN>::advance_key_leaf_or_extension::< 74 | F, 75 | D, 76 | 2, 77 | VALUE_LEN, 78 | >(b, &node.arr, current_key, &rlp_headers); 79 | b.connect(tru.target, valid.target); 80 | 81 | MPTLeafOrExtensionWires { 82 | node, 83 | root, 84 | key, 85 | value, 86 | } 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /ryhope/src/tests/example.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | 3 | use crate::storage::Operation; 4 | use crate::storage::{memory::InMemory, RoEpochKvStorage, TreeTransactionalStorage}; 5 | use crate::tree::PrintableTree; 6 | use crate::tree::{scapegoat, scapegoat::Alpha}; 7 | use crate::{InitSettings, MerkleTreeKvDb}; 8 | 9 | const MAX_TREE_DEPTH: usize = 10; 10 | 11 | #[tokio::test] 12 | async fn run() -> Result<()> { 13 | println!("Example to create a RowTree backed by memory storage"); 14 | 15 | type V = usize; 16 | type RowTree = scapegoat::Tree; 17 | 18 | type Storage = InMemory; 19 | let mut tree = MerkleTreeKvDb::::new( 20 | InitSettings::Reset(scapegoat::Tree::empty(Alpha::new(0.5), MAX_TREE_DEPTH)), 21 | (), 22 | ) 23 | .await?; 24 | 25 | println!("Insertion of some (key,value) pairs"); 26 | println!( 27 | "Current version of the tree before insertion: {}", 28 | tree.current_epoch().await.unwrap() 29 | ); 30 | 31 | let res = tree 32 | .transaction_from_batch(vec![ 33 | Operation::Insert(1, 1), 34 | Operation::Insert(2, 2), 35 | Operation::Insert(3, 3), 36 | ]) 37 | .await 38 | .expect("this should work"); 39 | 40 | let first_stamp = tree.current_epoch().await?; 41 | println!("Current version of the tree after insertion: {first_stamp}",); 42 | 43 | println!("Tree of keys to update:"); 44 | res.print(); 45 | let fetch_key = 1; 46 | let v = tree 47 | .try_fetch(&fetch_key) 48 | .await 49 | .unwrap() 50 | .expect("that should exist"); 51 | assert_eq!(fetch_key, v); 52 | println!("Fetching value from key {fetch_key} = {v}"); 53 | 54 | // Now try to add more keys , delete the one just fetched 55 | let _ = tree 56 | .transaction_from_batch(vec![ 57 | Operation::Delete(fetch_key), 58 | Operation::Insert(4, 4), 59 | Operation::Insert(5, 5), 60 | ]) 61 | .await 62 | .expect("this should work"); 63 | 64 | match tree.try_fetch(&fetch_key).await.unwrap() { 65 | Some(_) => panic!("that should not happen"), 66 | None => println!("Fetching deleted key {fetch_key} fails"), 67 | } 68 | 69 | // Now try to fetch from previous version 70 | match tree.try_fetch_at(&fetch_key, first_stamp).await.unwrap() { 71 | Some(v) => println!("Fetching {fetch_key} at previous stamp {first_stamp} works: {v}",), 72 | None => panic!("We should have fetched something for {fetch_key:?}"), 73 | } 74 | 75 | // Printing the tree at its previous versions 76 | println!("tree at {} is now:", tree.current_epoch().await?); 77 | tree.tree().print(&tree.storage).await; 78 | 79 | println!("tree at epoch {first_stamp} was:"); 80 | let previous_state = tree.view_at(first_stamp); 81 | tree.tree().print(&previous_state).await; 82 | 83 | println!( 84 | "The update tree from {first_stamp} to {} was:", 85 | first_stamp + 1 86 | ); 87 | tree.diff_at(first_stamp + 1).await?.unwrap().print(); 88 | 89 | println!("The update tree from 0 to 1 was:",); 90 | tree.diff_at(1).await?.unwrap().print(); 91 | 92 | Ok(()) 93 | } 94 | -------------------------------------------------------------------------------- /recursion-framework/src/lib.rs: -------------------------------------------------------------------------------- 1 | // #![warn(missing_docs)] 2 | #![allow(incomplete_features)] 3 | #![feature(generic_const_exprs)] 4 | 5 | //! This crate provides a framework to build circuits that needs to recursively verify proofs 6 | //! employing a universal verifier, which is a verifier capable of verifying proofs of distinct circuits with the 7 | //! same circuit implementation. The only constraint is that the circuits whose proofs need to be verified must 8 | //! share the same structure of their public inputs. This capability enables to recusively combine proofs of different 9 | //! circuits in an arbitrary way, employing the same verifier at the end independently from how the proofs were 10 | //! recursively combined together. However, it is necessary to still ensure to the verifier that the proofs being 11 | //! recursively combined employing the universal verifier were generated for a circuit belonging to a given set 12 | //! of circuits, as otherwise the prover could generate public inputs in an arbitrary fashion, which is usually 13 | //! undesired. Therefore, the universal verifier provided by this framework also checks that each proof being 14 | //! verified is generated for a circuit belonging to a given set of circuits, represented by a digest which is 15 | //! exposed as an additional public input by every proof generated by a circuit employing by the universal verifier. 16 | //! The data structures provided by this framework will automatically handle most of the details necessary to make 17 | //! the circuits verifiable by the universal verifier, so that the user should mostly have to specify the additional 18 | //! logic to be enforced in the circuit besides verifying proofs. 19 | //! For an example of how to use the framework, take a look at the integration tests, located in `tests/integration.rs` 20 | 21 | use plonky2::plonk::config::GenericConfig; 22 | 23 | /// This module contains data structures useful to instantiate circuits that either employ the universal verifier 24 | /// or whose proofs needs to be verified by a circuit employing the universal verifier 25 | pub mod circuit_builder; 26 | /// This module contains data structures that allow to generate proofs for a set of circuits that 27 | /// either employ the universal verifier or whose proofs needs to be verified by a circuit employing 28 | /// the universal verifier 29 | pub mod framework; 30 | /// This module contains a variant of the framework that simplifies testing and benchmakring the circuits based on 31 | /// the universal verifier and written employing the data structures and interfaces provided in the `framework` 32 | /// module 33 | pub mod framework_testing; 34 | /// Internal module that contains the core data structures and gadgets for the universal verifier 35 | pub(crate) mod universal_verifier_gadget; 36 | 37 | pub const D: usize = 2; 38 | #[cfg(feature = "original_poseidon")] 39 | pub type C = plonky2::plonk::config::PoseidonGoldilocksConfig; 40 | #[cfg(not(feature = "original_poseidon"))] 41 | pub type C = poseidon2_plonky2::poseidon2_goldilock::Poseidon2GoldilocksConfig; 42 | pub type F = >::F; 43 | pub type CHasher = >::Hasher; 44 | pub type H = >::Hasher; 45 | 46 | /*pub const D: usize = mp2_common::D; 47 | pub type C = mp2_common::C; 48 | pub type F = mp2_common::F; 49 | pub type CHasher = mp2_common::CHasher; 50 | pub type H = mp2_common::poseidon::H;*/ 51 | -------------------------------------------------------------------------------- /groth16-framework/tests/common/utils.rs: -------------------------------------------------------------------------------- 1 | //! Utility functions used for testing 2 | 3 | use super::{TestQueryInput, TestQueryOutput}; 4 | use alloy::primitives::U256; 5 | use groth16_framework::utils::{read_file, write_file}; 6 | use sha2::{Digest, Sha256}; 7 | use std::path::Path; 8 | 9 | /// The byte offset of the last Groth16 input in the full proof 10 | const LAST_GROTH16_INPUT_OFFSET: usize = 10 * 32; 11 | /// The start byte offset of the public inputs in the full proof 12 | const PI_OFFSET: usize = 11 * 32; 13 | 14 | const FULL_PROOF_FILENAME: &str = "full_proof.bin"; 15 | const PLONKY2_PROOF_FILENAME: &str = "plonky2_proof.bin"; 16 | const QUERY_INPUT_FILENAME: &str = "query_input.json"; 17 | const QUERY_OUTPUT_FILENAME: &str = "query_output.json"; 18 | 19 | /// Check if the sha256 of encoded public inputs equals to the last Groth16 input. 20 | /// This is the Rust version of Solidity function `verifyPlonky2Inputs`. 21 | pub(crate) fn verify_pi_sha256(full_proof: &[u8]) { 22 | // Convert the last Groth16 input to an Uint256. 23 | let last_groth16_input: [_; 32] = full_proof 24 | [LAST_GROTH16_INPUT_OFFSET..LAST_GROTH16_INPUT_OFFSET + 32] 25 | .try_into() 26 | .unwrap(); 27 | let last_groth16_input = U256::from_be_bytes(last_groth16_input); 28 | 29 | // Calculate the sha256 of public inputs. 30 | let pi = &full_proof[PI_OFFSET..]; 31 | let mut hasher = Sha256::new(); 32 | hasher.update(pi); 33 | let pi_sha256 = hasher.finalize(); 34 | let pi_sha256 = U256::from_be_bytes(pi_sha256.into()); 35 | // Calculate the top `3` bit mask of Uint256. 36 | let top_three_bit_mask: U256 = U256::from(7) << 253; 37 | let top_three_bit_mask = !top_three_bit_mask; 38 | let encoded_pi = pi_sha256 & top_three_bit_mask; 39 | 40 | assert_eq!(encoded_pi, last_groth16_input); 41 | } 42 | 43 | /// Read the combined full proof (Groth16 proof + plonky2 proof) from file. 44 | pub(crate) fn read_full_proof(asset_dir: &str) -> Option> { 45 | let path = Path::new(asset_dir).join(FULL_PROOF_FILENAME); 46 | read_file(path).ok() 47 | } 48 | 49 | /// Save the plonky2 proof to file. 50 | pub(crate) fn write_plonky2_proof(asset_dir: &str, proof: &[u8]) { 51 | let path = Path::new(asset_dir).join(PLONKY2_PROOF_FILENAME); 52 | write_file(path, proof).unwrap(); 53 | } 54 | 55 | /// Read the testing query input from file. 56 | pub(crate) fn read_query_input(asset_dir: &str) -> Option { 57 | let path = Path::new(asset_dir).join(QUERY_INPUT_FILENAME); 58 | read_file(path) 59 | .ok() 60 | .and_then(|data| serde_json::from_slice(&data).ok()) 61 | } 62 | 63 | /// Save the testing query input to file. 64 | pub(crate) fn write_query_input(asset_dir: &str, input: &TestQueryInput) { 65 | let data = serde_json::to_vec(input).unwrap(); 66 | let path = Path::new(asset_dir).join(QUERY_INPUT_FILENAME); 67 | write_file(path, &data).unwrap(); 68 | } 69 | 70 | /// Read the testing query output from file. 71 | pub(crate) fn read_query_output(asset_dir: &str) -> Option { 72 | let path = Path::new(asset_dir).join(QUERY_OUTPUT_FILENAME); 73 | read_file(path) 74 | .ok() 75 | .and_then(|data| serde_json::from_slice(&data).ok()) 76 | } 77 | 78 | /// Save the testing query output to file. 79 | pub(crate) fn write_query_output(asset_dir: &str, output: &TestQueryOutput) { 80 | let data = serde_json::to_vec(output).unwrap(); 81 | let path = Path::new(asset_dir).join(QUERY_OUTPUT_FILENAME); 82 | write_file(path, &data).unwrap(); 83 | } 84 | -------------------------------------------------------------------------------- /verifiable-db/src/results_tree/mod.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod binding; 2 | pub(crate) mod construction; 3 | /// Old query public inputs, moved here because the circuits in this module still expects 4 | /// these public inputs for now 5 | pub(crate) mod old_public_inputs; 6 | 7 | #[cfg(test)] 8 | pub(crate) mod tests { 9 | use std::array; 10 | 11 | use crate::F; 12 | use mp2_common::{array::ToField, types::CURVE_TARGET_LEN, utils::ToFields}; 13 | use plonky2::{ 14 | field::types::{Field, Sample}, 15 | hash::hash_types::NUM_HASH_OUT_ELTS, 16 | }; 17 | use plonky2_ecgfp5::curve::curve::Point; 18 | use rand::{thread_rng, Rng}; 19 | 20 | use crate::query::computational_hash_ids::{AggregationOperation, Identifiers}; 21 | 22 | use super::old_public_inputs::{PublicInputs, QueryPublicInputs}; 23 | 24 | /// Generate S number of proof public input slices by the specified operations for testing. 25 | /// The each returned proof public inputs could be constructed by 26 | /// `PublicInputs::from_slice` function. 27 | pub fn random_aggregation_public_inputs( 28 | ops: &[F; S], 29 | ) -> [Vec; N] { 30 | let [ops_range, overflow_range, index_ids_range, c_hash_range, p_hash_range] = [ 31 | QueryPublicInputs::OpIds, 32 | QueryPublicInputs::Overflow, 33 | QueryPublicInputs::IndexIds, 34 | QueryPublicInputs::ComputationalHash, 35 | QueryPublicInputs::PlaceholderHash, 36 | ] 37 | .map(PublicInputs::::to_range); 38 | 39 | let first_value_start = 40 | PublicInputs::::to_range(QueryPublicInputs::OutputValues).start; 41 | let is_first_op_id = 42 | ops[0] == Identifiers::AggregationOperations(AggregationOperation::IdOp).to_field(); 43 | 44 | // Generate the index ids, computational hash and placeholder hash, 45 | // they should be same for a series of public inputs. 46 | let mut rng = thread_rng(); 47 | let index_ids = (0..2).map(|_| rng.gen()).collect::>().to_fields(); 48 | let [computational_hash, placeholder_hash]: [Vec<_>; 2] = array::from_fn(|_| { 49 | (0..NUM_HASH_OUT_ELTS) 50 | .map(|_| rng.gen()) 51 | .collect::>() 52 | .to_fields() 53 | }); 54 | 55 | array::from_fn(|_| { 56 | let mut pi = (0..PublicInputs::::total_len()) 57 | .map(|_| rng.gen()) 58 | .collect::>() 59 | .to_fields(); 60 | 61 | // Copy the specified operations to the proofs. 62 | pi[ops_range.clone()].copy_from_slice(ops); 63 | 64 | // Set the overflow flag to a random boolean. 65 | let overflow = F::from_bool(rng.gen()); 66 | pi[overflow_range.clone()].copy_from_slice(&[overflow]); 67 | 68 | // Set the index ids, computational hash and placeholder hash, 69 | pi[index_ids_range.clone()].copy_from_slice(&index_ids); 70 | pi[c_hash_range.clone()].copy_from_slice(&computational_hash); 71 | pi[p_hash_range.clone()].copy_from_slice(&placeholder_hash); 72 | 73 | // If the first operation is ID, set the value to a random point. 74 | if is_first_op_id { 75 | let first_value = Point::sample(&mut rng).to_weierstrass().to_fields(); 76 | pi[first_value_start..first_value_start + CURVE_TARGET_LEN] 77 | .copy_from_slice(&first_value); 78 | } 79 | 80 | pi 81 | }) 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /verifiable-db/src/cells_tree/empty_node.rs: -------------------------------------------------------------------------------- 1 | //! Module handling the empty node inside a cells tree 2 | 3 | use super::public_inputs::PublicInputs; 4 | use crate::{CBuilder, D, F}; 5 | use anyhow::Result; 6 | use mp2_common::{ 7 | poseidon::empty_poseidon_hash, public_inputs::PublicInputCommon, utils::ToTargets, 8 | }; 9 | use plonky2::{iop::witness::PartialWitness, plonk::proof::ProofWithPublicInputsTarget}; 10 | use plonky2_ecgfp5::gadgets::curve::CircuitBuilderEcGFp5; 11 | use recursion_framework::circuit_builder::CircuitLogicWires; 12 | use serde::{Deserialize, Serialize}; 13 | 14 | #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] 15 | pub struct EmptyNodeWires; 16 | 17 | #[derive(Clone, Debug, Serialize, Deserialize)] 18 | pub struct EmptyNodeCircuit; 19 | 20 | impl EmptyNodeCircuit { 21 | pub fn build(b: &mut CBuilder) -> EmptyNodeWires { 22 | // h = Poseidon("") 23 | let empty_hash = empty_poseidon_hash(); 24 | let h = b.constant_hash(*empty_hash).elements; 25 | 26 | // ZERO 27 | let zero = b.zero(); 28 | 29 | // CURVE_ZERO 30 | let curve_zero = b.curve_zero().to_targets(); 31 | 32 | // Register the public inputs. 33 | PublicInputs::new(&h, &curve_zero, &curve_zero, &zero, &zero).register(b); 34 | 35 | EmptyNodeWires 36 | } 37 | } 38 | 39 | /// Num of children = 0 40 | impl CircuitLogicWires for EmptyNodeWires { 41 | type CircuitBuilderParams = (); 42 | 43 | type Inputs = EmptyNodeCircuit; 44 | 45 | const NUM_PUBLIC_INPUTS: usize = PublicInputs::::total_len(); 46 | 47 | fn circuit_logic( 48 | builder: &mut CBuilder, 49 | _verified_proofs: [&ProofWithPublicInputsTarget; 0], 50 | _: Self::CircuitBuilderParams, 51 | ) -> Self { 52 | EmptyNodeCircuit::build(builder) 53 | } 54 | 55 | fn assign_input(&self, _inputs: Self::Inputs, _pw: &mut PartialWitness) -> Result<()> { 56 | Ok(()) 57 | } 58 | } 59 | 60 | #[cfg(test)] 61 | mod tests { 62 | use super::*; 63 | use crate::C; 64 | use mp2_test::circuit::{run_circuit, UserCircuit}; 65 | use plonky2::field::types::Field; 66 | use plonky2_ecgfp5::curve::curve::WeierstrassPoint; 67 | 68 | impl UserCircuit for EmptyNodeCircuit { 69 | type Wires = EmptyNodeWires; 70 | 71 | fn build(b: &mut CBuilder) -> Self::Wires { 72 | EmptyNodeCircuit::build(b) 73 | } 74 | 75 | fn prove(&self, _pw: &mut PartialWitness, _wires: &Self::Wires) {} 76 | } 77 | 78 | #[test] 79 | fn test_cells_tree_empty_node_circuit() { 80 | let test_circuit = EmptyNodeCircuit; 81 | let proof = run_circuit::(test_circuit); 82 | let pi = PublicInputs::from_slice(&proof.public_inputs); 83 | // Check the node Poseidon hash 84 | { 85 | let empty_hash = empty_poseidon_hash(); 86 | assert_eq!(pi.h, empty_hash.elements); 87 | } 88 | // Check individual values digest 89 | assert_eq!( 90 | pi.individual_values_digest_point(), 91 | WeierstrassPoint::NEUTRAL 92 | ); 93 | // Check multiplier values digest 94 | assert_eq!( 95 | pi.multiplier_values_digest_point(), 96 | WeierstrassPoint::NEUTRAL 97 | ); 98 | // Check individual counter 99 | assert_eq!(pi.individual_counter(), F::ZERO); 100 | // Check multiplier counter 101 | assert_eq!(pi.multiplier_counter(), F::ZERO); 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /groth16-framework/tests/common/context.rs: -------------------------------------------------------------------------------- 1 | //! Testing context used in the cases 2 | 3 | use super::{NUM_PREPROCESSING_IO, NUM_QUERY_IO}; 4 | use groth16_framework::{compile_and_generate_assets, utils::clone_circuit_data}; 5 | use mp2_common::{C, D, F}; 6 | use mp2_test::circuit::TestDummyCircuit; 7 | use recursion_framework::framework_testing::TestingRecursiveCircuits; 8 | use verifiable_db::{ 9 | api::WrapCircuitParams, 10 | query::pi_len, 11 | revelation::api::Parameters as RevelationParameters, 12 | test_utils::{ 13 | INDEX_TREE_MAX_DEPTH, MAX_NUM_COLUMNS, MAX_NUM_ITEMS_PER_OUTPUT, MAX_NUM_OUTPUTS, 14 | MAX_NUM_PLACEHOLDERS, MAX_NUM_PREDICATE_OPS, MAX_NUM_RESULT_OPS, ROW_TREE_MAX_DEPTH, 15 | }, 16 | }; 17 | 18 | /// Test context 19 | pub(crate) struct TestContext { 20 | pub(crate) preprocessing_circuits: TestingRecursiveCircuits, 21 | pub(crate) query_circuits: TestingRecursiveCircuits, 22 | pub(crate) revelation_params: RevelationParameters< 23 | ROW_TREE_MAX_DEPTH, 24 | INDEX_TREE_MAX_DEPTH, 25 | MAX_NUM_COLUMNS, 26 | MAX_NUM_PREDICATE_OPS, 27 | MAX_NUM_RESULT_OPS, 28 | MAX_NUM_OUTPUTS, 29 | MAX_NUM_ITEMS_PER_OUTPUT, 30 | MAX_NUM_PLACEHOLDERS, 31 | >, 32 | pub(crate) wrap_circuit: 33 | WrapCircuitParams, 34 | } 35 | 36 | impl TestContext { 37 | /// Create the test context. 38 | pub(crate) fn new() -> Self { 39 | // Generate a fake preprocessing circuit set. 40 | let preprocessing_circuits = 41 | TestingRecursiveCircuits::::default(); 42 | 43 | // Generate a fake query circuit set. 44 | let query_circuits = TestingRecursiveCircuits::::default(); 45 | let dummy_universal_circuit = 46 | TestDummyCircuit::<{ pi_len::() }>::build(); 47 | 48 | // Create the revelation parameters. 49 | let revelation_params = RevelationParameters::< 50 | ROW_TREE_MAX_DEPTH, 51 | INDEX_TREE_MAX_DEPTH, 52 | MAX_NUM_COLUMNS, 53 | MAX_NUM_PREDICATE_OPS, 54 | MAX_NUM_RESULT_OPS, 55 | MAX_NUM_OUTPUTS, 56 | MAX_NUM_ITEMS_PER_OUTPUT, 57 | MAX_NUM_PLACEHOLDERS, 58 | >::build( 59 | query_circuits.get_recursive_circuit_set(), // unused, so we provide a dummy one 60 | dummy_universal_circuit.circuit_data().verifier_data(), 61 | preprocessing_circuits.get_recursive_circuit_set(), 62 | preprocessing_circuits 63 | .verifier_data_for_input_proofs::<1>() 64 | .last() 65 | .unwrap(), 66 | ); 67 | 68 | // Create the wrap circuit. 69 | let wrap_circuit = WrapCircuitParams::< 70 | MAX_NUM_OUTPUTS, 71 | MAX_NUM_ITEMS_PER_OUTPUT, 72 | MAX_NUM_PLACEHOLDERS, 73 | >::build(revelation_params.get_circuit_set()); 74 | 75 | Self { 76 | preprocessing_circuits, 77 | query_circuits, 78 | revelation_params, 79 | wrap_circuit, 80 | } 81 | } 82 | 83 | /// Generate the Groth16 asset files. 84 | pub(crate) fn generate_assets(&self, asset_dir: &str) { 85 | let circuit_data = clone_circuit_data(self.wrap_circuit.circuit_data()).unwrap(); 86 | 87 | compile_and_generate_assets(circuit_data, asset_dir) 88 | .expect("Failed to generate the Groth16 asset files"); 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /inspect/src/main.rs: -------------------------------------------------------------------------------- 1 | use anyhow::*; 2 | use clap::{Parser, Subcommand}; 3 | use index::{IndexDb, IndexPayloadFormatter}; 4 | use repl::Repl; 5 | use rows::{RowDb, RowPayloadFormatter}; 6 | use ryhope::{ 7 | storage::pgsql::{SqlServerConnection, SqlStorageSettings, ToFromBytea}, 8 | InitSettings, UserEpoch, 9 | }; 10 | use serde::Serialize; 11 | 12 | mod index; 13 | mod repl; 14 | mod rows; 15 | 16 | #[derive(Parser)] 17 | #[command()] 18 | struct Args { 19 | #[arg(long, default_value = "postgresql://localhost/storage")] 20 | /// How to connect to the database 21 | db_uri: String, 22 | 23 | #[arg(short = 'T', long)] 24 | /// The table storing the tree 25 | db_table: String, 26 | 27 | #[arg(short = 'E', long = "at")] 28 | /// If set, try to view the tree at this epoch 29 | epoch: Option, 30 | 31 | #[command(subcommand)] 32 | /// The type of tree to load from the database 33 | tree_type: TreeReader, 34 | } 35 | 36 | #[derive(Subcommand)] 37 | enum TreeReader { 38 | RowTree { 39 | #[arg(short = 'C', long)] 40 | /// A comma-separed list of `id=name` pairs mapping columns IDs to user-facing names 41 | column_names: Option, 42 | 43 | #[arg(short = 'D', long, conflicts_with = "column_names")] 44 | /// A comma-separed list of `id=name` pairs mapping columns IDs to user-facing names 45 | column_db: Option, 46 | }, 47 | IndexTree, 48 | } 49 | 50 | #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Hash, Debug)] 51 | // #[serde(from = "Vec")] 52 | struct Key { 53 | k: Vec, 54 | } 55 | 56 | impl ToFromBytea for Key { 57 | fn to_bytea(&self) -> Vec { 58 | self.k.clone() 59 | } 60 | 61 | fn from_bytea(k: Vec) -> Self { 62 | Self { k } 63 | } 64 | } 65 | 66 | #[tokio::main(flavor = "current_thread")] 67 | async fn main() -> Result<()> { 68 | let args = Args::parse(); 69 | 70 | match args.tree_type { 71 | TreeReader::RowTree { 72 | column_names, 73 | column_db, 74 | } => { 75 | let tree_db = RowDb::new( 76 | InitSettings::MustExist, 77 | SqlStorageSettings::new( 78 | &args.db_table, 79 | SqlServerConnection::NewConnection(args.db_uri.clone()), 80 | )?, 81 | ) 82 | .await?; 83 | 84 | let payload_fmt = if let Some(column_name) = column_names.as_ref() { 85 | RowPayloadFormatter::from_string(column_name)? 86 | } else if let Some(_column_db) = column_db.as_ref() { 87 | todo!() 88 | } else { 89 | RowPayloadFormatter::new() 90 | }; 91 | 92 | let mut repl = Repl::new(tree_db, payload_fmt).await?; 93 | if let Some(epoch) = args.epoch { 94 | repl.set_epoch(epoch).await?; 95 | } 96 | repl.run().await 97 | } 98 | TreeReader::IndexTree => { 99 | let tree_db = IndexDb::new( 100 | InitSettings::MustExist, 101 | SqlStorageSettings::new( 102 | &args.db_table, 103 | SqlServerConnection::NewConnection(args.db_uri.clone()), 104 | )?, 105 | ) 106 | .await?; 107 | 108 | let payload_fmt = IndexPayloadFormatter::default(); 109 | 110 | let mut repl = Repl::new(tree_db, payload_fmt).await?; 111 | if let Some(epoch) = args.epoch { 112 | repl.set_epoch(epoch).await?; 113 | } 114 | repl.run().await 115 | } 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | "gnark-utils", 4 | "groth16-framework", 5 | "inspect", 6 | "mp2-common", 7 | "mp2-test", 8 | "mp2-v1", 9 | "parsil", 10 | "ryhope", 11 | "recursion-framework", 12 | "verifiable-db", 13 | ] 14 | resolver = "2" 15 | 16 | [workspace.dependencies] 17 | alloy = { version = "1.0", default-features = false, features = [ 18 | "full", 19 | "consensus", 20 | "contract", 21 | "getrandom", 22 | "network", 23 | "node-bindings", 24 | "providers", 25 | "reqwest", 26 | "rlp", 27 | "rpc", 28 | "rpc-types", 29 | "signer-local", 30 | "sol-types", 31 | "transport-http", 32 | "transports", 33 | "postgres", 34 | ] } 35 | anyhow = "1.0" 36 | base64 = "0.22" 37 | bb8 = "0.9" 38 | bb8-postgres = "0.9" 39 | bincode = "1.3" 40 | camelpaste = "0.1" 41 | clap = { version = "4.5", features = ["derive"] } 42 | colored = "3" 43 | csv = "1.3" 44 | delegate = "0.13" 45 | derive_more = { version = "2", features = ["constructor", "deref", "from", "into"] } 46 | dialoguer = { version = "0.11", features = ["fuzzy-select"] } 47 | env_logger = "0.11" 48 | envconfig = "0.11" 49 | ethereum-types = "0.14" 50 | eth_trie = { git = "https://github.com/nikkolasg/eth-trie.rs" } 51 | futures = "0.3" 52 | git-version = "0.3" 53 | glob = "0.3" 54 | gobuild = "0.1.0-alpha.1" 55 | hashbrown = "0.15" 56 | hex = "0.4" 57 | itertools = "0.14" 58 | jammdb = "0.11.0" 59 | lazy_static = "1.5.0" 60 | log = "0.4" 61 | num = "0.4" 62 | paste = "1.0" 63 | plonky2 = "0.2.2" 64 | # supporting latest plonky2 65 | plonky2_crypto = { git = "https://github.com/Lagrange-Labs/plonky2-crypto" } 66 | plonky2_ecdsa = { git = "https://github.com/Lagrange-Labs/plonky2-ecdsa" } 67 | plonky2_ecgfp5 = { git = "https://github.com/Lagrange-Labs/plonky2-ecgfp5" } 68 | plonky2_monolith = "0.1.0" 69 | plonky2x = { git = "https://github.com/Lagrange-Labs/succinctx", branch = "fix-build" } 70 | poseidon2_plonky2 = { git = "https://github.com/Lagrange-Labs/plonky2", branch = "upstream" } 71 | postgres-types = { version = "0.2.6", features = ["with-serde_json-1"] } 72 | rand = "0.8" 73 | rand_chacha = "0.3.1" 74 | revm = { version = "3.5", default-features = false } 75 | rlp = "0.6" 76 | rstest = "0.25" 77 | serde = { version = "1.0", features = ["derive"] } 78 | serde_json = "1.0" 79 | serial_test = "3.0" 80 | sha2 = "0.10" 81 | sha256 = { version = "1.5.0", default-features = false } 82 | sha3 = "0.10" 83 | simple_logger = { version = "5.0.0", default-features = false, features = [ 84 | "colors", 85 | ] } 86 | sqlparser = "0.49" 87 | stderrlog = { version = "0.6.0", default-features = false } 88 | tabled = { version = "0.19", features = ["ansi"] } 89 | test-log = "0.2.16" 90 | testfile = "0.1.5" 91 | thiserror = "2.0" 92 | tokio = { version = "1.34", features = [ 93 | "sync", 94 | "macros", 95 | "macros", 96 | "rt-multi-thread", 97 | "fs", 98 | ] } 99 | tokio-postgres = { version = "0.7", features = ["with-chrono-0_4"] } 100 | tracing = "0.1.40" 101 | 102 | [profile.dev] 103 | # Reference: https://doc.rust-lang.org/cargo/reference/profiles.html#dev 104 | # Proving is too slow without optimizations 105 | opt-level = 3 106 | 107 | [profile.release] 108 | # Reference: https://doc.rust-lang.org/cargo/reference/profiles.html#release 109 | # Proving is a bottleneck, enable agressive optimizations. 110 | # Reference: https://nnethercote.github.io/perf-book/build-configuration.html#codegen-units 111 | codegen-units = 1 112 | lto = "fat" 113 | 114 | [patch.crates-io] 115 | plonky2 = { git = "https://github.com/Lagrange-Labs/plonky2", branch = "upstream" } 116 | plonky2_monolith = { git = "https://github.com/Lagrange-Labs/monolith" } 117 | plonky2_field = { git = "https://github.com/Lagrange-Labs/plonky2", branch = "upstream" } 118 | 119 | [workspace.metadata.release] 120 | publish = false 121 | pre-release-hook = ["sh", "../.github/changelog.sh"] 122 | -------------------------------------------------------------------------------- /groth16-framework/src/test_utils.rs: -------------------------------------------------------------------------------- 1 | //! Utility functions used for testing 2 | //! TODO: may move this code and the simple unit test of `lib.rs` to `tests/common`. 3 | 4 | use crate::{ 5 | prover::groth16::combine_proofs, 6 | utils::{hex_to_u256, read_file, write_file}, 7 | EVMVerifier, Groth16Proof, Groth16Prover, Groth16Verifier, C, 8 | }; 9 | use alloy::{contract::Interface, dyn_abi::DynSolValue, json_abi::JsonAbi}; 10 | use mp2_common::{proof::deserialize_proof, D, F}; 11 | use plonky2::plonk::proof::ProofWithPublicInputs; 12 | use std::path::Path; 13 | 14 | /// Test Groth16 proving, verification and Solidity verification. 15 | pub fn test_groth16_proving_and_verification(asset_dir: &str, plonky2_proof: &[u8]) { 16 | // Generate the Groth16 proof. 17 | let plonky2_proof = deserialize_proof(plonky2_proof).unwrap(); 18 | let groth16_proof = groth16_prove(asset_dir, &plonky2_proof); 19 | 20 | // Save the combined full proof. 21 | let full_proof_path = Path::new(asset_dir).join("full_proof.bin"); 22 | let full_proof = combine_proofs(groth16_proof.clone(), plonky2_proof).unwrap(); 23 | write_file(full_proof_path, &full_proof).unwrap(); 24 | 25 | // Verify the proof off-chain. 26 | groth16_verify(asset_dir, &groth16_proof); 27 | 28 | // Verify the proof on-chain. 29 | evm_verify(asset_dir, &groth16_proof); 30 | } 31 | 32 | /// Test to generate the proof. 33 | fn groth16_prove(asset_dir: &str, plonky2_proof: &ProofWithPublicInputs) -> Groth16Proof { 34 | // Read r1cs, pk and circuit bytes from asset dir. 35 | let r1cs = read_file(Path::new(asset_dir).join("r1cs.bin")).unwrap(); 36 | let pk = read_file(Path::new(asset_dir).join("pk.bin")).unwrap(); 37 | let circuit = read_file(Path::new(asset_dir).join("circuit.bin")).unwrap(); 38 | 39 | // Initialize the Groth16 prover. 40 | let prover = 41 | Groth16Prover::from_bytes(r1cs, pk, circuit).expect("Failed to initialize the prover"); 42 | 43 | // Construct the file paths to save the Groth16 and full proofs. 44 | let groth16_proof_path = Path::new(asset_dir).join("groth16_proof.json"); 45 | 46 | // Generate the Groth16 proof. 47 | let groth16_proof = prover 48 | .generate_groth16_proof(plonky2_proof) 49 | .expect("Failed to generate the proof"); 50 | write_file( 51 | groth16_proof_path, 52 | serde_json::to_string(&groth16_proof).unwrap().as_bytes(), 53 | ) 54 | .unwrap(); 55 | 56 | groth16_proof 57 | } 58 | 59 | /// Test to verify the proof. 60 | fn groth16_verify(asset_dir: &str, proof: &Groth16Proof) { 61 | let verifier = Groth16Verifier::new(asset_dir).expect("Failed to initialize the verifier"); 62 | 63 | verifier.verify(proof).expect("Failed to verify the proof") 64 | } 65 | 66 | /// Test the Solidity verification. 67 | fn evm_verify(asset_dir: &str, proof: &Groth16Proof) { 68 | let solidity_file_path = Path::new(asset_dir) 69 | .join("Verifier.sol") 70 | .to_string_lossy() 71 | .to_string(); 72 | 73 | // Build the contract interface for encoding the arguments of verification function. 74 | let abi = JsonAbi::parse([ 75 | "function verifyProof(uint256[8] calldata proof, uint256[3] calldata input)", 76 | ]) 77 | .unwrap(); 78 | let contract = Interface::new(abi); 79 | 80 | let input = [&proof.proofs, &proof.inputs].map(|s| { 81 | DynSolValue::FixedArray( 82 | s.iter() 83 | .map(|s| DynSolValue::Uint(hex_to_u256(s).unwrap(), 256)) 84 | .collect(), 85 | ) 86 | }); 87 | let calldata = contract 88 | .encode_input("verifyProof", &input) 89 | .expect("Failed to encode the inputs of Solidity contract function verifyProof"); 90 | 91 | let verifier = 92 | EVMVerifier::new(&solidity_file_path).expect("Failed to initialize the EVM verifier"); 93 | 94 | let verified = verifier.verify(calldata); 95 | assert!(verified.is_ok()); 96 | } 97 | -------------------------------------------------------------------------------- /devenv.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "devenv": { 4 | "locked": { 5 | "dir": "src/modules", 6 | "lastModified": 1747717470, 7 | "owner": "cachix", 8 | "repo": "devenv", 9 | "rev": "c7f2256ee4a4a4ee9cbf1e82a6e49b253c374995", 10 | "type": "github" 11 | }, 12 | "original": { 13 | "dir": "src/modules", 14 | "owner": "cachix", 15 | "repo": "devenv", 16 | "type": "github" 17 | } 18 | }, 19 | "fenix": { 20 | "inputs": { 21 | "nixpkgs": [ 22 | "nixpkgs" 23 | ], 24 | "rust-analyzer-src": "rust-analyzer-src" 25 | }, 26 | "locked": { 27 | "lastModified": 1747392669, 28 | "owner": "nix-community", 29 | "repo": "fenix", 30 | "rev": "c3c27e603b0d9b5aac8a16236586696338856fbb", 31 | "type": "github" 32 | }, 33 | "original": { 34 | "owner": "nix-community", 35 | "repo": "fenix", 36 | "type": "github" 37 | } 38 | }, 39 | "flake-compat": { 40 | "flake": false, 41 | "locked": { 42 | "lastModified": 1747046372, 43 | "owner": "edolstra", 44 | "repo": "flake-compat", 45 | "rev": "9100a0f413b0c601e0533d1d94ffd501ce2e7885", 46 | "type": "github" 47 | }, 48 | "original": { 49 | "owner": "edolstra", 50 | "repo": "flake-compat", 51 | "type": "github" 52 | } 53 | }, 54 | "git-hooks": { 55 | "inputs": { 56 | "flake-compat": "flake-compat", 57 | "gitignore": "gitignore", 58 | "nixpkgs": [ 59 | "nixpkgs" 60 | ] 61 | }, 62 | "locked": { 63 | "lastModified": 1747372754, 64 | "owner": "cachix", 65 | "repo": "git-hooks.nix", 66 | "rev": "80479b6ec16fefd9c1db3ea13aeb038c60530f46", 67 | "type": "github" 68 | }, 69 | "original": { 70 | "owner": "cachix", 71 | "repo": "git-hooks.nix", 72 | "type": "github" 73 | } 74 | }, 75 | "gitignore": { 76 | "inputs": { 77 | "nixpkgs": [ 78 | "git-hooks", 79 | "nixpkgs" 80 | ] 81 | }, 82 | "locked": { 83 | "lastModified": 1709087332, 84 | "owner": "hercules-ci", 85 | "repo": "gitignore.nix", 86 | "rev": "637db329424fd7e46cf4185293b9cc8c88c95394", 87 | "type": "github" 88 | }, 89 | "original": { 90 | "owner": "hercules-ci", 91 | "repo": "gitignore.nix", 92 | "type": "github" 93 | } 94 | }, 95 | "nixpkgs": { 96 | "locked": { 97 | "lastModified": 1746807397, 98 | "owner": "cachix", 99 | "repo": "devenv-nixpkgs", 100 | "rev": "c5208b594838ea8e6cca5997fbf784b7cca1ca90", 101 | "type": "github" 102 | }, 103 | "original": { 104 | "owner": "cachix", 105 | "ref": "rolling", 106 | "repo": "devenv-nixpkgs", 107 | "type": "github" 108 | } 109 | }, 110 | "root": { 111 | "inputs": { 112 | "devenv": "devenv", 113 | "fenix": "fenix", 114 | "git-hooks": "git-hooks", 115 | "nixpkgs": "nixpkgs", 116 | "pre-commit-hooks": [ 117 | "git-hooks" 118 | ] 119 | } 120 | }, 121 | "rust-analyzer-src": { 122 | "flake": false, 123 | "locked": { 124 | "lastModified": 1747900548, 125 | "owner": "rust-lang", 126 | "repo": "rust-analyzer", 127 | "rev": "76ee900d7c1e19d0ffa9a7533f271954fdd4a4f1", 128 | "type": "github" 129 | }, 130 | "original": { 131 | "owner": "rust-lang", 132 | "ref": "nightly", 133 | "repo": "rust-analyzer", 134 | "type": "github" 135 | } 136 | } 137 | }, 138 | "root": "root", 139 | "version": 7 140 | } 141 | -------------------------------------------------------------------------------- /mp2-v1/src/indexing/index.rs: -------------------------------------------------------------------------------- 1 | use plonky2::field::types::Field; 2 | use std::iter::once; 3 | 4 | use crate::{F, H}; 5 | use alloy::primitives::U256; 6 | use derive_more::From; 7 | use mp2_common::{ 8 | poseidon::empty_poseidon_hash, 9 | serialization::{deserialize, serialize, FromBytes, ToBytes}, 10 | types::HashOutput, 11 | utils::ToFields, 12 | }; 13 | use plonky2::{ 14 | hash::hash_types::HashOut, 15 | plonk::config::{GenericHashOut, Hasher}, 16 | }; 17 | use ryhope::NodePayload; 18 | use serde::{Deserialize, Serialize}; 19 | 20 | use super::{row::RowTreeKey, ColumnID}; 21 | 22 | /// Hardcoded to use blocks but the spirit for any primary index is the same 23 | #[derive(Debug, Default, Clone, Serialize, Deserialize)] 24 | pub struct IndexNode { 25 | // identifier and value are needed to compute the hash 26 | pub identifier: ColumnID, 27 | #[serde(serialize_with = "serialize", deserialize_with = "deserialize")] 28 | pub value: VectorU256, 29 | pub row_tree_root_key: RowTreeKey, 30 | pub row_tree_hash: HashOutput, 31 | pub row_tree_root_primary: PrimaryIndex, 32 | // information filled during aggregation inside ryhope 33 | pub node_hash: HashOutput, 34 | pub min: U256, 35 | pub max: U256, 36 | } 37 | 38 | impl IndexNode { 39 | pub fn new( 40 | identifier: ColumnID, 41 | value: U256, 42 | row_key: RowTreeKey, 43 | row_hash: HashOutput, 44 | row_primary: PrimaryIndex, 45 | ) -> Self { 46 | Self { 47 | identifier, 48 | value: value.into(), 49 | row_tree_root_key: row_key, 50 | row_tree_hash: row_hash, 51 | row_tree_root_primary: row_primary, 52 | ..Default::default() 53 | } 54 | } 55 | } 56 | 57 | impl< 58 | PrimaryIndex: std::fmt::Debug + Default + Clone + Sized + Serialize + for<'a> Deserialize<'a>, 59 | > NodePayload for IndexNode 60 | { 61 | fn aggregate>>(&mut self, children: I) { 62 | // curently always return the expected number of children which 63 | // is two. 64 | let children = children.into_iter().collect::>(); 65 | assert_eq!(children.len(), 2); 66 | let null_hash = empty_poseidon_hash(); 67 | 68 | let (left, right) = match [&children[0], &children[1]] { 69 | // no children 70 | [None, None] => { 71 | self.min = self.value.0; 72 | self.max = self.value.0; 73 | (*null_hash, *null_hash) 74 | } 75 | [Some(left), None] => { 76 | self.min = left.min; 77 | self.max = self.value.0; 78 | (HashOut::from_bytes(&left.node_hash.0), *null_hash) 79 | } 80 | [Some(left), Some(right)] => { 81 | self.min = left.min; 82 | self.max = right.max; 83 | ( 84 | HashOut::from_bytes(&left.node_hash.0), 85 | HashOut::from_bytes(&right.node_hash.0), 86 | ) 87 | } 88 | [None, Some(_)] => panic!("ryhope sbbst is wrong"), 89 | }; 90 | let inputs = left 91 | .to_fields() 92 | .into_iter() 93 | .chain(right.to_fields()) 94 | .chain(self.min.to_fields()) 95 | .chain(self.max.to_fields()) 96 | .chain(once(F::from_canonical_u64(self.identifier))) 97 | .chain(self.value.0.to_fields()) 98 | .chain(HashOut::from_bytes(&self.row_tree_hash.0).to_fields()) 99 | .collect::>(); 100 | self.node_hash = HashOutput(H::hash_no_pad(&inputs).to_bytes().try_into().unwrap()); 101 | } 102 | } 103 | 104 | #[derive(Clone, Hash, Debug, PartialOrd, PartialEq, Ord, Eq, Default, From)] 105 | pub struct VectorU256(pub U256); 106 | 107 | impl ToBytes for VectorU256 { 108 | fn to_bytes(&self) -> Vec { 109 | self.0.to_be_bytes_trimmed_vec() 110 | } 111 | } 112 | 113 | impl FromBytes for VectorU256 { 114 | fn from_bytes( 115 | bytes: &[u8], 116 | ) -> std::result::Result { 117 | std::result::Result::Ok(VectorU256(U256::from_be_slice(bytes))) 118 | } 119 | } 120 | -------------------------------------------------------------------------------- /mp2-v1/src/indexing/mod.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | 3 | use crate::indexing::{index::IndexNode, row::RowPayload}; 4 | use alloy::primitives::U256; 5 | use block::MerkleIndexTree; 6 | use mp2_common::{poseidon::empty_poseidon_hash, types::HashOutput}; 7 | use row::MerkleRowTree; 8 | use ryhope::{ 9 | storage::pgsql::{SqlServerConnection, SqlStorageSettings}, 10 | tree::scapegoat, 11 | InitSettings, UserEpoch, 12 | }; 13 | 14 | pub mod block; 15 | pub mod cell; 16 | pub mod index; 17 | pub mod row; 18 | 19 | pub type ColumnID = u64; 20 | 21 | /// Build `MerkleIndexTree` and `MerkleRowTree` trees from tables 22 | /// `index_table_name` and `row_table_name` in the DB with URL `db_url`. 23 | pub async fn load_trees( 24 | db_url: &str, 25 | index_table_name: String, 26 | row_table_name: String, 27 | ) -> Result<(MerkleIndexTree, MerkleRowTree)> { 28 | let index_tree = MerkleIndexTree::new( 29 | InitSettings::MustExist, 30 | SqlStorageSettings::new( 31 | &index_table_name, 32 | SqlServerConnection::NewConnection(db_url.to_string()), 33 | )?, 34 | ) 35 | .await?; 36 | let row_tree = MerkleRowTree::new( 37 | InitSettings::MustExist, 38 | SqlStorageSettings::new_with_mapper( 39 | &row_table_name, 40 | SqlServerConnection::NewConnection(db_url.to_string()), 41 | index_table_name, 42 | )?, 43 | ) 44 | .await?; 45 | 46 | Ok((index_tree, row_tree)) 47 | } 48 | 49 | /// Build `MerkleIndexTree` and `MerkleRowTree` trees starting from 50 | /// `genesis_block`. The tables employed in the DB with URL `db_url` 51 | /// to store the trees are `index_table_name` and `row_table_name`, 52 | /// respectively. The following additional parameters are required: 53 | /// - `alpha`: Parameter of the Scapegoat tree employed for the `MerkleRowTree` 54 | /// - `reset_if_exist`: if true, an existing tree would be deleted 55 | pub async fn build_trees( 56 | db_url: &str, 57 | index_table_name: String, 58 | row_table_name: String, 59 | genesis_block: UserEpoch, 60 | alpha: scapegoat::Alpha, 61 | max_depth: usize, 62 | reset_if_exist: bool, 63 | ) -> Result<(MerkleIndexTree, MerkleRowTree)> { 64 | let db_settings_index = SqlStorageSettings::new( 65 | &index_table_name, 66 | SqlServerConnection::NewConnection(db_url.to_string()), 67 | )?; 68 | let db_settings_row = SqlStorageSettings::new_with_mapper( 69 | &row_table_name, 70 | SqlServerConnection::NewConnection(db_url.to_string()), 71 | index_table_name, 72 | )?; 73 | 74 | let index_tree = ryhope::new_index_tree( 75 | genesis_block as UserEpoch, 76 | db_settings_index, 77 | reset_if_exist, 78 | ) 79 | .await?; 80 | let row_tree = ryhope::new_row_tree( 81 | genesis_block as UserEpoch, 82 | alpha, 83 | max_depth, 84 | db_settings_row, 85 | reset_if_exist, 86 | ) 87 | .await?; 88 | 89 | Ok((index_tree, row_tree)) 90 | } 91 | 92 | // NOTE this might be good to have on public API ? 93 | // cc/ @andrus 94 | pub trait LagrangeNode { 95 | fn value(&self) -> U256; 96 | fn hash(&self) -> HashOutput; 97 | fn min(&self) -> U256; 98 | fn max(&self) -> U256; 99 | fn embedded_hash(&self) -> HashOutput; 100 | } 101 | 102 | impl LagrangeNode for RowPayload { 103 | fn value(&self) -> U256 { 104 | self.secondary_index_value() 105 | } 106 | 107 | fn hash(&self) -> HashOutput { 108 | self.hash 109 | } 110 | 111 | fn min(&self) -> U256 { 112 | self.min 113 | } 114 | 115 | fn max(&self) -> U256 { 116 | self.max 117 | } 118 | 119 | fn embedded_hash(&self) -> HashOutput { 120 | self.cell_root_hash 121 | .unwrap_or(HashOutput::from(*empty_poseidon_hash())) 122 | } 123 | } 124 | 125 | impl LagrangeNode for IndexNode { 126 | fn value(&self) -> U256 { 127 | self.value.0 128 | } 129 | 130 | fn hash(&self) -> HashOutput { 131 | self.node_hash 132 | } 133 | 134 | fn min(&self) -> U256 { 135 | self.min 136 | } 137 | 138 | fn max(&self) -> U256 { 139 | self.max 140 | } 141 | 142 | fn embedded_hash(&self) -> HashOutput { 143 | self.row_tree_hash 144 | } 145 | } 146 | -------------------------------------------------------------------------------- /gnark-utils/lib/deserialize.go: -------------------------------------------------------------------------------- 1 | // Deserialization functions 2 | 3 | package main 4 | 5 | import "C" 6 | import ( 7 | "encoding/json" 8 | 9 | "github.com/pkg/errors" 10 | "github.com/succinctlabs/gnark-plonky2-verifier/plonk/gates" 11 | "github.com/succinctlabs/gnark-plonky2-verifier/types" 12 | "github.com/succinctlabs/gnark-plonky2-verifier/variables" 13 | ) 14 | 15 | // Deserialize the proof with public inputs. 16 | func DeserializeProofWithPublicInputs(str string) (*variables.ProofWithPublicInputs, error) { 17 | var raw types.ProofWithPublicInputsRaw 18 | err := json.Unmarshal([]byte(str), &raw) 19 | if err != nil { 20 | return nil, err 21 | } 22 | 23 | proofWithPublicInputs := variables.DeserializeProofWithPublicInputs(raw) 24 | return &proofWithPublicInputs, nil 25 | } 26 | 27 | // Deserialize the verifier data. 28 | func DeserializeVerifierOnlyCircuitData(str string) (*variables.VerifierOnlyCircuitData, error) { 29 | var raw types.VerifierOnlyCircuitDataRaw 30 | err := json.Unmarshal([]byte(str), &raw) 31 | if err != nil { 32 | return nil, err 33 | } 34 | 35 | verifierOnlyCircuitData := variables.DeserializeVerifierOnlyCircuitData(raw) 36 | return &verifierOnlyCircuitData, nil 37 | } 38 | 39 | // Deserialize the circuit data. Reference gnark-plonky2-verifier 40 | // [ReadCommonCircuitData](https://github.com/succinctlabs/gnark-plonky2-verifier/blob/7025b2efd67b5ed30bd85f93c694774106d21b3d/types/common_data.go#L61) 41 | // function, it reads the circuit data from a file, but we want to deserialize 42 | // from a string here. 43 | func DeserializeCommonCircuitData(str string) (*types.CommonCircuitData, error) { 44 | var raw types.CommonCircuitDataRaw 45 | err := json.Unmarshal([]byte(str), &raw) 46 | if err != nil { 47 | return nil, err 48 | } 49 | 50 | var commonCircuitData types.CommonCircuitData 51 | commonCircuitData.Config.NumWires = raw.Config.NumWires 52 | commonCircuitData.Config.NumRoutedWires = raw.Config.NumRoutedWires 53 | commonCircuitData.Config.NumConstants = raw.Config.NumConstants 54 | commonCircuitData.Config.UseBaseArithmeticGate = raw.Config.UseBaseArithmeticGate 55 | commonCircuitData.Config.SecurityBits = raw.Config.SecurityBits 56 | commonCircuitData.Config.NumChallenges = raw.Config.NumChallenges 57 | commonCircuitData.Config.ZeroKnowledge = raw.Config.ZeroKnowledge 58 | commonCircuitData.Config.MaxQuotientDegreeFactor = raw.Config.MaxQuotientDegreeFactor 59 | 60 | commonCircuitData.Config.FriConfig.RateBits = raw.Config.FriConfig.RateBits 61 | commonCircuitData.Config.FriConfig.CapHeight = raw.Config.FriConfig.CapHeight 62 | commonCircuitData.Config.FriConfig.ProofOfWorkBits = raw.Config.FriConfig.ProofOfWorkBits 63 | commonCircuitData.Config.FriConfig.NumQueryRounds = raw.Config.FriConfig.NumQueryRounds 64 | 65 | commonCircuitData.FriParams.DegreeBits = raw.FriParams.DegreeBits 66 | commonCircuitData.DegreeBits = raw.FriParams.DegreeBits 67 | commonCircuitData.FriParams.Config.RateBits = raw.FriParams.Config.RateBits 68 | commonCircuitData.FriParams.Config.CapHeight = raw.FriParams.Config.CapHeight 69 | commonCircuitData.FriParams.Config.ProofOfWorkBits = raw.FriParams.Config.ProofOfWorkBits 70 | commonCircuitData.FriParams.Config.NumQueryRounds = raw.FriParams.Config.NumQueryRounds 71 | commonCircuitData.FriParams.ReductionArityBits = raw.FriParams.ReductionArityBits 72 | 73 | commonCircuitData.GateIds = raw.Gates 74 | 75 | selectorGroupStart := []uint64{} 76 | selectorGroupEnd := []uint64{} 77 | for _, group := range raw.SelectorsInfo.Groups { 78 | selectorGroupStart = append(selectorGroupStart, group.Start) 79 | selectorGroupEnd = append(selectorGroupEnd, group.End) 80 | } 81 | 82 | commonCircuitData.SelectorsInfo = *gates.NewSelectorsInfo( 83 | raw.SelectorsInfo.SelectorIndices, 84 | selectorGroupStart, 85 | selectorGroupEnd, 86 | ) 87 | 88 | commonCircuitData.QuotientDegreeFactor = raw.QuotientDegreeFactor 89 | commonCircuitData.NumGateConstraints = raw.NumGateConstraints 90 | commonCircuitData.NumConstants = raw.NumConstants 91 | commonCircuitData.NumPublicInputs = raw.NumPublicInputs 92 | commonCircuitData.KIs = raw.KIs 93 | commonCircuitData.NumPartialProducts = raw.NumPartialProducts 94 | 95 | // Don't support circuits that have hiding enabled 96 | if raw.FriParams.Hiding { 97 | return nil, errors.New("Circuit has hiding enabled, which is not supported") 98 | } 99 | 100 | return &commonCircuitData, nil 101 | } 102 | -------------------------------------------------------------------------------- /inspect/src/index.rs: -------------------------------------------------------------------------------- 1 | use anyhow::*; 2 | use colored::Colorize; 3 | use dialoguer::MultiSelect; 4 | use mp2_v1::indexing::{ 5 | block::{BlockPrimaryIndex, BlockTree}, 6 | index::IndexNode, 7 | LagrangeNode, 8 | }; 9 | use ryhope::{storage::pgsql::PgsqlStorage, MerkleTreeKvDb}; 10 | use tabled::{builder::Builder, settings::Style}; 11 | 12 | use crate::repl::PayloadFormatter; 13 | 14 | pub(crate) type IndexDb = MerkleTreeKvDb< 15 | BlockTree, 16 | IndexNode, 17 | PgsqlStorage, false>, 18 | >; 19 | 20 | struct IndexPayloadFormatterDisplay { 21 | value: bool, 22 | row_tree_root_key: bool, 23 | row_tree_root_hash: bool, 24 | hash: bool, 25 | min: bool, 26 | max: bool, 27 | } 28 | impl std::default::Default for IndexPayloadFormatterDisplay { 29 | fn default() -> Self { 30 | Self { 31 | value: false, 32 | hash: true, 33 | min: true, 34 | max: true, 35 | row_tree_root_key: false, 36 | row_tree_root_hash: false, 37 | } 38 | } 39 | } 40 | impl IndexPayloadFormatterDisplay { 41 | fn header(&self) -> Vec { 42 | let mut r = vec![]; 43 | if self.value { 44 | r.push("value".white().bold().to_string()); 45 | } 46 | if self.hash { 47 | r.push("hash".white().bold().to_string()); 48 | } 49 | if self.min { 50 | r.push("min".white().bold().to_string()); 51 | } 52 | if self.max { 53 | r.push("max".white().bold().to_string()); 54 | } 55 | if self.row_tree_root_key { 56 | r.push("R. tree root key".white().bold().to_string()); 57 | } 58 | if self.row_tree_root_hash { 59 | r.push("R. tree root hash".white().bold().to_string()); 60 | } 61 | r 62 | } 63 | } 64 | 65 | #[derive(Default)] 66 | pub(crate) struct IndexPayloadFormatter { 67 | display: IndexPayloadFormatterDisplay, 68 | } 69 | impl PayloadFormatter> for IndexPayloadFormatter { 70 | fn pretty_payload(&self, payload: &IndexNode) -> String { 71 | let mut builder = Builder::new(); 72 | builder.push_record(self.display.header()); 73 | 74 | let mut r = vec![]; 75 | if self.display.value { 76 | r.push(format!("0x{:x}", payload.value.0)); 77 | } 78 | if self.display.hash { 79 | r.push(hex::encode(&payload.node_hash)); 80 | } 81 | if self.display.min { 82 | r.push(format!("{}", payload.min())); 83 | } 84 | if self.display.max { 85 | r.push(format!("{}", payload.max())); 86 | } 87 | if self.display.row_tree_root_key { 88 | r.push(format!("{:?}", payload.row_tree_root_key)); 89 | } 90 | if self.display.row_tree_root_hash { 91 | r.push(hex::encode(&payload.row_tree_hash)); 92 | } 93 | builder.push_record(r); 94 | 95 | let mut table = builder.build(); 96 | table.with(Style::blank()); 97 | table.to_string() 98 | } 99 | 100 | fn settings(&mut self, _tty: &mut dialoguer::console::Term) -> Result<()> { 101 | if let Some(selection) = MultiSelect::new() 102 | .with_prompt(format!( 103 | "{} select - {} validate", 104 | "[space]".yellow().bold(), 105 | "[enter]".yellow().bold() 106 | )) 107 | .items_checked(&[ 108 | ("value", self.display.value), 109 | ("hash", self.display.hash), 110 | ("min", self.display.min), 111 | ("max", self.display.max), 112 | ("R. tree root key", self.display.row_tree_root_key), 113 | ("R. tree root hash", self.display.row_tree_root_hash), 114 | ]) 115 | .interact_opt() 116 | .unwrap() 117 | { 118 | self.display.value = selection.contains(&0); 119 | self.display.hash = selection.contains(&1); 120 | self.display.min = selection.contains(&2); 121 | self.display.max = selection.contains(&3); 122 | self.display.row_tree_root_key = selection.contains(&4); 123 | self.display.row_tree_root_hash = selection.contains(&5); 124 | } 125 | 126 | Ok(()) 127 | } 128 | } 129 | -------------------------------------------------------------------------------- /mp2-common/src/group_hashing/curve_add.rs: -------------------------------------------------------------------------------- 1 | //! Curve point addition arithmetic and circuit functions 2 | 3 | use super::EXTENSION_DEGREE as N; 4 | use plonky2::{ 5 | field::extension::Extendable, hash::hash_types::RichField, 6 | plonk::circuit_builder::CircuitBuilder, 7 | }; 8 | use plonky2_ecgfp5::{ 9 | curve::curve::{Point, WeierstrassPoint}, 10 | gadgets::{ 11 | base_field::CircuitBuilderGFp5, 12 | curve::{CircuitBuilderEcGFp5, CurveTarget}, 13 | }, 14 | }; 15 | 16 | /// Calculate the curve point addition. 17 | pub fn add_curve_point(inputs: &[Point]) -> Point { 18 | assert!(!inputs.is_empty()); 19 | 20 | inputs.iter().cloned().reduce(|acc, p| acc + p).unwrap() 21 | } 22 | 23 | /// Calculate the weierstrass point addition. 24 | pub fn add_weierstrass_point(inputs: &[WeierstrassPoint]) -> WeierstrassPoint { 25 | assert!(!inputs.is_empty()); 26 | 27 | inputs 28 | .iter() 29 | .fold(Point::NEUTRAL, |acc, p| { 30 | acc + Point::decode(p.encode()).unwrap() 31 | }) 32 | .to_weierstrass() 33 | } 34 | 35 | /// Calculate the curve target addition. 36 | pub(crate) fn add_curve_target( 37 | b: &mut CircuitBuilder, 38 | inputs: &[CurveTarget], 39 | ) -> CurveTarget 40 | where 41 | F: RichField + Extendable + Extendable, 42 | CircuitBuilder: CircuitBuilderGFp5 + CircuitBuilderEcGFp5, 43 | { 44 | assert!(!inputs.is_empty()); 45 | 46 | inputs 47 | .iter() 48 | .cloned() 49 | .reduce(|acc, point| b.curve_add(acc, point)) 50 | .unwrap() 51 | } 52 | 53 | #[cfg(test)] 54 | mod tests { 55 | use super::*; 56 | use crate::{group_hashing::CircuitBuilderGroupHashing, C, D, F}; 57 | use anyhow::Result; 58 | use plonky2::{ 59 | field::{ 60 | extension::quintic::QuinticExtension, 61 | types::{Field, Sample}, 62 | }, 63 | iop::witness::PartialWitness, 64 | plonk::{circuit_builder::CircuitBuilder, circuit_data::CircuitConfig}, 65 | }; 66 | use plonky2_ecgfp5::{ 67 | curve::curve::{Point, WeierstrassPoint}, 68 | gadgets::curve::PartialWitnessCurve, 69 | }; 70 | use rand::thread_rng; 71 | 72 | const ARITY: usize = 4; 73 | 74 | /// Test curve point addition. 75 | #[test] 76 | fn test_curve_point_addition_gadget() -> Result<()> { 77 | let config = CircuitConfig::standard_recursion_config(); 78 | let mut b = CircuitBuilder::::new(config); 79 | 80 | // Build the input and output targets. 81 | let input_targets = [0; ARITY].map(|_| b.add_virtual_curve_target()); 82 | let output_target = b.add_curve_point(&input_targets); 83 | 84 | // Register the public inputs, register the output first since it's easy 85 | // to index in proof for testing. 86 | b.register_curve_public_input(output_target); 87 | input_targets 88 | .into_iter() 89 | .for_each(|it| b.register_curve_public_input(it)); 90 | 91 | // Generate random curve points as inputs. 92 | let mut rng = thread_rng(); 93 | let input_values = [0; ARITY].map(|_| Point::sample(&mut rng)); 94 | 95 | // Set the value to target for witness. 96 | let mut pw = PartialWitness::new(); 97 | input_targets 98 | .into_iter() 99 | .zip(input_values) 100 | .for_each(|(it, iv)| pw.set_curve_target(it, iv.to_weierstrass())); 101 | 102 | println!( 103 | "[+] This test curve point addition gadget has {} gates", 104 | b.num_gates() 105 | ); 106 | 107 | // Generate the proof. 108 | let data = b.build::(); 109 | let proof = data.prove(pw)?; 110 | 111 | // Calculate the output point and check with proof. 112 | let expected_point = add_curve_point(&input_values).to_weierstrass(); 113 | let real_point = WeierstrassPoint { 114 | x: QuinticExtension(proof.public_inputs[..N].try_into().unwrap()), 115 | y: QuinticExtension(proof.public_inputs[N..N + N].try_into().unwrap()), 116 | is_inf: proof.public_inputs[N + N].is_nonzero(), 117 | }; 118 | assert_eq!( 119 | real_point, expected_point, 120 | "Expected output point must be same with proof" 121 | ); 122 | 123 | // Verify the proof. 124 | data.verify(proof) 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /parsil/src/placeholders.rs: -------------------------------------------------------------------------------- 1 | //! This module handles the validation of adequate use of placeholders within a 2 | //! [`Query`]. 3 | use std::collections::HashSet; 4 | 5 | use anyhow::*; 6 | use sqlparser::ast::{Expr, Query, Value}; 7 | use verifiable_db::query::computational_hash_ids::PlaceholderIdentifier; 8 | 9 | use crate::{ 10 | errors::ValidationError, 11 | symbols::ContextProvider, 12 | utils::ParsilSettings, 13 | visitor::{AstVisitor, Visit}, 14 | }; 15 | 16 | pub struct PlaceholderValidator<'a, C: ContextProvider> { 17 | /// Parsil settings provided by the user 18 | settings: &'a ParsilSettings, 19 | /// Stores at i whether free-standing placeholder $(i+1) has been met 20 | visited: Vec, 21 | /// The largest free-standing placeholder met; 0 if none 22 | current_max_freestanding: usize, 23 | } 24 | impl<'a, C: ContextProvider> PlaceholderValidator<'a, C> { 25 | /// Instantiate a new [`PlaceholderValidator`] from the given settings. 26 | fn new(settings: &'a ParsilSettings) -> Self { 27 | Self { 28 | settings, 29 | visited: vec![false; settings.placeholders.max_free_placeholders], 30 | current_max_freestanding: 0, 31 | } 32 | } 33 | 34 | /// Ensure that the given placeholder is valid, and update the validator 35 | /// internal state accordingly. 36 | fn resolve(&mut self, name: &str) -> Result<()> { 37 | if let PlaceholderIdentifier::Generic(i) = 38 | self.settings.placeholders.resolve_placeholder(name)? 39 | { 40 | self.visited[i - 1] = true; 41 | self.current_max_freestanding = self.current_max_freestanding.max(i); 42 | } 43 | Ok(()) 44 | } 45 | 46 | /// Ensure that all the placeholders have been used and that the number of 47 | /// parameters matches the number of placeholders, and return the largest 48 | /// one found. 49 | fn ensured_used(&self) -> Result { 50 | for i in 0..self.current_max_freestanding { 51 | ensure!( 52 | self.visited[i], 53 | ValidationError::MissingPlaceholder(format!("${}", i + 1)) 54 | ); 55 | } 56 | if let Some(parameters_count) = self.settings.placeholders.parameters_count.get() { 57 | ensure!( 58 | *parameters_count == self.current_max_freestanding, 59 | ValidationError::TooManyParameters { 60 | expected: self.current_max_freestanding, 61 | got: *parameters_count, 62 | } 63 | ); 64 | } 65 | Ok(self.current_max_freestanding) 66 | } 67 | 68 | /// Return a [`HashSet`] containing all the numeric placeholders found in 69 | /// the query, without any guarantee of contiguity. 70 | fn gather_placeholders(&self) -> Result> { 71 | Ok(self 72 | .visited 73 | .iter() 74 | .enumerate() 75 | // self.visited starts at 0, placeholders start a 1 76 | .filter_map(|(i, used)| if *used { Some(i + 1) } else { None }) 77 | .collect()) 78 | } 79 | } 80 | 81 | impl AstVisitor for PlaceholderValidator<'_, C> { 82 | type Error = anyhow::Error; 83 | 84 | fn pre_expr(&mut self, expr: &Expr) -> anyhow::Result<()> { 85 | if let Expr::Value(Value::Placeholder(name)) = expr { 86 | self.resolve(name)?; 87 | } 88 | Ok(()) 89 | } 90 | } 91 | 92 | /// Instantiate a [`PlaceholderValidator`], then run ot on the given query. 93 | /// Return the number of used free-standing placeholders if successful, or 94 | /// an error if the placeholder use is inappropriate. 95 | pub fn validate(settings: &ParsilSettings, query: &Query) -> Result { 96 | let mut validator = PlaceholderValidator::new(settings); 97 | query.visit(&mut validator)?; 98 | validator.ensured_used() 99 | } 100 | 101 | /// Instantiate a [`PlaceholderValidator`], then run ot on the given query. 102 | /// Return the number of used free-standing placeholders if successful, or 103 | /// an error if the placeholder use is inappropriate. 104 | pub fn gather_placeholders( 105 | settings: &ParsilSettings, 106 | query: &Query, 107 | ) -> Result> { 108 | let mut validator = PlaceholderValidator::new(settings); 109 | query.visit(&mut validator)?; 110 | validator.gather_placeholders() 111 | } 112 | -------------------------------------------------------------------------------- /mp2-v1/src/block_extraction/public_inputs.rs: -------------------------------------------------------------------------------- 1 | use crate::{CBuilder, OutputHash}; 2 | use mp2_common::{ 3 | keccak::PACKED_HASH_LEN, 4 | public_inputs::{PublicInputCommon, PublicInputRange}, 5 | u256::{self, UInt256Target}, 6 | utils::FromTargets, 7 | }; 8 | use plonky2::iop::target::Target; 9 | 10 | // Block extraction public inputs: 11 | // - `BH : [8]F` packed Keccak hash of the block 12 | // - `PREV_BH : [8]F` packed Keccak hash of the block 13 | // - `BN : F` Proven block number 14 | // - `SH : [8]F` Packed state root hash 15 | const BH_RANGE: PublicInputRange = 0..PACKED_HASH_LEN; 16 | const PREV_BH_RANGE: PublicInputRange = BH_RANGE.end..BH_RANGE.end + PACKED_HASH_LEN; 17 | const BN_RANGE: PublicInputRange = PREV_BH_RANGE.end..PREV_BH_RANGE.end + u256::NUM_LIMBS; 18 | const SH_RANGE: PublicInputRange = BN_RANGE.end..BN_RANGE.end + PACKED_HASH_LEN; 19 | 20 | /// Public inputs for the dynamic-length variable extraction. 21 | #[derive(Clone, Debug)] 22 | pub struct PublicInputs<'a, T> { 23 | /// Block hash 24 | pub(crate) bh: &'a [T], 25 | /// Previous block hash 26 | pub(crate) prev_bh: &'a [T], 27 | /// Block number 28 | pub(crate) bn: &'a [T], 29 | /// Packed state root 30 | pub(crate) sh: &'a [T], 31 | } 32 | 33 | impl PublicInputCommon for PublicInputs<'_, Target> { 34 | const RANGES: &'static [PublicInputRange] = &[BH_RANGE, PREV_BH_RANGE, BN_RANGE, SH_RANGE]; 35 | 36 | fn register_args(&self, cb: &mut CBuilder) { 37 | cb.register_public_inputs(self.bh); 38 | cb.register_public_inputs(self.prev_bh); 39 | cb.register_public_inputs(self.bn); 40 | cb.register_public_inputs(self.sh); 41 | } 42 | } 43 | 44 | impl<'a> PublicInputs<'a, Target> { 45 | /// Creates a new instance of the public inputs from its logical components. 46 | pub const fn new( 47 | bh: &'a [Target], 48 | prev_bh: &'a [Target], 49 | bn: &'a [Target], 50 | sh: &'a [Target], 51 | ) -> Self { 52 | assert!(bh.len() == PACKED_HASH_LEN); 53 | assert!(prev_bh.len() == PACKED_HASH_LEN); 54 | assert!(sh.len() == PACKED_HASH_LEN); 55 | assert!(bn.len() == u256::NUM_LIMBS); 56 | Self { 57 | bh, 58 | prev_bh, 59 | bn, 60 | sh, 61 | } 62 | } 63 | 64 | pub fn block_number(&self) -> UInt256Target { 65 | UInt256Target::from_targets(self.bn) 66 | } 67 | 68 | pub fn block_hash(&self) -> OutputHash { 69 | OutputHash::from_targets(self.bh) 70 | } 71 | 72 | pub fn state_root(&self) -> OutputHash { 73 | OutputHash::from_targets(self.sh) 74 | } 75 | } 76 | 77 | impl PublicInputs<'_, T> { 78 | /// Creates a vector from the parts of the public inputs 79 | pub fn to_vec(&self) -> Vec { 80 | self.bh 81 | .iter() 82 | .chain(self.prev_bh.iter()) 83 | .chain(self.bn.iter()) 84 | .chain(self.sh.iter()) 85 | .cloned() 86 | .collect() 87 | } 88 | } 89 | 90 | impl<'a, T> PublicInputs<'a, T> { 91 | /// Total length of the public inputs. 92 | pub const TOTAL_LEN: usize = SH_RANGE.end; 93 | 94 | /// Creates a new instance from its internal parts. 95 | pub fn from_parts(bh: &'a [T], prev_bh: &'a [T], bn: &'a [T], sh: &'a [T]) -> Self { 96 | assert_eq!(bh.len(), BH_RANGE.len()); 97 | assert_eq!(prev_bh.len(), PREV_BH_RANGE.len()); 98 | assert_eq!(sh.len(), SH_RANGE.len()); 99 | 100 | Self { 101 | bh, 102 | prev_bh, 103 | bn, 104 | sh, 105 | } 106 | } 107 | 108 | /// Creates a new instance of the public inputs from a contiguous slice. 109 | pub fn from_slice(pi: &'a [T]) -> Self { 110 | Self { 111 | bh: &pi[BH_RANGE], 112 | prev_bh: &pi[PREV_BH_RANGE], 113 | bn: &pi[BN_RANGE], 114 | sh: &pi[SH_RANGE], 115 | } 116 | } 117 | 118 | /// Returns the block hash. 119 | pub const fn block_hash_raw(&self) -> &[T] { 120 | self.bh 121 | } 122 | 123 | /// Returns the previous block hash. 124 | pub const fn prev_block_hash_raw(&self) -> &[T] { 125 | self.prev_bh 126 | } 127 | 128 | /// Returns the block number. 129 | pub const fn block_number_raw(&self) -> &[T] { 130 | self.bn 131 | } 132 | 133 | /// Returns the packed state root hash. 134 | pub const fn state_root_raw(&self) -> &[T] { 135 | self.sh 136 | } 137 | } 138 | -------------------------------------------------------------------------------- /mp2-common/src/group_hashing/sswu_value.rs: -------------------------------------------------------------------------------- 1 | //! Map to curve field arithmetic functions 2 | 3 | use super::{ 4 | field_to_curve::ToCurvePoint, 5 | utils::{a_sw, b_sw, neg_b_div_a_sw, neg_z_inv_sw, two_thirds, z_sw}, 6 | }; 7 | use plonky2::field::{ 8 | extension::quintic::QuinticExtension, goldilocks_field::GoldilocksField, ops::Square, 9 | types::Field, 10 | }; 11 | use plonky2_ecgfp5::curve::{ 12 | base_field::{InverseOrZero, Sgn0, SquareRoot}, 13 | curve::Point, 14 | }; 15 | 16 | /// Define Goldilocks and extension field types. 17 | type GFp = GoldilocksField; 18 | type GFp5 = QuinticExtension; 19 | 20 | /// Implement curve point conversion for Goldilocks extension field. 21 | impl ToCurvePoint for GFp5 { 22 | /// Convert extension field to a curve point. 23 | fn map_to_curve_point(self) -> Point { 24 | // Invokes simplified SWU method. 25 | simple_swu(self) 26 | } 27 | } 28 | 29 | /// Simplified SWU mapping function for conversion from an extension field to a 30 | /// curve point. 31 | pub(crate) fn simple_swu(u: GFp5) -> Point { 32 | // Initialize constants. 33 | let [two_thirds, a_sw, b_sw, z_sw, neg_z_inv_sw, neg_b_div_a_sw] = [ 34 | two_thirds(), 35 | a_sw(), 36 | b_sw(), 37 | z_sw(), 38 | neg_z_inv_sw(), 39 | neg_b_div_a_sw(), 40 | ]; 41 | 42 | // Calculate tv1. 43 | let denom_part = z_sw * u.square(); 44 | let denom = denom_part.square() + denom_part; 45 | let tv1 = denom.inverse_or_zero(); 46 | 47 | // Calculate x1. 48 | let x1 = if tv1.is_zero() { 49 | neg_z_inv_sw 50 | } else { 51 | tv1 + GFp5::ONE 52 | } * neg_b_div_a_sw; 53 | 54 | // Calculate x2. 55 | let x2 = denom_part * x1; 56 | 57 | // g(x) = X^3 + A_sw*X + B_sw 58 | let gx1 = x1 * x1.square() + a_sw * x1 + b_sw; 59 | let gx2 = x2 * x2.square() + a_sw * x2 + b_sw; 60 | 61 | let (x_sw, y_pos) = if let Some(gx1_root) = gx1.sqrt() { 62 | (x1, gx1_root) 63 | } else { 64 | (x2, gx2.sqrt().unwrap()) 65 | }; 66 | 67 | // Calculate X_cand and Y_cand. 68 | let x_cand = x_sw - two_thirds; 69 | let y_cand = if u.sgn0() == y_pos.sgn0() { 70 | y_pos 71 | } else { 72 | -y_pos 73 | }; 74 | 75 | // Decode to a curve point. 76 | Point::decode(y_cand / x_cand).unwrap() 77 | } 78 | 79 | #[cfg(test)] 80 | mod tests { 81 | use super::{super::EXTENSION_DEGREE as N, *}; 82 | use plonky2::field::extension::FieldExtension; 83 | use std::array; 84 | 85 | /// The array of input and output pairs is used for testing. The input is 86 | /// the raw values of an extension field, and output is the encoded 87 | /// extension field of a curve point (less data than a Weierstrass point). 88 | const TEST_INPUTS_OUTPUTS: [[[u64; N]; 2]; 3] = [ 89 | [ 90 | [1, 2, 3, 4, 5], 91 | [ 92 | 14787531356491256379, 93 | 11461637202037498289, 94 | 4291527673026618528, 95 | 4746471857872952759, 96 | 13337224262829952359, 97 | ], 98 | ], 99 | [ 100 | [100, 100, 100, 100, 100], 101 | [ 102 | 5101977855671705567, 103 | 18259369900233540211, 104 | 4964766086423821262, 105 | 6349865835816149910, 106 | 13164635315267603389, 107 | ], 108 | ], 109 | [ 110 | [0, u64::MAX, 0, u64::MAX, u64::MAX], 111 | [ 112 | 15406267945121757331, 113 | 8614084671648873762, 114 | 2366015382156010603, 115 | 14529344599099006840, 116 | 15466818755358183082, 117 | ], 118 | ], 119 | ]; 120 | 121 | /// Test simplified SWU method for mapping to curve point. 122 | #[test] 123 | fn test_simple_swu_for_curve_point() { 124 | TEST_INPUTS_OUTPUTS.iter().for_each(|input_output| { 125 | let [input, expected_output] = input_output.map(ext_field_from_array); 126 | let real_output = simple_swu(input).encode(); 127 | 128 | assert_eq!( 129 | real_output, expected_output, 130 | "The encoded extension field must be equal" 131 | ); 132 | }); 133 | } 134 | 135 | fn ext_field_from_array(values: [u64; N]) -> GFp5 { 136 | GFp5::from_basefield_array(array::from_fn::<_, N, _>(|i| GoldilocksField(values[i]))) 137 | } 138 | } 139 | -------------------------------------------------------------------------------- /recursion-framework/src/universal_verifier_gadget/mod.rs: -------------------------------------------------------------------------------- 1 | use plonky2::{ 2 | field::extension::Extendable, 3 | gates::noop::NoopGate, 4 | hash::hash_types::RichField, 5 | plonk::{ 6 | circuit_builder::CircuitBuilder, 7 | circuit_data::{CircuitConfig, CircuitData, CommonCircuitData}, 8 | config::{AlgebraicHasher, GenericConfig}, 9 | }, 10 | }; 11 | 12 | use mp2_common::serialization::circuit_data_serialization::SerializableRichField; 13 | 14 | use self::wrap_circuit::WrapCircuit; 15 | 16 | mod circuit_set; 17 | pub(crate) mod verifier_gadget; 18 | pub(crate) mod wrap_circuit; 19 | 20 | pub use circuit_set::CircuitSetDigest; 21 | pub(crate) use circuit_set::{CircuitSet, CircuitSetTarget}; 22 | 23 | /// cap height for the Merkle-tree employed to represent the set of circuits that can be aggregated with 24 | /// `MergeCircuit`; it is now set to 0 for simplicity, which is equivalent to a traditional 25 | /// Merkle-tree with a single root. 26 | //ToDo: evaluate if changing the value depending on the number of circuits in the set 27 | const CIRCUIT_SET_CAP_HEIGHT: usize = 0; 28 | 29 | /// Minimum `degree_bits` of a circuit recursively verifying a Plonky2 proof. This corresponds to the 30 | /// expected `degree_bits` of every circuit whose proofs can be recursively verified by a universal 31 | /// verifier, given that for every Plonky2 circuit it should always be possible to obtain a circuit 32 | /// with `RECURSION_THRESHOLD` `degree_bits` proving the same statement (referred to as `WrapCircuit` 33 | /// in this framework) 34 | pub(crate) const RECURSION_THRESHOLD: usize = 12; 35 | 36 | /// `degree_bits` for a base circuit guaranteeing that 2 wrap steps are necessary to obtain an equivalent 37 | /// version of the base circuit with `RECURSION_THRESHOLD` `degree_bits`; this limit is only employed to 38 | /// generate the base circuit employed to compute the `CommonCircuitData` shared among all wrap circuits 39 | /// whose proofs can be verified by a universal verifier for a given circuit set 40 | const SHRINK_LIMIT: usize = 15; 41 | 42 | /// This function builds the base circuit employed to compute the `CommonCircuitData` shared among all 43 | /// wrap circuits whose proofs can be verified by a universal verifier for a set of circuits with 44 | /// `num_public_inputs` 45 | fn dummy_circuit, C: GenericConfig, const D: usize>( 46 | config: CircuitConfig, 47 | num_gates: usize, 48 | num_public_inputs: usize, 49 | ) -> CircuitData { 50 | let mut builder = CircuitBuilder::new(config); 51 | for _ in 0..num_public_inputs { 52 | let target = builder.add_virtual_target(); 53 | builder.register_public_input(target); 54 | } 55 | // pad the number of gates of the circuit up to `num_gates` with noop operations 56 | let num_padding_gates = num_gates - builder.num_gates(); 57 | for _ in 0..num_padding_gates { 58 | builder.add_gate(NoopGate, vec![]); 59 | } 60 | 61 | builder.build::() 62 | } 63 | 64 | /// It returns the `CommonCircuitData` which is shared across all circuits whose proofs can be 65 | /// verified by a universal verifier for a set of circuits with `num_public_inputs`. 66 | /// It is only called for testing purposes or during build time. 67 | pub(crate) fn build_data_for_universal_verifier< 68 | F: SerializableRichField, 69 | C: GenericConfig + 'static, 70 | const D: usize, 71 | >( 72 | config: CircuitConfig, 73 | num_public_inputs: usize, 74 | ) -> CommonCircuitData 75 | where 76 | C::Hasher: AlgebraicHasher, 77 | { 78 | let num_public_inputs = 79 | num_public_inputs + CircuitSetTarget::num_targets::(config.clone()); 80 | let circuit_data = 81 | dummy_circuit::(config.clone(), 1 << SHRINK_LIMIT, num_public_inputs); 82 | 83 | let wrap_circuit = WrapCircuit::::build_wrap_circuit( 84 | &circuit_data.verifier_only, 85 | &circuit_data.common, 86 | &config, 87 | ); 88 | 89 | wrap_circuit.final_proof_circuit_data().common.clone() 90 | } 91 | 92 | #[cfg(test)] 93 | mod tests { 94 | use crate::{C, D, F}; 95 | use plonky2::plonk::circuit_data::CircuitConfig; 96 | 97 | use super::{build_data_for_universal_verifier, RECURSION_THRESHOLD}; 98 | 99 | #[test] 100 | fn test_common_data_for_recursion() { 101 | let cd = build_data_for_universal_verifier::( 102 | CircuitConfig::standard_recursion_config(), 103 | 3, 104 | ); 105 | 106 | assert_eq!(dbg!(cd).degree_bits(), RECURSION_THRESHOLD); 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /mp2-common/src/types.rs: -------------------------------------------------------------------------------- 1 | //! Custom types 2 | 3 | use crate::{array::Array, D, F}; 4 | use anyhow::ensure; 5 | use derive_more::Deref; 6 | use plonky2::{ 7 | field::extension::quintic::QuinticExtension, 8 | hash::hash_types::HashOut, 9 | iop::target::Target, 10 | plonk::{circuit_builder::CircuitBuilder, config::GenericHashOut}, 11 | }; 12 | use plonky2_crypto::u32::arithmetic_u32::U32Target; 13 | use serde::{Deserialize, Serialize}; 14 | 15 | /// Default field 16 | pub type GFp = F; 17 | 18 | /// Quintic extension field 19 | pub type GFp5 = QuinticExtension; 20 | 21 | /// Default circuit builder 22 | pub type CBuilder = CircuitBuilder; 23 | 24 | /// Length of an U64 25 | pub const U64_LEN: usize = 8; 26 | /// Length of an U64 in U32 27 | pub const PACKED_U64_LEN: usize = U64_LEN / 4; 28 | /// Length of a curve target (2x quintic + bool) 29 | pub const CURVE_TARGET_LEN: usize = 11; 30 | /// Byte representation of an U64 31 | pub type U64Target = Array; 32 | /// U32 representation of an U64 33 | pub type PackedU64Target = Array; 34 | 35 | /// Length of an address (H160 = [u8; 20]) 36 | pub const ADDRESS_LEN: usize = 20; 37 | /// Length of an address in U32 38 | pub const PACKED_ADDRESS_LEN: usize = ADDRESS_LEN / 4; 39 | /// Byte representation of an address 40 | pub type AddressTarget = Array; 41 | /// U32 representation of an address 42 | pub type PackedAddressTarget = Array; 43 | 44 | /// The length of a mapping key in bytes 45 | pub const MAPPING_KEY_LEN: usize = 32; 46 | /// Length of a mapping key when packed in u32 47 | pub const PACKED_MAPPING_KEY_LEN: usize = MAPPING_KEY_LEN / 4; 48 | /// A value is never more than 32 bytes in EVM 49 | pub const VALUE_LEN: usize = 32; 50 | /// A compact representation of a value in U32 51 | pub const PACKED_VALUE_LEN: usize = VALUE_LEN / 4; 52 | /// The target for a packed value in U32 53 | pub type PackedValueTarget = Array; 54 | /// The target for a mapping key, 32 bytes 55 | pub type MappingKeyTarget = Array; 56 | /// The target for representing a mapping key, in packed format in u32 57 | pub type PackedMappingKeyTarget = Array; 58 | 59 | /// Regular hash output function - it can be generated from field elements using 60 | /// poseidon with the output serialized or via regular hash functions. 61 | #[derive(Clone, Hash, Default, Debug, Serialize, Deserialize, Deref, PartialEq, Eq, Copy)] 62 | pub struct HashOutput(pub [u8; 32]); 63 | impl AsRef<[u8]> for &HashOutput { 64 | fn as_ref(&self) -> &[u8] { 65 | &self.0 66 | } 67 | } 68 | 69 | /// Max observed is 622 but better be safe by default, it doesn't cost "more" for keccak 70 | /// since it still has to do 5 rounds in 622 or 670. 71 | pub const MAX_BLOCK_LEN: usize = 670; 72 | 73 | /// This constant represents the maximum size a value can be inside the storage trie. 74 | /// 75 | /// It is different than the `MAX_LEAF_VALUE_LEN` constant because it represents the 76 | /// value **not** RLP encoded,i.e. without the 1-byte RLP header. 77 | pub const MAPPING_LEAF_VALUE_LEN: usize = 32; 78 | 79 | /// The length of an EVM word 80 | pub const EVM_WORD_LEN: usize = 32; 81 | 82 | impl From<[u8; 32]> for HashOutput { 83 | fn from(value: [u8; 32]) -> Self { 84 | Self(value) 85 | } 86 | } 87 | 88 | impl TryFrom> for HashOutput { 89 | type Error = anyhow::Error; 90 | 91 | fn try_from(value: Vec) -> Result { 92 | ensure!(value.len() == 32, "invalid length of the vector"); 93 | Ok(Self(value.try_into().unwrap())) 94 | } 95 | } 96 | 97 | impl<'a> From<&'a HashOutput> for &'a [u8] { 98 | fn from(value: &'a HashOutput) -> Self { 99 | value.0.as_slice() 100 | } 101 | } 102 | 103 | impl<'a> From<&'a HashOutput> for Vec { 104 | fn from(value: &'a HashOutput) -> Self { 105 | value.0.to_vec() 106 | } 107 | } 108 | 109 | impl From> for HashOutput { 110 | fn from(value: HashOut) -> Self { 111 | value.to_bytes().try_into().unwrap() 112 | } 113 | } 114 | 115 | impl From<&HashOut> for HashOutput { 116 | fn from(value: &HashOut) -> Self { 117 | value.to_bytes().try_into().unwrap() 118 | } 119 | } 120 | 121 | impl From for HashOut { 122 | fn from(value: HashOutput) -> Self { 123 | Self::from_bytes(&value.0) 124 | } 125 | } 126 | 127 | impl From<&HashOutput> for HashOut { 128 | fn from(value: &HashOutput) -> Self { 129 | Self::from_bytes(&value.0) 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /mp2-test/src/circuit.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use mp2_common::{C, D, F}; 3 | use plonky2::plonk::circuit_data::CircuitConfig; 4 | use plonky2::plonk::proof::ProofWithPublicInputs; 5 | use plonky2::{ 6 | field::extension::Extendable, 7 | hash::hash_types::RichField, 8 | iop::witness::PartialWitness, 9 | plonk::{ 10 | circuit_builder::CircuitBuilder, 11 | circuit_data::{CircuitData, VerifierCircuitData}, 12 | config::GenericConfig, 13 | }, 14 | }; 15 | use recursion_framework::{ 16 | circuit_builder::CircuitLogicWires, framework_testing::DummyCircuitWires, 17 | }; 18 | use std::fmt::Debug; 19 | 20 | /// Circuit that does nothing but can be passed as a children proof to some circuit when testing the aggregation 21 | /// logic. 22 | pub struct TestDummyCircuit { 23 | data: CircuitData, 24 | wires: DummyCircuitWires, 25 | } 26 | 27 | impl TestDummyCircuit { 28 | pub fn build() -> Self { 29 | let config = CircuitConfig::standard_recursion_config(); 30 | let mut cb = CircuitBuilder::::new(config); 31 | let wires = DummyCircuitWires::circuit_logic(&mut cb, [], ()); 32 | let data = cb.build::(); 33 | Self { data, wires } 34 | } 35 | 36 | pub fn generate_proof( 37 | &self, 38 | public_inputs: [F; NUM_PUBLIC_INPUTS], 39 | ) -> Result> { 40 | let mut pw = PartialWitness::::new(); 41 | as CircuitLogicWires>::assign_input( 42 | &self.wires, 43 | public_inputs, 44 | &mut pw, 45 | )?; 46 | self.data.prove(pw) 47 | } 48 | 49 | pub fn circuit_data(&self) -> &CircuitData { 50 | &self.data 51 | } 52 | } 53 | 54 | /// Simple trait defining the main utilities method to define circuits almost 55 | /// as gadgets / library calls. 56 | pub trait UserCircuit: Clone 57 | where 58 | F: RichField + Extendable, 59 | { 60 | /// The wires related to this circuit that need assignement during 61 | /// the proving phase. 62 | type Wires; 63 | 64 | /// Method is called once to build the circuit shape. 65 | fn build(c: &mut CircuitBuilder) -> Self::Wires; 66 | 67 | /// Create a proof, giving the wires already generated at the first step. 68 | fn prove(&self, pw: &mut PartialWitness, wires: &Self::Wires); 69 | } 70 | 71 | /// Setup the circuit to be proven via an instance. 72 | pub fn setup_circuit< 73 | F: RichField + Extendable, 74 | const D: usize, 75 | C: GenericConfig, 76 | U: UserCircuit + Debug, 77 | >() -> (U::Wires, CircuitData, VerifierCircuitData) { 78 | let mut b = CircuitBuilder::new(CircuitConfig::standard_recursion_config()); 79 | let now = std::time::Instant::now(); 80 | let wires = U::build(&mut b); 81 | let circuit_data = b.build::(); 82 | let vcd = VerifierCircuitData { 83 | verifier_only: circuit_data.verifier_only.clone(), 84 | common: circuit_data.common.clone(), 85 | }; 86 | 87 | println!("[+] Circuit data built in {:?}s", now.elapsed().as_secs()); 88 | 89 | (wires, circuit_data, vcd) 90 | } 91 | 92 | /// Prove and verify a circuit instance with a previously generated setup. 93 | pub fn prove_circuit< 94 | F: RichField + Extendable, 95 | const D: usize, 96 | C: GenericConfig, 97 | U: UserCircuit + Debug, 98 | >( 99 | setup: &(U::Wires, CircuitData, VerifierCircuitData), 100 | u: &U, 101 | ) -> ProofWithPublicInputs { 102 | let mut pw = PartialWitness::new(); 103 | 104 | println!("[+] Generating a proof ... "); 105 | let now = std::time::Instant::now(); 106 | u.prove(&mut pw, &setup.0); 107 | let proof = setup.1.prove(pw).expect("invalid proof"); 108 | println!("[+] Proof generated in {:?}ms", now.elapsed().as_millis()); 109 | setup 110 | .2 111 | .verify(proof.clone()) 112 | .expect("failed to verify proof"); 113 | 114 | proof 115 | } 116 | 117 | /// Proves and verifies the provided circuit instance. 118 | pub fn run_circuit< 119 | F: RichField + Extendable, 120 | const D: usize, 121 | C: GenericConfig, 122 | U: UserCircuit + Debug, 123 | >( 124 | u: U, 125 | ) -> ProofWithPublicInputs { 126 | let setup = setup_circuit::(); 127 | println!( 128 | "setup.verifierdata hash {:?}", 129 | setup.2.verifier_only.circuit_digest 130 | ); 131 | 132 | prove_circuit(&setup, &u) 133 | } 134 | -------------------------------------------------------------------------------- /mp2-v1/src/block_extraction/mod.rs: -------------------------------------------------------------------------------- 1 | use plonky2::{ 2 | iop::witness::PartialWitness, 3 | plonk::{circuit_builder::CircuitBuilder, circuit_data::CircuitData}, 4 | }; 5 | 6 | mod circuit; 7 | pub(crate) mod public_inputs; 8 | 9 | use crate::{C, D, F}; 10 | use anyhow::Result; 11 | use mp2_common::{ 12 | default_config, 13 | proof::serialize_proof, 14 | serialization::{deserialize, serialize}, 15 | }; 16 | use serde::{Deserialize, Serialize}; 17 | 18 | pub use public_inputs::PublicInputs; 19 | 20 | #[derive(Serialize, Deserialize)] 21 | pub struct CircuitInput(Vec); 22 | 23 | impl CircuitInput { 24 | pub fn from_block_header(rlp_header: Vec) -> Self { 25 | Self(rlp_header) 26 | } 27 | } 28 | 29 | #[derive(Debug, Serialize, Deserialize)] 30 | pub struct PublicParameters { 31 | #[serde(serialize_with = "serialize", deserialize_with = "deserialize")] 32 | circuit_data: CircuitData, 33 | wires: circuit::BlockWires, 34 | } 35 | 36 | /// Returns the parameters necessary to prove block extraction circuits 37 | pub fn build_circuits_params() -> PublicParameters { 38 | PublicParameters::build() 39 | } 40 | 41 | impl PublicParameters { 42 | pub fn build() -> Self { 43 | let config = default_config(); 44 | let mut cb = CircuitBuilder::new(config); 45 | let wires = circuit::BlockCircuit::build(&mut cb); 46 | let cd = cb.build(); 47 | Self { 48 | circuit_data: cd, 49 | wires, 50 | } 51 | } 52 | 53 | pub fn generate_proof(&self, block_header: CircuitInput) -> Result> { 54 | let input = circuit::BlockCircuit::new(block_header.0)?; 55 | let mut pw = PartialWitness::new(); 56 | input.assign(&mut pw, &self.wires); 57 | let proof = self.circuit_data.prove(pw)?; 58 | serialize_proof(&proof) 59 | } 60 | 61 | pub fn circuit_data(&self) -> &CircuitData { 62 | &self.circuit_data 63 | } 64 | } 65 | 66 | #[cfg(test)] 67 | mod test { 68 | use crate::{C, D, F}; 69 | use alloy::{ 70 | eips::BlockNumberOrTag, 71 | primitives::U256, 72 | providers::{Provider, ProviderBuilder}, 73 | }; 74 | use anyhow::Result; 75 | use mp2_common::{ 76 | eth::BlockUtil, 77 | proof::deserialize_proof, 78 | utils::{Endianness, FromFields, Packer, ToFields}, 79 | }; 80 | use mp2_test::eth::get_sepolia_url; 81 | 82 | use crate::block_extraction::{public_inputs::PublicInputs, PublicParameters}; 83 | #[tokio::test] 84 | async fn test_api() -> Result<()> { 85 | let params = PublicParameters::build(); 86 | let url = get_sepolia_url(); 87 | let provider = ProviderBuilder::new().connect_http(url.parse().unwrap()); 88 | let block_number = BlockNumberOrTag::Latest; 89 | let block = provider 90 | .get_block_by_number(block_number) 91 | .full() 92 | .await 93 | .unwrap() 94 | .unwrap(); 95 | 96 | let rlp_headers = super::CircuitInput::from_block_header(block.rlp()); 97 | let proof = params.generate_proof(rlp_headers)?; 98 | // check public inputs 99 | let proof = deserialize_proof::(&proof)?; 100 | let pi = PublicInputs::from_slice(&proof.public_inputs); 101 | assert_eq!( 102 | pi.block_hash_raw(), 103 | block.block_hash().pack(Endianness::Little).to_fields() 104 | ); 105 | // sanity check to know we generate the hash the same way as what is included in headers 106 | assert_eq!( 107 | pi.block_hash_raw(), 108 | block 109 | .header 110 | .hash 111 | // XXX unclear why that fails when one removes the ".0" since we access things 112 | // directly underneath when calling pack directly or using as_slice, both fail. 113 | // XXX unclear why it is needed here but not for previous hash... 114 | .0 115 | .pack(Endianness::Little) 116 | .to_fields(), 117 | ); 118 | assert_eq!( 119 | pi.prev_block_hash_raw(), 120 | block 121 | .header 122 | .parent_hash 123 | .pack(Endianness::Little) 124 | .to_fields(), 125 | ); 126 | assert_eq!( 127 | U256::from_fields(pi.block_number_raw()), 128 | U256::from(block.header.number), 129 | ); 130 | assert_eq!( 131 | pi.state_root_raw(), 132 | block.header.state_root.pack(Endianness::Little).to_fields(), 133 | ); 134 | Ok(()) 135 | } 136 | } 137 | -------------------------------------------------------------------------------- /mp2-v1/src/final_extraction/simple_circuit.rs: -------------------------------------------------------------------------------- 1 | use crate::{D, F}; 2 | use derive_more::From; 3 | use mp2_common::{public_inputs::PublicInputCommon, utils::ToTargets}; 4 | use plonky2::{ 5 | iop::{target::Target, witness::PartialWitness}, 6 | plonk::circuit_builder::CircuitBuilder, 7 | }; 8 | use recursion_framework::circuit_builder::CircuitLogicWires; 9 | use serde::{Deserialize, Serialize}; 10 | 11 | use crate::values_extraction; 12 | 13 | use super::{ 14 | api::{FinalExtractionBuilderParams, NUM_IO}, 15 | base_circuit::{self, BaseCircuitProofInputs, BaseCircuitProofWires}, 16 | PublicInputs, 17 | }; 18 | 19 | /// This circuit contains the logic to prove the final extraction of a simple 20 | /// variable (like uint256) or a mapping without an associated length slot. 21 | #[derive(Clone, Debug, From)] 22 | pub struct SimpleCircuit; 23 | 24 | impl SimpleCircuit { 25 | fn build( 26 | b: &mut CircuitBuilder, 27 | block_pi: &[Target], 28 | contract_pi: &[Target], 29 | value_pi: &[Target], 30 | ) { 31 | // only one value proof to verify for this circuit 32 | let base_wires = base_circuit::BaseCircuit::build(b, block_pi, contract_pi, vec![value_pi]); 33 | 34 | let value_pi = values_extraction::PublicInputs::::new(value_pi); 35 | let final_dv = value_pi.values_digest_target(); 36 | PublicInputs::new( 37 | &base_wires.bh, 38 | &base_wires.prev_bh, 39 | &final_dv.to_targets(), 40 | &base_wires.dm.to_targets(), 41 | &base_wires.bn.to_targets(), 42 | &[b._false().target], 43 | ) 44 | .register_args(b); 45 | } 46 | } 47 | 48 | #[derive(Serialize, Deserialize, Clone, Debug)] 49 | pub(crate) struct SimpleCircuitRecursiveWires { 50 | /// NOTE: assumed to be containing a single value inside, in the vec. 51 | base: BaseCircuitProofWires, 52 | } 53 | 54 | pub struct SimpleCircuitInput { 55 | base: BaseCircuitProofInputs, 56 | } 57 | 58 | impl SimpleCircuitInput { 59 | pub(crate) fn new(base: BaseCircuitProofInputs) -> Self { 60 | Self { base } 61 | } 62 | } 63 | 64 | impl CircuitLogicWires for SimpleCircuitRecursiveWires { 65 | type CircuitBuilderParams = FinalExtractionBuilderParams; 66 | 67 | type Inputs = SimpleCircuitInput; 68 | 69 | const NUM_PUBLIC_INPUTS: usize = NUM_IO; 70 | 71 | fn circuit_logic( 72 | builder: &mut CircuitBuilder, 73 | _verified_proofs: [&plonky2::plonk::proof::ProofWithPublicInputsTarget; 0], 74 | builder_parameters: Self::CircuitBuilderParams, 75 | ) -> Self { 76 | // only one proof to verify for this simple circuit 77 | let base = BaseCircuitProofInputs::build(builder, &builder_parameters, 1); 78 | SimpleCircuit::build( 79 | builder, 80 | base.get_block_public_inputs(), 81 | base.get_contract_public_inputs(), 82 | base.get_value_public_inputs(), 83 | ); 84 | Self { base } 85 | } 86 | 87 | fn assign_input(&self, inputs: Self::Inputs, pw: &mut PartialWitness) -> anyhow::Result<()> { 88 | inputs.base.assign_proof_targets(pw, &self.base)?; 89 | Ok(()) 90 | } 91 | } 92 | 93 | #[cfg(test)] 94 | mod test { 95 | use super::*; 96 | use crate::C; 97 | use base_circuit::test::{ProofsPi, ProofsPiTarget}; 98 | use mp2_test::circuit::{run_circuit, UserCircuit}; 99 | 100 | #[derive(Clone, Debug)] 101 | struct TestSimpleCircuit { 102 | pis: ProofsPi, 103 | } 104 | 105 | struct TestSimpleWires { 106 | pis: ProofsPiTarget, 107 | } 108 | 109 | impl UserCircuit for TestSimpleCircuit { 110 | type Wires = TestSimpleWires; 111 | fn build(c: &mut plonky2::plonk::circuit_builder::CircuitBuilder) -> Self::Wires { 112 | let pis = ProofsPiTarget::new(c); 113 | SimpleCircuit::build(c, &pis.blocks_pi, &pis.contract_pi, &pis.values_pi); 114 | TestSimpleWires { pis } 115 | } 116 | fn prove(&self, pw: &mut plonky2::iop::witness::PartialWitness, wires: &Self::Wires) { 117 | wires.pis.assign(pw, &self.pis); 118 | } 119 | } 120 | 121 | #[test] 122 | fn test_final_simple_circuit() { 123 | let pis = ProofsPi::random(); 124 | let test_circuit = TestSimpleCircuit { pis: pis.clone() }; 125 | let proof = run_circuit::(test_circuit); 126 | pis.check_proof_public_inputs(&proof, None); 127 | 128 | let test_circuit = TestSimpleCircuit { pis: pis.clone() }; 129 | let proof = run_circuit::(test_circuit); 130 | pis.check_proof_public_inputs(&proof, None); 131 | } 132 | } 133 | -------------------------------------------------------------------------------- /mp2-common/src/hash.rs: -------------------------------------------------------------------------------- 1 | use plonky2::{ 2 | hash::hash_types::NUM_HASH_OUT_ELTS, 3 | iop::target::{BoolTarget, Target}, 4 | plonk::circuit_builder::CircuitBuilder, 5 | }; 6 | 7 | use crate::{ 8 | poseidon::{H, P}, 9 | D, F, 10 | }; 11 | 12 | use plonky2::hash::hashing::PlonkyPermutation; 13 | use plonky2::plonk::config::AlgebraicHasher; 14 | 15 | // maybe swap the first two elements and hashes the rest after with it 16 | pub fn hash_maybe_first( 17 | c: &mut CircuitBuilder, 18 | should_swap: BoolTarget, 19 | elem1: [Target; NUM_HASH_OUT_ELTS], 20 | elem2: [Target; NUM_HASH_OUT_ELTS], 21 | rest: &[Target], 22 | ) -> Vec { 23 | let zero = c.zero(); 24 | let mut state = P::new(core::iter::repeat(zero)); 25 | // absorb the first two inputs and do the swap 26 | state.set_from_slice(&[elem1, elem2].concat(), 0); 27 | state = H::permute_swapped(state, should_swap, c); 28 | // Absorb all the rest of the input chunks. 29 | let t = c._false(); 30 | for input_chunk in rest.chunks(P::RATE) { 31 | state.set_from_slice(input_chunk, 0); 32 | state = H::permute_swapped(state, t, c); 33 | } 34 | 35 | // Squeeze until we have the desired number of outputs. 36 | let mut outputs = Vec::new(); 37 | loop { 38 | for &item in state.squeeze() { 39 | outputs.push(item); 40 | if outputs.len() == NUM_HASH_OUT_ELTS { 41 | return outputs; 42 | } 43 | } 44 | state.permute(); 45 | } 46 | } 47 | 48 | #[cfg(test)] 49 | mod test { 50 | use crate::C; 51 | use plonky2::field::types::Sample; 52 | use plonky2::hash::hashing::hash_n_to_hash_no_pad; 53 | 54 | use plonky2::plonk::config::Hasher; 55 | use plonky2::{ 56 | hash::hash_types::NUM_HASH_OUT_ELTS, 57 | iop::{ 58 | target::{BoolTarget, Target}, 59 | witness::WitnessWrite, 60 | }, 61 | plonk::circuit_builder::CircuitBuilder, 62 | }; 63 | 64 | use crate::{CHasher, D, F}; 65 | use mp2_test::circuit::{run_circuit, UserCircuit}; 66 | 67 | const REST: usize = 7; 68 | #[derive(Debug, Clone)] 69 | struct TestPartialSwap { 70 | elem1: Vec, 71 | elem2: Vec, 72 | should_swap: bool, 73 | rest: Vec, 74 | } 75 | 76 | impl UserCircuit for TestPartialSwap { 77 | type Wires = (Vec, Vec, BoolTarget, Vec); 78 | 79 | fn build(c: &mut CircuitBuilder) -> Self::Wires { 80 | let elem1 = c.add_virtual_targets(NUM_HASH_OUT_ELTS); 81 | let elem2 = c.add_virtual_targets(NUM_HASH_OUT_ELTS); 82 | let cond = c.add_virtual_bool_target_safe(); 83 | let rest = c.add_virtual_targets(REST); 84 | let hash = super::hash_maybe_first( 85 | c, 86 | cond, 87 | elem1.clone().try_into().unwrap(), 88 | elem2.clone().try_into().unwrap(), 89 | &rest, 90 | ); 91 | c.register_public_inputs(&hash); 92 | (elem1, elem2, cond, rest) 93 | } 94 | 95 | fn prove(&self, pw: &mut plonky2::iop::witness::PartialWitness, wires: &Self::Wires) { 96 | pw.set_target_arr(&wires.0, &self.elem1); 97 | pw.set_target_arr(&wires.1, &self.elem2); 98 | pw.set_bool_target(wires.2, self.should_swap); 99 | pw.set_target_arr(&wires.3, &self.rest); 100 | } 101 | } 102 | 103 | #[test] 104 | fn test_partial_swap() { 105 | let elem1 = (0..NUM_HASH_OUT_ELTS) 106 | .map(|_| F::rand()) 107 | .collect::>(); 108 | let elem2 = (0..NUM_HASH_OUT_ELTS) 109 | .map(|_| F::rand()) 110 | .collect::>(); 111 | let rest = (0..REST).map(|_| F::rand()).collect::>(); 112 | for should_swap in [true, false] { 113 | let circuit = TestPartialSwap { 114 | elem1: elem1.clone(), 115 | elem2: elem2.clone(), 116 | should_swap, 117 | rest: rest.clone(), 118 | }; 119 | let proof = run_circuit::(circuit); 120 | let pi = proof.public_inputs; 121 | // do it outside circuit 122 | let tuple = match should_swap { 123 | false => [elem1.clone(), elem2.clone()].concat(), 124 | true => [elem2.clone(), elem1.clone()].concat(), 125 | }; 126 | let inputs = tuple.iter().chain(rest.iter()).cloned().collect::>(); 127 | let hash = hash_n_to_hash_no_pad::>::Permutation>(&inputs); 128 | assert_eq!(&hash.elements.as_slice(), &pi.as_slice()); 129 | } 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /mp2-common/src/mpt_sequential/utils.rs: -------------------------------------------------------------------------------- 1 | //! MPT utility functions 2 | 3 | use crate::{ 4 | array::Array, 5 | utils::{find_index_subvector, keccak256, less_than}, 6 | }; 7 | use eth_trie::Nibbles; 8 | use plonky2::{ 9 | field::extension::Extendable, hash::hash_types::RichField, iop::target::Target, 10 | plonk::circuit_builder::CircuitBuilder, 11 | }; 12 | 13 | pub fn bytes_to_nibbles(bytes: &[u8]) -> Vec { 14 | let mut nibbles = Vec::new(); 15 | for b in bytes { 16 | nibbles.push(b >> 4); 17 | nibbles.push(b & 0x0F); 18 | } 19 | nibbles 20 | } 21 | 22 | pub fn nibbles_to_bytes(nibbles: &[u8]) -> Vec { 23 | let mut padded = nibbles.to_vec(); 24 | if padded.len() % 2 == 1 { 25 | padded.insert(0, 0); 26 | } 27 | let mut bytes = Vec::new(); 28 | for i in 0..nibbles.len() / 2 { 29 | bytes.push((nibbles[i * 2] << 4) | (nibbles[i * 2 + 1] & 0x0F)); 30 | } 31 | bytes 32 | } 33 | 34 | /// Decodes the RLP header (assuming it's a value < 0x80 or less than 55 bytes) 35 | /// 36 | /// Left pad the leaf value 37 | /// return as big-endian. 38 | /// NOTE: WARNING: RLP_VALUE_LEN MUST include an additional byte for a potential 39 | /// RLP header. For example, in EVM, every value can be up to 32 bytes. One must 40 | /// input 33 in this function because the RLP header might take a full additional byte. 41 | pub fn left_pad_leaf_value< 42 | F: RichField + Extendable, 43 | const D: usize, 44 | const RLP_VALUE_LEN: usize, 45 | const PADDED_LEN: usize, 46 | >( 47 | b: &mut CircuitBuilder, 48 | value: &Array, 49 | ) -> Array { 50 | // Read the length of the relevant data (RLP header - 0x80) 51 | let zero = b.zero(); 52 | let one = b.one(); 53 | 54 | let prefix = value[0]; 55 | let byte_80 = b.constant(F::from_canonical_usize(128)); 56 | let is_single_byte = less_than(b, prefix, byte_80, 8); 57 | let value_len_80 = b.sub(value[0], byte_80); 58 | let value_len = b.select(is_single_byte, one, value_len_80); 59 | let offset = b.select(is_single_byte, zero, one); 60 | value 61 | // WARNING: this is a hack to avoid another const generic but 62 | // what we should really do here is extract RLP_VALUE_LEN-1 because we 63 | // consider 1 extra byte for the RLP header always (which may or may not exist) 64 | .extract_array::(b, offset) 65 | .into_vec(value_len) 66 | .normalize_left::<_, _, PADDED_LEN>(b) 67 | } 68 | 69 | pub fn visit_proof(proof: &[Vec]) { 70 | let mut child_hash = vec![]; 71 | let mut partial_key = vec![]; 72 | for node in proof.iter() { 73 | visit_node(node, &child_hash, &mut partial_key); 74 | child_hash = keccak256(node); 75 | println!( 76 | "\t=> full partial key: hex {:?}", 77 | hex::encode(nibbles_to_bytes(&partial_key)) 78 | ); 79 | } 80 | } 81 | 82 | pub fn visit_node(node: &[u8], child_hash: &[u8], partial_key: &mut Vec) { 83 | println!("[+] Node ({} bytes) {}", node.len(), hex::encode(node)); 84 | let node_list: Vec> = rlp::decode_list(node); 85 | match node_list.len() { 86 | 2 => { 87 | // extension case: verify the hash is present and lookup the key 88 | if !child_hash.is_empty() { 89 | let _ = find_index_subvector(node, child_hash) 90 | .expect("extension should contain hash of child"); 91 | } 92 | // we don't need to decode the RLP header on top of it, since it is 93 | // already done in the decode_list function. 94 | let key_nibbles_struct = Nibbles::from_compact(&node_list[0]); 95 | let key_nibbles = key_nibbles_struct.nibbles(); 96 | println!( 97 | "\t=> Leaf/Extension node: partial key extracted: {:?}", 98 | hex::encode(nibbles_to_bytes(key_nibbles)) 99 | ); 100 | partial_key.splice(0..0, key_nibbles.to_vec()); 101 | } 102 | 16 | 17 => { 103 | // branch case: search the nibble where the hash is present 104 | let branch_idx = node_list 105 | .iter() 106 | .enumerate() 107 | .find(|(_, h)| *h == child_hash) 108 | .map(|(i, _)| i) 109 | .expect("didn't find hash in parent") as u8; 110 | println!( 111 | "\t=> Branch node: (len branch = {}) partial key (nibble): {:?}", 112 | node_list.len(), 113 | hex::encode(vec![branch_idx]).pop().unwrap() 114 | ); 115 | partial_key.insert(0, branch_idx); 116 | } 117 | _ => { 118 | panic!("invalid node") 119 | } 120 | } 121 | } 122 | --------------------------------------------------------------------------------