├── AUTHORS ├── .gitignore ├── .github ├── dependabot.yml └── workflows │ └── ci.yml ├── LICENSE-MIT ├── src ├── error.rs ├── trivial_pc_as │ ├── data_structures.rs │ └── constraints │ │ ├── data_structures.rs │ │ └── mod.rs ├── ipa_pc_as │ ├── data_structures.rs │ └── constraints │ │ └── data_structures.rs ├── hp_as │ ├── data_structures.rs │ └── constraints │ │ ├── data_structures.rs │ │ └── mod.rs ├── data_structures.rs ├── r1cs_nark_as │ ├── r1cs_nark │ │ ├── data_structures.rs │ │ └── mod.rs │ ├── data_structures.rs │ └── constraints │ │ └── data_structures.rs ├── constraints.rs └── lib.rs ├── Cargo.toml ├── README.md ├── examples ├── scaling-pc.rs ├── scaling-nark.rs └── scaling-as.rs └── LICENSE-APACHE /AUTHORS: -------------------------------------------------------------------------------- 1 | Benedikt Bünz 2 | Alessandro Chiesa 3 | William Lin 4 | Pratyush Mishra 5 | Nicholas Spooner 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | Cargo.lock 3 | .DS_Store 4 | .idea 5 | *.iml 6 | *.ipynb_checkpoints 7 | *.pyc 8 | *.sage.py 9 | params 10 | *.swp 11 | *.swo 12 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: cargo 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | open-pull-requests-limit: 10 8 | ignore: 9 | - dependency-name: rand_chacha 10 | versions: 11 | - 0.3.0 12 | - dependency-name: rand 13 | versions: 14 | - 0.8.0 15 | - dependency-name: rand_core 16 | versions: 17 | - 0.6.0 18 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | use ark_std::boxed::Box; 2 | use ark_std::error::Error; 3 | use ark_std::format; 4 | use ark_std::string::String; 5 | 6 | /// Common errors for the `AccumulationScheme` trait. 7 | #[derive(Debug)] 8 | pub enum ASError { 9 | /// The accumulator was corrupted or malformed. 10 | MalformedAccumulator(String), 11 | 12 | /// The input was corrupted or malformed. 13 | MalformedInput(String), 14 | 15 | /// There are no inputs nor accumulators and nothing else can be done. 16 | MissingAccumulatorsAndInputs(String), 17 | 18 | /// An RngCore was expected, but was not passed in. 19 | MissingRng(String), 20 | } 21 | 22 | impl core::fmt::Display for ASError { 23 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { 24 | let error_text = match self { 25 | ASError::MalformedAccumulator(err) => format!("MalformedAccumulator: {}", err), 26 | ASError::MalformedInput(err) => format!("MalformedInput: {}", err), 27 | ASError::MissingAccumulatorsAndInputs(err) => { 28 | format!("MissingAccumulatorsAndInputs: {}", err) 29 | } 30 | ASError::MissingRng(err) => format!("MissingRng: {}", err), 31 | }; 32 | 33 | write!(f, "{}", error_text) 34 | } 35 | } 36 | 37 | impl Error for ASError {} 38 | 39 | /// Wrapper struct holding any ark_std error 40 | #[derive(Debug)] 41 | pub struct BoxedError(pub Box); 42 | 43 | impl BoxedError { 44 | /// Converts from a static error into the boxed form 45 | pub fn new(err: impl Error + 'static) -> Self { 46 | Self(Box::new(err)) 47 | } 48 | } 49 | 50 | impl core::fmt::Display for BoxedError { 51 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { 52 | core::fmt::Display::fmt(self.0.as_ref(), f) 53 | } 54 | } 55 | 56 | impl Error for BoxedError {} 57 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: 3 | pull_request: 4 | push: 5 | branches: 6 | - master 7 | env: 8 | RUST_BACKTRACE: 1 9 | 10 | jobs: 11 | style: 12 | name: Check Style 13 | runs-on: ubuntu-latest 14 | steps: 15 | 16 | - name: Checkout 17 | uses: actions/checkout@v1 18 | - name: Install Rust 19 | uses: actions-rs/toolchain@v1 20 | with: 21 | profile: minimal 22 | toolchain: stable 23 | override: true 24 | components: rustfmt 25 | 26 | - name: cargo fmt --check 27 | uses: actions-rs/cargo@v1 28 | with: 29 | command: fmt 30 | args: --all -- --check 31 | 32 | test: 33 | name: Test 34 | runs-on: ubuntu-latest 35 | env: 36 | RUSTFLAGS: -Dwarnings 37 | strategy: 38 | matrix: 39 | rust: 40 | - stable 41 | - nightly 42 | steps: 43 | - name: Checkout 44 | uses: actions/checkout@v2 45 | 46 | - name: Install Rust (${{ matrix.rust }}) 47 | uses: actions-rs/toolchain@v1 48 | with: 49 | profile: minimal 50 | toolchain: ${{ matrix.rust }} 51 | override: true 52 | 53 | - name: Check examples 54 | uses: actions-rs/cargo@v1 55 | with: 56 | command: check 57 | args: --examples --all 58 | 59 | - name: Check examples with all features on stable 60 | uses: actions-rs/cargo@v1 61 | with: 62 | command: check 63 | args: --examples --all-features --all 64 | if: matrix.rust == 'stable' 65 | 66 | - name: Check benchmarks on nightly 67 | uses: actions-rs/cargo@v1 68 | with: 69 | command: check 70 | args: --all-features --examples --all --benches 71 | if: matrix.rust == 'nightly' 72 | 73 | - name: Test 74 | uses: actions-rs/cargo@v1 75 | with: 76 | command: test 77 | args: --all-features --release 78 | 79 | check_no_std: 80 | name: Check no_std 81 | runs-on: ubuntu-latest 82 | steps: 83 | - name: Checkout 84 | uses: actions/checkout@v2 85 | 86 | - name: Install Rust (${{ matrix.rust }}) 87 | uses: actions-rs/toolchain@v1 88 | with: 89 | toolchain: stable 90 | target: thumbv6m-none-eabi 91 | override: true 92 | 93 | - name: Build 94 | uses: actions-rs/cargo@v1 95 | with: 96 | use-cross: true 97 | command: build 98 | args: --no-default-features --target thumbv6m-none-eabi 99 | 100 | - name: Check 101 | uses: actions-rs/cargo@v1 102 | with: 103 | use-cross: true 104 | command: check 105 | args: --examples --no-default-features --target thumbv6m-none-eabi 106 | -------------------------------------------------------------------------------- /src/trivial_pc_as/data_structures.rs: -------------------------------------------------------------------------------- 1 | use ark_ec::AffineCurve; 2 | use ark_ff::{to_bytes, PrimeField, Zero}; 3 | use ark_poly_commit::{trivial_pc, LabeledCommitment, PolynomialLabel}; 4 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; 5 | use ark_sponge::{collect_sponge_bytes, collect_sponge_field_elements, Absorbable}; 6 | use ark_std::io::{Read, Write}; 7 | use ark_std::vec::Vec; 8 | 9 | /// The [`InputInstance`][input_instance] of the [`ASForTrivialPC`][as_for_trivial_pc]. 10 | /// 11 | /// [input_instance]: crate::AccumulationScheme::InputInstance 12 | /// [as_for_trivial_pc]: crate::trivial_pc_as::ASForTrivialPC 13 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 14 | pub struct InputInstance { 15 | /// Pedersen commitment to a polynomial. 16 | pub commitment: LabeledCommitment>, 17 | 18 | /// Point where the proof was opened at. 19 | pub point: G::ScalarField, 20 | 21 | /// Evaluation of the committed polynomial at the point. 22 | pub eval: G::ScalarField, 23 | } 24 | 25 | impl InputInstance { 26 | pub(crate) fn zero() -> Self { 27 | Self { 28 | commitment: LabeledCommitment::new( 29 | PolynomialLabel::new(), 30 | trivial_pc::Commitment::default(), 31 | None, 32 | ), 33 | point: G::ScalarField::zero(), 34 | eval: G::ScalarField::zero(), 35 | } 36 | } 37 | } 38 | 39 | impl, CF: PrimeField> Absorbable for InputInstance { 40 | fn to_sponge_bytes(&self) -> Vec { 41 | collect_sponge_bytes!( 42 | CF, 43 | self.commitment.commitment().elem, 44 | to_bytes!(self.point).unwrap(), 45 | to_bytes!(self.eval).unwrap() 46 | ) 47 | } 48 | 49 | fn to_sponge_field_elements(&self) -> Vec { 50 | collect_sponge_field_elements!( 51 | self.commitment.commitment().elem, 52 | to_bytes!(self.point).unwrap(), 53 | to_bytes!(self.eval).unwrap() 54 | ) 55 | } 56 | } 57 | 58 | /// A proof attesting that a single [`Input`][input] of [`ASForTrivialPC`][as_for_trivial_pc] was 59 | /// properly accumulated. 60 | /// 61 | /// [input]: crate::Input 62 | /// [as_for_trivial_pc]: crate::trivial_pc_as::ASForTrivialPC 63 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 64 | pub struct SingleProof { 65 | /// Pedersen commitment to the witness polynomial. 66 | pub(crate) witness_commitment: LabeledCommitment>, 67 | 68 | /// Evaluation of the witness polynomial at the challenge point. 69 | pub(crate) witness_eval: G::ScalarField, 70 | 71 | /// Evaluation of the input polynomial at the challenge point. 72 | pub(crate) eval: G::ScalarField, 73 | } 74 | 75 | /// The list of [`SingleProof`]s for each accumulated input. 76 | /// The [`Proof`][proof] of the [`ASForTrivialPC`][as_for_trivial_pc]. 77 | /// 78 | /// [proof]: crate::AccumulationScheme::Proof 79 | /// [as_for_trivial_pc]: crate::trivial_pc_as::ASForTrivialPC 80 | pub type Proof = Vec>; 81 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "ark-accumulation" 3 | version = "0.1.0" 4 | authors = [ 5 | "Benedikt Bünz ", 6 | "Alessandro Chiesa ", 7 | "William Lin ", 8 | "Pratyush Mishra ", 9 | "Nicholas Spooner ", 10 | ] 11 | edition = "2018" 12 | 13 | [dependencies] 14 | # Dependencies for core library 15 | ark-ec = { version = "^0.2.0", default-features = false } 16 | ark-ff = { version = "^0.2.0", default-features = false } 17 | ark-serialize = { version = "^0.2.0", default-features = false, features = [ "derive" ] } 18 | ark-sponge = { git = "https://github.com/arkworks-rs/sponge/", branch = "accumulation-experimental", default-features = false } 19 | ark-std = { version = "^0.2.0", default-features = false } 20 | derivative = { version = "2.1.1", default-features = false, features = [ "use_core" ] } 21 | 22 | # Dependencies for r1cs 23 | ark-crypto-primitives = { version = "^0.2.0", default-features = false, optional = true } 24 | ark-nonnative-field = { git = "https://github.com/arkworks-rs/nonnative", default-features = false, optional = true, rev = "d57dd4f83f7363e6310526a0a2533e4fc310564b" } 25 | ark-relations = { version = "^0.2.0", default-features = false, optional = true } 26 | ark-r1cs-std = { version = "^0.2.0", default-features = false, optional = true } 27 | tracing = { version = "0.1", default-features = false, features = [ "attributes" ], optional = true } 28 | 29 | # Dependencies for parallel 30 | rayon = { version = "1", optional = true } 31 | 32 | # Dependencies for implementations 33 | ark-poly = { version = "^0.2.0", default-features = false, optional = true } 34 | ark-poly-commit = { git = "https://github.com/arkworks-rs/poly-commit", branch = "accumulation-experimental", default-features = false, optional = true } 35 | blake2 = { version = "0.9.1", default-features = false, optional = true } 36 | digest = { version = "0.9.0", default-features = false, optional = true } 37 | 38 | [dev-dependencies] 39 | ark-pallas = { version = "^0.2.0", features = [ "r1cs", "curve" ] } 40 | tracing = { version = "0.1", default-features = false } 41 | tracing-subscriber = { version = "0.2", default-features = false, features = [ "registry" ] } 42 | 43 | [features] 44 | default = [ "r1cs", "std" ] 45 | 46 | parallel = [ "std", "ark-ec/parallel", "ark-ff/parallel", "ark-poly/parallel", 47 | "ark-poly-commit/parallel", "ark-std/parallel", "rayon" ] 48 | 49 | print-trace = [ "ark-poly-commit/print-trace", "ark-std/print-trace" ] 50 | 51 | r1cs = [ "ark-crypto-primitives/r1cs", "ark-nonnative-field", "ark-poly-commit/r1cs", 52 | "ark-relations", "ark-r1cs-std", "ark-sponge/r1cs", "tracing" ] 53 | 54 | std = [ "ark-crypto-primitives/std", "ark-ec/std", "ark-ff/std", "ark-nonnative-field/std", 55 | "ark-poly/std", "ark-poly-commit/std", "ark-relations/std", "ark-r1cs-std/std", 56 | "ark-serialize/std", "ark-sponge/std", "ark-std/std" ] 57 | 58 | # Accumulation scheme implementations 59 | impl = [] 60 | hp-as = [ "impl", "ark-poly" ] 61 | ipa-pc-as = [ "impl", "ark-poly", "ark-poly-commit", "blake2" ] 62 | r1cs-nark-as = [ "impl", "blake2", "digest", "hp-as", "r1cs" ] 63 | trivial-pc-as = [ "impl", "ark-poly", "ark-poly-commit", "blake2" ] 64 | 65 | [package.metadata.docs.rs] 66 | all-features = true 67 | rustdoc-args = ["--cfg", "docsrs"] 68 | 69 | [[example]] 70 | name = "scaling-as" 71 | required-features = [ "hp-as", "ipa-pc-as", "r1cs-nark-as", "trivial-pc-as" ] 72 | 73 | [[example]] 74 | name = "scaling-nark" 75 | required-features = [ "hp-as", "r1cs-nark-as" ] 76 | 77 | [[example]] 78 | name = "scaling-pc" 79 | required-features = [ "ipa-pc-as" ] -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

Accumulation Schemes

2 | 3 |

4 | 5 | 6 | 7 | 8 | 9 |

10 | 11 | `ark-accumulation` is a Rust library that provides infrastructure for implementing 12 | *accumulation schemes*. This library was developed as part of the 13 | [Proof-Carrying Data without Succinct Arguments][bclms20] paper, and is released under the MIT License 14 | and the Apache v2 License (see [License](#license)). 15 | 16 | **WARNING:** This is an academic prototype, and in particular has not received careful code review. 17 | This implementation is NOT ready for production use. 18 | 19 | ## Overview 20 | 21 | An accumulation scheme for a predicate is a cryptographic primitive that allows an accumulation 22 | prover to receive a stream of inputs and accumulate them into an object called an *accumulator*. 23 | Given the inputs and outputs of the prover, an accumulation verifier can verify that the set of 24 | inputs was properly accumulated. At any time, an accumulation decider can use a single accumulator 25 | to determine whether all the previously accumulated inputs satisfy the predicate. 26 | 27 | This library provides the following features that enable specific implementations of accumulation 28 | schemes: 29 | 30 | - [`hp-as`](src/hp_as): An accumulation scheme for Hadamard Products 31 | 32 | - [`ipa-pc-as`](src/ipa_pc_as): An accumulation scheme for the polynomial commitment scheme based on 33 | Inner Product Arguments (as [implemented]( 34 | https://github.com/arkworks-rs/poly-commit/tree/accumulation-experimental/src/ipa_pc) in the 35 | [`poly-commit`](https://github.com/arkworks-rs/poly-commit) library) 36 | 37 | - [`r1cs-nark-as`](src/r1cs_nark_as): An accumulation scheme for a NARK for R1CS (as 38 | [implemented](src/r1cs_nark_as/r1cs_nark) in this library) 39 | 40 | - [`trivial-pc-as`](src/trivial_pc_as): An accumulation scheme for the trivial polynomial commitment 41 | scheme (as [implemented]( 42 | https://github.com/arkworks-rs/poly-commit/tree/accumulation-experimental/src/trivial_pc) in the 43 | [`poly-commit`](https://github.com/arkworks-rs/poly-commit) library) 44 | 45 | ## Build guide 46 | 47 | The library compiles on the `stable` toolchain of the Rust compiler. To install the latest version 48 | of Rust, first install `rustup` by following the instructions [here](https://rustup.rs/), or via 49 | your platform's package manager. Once `rustup` is installed, install the Rust toolchain by invoking: 50 | ```bash 51 | rustup install stable 52 | ``` 53 | 54 | After that, use `cargo` (the standard Rust build tool) to build the library: 55 | ```bash 56 | git clone https://github.com/arkworks-rs/accumulation.git 57 | cd accumulation 58 | cargo build --release 59 | ``` 60 | 61 | This library comes with some unit and integration tests. Run these tests with: 62 | ```bash 63 | cargo test 64 | ``` 65 | 66 | ## License 67 | 68 | This library is licensed under either of the following licenses, at your discretion. 69 | 70 | * [Apache License Version 2.0](LICENSE-APACHE) 71 | * [MIT License](LICENSE-MIT) 72 | 73 | Unless you explicitly state otherwise, any contribution that you submit to this library shall be 74 | dual licensed as above (as defined in the Apache v2 License), without any additional terms or 75 | conditions. 76 | 77 | ## Reference papers 78 | 79 | [Proof-Carrying Data from Accumulation Schemes][bcms20] 80 | Benedikt Bünz, Alessandro Chiesa, [Pratyush Mishra](https://www.github.com/pratyush), 81 | Nicholas Spooner 82 | 83 | [Proof-Carrying Data without Succinct Arguments][bclms20] 84 | Benedikt Bünz, Alessandro Chiesa, [William Lin](https://github.com/Will-Lin4), 85 | [Pratyush Mishra](https://www.github.com/pratyush), Nicholas Spooner 86 | 87 | [bcms20]: https://eprint.iacr.org/2020/499 88 | [bclms20]: https://eprint.iacr.org/2020/1618 89 | -------------------------------------------------------------------------------- /src/ipa_pc_as/data_structures.rs: -------------------------------------------------------------------------------- 1 | use ark_ec::AffineCurve; 2 | use ark_poly::polynomial::univariate::DensePolynomial; 3 | use ark_poly_commit::{ipa_pc, LabeledCommitment}; 4 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; 5 | use ark_sponge::domain_separated::DomainSeparator; 6 | use ark_std::io::{Read, Write}; 7 | use ark_std::vec::Vec; 8 | 9 | /// The [`PredicateIndex`][predicate_index] of the [`AtomicASForInnerProductArgPC`][as_for_ipa_pc]. 10 | /// 11 | /// [predicate_index]: crate::AccumulationScheme::PredicateIndex 12 | /// [as_for_ipa_pc]: crate::ipa_pc_as::AtomicASForInnerProductArgPC 13 | #[derive(Clone)] 14 | pub struct PredicateIndex { 15 | /// The degree bound supported by IpaPC. 16 | pub supported_degree_bound: usize, 17 | 18 | /// The hiding bound supported by IpaPC. 19 | pub supported_hiding_bound: usize, 20 | } 21 | 22 | /// The [`ProverKey`][pk] of the [`AtomicASForInnerProductArgPC`][as_for_ipa_pc]. 23 | /// 24 | /// [pk]: crate::AccumulationScheme::ProverKey 25 | /// [as_for_ipa_pc]: crate::ipa_pc_as::AtomicASForInnerProductArgPC 26 | #[derive(Clone)] 27 | pub struct ProverKey { 28 | /// The IpaPC committer key for committing input polynomials. 29 | pub(crate) ipa_ck: ipa_pc::CommitterKey, 30 | 31 | /// The accumulation scheme's [`VerifierKey`]. 32 | pub(crate) verifier_key: VerifierKey, 33 | } 34 | 35 | /// The [`VerifierKey`][vk] of the [`AtomicASForInnerProductArgPC`][as_for_ipa_pc]. 36 | /// 37 | /// [vk]: crate::AccumulationScheme::VerifierKey 38 | /// [as_for_ipa_pc]: crate::ipa_pc_as::AtomicASForInnerProductArgPC 39 | #[derive(Clone)] 40 | pub struct VerifierKey { 41 | /// The IpaPC succinct check key for inputs. 42 | pub(crate) ipa_svk: ipa_pc::SuccinctVerifierKey, 43 | 44 | /// The IpaPC committer key for random linear polynomials. 45 | pub(crate) ipa_ck_linear: ipa_pc::CommitterKey, 46 | 47 | /// The IpaPC proof for the opening of the zero polynomial at the point zero. 48 | pub(crate) default_proof: ipa_pc::Proof, 49 | } 50 | 51 | /// The [`InputInstance`][input_instance] of the [`AtomicASForInnerProductArgPC`][as_for_ipa_pc]. 52 | /// 53 | /// [input_instance]: crate::AccumulationScheme::InputInstance 54 | /// [as_for_ipa_pc]: crate::ipa_pc_as::AtomicASForInnerProductArgPC 55 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 56 | pub struct InputInstance { 57 | /// The IpaPC commitment to a polynomial. 58 | pub ipa_commitment: LabeledCommitment>, 59 | 60 | /// Point where the proof was opened at. 61 | pub point: G::ScalarField, 62 | 63 | /// Evaluation of the committed polynomial at the point. 64 | pub evaluation: G::ScalarField, 65 | 66 | /// The IpaPC proof of evaluation at the point. 67 | pub ipa_proof: ipa_pc::Proof, 68 | } 69 | 70 | /// The randomness used to apply zero-knowledge to commitment and accumulation. 71 | /// If used, the randomness is the [`Proof`][proof] of the 72 | /// [`AtomicASForInnerProductArgPC`][as_for_ipa_pc]. 73 | /// 74 | /// [Proof]: crate::AccumulationScheme::Proof 75 | /// [as_for_ipa_pc]: crate::ipa_pc_as::AtomicASForInnerProductArgPC 76 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 77 | pub struct Randomness { 78 | /// A random linear polynomial to be accumulated 79 | pub(crate) random_linear_polynomial: DensePolynomial, 80 | 81 | /// The IpaPC commitment to the random linear polynomial. 82 | pub(crate) random_linear_polynomial_commitment: G, 83 | 84 | /// Randomness used to commit to the linear combination of the input polynomials. 85 | pub(crate) commitment_randomness: G::ScalarField, 86 | } 87 | 88 | /// The domain for the IpaPC sponge. Used as a substitution for forking for backwards compatibility. 89 | pub struct IpaPCDomain {} 90 | impl DomainSeparator for IpaPCDomain { 91 | fn domain() -> Vec { 92 | b"IPA-PC-2020".to_vec() 93 | } 94 | } 95 | 96 | /// The domain for the ASForIpaPC sponge. Used as a substitution for forking for backwards 97 | /// compatibility. 98 | pub struct ASForIpaPCDomain {} 99 | impl DomainSeparator for ASForIpaPCDomain { 100 | fn domain() -> Vec { 101 | b"AS-FOR-IPA-PC-2020".to_vec() 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /examples/scaling-pc.rs: -------------------------------------------------------------------------------- 1 | #![allow(non_camel_case_types)] 2 | // For randomness (during paramgen and proof generation) 3 | // PS: thread_rng is *insecure* 4 | 5 | // For benchmarking 6 | use crate::ipa_pc_as::IpaPCDomain; 7 | use ark_accumulation::ipa_pc_as; 8 | use ark_ff::PrimeField; 9 | use ark_pallas::{Affine as G1Affine, Fq, Fr}; 10 | use ark_poly::univariate::DensePolynomial; 11 | use ark_poly_commit::ipa_pc::InnerProductArgPC; 12 | use ark_poly_commit::trivial_pc::TrivialPC; 13 | use ark_poly_commit::{LabeledPolynomial, PCCommitterKey, PolynomialCommitment, UVPolynomial}; 14 | use ark_serialize::CanonicalSerialize; 15 | use ark_sponge::domain_separated::DomainSeparatedSponge; 16 | use ark_sponge::poseidon::PoseidonSponge; 17 | use ark_std::rand::Rng; 18 | use ark_std::vec::Vec; 19 | use blake2::Blake2s; 20 | use std::time::Instant; 21 | 22 | type TrivPC = TrivialPC>; 23 | 24 | type IpaPC = InnerProductArgPC< 25 | G1Affine, 26 | Blake2s, 27 | DensePolynomial, 28 | Fq, 29 | DomainSeparatedSponge, IpaPCDomain>, 30 | >; 31 | 32 | fn profile_pc(min_degree: usize, max_degree: usize, rng: &mut R) 33 | where 34 | F: PrimeField, 35 | PC: PolynomialCommitment>, 36 | R: Rng, 37 | { 38 | println!("Performing setup!"); 39 | let pc_pp = PC::setup(1 << max_degree, Some(1), rng).unwrap(); 40 | println!("Done with setup!"); 41 | 42 | for degree in min_degree..=max_degree { 43 | let degree = (1 << degree) - 1; 44 | println!("Degree: {:?}", degree); 45 | let supported_degree = degree; 46 | 47 | let start = Instant::now(); 48 | let (ck, vk) = PC::trim(&pc_pp, supported_degree, 0, None).unwrap(); 49 | let index_time = start.elapsed(); 50 | println!("Indexer: {:?}", index_time.as_millis()); 51 | 52 | let polynomials = vec![{ 53 | let degree = ck.supported_degree(); 54 | let label = format!("Input {}", 1); 55 | 56 | let polynomial = UVPolynomial::rand(degree, rng); 57 | let labeled_polynomial = LabeledPolynomial::new(label, polynomial, None, None); 58 | 59 | labeled_polynomial 60 | }]; 61 | 62 | let start = Instant::now(); 63 | let (comms, rands) = PC::commit(&ck, &polynomials, Some(rng)).unwrap(); 64 | let commit_time = start.elapsed(); 65 | println!("Committer: {:?}", commit_time.as_millis()); 66 | 67 | let point = F::rand(rng); 68 | let values = vec![polynomials[0].evaluate(&point)]; 69 | let opening_challenge = F::one(); 70 | 71 | let start = Instant::now(); 72 | let proof = PC::open( 73 | &ck, 74 | &polynomials, 75 | &comms, 76 | &point, 77 | opening_challenge, 78 | &rands, 79 | Some(rng), 80 | ) 81 | .unwrap(); 82 | let open_time = start.elapsed(); 83 | println!("Open: {:?}", open_time.as_millis()); 84 | 85 | let start = Instant::now(); 86 | assert!(PC::check( 87 | &vk, 88 | &comms, 89 | &point, 90 | values, 91 | &proof, 92 | opening_challenge, 93 | Some(rng) 94 | ) 95 | .unwrap()); 96 | let check_time = start.elapsed(); 97 | println!("Check: {:?}\n", check_time.as_millis()); 98 | 99 | println!("Proof size: {}", proof.serialized_size()); 100 | println!("\n\n"); 101 | } 102 | } 103 | 104 | fn main() { 105 | let args: Vec = std::env::args().collect(); 106 | if args.len() < 4 || args[1] == "-h" || args[1] == "--help" { 107 | println!("\nHelp: Invoke this as \n"); 108 | } 109 | let min_degree: usize = String::from(args[1].clone()) 110 | .parse() 111 | .expect(" should be integer"); 112 | let max_degree: usize = String::from(args[2].clone()) 113 | .parse() 114 | .expect(" should be integer"); 115 | 116 | let rng = &mut ark_std::test_rng(); 117 | println!("\n\n\n================ Benchmarking PC_LH ================"); 118 | profile_pc::<_, TrivPC, _>(min_degree, max_degree, rng); 119 | println!("\n\n\n================ Benchmarking IpaPC ================"); 120 | profile_pc::<_, IpaPC, _>(min_degree, max_degree, rng); 121 | } 122 | -------------------------------------------------------------------------------- /examples/scaling-nark.rs: -------------------------------------------------------------------------------- 1 | #![allow(non_camel_case_types)] 2 | // For randomness (during paramgen and proof generation) 3 | // PS: thread_rng is *insecure* 4 | 5 | // For benchmarking 6 | use ark_accumulation::r1cs_nark_as::r1cs_nark::R1CSNark; 7 | use ark_ff::{PrimeField, One}; 8 | use ark_pallas::{Affine, Fq, Fr}; 9 | use ark_relations::{ 10 | lc, 11 | r1cs::{ConstraintSynthesizer, ConstraintSystemRef, SynthesisError}, 12 | }; 13 | use ark_serialize::CanonicalSerialize; 14 | use ark_sponge::poseidon::PoseidonSponge; 15 | use ark_sponge::CryptographicSponge; 16 | use ark_std::rand::Rng; 17 | use ark_std::vec::Vec; 18 | use ark_std::UniformRand; 19 | use std::time::Instant; 20 | 21 | #[derive(Copy, Clone)] 22 | struct DummyCircuit { 23 | pub a: Option, 24 | pub b: Option, 25 | pub num_input_variables: usize, 26 | pub num_witness_variables: usize, 27 | pub num_constraints: usize, 28 | } 29 | 30 | impl ConstraintSynthesizer for DummyCircuit { 31 | fn generate_constraints(self, cs: ConstraintSystemRef) -> Result<(), SynthesisError> { 32 | let a = cs.new_witness_variable(|| self.a.ok_or(SynthesisError::AssignmentMissing))?; 33 | let b = cs.new_witness_variable(|| self.b.ok_or(SynthesisError::AssignmentMissing))?; 34 | let c = cs.new_input_variable(|| { 35 | let a = self.a.ok_or(SynthesisError::AssignmentMissing)?; 36 | let b = self.b.ok_or(SynthesisError::AssignmentMissing)?; 37 | 38 | Ok(a * b) 39 | })?; 40 | 41 | for _ in 0..(self.num_input_variables - 1) { 42 | let _ = cs.new_input_variable(|| self.a.ok_or(SynthesisError::AssignmentMissing))?; 43 | } 44 | for _ in 0..(self.num_witness_variables - 1) { 45 | let _ = cs.new_witness_variable(|| self.a.ok_or(SynthesisError::AssignmentMissing))?; 46 | } 47 | 48 | for _ in 0..self.num_constraints - 1 { 49 | cs.enforce_constraint(lc!() + a, lc!() + b, lc!() + c)?; 50 | } 51 | 52 | cs.enforce_constraint(lc!(), lc!(), lc!())?; 53 | 54 | Ok(()) 55 | } 56 | } 57 | 58 | fn profile_nark( 59 | min_constraints: usize, 60 | max_constraints: usize, 61 | make_zk: bool, 62 | rng: &mut R, 63 | ) { 64 | let pp = R1CSNark::>::setup(); 65 | let mut times = Vec::new(); 66 | 67 | for num_constraints in min_constraints..=max_constraints { 68 | let num_constraints = 1 << num_constraints; 69 | let c = DummyCircuit { 70 | a: Some(Fr::rand(rng)), 71 | b: Some(Fr::rand(rng)), 72 | num_input_variables: 5, 73 | num_witness_variables: num_constraints - 5, 74 | num_constraints, 75 | }; 76 | let a = c.a.unwrap(); 77 | let v = a * &c.b.unwrap(); 78 | 79 | let start = Instant::now(); 80 | let (ipk, ivk) = R1CSNark::>::index(&pp, c).unwrap(); 81 | let index_time = start.elapsed().as_millis(); 82 | 83 | let start = Instant::now(); 84 | let proof = R1CSNark::>::prove( 85 | &ipk, 86 | c.clone(), 87 | make_zk, 88 | Some(PoseidonSponge::new()), 89 | Some(rng), 90 | ) 91 | .unwrap(); 92 | let prover_time = start.elapsed().as_millis(); 93 | 94 | let start = Instant::now(); 95 | assert!(R1CSNark::>::verify( 96 | &ivk, 97 | &[Fr::one(), v, a, a, a, a], 98 | &proof, 99 | Some(PoseidonSponge::new()) 100 | )); 101 | let verifier_time = start.elapsed().as_millis(); 102 | let record = (num_constraints, index_time, prover_time, verifier_time); 103 | println!( 104 | "(num_constraints, index_time, prover_time, verifier_time):\n{:?}", 105 | record 106 | ); 107 | println!("Proof size: {}", proof.serialized_size()); 108 | times.push(record) 109 | } 110 | } 111 | 112 | fn main() { 113 | let args: Vec = std::env::args().collect(); 114 | if args.len() < 3 || args[1] == "-h" || args[1] == "--help" { 115 | println!("\nHelp: Invoke this as \n"); 116 | } 117 | let min_num_constraints: usize = String::from(args[1].clone()) 118 | .parse() 119 | .expect(" should be integer"); 120 | let max_num_constraints: usize = String::from(args[2].clone()) 121 | .parse() 122 | .expect(" should be integer"); 123 | 124 | let rng = &mut ark_std::test_rng(); 125 | 126 | println!("\n\n\n================ Benchmarking NARK without zk ================"); 127 | profile_nark(min_num_constraints, max_num_constraints, false, rng); 128 | 129 | println!("\n\n\n================ Benchmarking NARK with zk ================"); 130 | profile_nark(min_num_constraints, max_num_constraints, true, rng); 131 | } 132 | -------------------------------------------------------------------------------- /src/hp_as/data_structures.rs: -------------------------------------------------------------------------------- 1 | use ark_ec::AffineCurve; 2 | use ark_ff::{Field, PrimeField}; 3 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; 4 | use ark_sponge::{collect_sponge_bytes, collect_sponge_field_elements, Absorbable}; 5 | use ark_std::io::{Read, Write}; 6 | use ark_std::vec; 7 | use ark_std::vec::Vec; 8 | 9 | /// The [`InputInstance`][input_instance] of the [`ASForHadamardProducts`][as_for_hp]. 10 | /// 11 | /// [input_instance]: crate::AccumulationScheme::InputInstance 12 | /// [as_for_hp]: crate::hp_as::ASForHadamardProducts 13 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize, PartialEq, Eq)] 14 | pub struct InputInstance { 15 | /// Pedersen commitment to the `a` vector of the Hadamard product relation. 16 | pub comm_1: G, 17 | 18 | /// Pedersen commitment to the `b` vector of the Hadamard product relation. 19 | pub comm_2: G, 20 | 21 | /// Pedersen commitment to the `a ◦ b` vector of the Hadamard product relation. 22 | pub comm_3: G, 23 | } 24 | 25 | impl InputInstance { 26 | pub(crate) fn zero() -> Self { 27 | Self { 28 | comm_1: G::zero(), 29 | comm_2: G::zero(), 30 | comm_3: G::zero(), 31 | } 32 | } 33 | } 34 | 35 | impl Absorbable for InputInstance 36 | where 37 | G: AffineCurve + Absorbable, 38 | CF: PrimeField, 39 | { 40 | fn to_sponge_bytes(&self) -> Vec { 41 | collect_sponge_bytes!(CF, self.comm_1, self.comm_2, self.comm_3) 42 | } 43 | 44 | fn to_sponge_field_elements(&self) -> Vec { 45 | collect_sponge_field_elements!(self.comm_1, self.comm_2, self.comm_3) 46 | } 47 | } 48 | 49 | /// The [`InputWitness`][input_witness] of the [`ASForHadamardProducts`][as_for_hp]. 50 | /// 51 | /// [input_witness]: crate::AccumulationScheme::InputWitness 52 | /// [as_for_hp]: crate::hp_as::ASForHadamardProducts 53 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 54 | pub struct InputWitness { 55 | /// The `a` vector of the Hadamard product relation. 56 | pub a_vec: Vec, 57 | 58 | /// The `b` vector of the Hadamard product relation. 59 | pub b_vec: Vec, 60 | 61 | /// Randomness used to compute hiding commitments for zero-knowledge. 62 | pub randomness: Option>, 63 | } 64 | 65 | impl InputWitness { 66 | pub(crate) fn zero(vec_len: usize) -> Self { 67 | Self { 68 | a_vec: vec![F::zero(); vec_len], 69 | b_vec: vec![F::zero(); vec_len], 70 | randomness: None, 71 | } 72 | } 73 | } 74 | 75 | /// The randomness used to compute hiding commitments for zero-knowledge. 76 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 77 | pub struct InputWitnessRandomness { 78 | /// Randomness used to commit the random vector that hides the `a` vector of the Hadamard 79 | /// product relation. 80 | pub rand_1: F, 81 | 82 | /// Randomness used to commit the random vector that hides the `b` vector of the Hadamard 83 | /// product relation. 84 | pub rand_2: F, 85 | 86 | /// Randomness used to commit the cross term randomness vector. 87 | pub rand_3: F, 88 | } 89 | 90 | /// The [`Proof`][proof] of the [`ASForHadamardProducts`][as_for_hp]. 91 | /// 92 | /// [proof]: crate::AccumulationScheme::Proof 93 | /// [as_for_hp]: crate::hp_as::ASForHadamardProducts 94 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 95 | pub struct Proof { 96 | /// Pedersen commitments to each coefficient vector of the product polynomial 97 | /// `a(X, µ) ◦ b(X)`, excluding `n-1`th coefficient (0-index) 98 | pub(crate) product_poly_comm: ProductPolynomialCommitment, 99 | 100 | /// Pedersen commitments to the random vectors used to apply zero-knowledge to the vectors 101 | /// of the Hadamard product relation. 102 | pub(crate) hiding_comms: Option>, 103 | } 104 | 105 | /// The Pedersen commitments to each coefficient vector of the product polynomial `a(X, µ) ◦ b(X)`. 106 | /// Excludes `n-1`th commitment (0-index) 107 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 108 | pub(crate) struct ProductPolynomialCommitment { 109 | /// Pedersen commitments to the first `n-1` coefficients of the lower powers. 110 | pub(crate) low: Vec, 111 | 112 | /// Pedersen commitments to the last `n-1` coefficients of the higher powers. 113 | pub(crate) high: Vec, 114 | } 115 | 116 | impl Absorbable for ProductPolynomialCommitment 117 | where 118 | G: AffineCurve + Absorbable, 119 | CF: PrimeField, 120 | { 121 | fn to_sponge_bytes(&self) -> Vec { 122 | collect_sponge_bytes!(CF, self.low, self.high) 123 | } 124 | 125 | fn to_sponge_field_elements(&self) -> Vec { 126 | collect_sponge_field_elements!(self.low, self.high) 127 | } 128 | } 129 | 130 | /// The Pedersen commitments to the random vectors used to apply zero-knowledge to the vectors of 131 | /// the Hadamard product relation. 132 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 133 | pub(crate) struct ProofHidingCommitments { 134 | /// Pedersen commitment to the random vector that hides the `a` vector of the Hadamard 135 | /// product relation. 136 | pub(crate) comm_1: G, 137 | 138 | /// Pedersen commitment to the random vector that hides the `b` vector of the Hadamard 139 | /// product relation. 140 | pub(crate) comm_2: G, 141 | 142 | /// Pedersen commitment to the cross term randomness vector 143 | pub(crate) comm_3: G, 144 | } 145 | 146 | impl Absorbable for ProofHidingCommitments 147 | where 148 | G: AffineCurve + Absorbable, 149 | CF: PrimeField, 150 | { 151 | fn to_sponge_bytes(&self) -> Vec { 152 | collect_sponge_bytes!(CF, self.comm_1, self.comm_2, self.comm_3) 153 | } 154 | 155 | fn to_sponge_field_elements(&self) -> Vec { 156 | collect_sponge_field_elements!(self.comm_1, self.comm_2, self.comm_3) 157 | } 158 | } 159 | -------------------------------------------------------------------------------- /src/data_structures.rs: -------------------------------------------------------------------------------- 1 | use crate::AccumulationScheme; 2 | 3 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; 4 | use ark_std::io::{Read, Write}; 5 | use ark_std::rand::RngCore; 6 | 7 | // Useful type alias for implementations. 8 | #[cfg(feature = "impl")] 9 | use {ark_ec::AffineCurve, ark_ff::Field}; 10 | 11 | #[cfg(feature = "impl")] 12 | pub(crate) type ConstraintF = <::BaseField as Field>::BasePrimeField; 13 | 14 | /// A pair consisting of references to an instance and witness. 15 | pub struct InstanceWitnessPairRef< 16 | 'a, 17 | Instance: Clone + CanonicalSerialize + CanonicalDeserialize, 18 | Witness: Clone + CanonicalSerialize + CanonicalDeserialize, 19 | > { 20 | /// The reference to the instance of a pair. 21 | pub instance: &'a Instance, 22 | 23 | /// The reference to the witness of a pair. 24 | pub witness: &'a Witness, 25 | } 26 | 27 | impl<'a, Instance, Witness> InstanceWitnessPairRef<'a, Instance, Witness> 28 | where 29 | Instance: Clone + CanonicalSerialize + CanonicalDeserialize, 30 | Witness: Clone + CanonicalSerialize + CanonicalDeserialize, 31 | { 32 | /// Extract the accumulator instances out of a list of accumulators. 33 | pub fn instances(pairs: impl IntoIterator) -> impl Iterator 34 | where 35 | Self: 'a, 36 | { 37 | pairs.into_iter().map(|p| p.instance) 38 | } 39 | } 40 | 41 | /// A pair consisting of an instance and witness. 42 | #[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] 43 | #[derivative(Clone(bound = " 44 | Instance: Clone + CanonicalSerialize + CanonicalDeserialize, 45 | Witness: Clone + CanonicalSerialize + CanonicalDeserialize, 46 | "))] 47 | pub struct InstanceWitnessPair< 48 | Instance: Clone + CanonicalSerialize + CanonicalDeserialize, 49 | Witness: Clone + CanonicalSerialize + CanonicalDeserialize, 50 | > { 51 | /// The instance of the pair. 52 | pub instance: Instance, 53 | 54 | /// The witness of the pair. 55 | pub witness: Witness, 56 | } 57 | 58 | impl InstanceWitnessPair 59 | where 60 | Instance: Clone + CanonicalSerialize + CanonicalDeserialize, 61 | Witness: Clone + CanonicalSerialize + CanonicalDeserialize, 62 | { 63 | /// Returns a reference for each instance in the list of [`InstanceWitnessPair`]s. 64 | pub fn instances<'a>( 65 | pairs: impl IntoIterator, 66 | ) -> impl Iterator 67 | where 68 | Self: 'a, 69 | { 70 | pairs.into_iter().map(|p| &p.instance) 71 | } 72 | 73 | /// Returns a [`InstanceWitnessPairRef`] for each [`InstanceWitnessPair`] reference. 74 | pub fn map_to_refs<'a>( 75 | pairs: impl IntoIterator, 76 | ) -> impl Iterator> 77 | where 78 | Self: 'a, 79 | { 80 | pairs.into_iter().map(|p| InstanceWitnessPairRef { 81 | instance: &p.instance, 82 | witness: &p.witness, 83 | }) 84 | } 85 | 86 | /// Returns the [`InstanceWitnessPairRef`] for a [`InstanceWitnessPair`] reference. 87 | pub fn as_ref(&self) -> InstanceWitnessPairRef<'_, Instance, Witness> { 88 | InstanceWitnessPairRef { 89 | instance: &self.instance, 90 | witness: &self.witness, 91 | } 92 | } 93 | } 94 | 95 | impl Default for InstanceWitnessPair 96 | where 97 | Instance: Clone + CanonicalSerialize + CanonicalDeserialize + Default, 98 | Witness: Clone + CanonicalSerialize + CanonicalDeserialize + Default, 99 | { 100 | fn default() -> Self { 101 | Self { 102 | instance: Instance::default(), 103 | witness: Witness::default(), 104 | } 105 | } 106 | } 107 | 108 | /// A pair of [`AccumulatorInstance`][instance] and [`AccumulatorWitness`][witness]. 109 | /// 110 | /// [instance]: AccumulationScheme::AccumulatorInstance 111 | /// [witness]: AccumulationScheme::AccumulatorWitness 112 | pub type Accumulator = InstanceWitnessPair< 113 | >::AccumulatorInstance, 114 | >::AccumulatorWitness, 115 | >; 116 | 117 | /// A pair of references to an [`AccumulatorInstance`][instance] and 118 | /// [`AccumulatorWitness`][witness]. 119 | /// 120 | /// [instance]: AccumulationScheme::AccumulatorInstance 121 | /// [witness]: AccumulationScheme::AccumulatorWitness 122 | pub type AccumulatorRef<'a, CF, S, A> = InstanceWitnessPairRef< 123 | 'a, 124 | >::AccumulatorInstance, 125 | >::AccumulatorWitness, 126 | >; 127 | 128 | /// A pair of [`InputInstance`][instance] and [`InputWitness`][witness]. 129 | /// 130 | /// [instance]: AccumulationScheme::InputInstance 131 | /// [witness]: AccumulationScheme::InputWitness 132 | pub type Input = InstanceWitnessPair< 133 | >::InputInstance, 134 | >::InputWitness, 135 | >; 136 | 137 | /// A pair of references to an [`InputInstance`][instance] and [`InputWitness`][witness]. 138 | /// 139 | /// [instance]: AccumulationScheme::InputInstance 140 | /// [witness]: AccumulationScheme::InputWitness 141 | pub type InputRef<'a, CF, S, A> = InstanceWitnessPairRef< 142 | 'a, 143 | >::InputInstance, 144 | >::InputWitness, 145 | >; 146 | 147 | /// Specifies the zero-knowledge configuration for an accumulation. 148 | pub enum MakeZK<'a> { 149 | /// Enable zero-knowledge accumulation. 150 | Enabled(&'a mut dyn RngCore), 151 | 152 | /// Disable zero-knowledge accumulation. 153 | Disabled, 154 | } 155 | 156 | impl<'a> MakeZK<'a> { 157 | /// Converts the MakeZK parameter to a (make_zk_enabled, rng) 158 | pub fn into_components(self) -> (bool, Option<&'a mut dyn RngCore>) { 159 | match self { 160 | MakeZK::Enabled(rng) => (true, Some(rng)), 161 | MakeZK::Disabled => (false, None), 162 | } 163 | } 164 | } 165 | -------------------------------------------------------------------------------- /src/r1cs_nark_as/r1cs_nark/data_structures.rs: -------------------------------------------------------------------------------- 1 | use ark_ec::AffineCurve; 2 | use ark_ff::{Field, PrimeField}; 3 | use ark_poly_commit::trivial_pc::CommitterKey; 4 | use ark_relations::r1cs::Matrix; 5 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; 6 | use ark_sponge::{collect_sponge_bytes, collect_sponge_field_elements, Absorbable}; 7 | use ark_std::io::{Read, Write}; 8 | use ark_std::vec::Vec; 9 | 10 | /// The public parameters of this NARK. 11 | pub type PublicParameters = (); 12 | 13 | /// Information about the index, including the field of definition, the number of 14 | /// variables, the number of constraints, and the maximum number of non-zero 15 | /// entries in any of the constraint matrices. 16 | #[derive(Clone, Copy)] 17 | pub(crate) struct IndexInfo { 18 | /// The total number of variables in the constraint system. 19 | pub(crate) num_variables: usize, 20 | 21 | /// The number of constraints. 22 | pub(crate) num_constraints: usize, 23 | 24 | /// The number of public input (i.e. instance) variables. 25 | pub(crate) num_instance_variables: usize, 26 | 27 | /// Hash of the matrices. 28 | pub(crate) matrices_hash: [u8; 32], 29 | } 30 | 31 | /// The index prover key for our NARK. 32 | #[derive(Clone)] 33 | pub struct IndexProverKey { 34 | /// Information about the index. 35 | pub(crate) index_info: IndexInfo, 36 | 37 | /// The `A` matrix of the R1CS instance. 38 | pub(crate) a: Matrix, 39 | 40 | /// The `B` matrix of the R1CS instance. 41 | pub(crate) b: Matrix, 42 | 43 | /// The `C` matrix of the R1CS instance. 44 | pub(crate) c: Matrix, 45 | 46 | /// Group elements required by the Pedersen commitment. 47 | pub(crate) ck: CommitterKey, 48 | } 49 | 50 | /// Index verifier key for our NARK. 51 | pub type IndexVerifierKey = IndexProverKey; 52 | 53 | /// The sigma protocol's prover commitment randomness. 54 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 55 | pub struct FirstRoundMessageRandomness { 56 | /// Pedersen commitment to the vector that blinds the witness in `Az`. 57 | pub(crate) comm_r_a: G, 58 | 59 | /// Pedersen commitment to the vector that blinds the witness in `Bz`. 60 | pub(crate) comm_r_b: G, 61 | 62 | /// Pedersen commitment to the vector that blinds the witness in `Cz`. 63 | pub(crate) comm_r_c: G, 64 | 65 | /// Pedersen commitment to the first cross term randomness vector 66 | pub(crate) comm_1: G, 67 | 68 | /// Pedersen commitment to the second cross term randomness vector 69 | pub(crate) comm_2: G, 70 | } 71 | 72 | impl Absorbable for FirstRoundMessageRandomness 73 | where 74 | CF: PrimeField, 75 | G: AffineCurve + Absorbable, 76 | { 77 | fn to_sponge_bytes(&self) -> Vec { 78 | collect_sponge_bytes!( 79 | CF, 80 | self.comm_r_a, 81 | self.comm_r_b, 82 | self.comm_r_c, 83 | self.comm_1, 84 | self.comm_2 85 | ) 86 | } 87 | 88 | fn to_sponge_field_elements(&self) -> Vec { 89 | collect_sponge_field_elements!( 90 | self.comm_r_a, 91 | self.comm_r_b, 92 | self.comm_r_c, 93 | self.comm_1, 94 | self.comm_2 95 | ) 96 | } 97 | } 98 | 99 | /// The sigma protocol's prover commitment. 100 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 101 | pub struct FirstRoundMessage { 102 | /// Pedersen commitment to the `Az` vector. 103 | pub(crate) comm_a: G, 104 | 105 | /// Pedersen commitment to the `Bz` vector. 106 | pub(crate) comm_b: G, 107 | 108 | /// Pedersen commitment to the `Cz` vector. 109 | pub(crate) comm_c: G, 110 | 111 | /// The randomness used for the commitment. 112 | pub(crate) randomness: Option>, 113 | } 114 | 115 | impl FirstRoundMessage { 116 | pub(crate) fn zero(make_zk: bool) -> Self { 117 | Self { 118 | comm_a: G::zero(), 119 | comm_b: G::zero(), 120 | comm_c: G::zero(), 121 | randomness: if make_zk { 122 | Some(FirstRoundMessageRandomness { 123 | comm_r_a: G::zero(), 124 | comm_r_b: G::zero(), 125 | comm_r_c: G::zero(), 126 | comm_1: G::zero(), 127 | comm_2: G::zero(), 128 | }) 129 | } else { 130 | None 131 | }, 132 | } 133 | } 134 | } 135 | 136 | impl Absorbable for FirstRoundMessage 137 | where 138 | CF: PrimeField, 139 | G: AffineCurve + Absorbable, 140 | { 141 | fn to_sponge_bytes(&self) -> Vec { 142 | collect_sponge_bytes!(CF, self.comm_a, self.comm_b, self.comm_c, self.randomness) 143 | } 144 | 145 | fn to_sponge_field_elements(&self) -> Vec { 146 | collect_sponge_field_elements!(self.comm_a, self.comm_b, self.comm_c, self.randomness) 147 | } 148 | } 149 | 150 | /// The sigma protocol's prover response randomness. 151 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 152 | pub struct SecondRoundMessageRandomness { 153 | /// The blinded randomness for the Pedersen commitment to the linear combination with the 154 | /// `A` matrix. 155 | pub(crate) sigma_a: F, 156 | 157 | /// The blinded randomness for the Pedersen commitment to the linear combination with the 158 | /// `B` matrix. 159 | pub(crate) sigma_b: F, 160 | 161 | /// The blinded randomness for the Pedersen commitment to the linear combination with the 162 | /// `C` matrix. 163 | pub(crate) sigma_c: F, 164 | 165 | /// The blinded randomness for the Pedersen commitment to the cross terms 166 | pub(crate) sigma_o: F, 167 | } 168 | 169 | /// The sigma protocol's prover response. 170 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 171 | pub struct SecondRoundMessage { 172 | /// The R1CS witness with randomness applied if zero-knowledge is needed. 173 | pub(crate) blinded_witness: Vec, 174 | 175 | /// The randomness used for the response. 176 | pub(crate) randomness: Option>, 177 | } 178 | 179 | impl SecondRoundMessage { 180 | pub(crate) fn zero(witness_len: usize, make_zk: bool) -> Self { 181 | Self { 182 | blinded_witness: vec![F::zero(); witness_len], 183 | randomness: if make_zk { 184 | Some(SecondRoundMessageRandomness { 185 | sigma_a: F::zero(), 186 | sigma_b: F::zero(), 187 | sigma_c: F::zero(), 188 | sigma_o: F::zero(), 189 | }) 190 | } else { 191 | None 192 | }, 193 | } 194 | } 195 | } 196 | 197 | /// The proof for our NARK. 198 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 199 | pub struct Proof { 200 | /// The sigma protocol's prove commitment. 201 | pub first_msg: FirstRoundMessage, 202 | 203 | /// The sigma protocol's prove response. 204 | pub second_msg: SecondRoundMessage, 205 | } 206 | -------------------------------------------------------------------------------- /src/trivial_pc_as/constraints/data_structures.rs: -------------------------------------------------------------------------------- 1 | use crate::trivial_pc_as::{InputInstance, SingleProof}; 2 | use crate::ConstraintF; 3 | 4 | use ark_ec::AffineCurve; 5 | use ark_ff::{Field, PrimeField}; 6 | use ark_nonnative_field::NonNativeFieldVar; 7 | use ark_r1cs_std::alloc::{AllocVar, AllocationMode}; 8 | use ark_r1cs_std::fields::fp::FpVar; 9 | use ark_r1cs_std::groups::CurveVar; 10 | use ark_r1cs_std::ToBytesGadget; 11 | use ark_relations::r1cs::{Namespace, SynthesisError}; 12 | use ark_sponge::constraints::AbsorbableGadget; 13 | use ark_sponge::{collect_sponge_field_elements_gadget, Absorbable}; 14 | use ark_std::borrow::Borrow; 15 | use ark_std::marker::PhantomData; 16 | use ark_std::vec::Vec; 17 | 18 | /// The [`VerifierKey`][vk] of the [`ASForTrivialPC`][as_for_trivial_pc_verifier]. 19 | /// 20 | /// [vk]: crate::constraints::ASVerifierGadget::VerifierKey 21 | /// [as_for_trivial_pc_verifier]: crate::trivial_pc_as::constraints::ASForTrivialPCVerifierGadget 22 | pub struct VerifierKeyVar(pub(crate) FpVar); 23 | 24 | impl AllocVar for VerifierKeyVar 25 | where 26 | CF: PrimeField, 27 | { 28 | fn new_variable>( 29 | cs: impl Into>, 30 | f: impl FnOnce() -> Result, 31 | mode: AllocationMode, 32 | ) -> Result { 33 | let ns = cs.into(); 34 | f().and_then(|vk| { 35 | let vk = FpVar::::new_variable( 36 | ns.clone(), 37 | || { 38 | Ok(Absorbable::::to_sponge_field_elements(vk.borrow()) 39 | .pop() 40 | .unwrap()) 41 | }, 42 | mode, 43 | )?; 44 | Ok(VerifierKeyVar(vk)) 45 | }) 46 | } 47 | } 48 | 49 | /// The [`InputInstance`][input] of the [`ASForTrivialPC`][as_for_trivial_pc_verifier]. 50 | /// 51 | /// [input]: crate::constraints::ASVerifierGadget::InputInstance 52 | /// [as_for_trivial_pc_verifier]: crate::trivial_pc_as::constraints::ASForTrivialPCVerifierGadget 53 | pub struct InputInstanceVar 54 | where 55 | G: AffineCurve, 56 | C: CurveVar::BasePrimeField>, 57 | { 58 | /// Pedersen commitment to a polynomial. 59 | pub commitment: C, 60 | 61 | /// Point where the proof was opened at. 62 | pub point: NonNativeFieldVar>, 63 | 64 | /// Evaluation of the committed polynomial at the point. 65 | pub eval: NonNativeFieldVar>, 66 | 67 | #[doc(hidden)] 68 | pub _affine: PhantomData, 69 | } 70 | 71 | impl AllocVar, ConstraintF> for InputInstanceVar 72 | where 73 | G: AffineCurve, 74 | C: CurveVar>, 75 | { 76 | fn new_variable>>( 77 | cs: impl Into>>, 78 | f: impl FnOnce() -> Result, 79 | mode: AllocationMode, 80 | ) -> Result { 81 | let ns = cs.into(); 82 | f().and_then(|input_instance| { 83 | let pedersen_comm: G = input_instance.borrow().commitment.commitment().elem; 84 | let commitment = C::new_variable(ns.clone(), || Ok(pedersen_comm), mode)?; 85 | let point = NonNativeFieldVar::>::new_variable( 86 | ns.clone(), 87 | || Ok(&input_instance.borrow().point), 88 | mode, 89 | )?; 90 | let eval = NonNativeFieldVar::>::new_variable( 91 | ns.clone(), 92 | || Ok(&input_instance.borrow().eval), 93 | mode, 94 | )?; 95 | 96 | Ok(Self { 97 | commitment, 98 | point, 99 | eval, 100 | _affine: PhantomData, 101 | }) 102 | }) 103 | } 104 | } 105 | 106 | impl AbsorbableGadget> for InputInstanceVar 107 | where 108 | G: AffineCurve, 109 | C: CurveVar> + AbsorbableGadget>, 110 | { 111 | fn to_sponge_field_elements(&self) -> Result>>, SynthesisError> { 112 | collect_sponge_field_elements_gadget!( 113 | self.commitment, 114 | self.point.to_bytes()?, 115 | self.eval.to_bytes()? 116 | ) 117 | } 118 | } 119 | 120 | /// A proof attesting that a single input was properly accumulated. 121 | pub struct SingleProofVar 122 | where 123 | G: AffineCurve, 124 | C: CurveVar::BasePrimeField>, 125 | { 126 | /// Pedersen commitment to the witness polynomial. 127 | pub(crate) witness_commitment: C, 128 | 129 | /// Evaluation of the witness polynomial at the challenge point. 130 | pub(crate) witness_eval: NonNativeFieldVar>, 131 | 132 | /// Evaluation of the input polynomial at the challenge point. 133 | pub(crate) eval: NonNativeFieldVar>, 134 | 135 | #[doc(hidden)] 136 | pub(crate) _affine: PhantomData, 137 | } 138 | 139 | impl AllocVar, ConstraintF> for SingleProofVar 140 | where 141 | G: AffineCurve, 142 | C: CurveVar>, 143 | { 144 | fn new_variable>>( 145 | cs: impl Into>>, 146 | f: impl FnOnce() -> Result, 147 | mode: AllocationMode, 148 | ) -> Result { 149 | let ns = cs.into(); 150 | f().and_then(|single_proof| { 151 | let witness_commitment: G = single_proof.borrow().witness_commitment.commitment().elem; 152 | let witness_commitment = C::new_variable(ns.clone(), || Ok(witness_commitment), mode)?; 153 | let witness_eval = NonNativeFieldVar::>::new_variable( 154 | ns.clone(), 155 | || Ok(&single_proof.borrow().witness_eval), 156 | mode, 157 | )?; 158 | let eval = NonNativeFieldVar::>::new_variable( 159 | ns.clone(), 160 | || Ok(&single_proof.borrow().eval), 161 | mode, 162 | )?; 163 | 164 | Ok(Self { 165 | witness_commitment, 166 | witness_eval, 167 | eval, 168 | _affine: PhantomData, 169 | }) 170 | }) 171 | } 172 | } 173 | 174 | /// The [`Proof`][proof] of the [`ASForTrivialPC`][as_for_trivial_pc_verifier]. 175 | /// 176 | /// [proof]: crate::constraints::ASVerifierGadget::Proof 177 | /// [as_for_trivial_pc_verifier]: crate::trivial_pc_as::constraints::ASForTrivialPCVerifierGadget 178 | pub struct ProofVar 179 | where 180 | G: AffineCurve, 181 | C: CurveVar::BasePrimeField>, 182 | { 183 | /// A list of [`SingleProofVar`] for each input. 184 | pub(crate) single_proofs: Vec>, 185 | } 186 | 187 | impl AllocVar>, ConstraintF> for ProofVar 188 | where 189 | G: AffineCurve, 190 | C: CurveVar>, 191 | { 192 | fn new_variable>>>( 193 | cs: impl Into>>, 194 | f: impl FnOnce() -> Result, 195 | mode: AllocationMode, 196 | ) -> Result { 197 | let ns = cs.into(); 198 | f().and_then(|single_proofs| { 199 | let single_proof_vars = single_proofs 200 | .borrow() 201 | .into_iter() 202 | .map(|single_proof| { 203 | SingleProofVar::new_variable(ns.clone(), || Ok(single_proof.clone()), mode) 204 | }) 205 | .collect::, SynthesisError>>()?; 206 | 207 | Ok(Self { 208 | single_proofs: single_proof_vars, 209 | }) 210 | }) 211 | } 212 | } 213 | -------------------------------------------------------------------------------- /src/ipa_pc_as/constraints/data_structures.rs: -------------------------------------------------------------------------------- 1 | use crate::ipa_pc_as::data_structures::{InputInstance, Randomness, VerifierKey}; 2 | use crate::ConstraintF; 3 | 4 | use ark_ec::AffineCurve; 5 | use ark_ff::Zero; 6 | use ark_ff::{BitIteratorLE, Field, PrimeField}; 7 | use ark_nonnative_field::NonNativeFieldVar; 8 | use ark_poly_commit::ipa_pc; 9 | use ark_poly_commit::UVPolynomial; 10 | use ark_r1cs_std::alloc::{AllocVar, AllocationMode}; 11 | use ark_r1cs_std::bits::boolean::Boolean; 12 | use ark_r1cs_std::groups::CurveVar; 13 | use ark_relations::r1cs::{Namespace, SynthesisError}; 14 | use ark_std::borrow::Borrow; 15 | use ark_std::vec::Vec; 16 | 17 | pub(crate) type FinalCommKeyVar = C; 18 | 19 | /// The [`VerifierKey`][vk] of the [`AtomicASForIpaPCVerifierGadget`][as_for_ipa_pc_verifier]. 20 | /// 21 | /// [vk]: crate::constraints::ASVerifierGadget::VerifierKey 22 | /// [as_for_ipa_pc_verifier]: crate::ipa_pc_as::constraints::AtomicASForIpaPCVerifierGadget 23 | pub struct VerifierKeyVar 24 | where 25 | G: AffineCurve, 26 | C: CurveVar::BasePrimeField>, 27 | { 28 | pub(crate) ipa_svk: ipa_pc::constraints::SuccinctVerifierKeyVar, 29 | pub(crate) ipa_ck_linear: ipa_pc::constraints::VerifierKeyVar, 30 | 31 | pub(crate) default_proof: ipa_pc::Proof, 32 | } 33 | 34 | impl AllocVar, ConstraintF> for VerifierKeyVar 35 | where 36 | G: AffineCurve, 37 | C: CurveVar>, 38 | { 39 | fn new_variable>>( 40 | cs: impl Into>>, 41 | f: impl FnOnce() -> Result, 42 | mode: AllocationMode, 43 | ) -> Result { 44 | let ns = cs.into(); 45 | f().and_then(|verifier_key| { 46 | let ipa_svk = ipa_pc::constraints::SuccinctVerifierKeyVar::::new_variable( 47 | ns.clone(), 48 | || Ok(verifier_key.borrow().ipa_svk.clone()), 49 | mode, 50 | )?; 51 | 52 | let ipa_ck_linear = ipa_pc::constraints::VerifierKeyVar::::new_variable( 53 | ns.clone(), 54 | || Ok(&verifier_key.borrow().ipa_ck_linear), 55 | mode, 56 | )?; 57 | 58 | let default_proof = verifier_key.borrow().default_proof.clone(); 59 | 60 | Ok(Self { 61 | ipa_svk, 62 | ipa_ck_linear, 63 | default_proof, 64 | }) 65 | }) 66 | } 67 | } 68 | 69 | /// The [`InputInstance`][input_instance] of the 70 | /// [`AtomicASForIpaPCVerifierGadget`][as_for_ipa_pc_verifier]. 71 | /// 72 | /// [input_instance]: crate::constraints::ASVerifierGadget::InputInstance 73 | /// [as_for_ipa_pc_verifier]: crate::ipa_pc_as::constraints::AtomicASForIpaPCVerifierGadget 74 | pub struct InputInstanceVar 75 | where 76 | G: AffineCurve, 77 | C: CurveVar::BasePrimeField>, 78 | { 79 | /// The IpaPC commitment to a polynomial. 80 | pub(crate) ipa_commitment: ipa_pc::constraints::CommitmentVar, 81 | 82 | /// Point where the proof was opened at. 83 | pub(crate) point: NonNativeFieldVar>, 84 | 85 | /// Evaluation of the committed polynomial at the point. 86 | pub(crate) evaluation: NonNativeFieldVar>, 87 | 88 | /// The IpaPC proof of evaluation at the point. 89 | pub(crate) ipa_proof: ipa_pc::constraints::ProofVar, 90 | } 91 | 92 | impl AllocVar, ConstraintF> for InputInstanceVar 93 | where 94 | G: AffineCurve, 95 | C: CurveVar>, 96 | { 97 | fn new_variable>>( 98 | cs: impl Into>>, 99 | f: impl FnOnce() -> Result, 100 | mode: AllocationMode, 101 | ) -> Result { 102 | let ns = cs.into(); 103 | f().and_then(|input_instance| { 104 | let ipa_commitment = ipa_pc::constraints::CommitmentVar::::new_variable( 105 | ns.clone(), 106 | || Ok(&input_instance.borrow().ipa_commitment), 107 | mode, 108 | )?; 109 | 110 | let point = NonNativeFieldVar::>::new_variable( 111 | ns.clone(), 112 | || Ok(&input_instance.borrow().point), 113 | mode, 114 | )?; 115 | 116 | let evaluation = NonNativeFieldVar::>::new_variable( 117 | ns.clone(), 118 | || Ok(&input_instance.borrow().evaluation), 119 | mode, 120 | )?; 121 | 122 | let ipa_proof = ipa_pc::constraints::ProofVar::::new_variable( 123 | ns.clone(), 124 | || Ok(&input_instance.borrow().ipa_proof), 125 | mode, 126 | )?; 127 | 128 | Ok(Self { 129 | ipa_commitment, 130 | point, 131 | evaluation, 132 | ipa_proof, 133 | }) 134 | }) 135 | } 136 | } 137 | 138 | /// The randomness used to apply zero-knowledge to commitment and accumulation. 139 | pub struct RandomnessVar 140 | where 141 | G: AffineCurve, 142 | C: CurveVar::BasePrimeField>, 143 | { 144 | /// A random linear polynomial to be accumulated. 145 | pub(crate) random_linear_polynomial_coeffs: 146 | [NonNativeFieldVar>; 2], 147 | 148 | /// The IpaPC commitment to the random linear polynomial. 149 | pub(crate) random_linear_polynomial_commitment: C, 150 | 151 | /// Randomness used to commit to the linear combination of the input polynomials. 152 | pub(crate) commitment_randomness: Vec>>, 153 | } 154 | 155 | impl AllocVar, ConstraintF> for RandomnessVar 156 | where 157 | G: AffineCurve, 158 | C: CurveVar>, 159 | { 160 | fn new_variable>>( 161 | cs: impl Into>>, 162 | f: impl FnOnce() -> Result, 163 | mode: AllocationMode, 164 | ) -> Result { 165 | let ns = cs.into(); 166 | f().and_then(|proof| { 167 | let random_linear_polynomial_coeffs = &proof.borrow().random_linear_polynomial.coeffs(); 168 | assert!(random_linear_polynomial_coeffs.len() <= 2); 169 | 170 | let random_linear_polynomial_coeffs = [ 171 | NonNativeFieldVar::>::new_variable( 172 | ns.clone(), 173 | || { 174 | Ok(if random_linear_polynomial_coeffs.len() > 0 { 175 | random_linear_polynomial_coeffs[0].clone() 176 | } else { 177 | G::ScalarField::zero() 178 | }) 179 | }, 180 | mode, 181 | )?, 182 | NonNativeFieldVar::>::new_variable( 183 | ns.clone(), 184 | || { 185 | Ok(if random_linear_polynomial_coeffs.len() > 1 { 186 | random_linear_polynomial_coeffs[1].clone() 187 | } else { 188 | G::ScalarField::zero() 189 | }) 190 | }, 191 | mode, 192 | )?, 193 | ]; 194 | 195 | let random_linear_polynomial_commitment = C::new_variable( 196 | ns.clone(), 197 | || Ok(proof.borrow().random_linear_polynomial_commitment), 198 | mode, 199 | )?; 200 | 201 | let commitment_randomness = BitIteratorLE::without_trailing_zeros( 202 | (&proof.borrow().commitment_randomness).into_repr(), 203 | ) 204 | .map(|b| Boolean::new_variable(ns.clone(), || Ok(b), mode)) 205 | .collect::, SynthesisError>>()?; 206 | 207 | Ok(Self { 208 | random_linear_polynomial_coeffs, 209 | random_linear_polynomial_commitment, 210 | commitment_randomness, 211 | }) 212 | }) 213 | } 214 | } 215 | 216 | /// The [`Proof`][proof] of the [`AtomicASForIpaPCVerifierGadget`][as_for_ipa_pc_verifier]. 217 | /// 218 | /// [proof]: crate::constraints::ASVerifierGadget::Proof 219 | /// [as_for_ipa_pc_verifier]: crate::ipa_pc_as::constraints::AtomicASForIpaPCVerifierGadget 220 | pub struct ProofVar 221 | where 222 | G: AffineCurve, 223 | C: CurveVar::BasePrimeField>, 224 | { 225 | /// Randomness used to apply zero-knowledge to commitment and accumulation. 226 | pub(crate) randomness: Option>, 227 | } 228 | 229 | impl AllocVar>, ConstraintF> for ProofVar 230 | where 231 | G: AffineCurve, 232 | C: CurveVar>, 233 | { 234 | fn new_variable>>>( 235 | cs: impl Into>>, 236 | f: impl FnOnce() -> Result, 237 | mode: AllocationMode, 238 | ) -> Result { 239 | let ns = cs.into(); 240 | f().and_then(|proof| { 241 | let randomness = proof 242 | .borrow() 243 | .as_ref() 244 | .map(|rand| RandomnessVar::new_variable(ns.clone(), || Ok(rand.clone()), mode)) 245 | .transpose()?; 246 | Ok(Self { randomness }) 247 | }) 248 | } 249 | } 250 | -------------------------------------------------------------------------------- /src/hp_as/constraints/data_structures.rs: -------------------------------------------------------------------------------- 1 | use crate::hp_as::data_structures::{ProductPolynomialCommitment, Proof, ProofHidingCommitments}; 2 | use crate::hp_as::InputInstance; 3 | use crate::ConstraintF; 4 | 5 | use ark_ec::AffineCurve; 6 | use ark_ff::PrimeField; 7 | use ark_r1cs_std::alloc::{AllocVar, AllocationMode}; 8 | use ark_r1cs_std::fields::fp::FpVar; 9 | use ark_r1cs_std::groups::CurveVar; 10 | use ark_relations::r1cs::{Namespace, SynthesisError}; 11 | use ark_sponge::collect_sponge_field_elements_gadget; 12 | use ark_sponge::constraints::AbsorbableGadget; 13 | use ark_std::borrow::Borrow; 14 | use ark_std::marker::PhantomData; 15 | use ark_std::vec::Vec; 16 | 17 | /// The [`VerifierKey`][vk] of the [`ASForHPVerifierGadget`][as_for_hp_verifier]. 18 | /// 19 | /// [vk]: crate::constraints::ASVerifierGadget::VerifierKey 20 | /// [as_for_hp_verifier]: crate::hp_as::constraints::ASForHPVerifierGadget 21 | pub struct VerifierKeyVar { 22 | /// The maximum supported vector length of the Hadamard product relation. 23 | pub(crate) num_supported_elems: FpVar, 24 | } 25 | 26 | impl AllocVar for VerifierKeyVar { 27 | fn new_variable>( 28 | cs: impl Into>, 29 | f: impl FnOnce() -> Result, 30 | mode: AllocationMode, 31 | ) -> Result { 32 | let ns = cs.into(); 33 | f().and_then(|vk| { 34 | Ok(VerifierKeyVar { 35 | num_supported_elems: FpVar::new_variable( 36 | ns.clone(), 37 | || Ok(CF::from(*vk.borrow() as u64)), 38 | mode, 39 | )?, 40 | }) 41 | }) 42 | } 43 | } 44 | 45 | impl AbsorbableGadget for VerifierKeyVar { 46 | fn to_sponge_field_elements(&self) -> Result>, SynthesisError> { 47 | collect_sponge_field_elements_gadget!(self.num_supported_elems) 48 | } 49 | } 50 | 51 | /// The [`InputInstance`][input] of the [`ASForHPVerifierGadget`][as_for_hp_verifier]. 52 | /// 53 | /// [input]: crate::constraints::ASVerifierGadget::InputInstance 54 | /// [as_for_hp_verifier]: crate::hp_as::constraints::ASForHPVerifierGadget 55 | pub struct InputInstanceVar 56 | where 57 | G: AffineCurve, 58 | C: CurveVar>, 59 | { 60 | /// Pedersen commitment to the `a` vector of the Hadamard product relation. 61 | pub comm_1: C, 62 | 63 | /// Pedersen commitment to the `b` vector of the Hadamard product relation. 64 | pub comm_2: C, 65 | 66 | /// Pedersen commitment to the `a ◦ b` vector of the Hadamard product relation. 67 | pub comm_3: C, 68 | 69 | #[doc(hidden)] 70 | pub _curve: PhantomData, 71 | } 72 | 73 | impl AllocVar, ConstraintF> for InputInstanceVar 74 | where 75 | G: AffineCurve, 76 | C: CurveVar>, 77 | { 78 | fn new_variable>>( 79 | cs: impl Into>>, 80 | f: impl FnOnce() -> Result, 81 | mode: AllocationMode, 82 | ) -> Result { 83 | let ns = cs.into(); 84 | f().and_then(|input_instance| { 85 | let comm_1 = C::new_variable(ns.clone(), || Ok(input_instance.borrow().comm_1), mode)?; 86 | let comm_2 = C::new_variable(ns.clone(), || Ok(input_instance.borrow().comm_2), mode)?; 87 | let comm_3 = C::new_variable(ns.clone(), || Ok(input_instance.borrow().comm_3), mode)?; 88 | Ok(Self { 89 | comm_1, 90 | comm_2, 91 | comm_3, 92 | _curve: PhantomData, 93 | }) 94 | }) 95 | } 96 | } 97 | 98 | impl AbsorbableGadget> for InputInstanceVar 99 | where 100 | G: AffineCurve, 101 | C: CurveVar> + AbsorbableGadget>, 102 | { 103 | fn to_sponge_field_elements(&self) -> Result>>, SynthesisError> { 104 | collect_sponge_field_elements_gadget!(self.comm_1, self.comm_2, self.comm_3) 105 | } 106 | } 107 | 108 | /// The [`Proof`][proof] of the [`ASForHPVerifierGadget`][as_for_hp_verifier]. 109 | /// 110 | /// [proof]: crate::constraints::ASVerifierGadget::Proof 111 | /// [as_for_hp_verifier]: crate::hp_as::constraints::ASForHPVerifierGadget 112 | pub struct ProofVar 113 | where 114 | G: AffineCurve, 115 | C: CurveVar>, 116 | { 117 | /// Pedersen commitments to each coefficient vector of the product polynomial 118 | /// `a(X, µ) ◦ b(X)`, excluding `n-1`th coefficient (0-index) 119 | pub(crate) product_poly_comm: ProductPolynomialCommitmentVar, 120 | 121 | /// Pedersen commitments to the random vectors used to apply zero-knowledge to the vectors 122 | /// of the Hadamard product relation. 123 | pub(crate) hiding_comms: Option>, 124 | 125 | #[doc(hidden)] 126 | pub(crate) _curve: PhantomData, 127 | } 128 | 129 | impl AllocVar, ConstraintF> for ProofVar 130 | where 131 | G: AffineCurve, 132 | C: CurveVar>, 133 | { 134 | fn new_variable>>( 135 | cs: impl Into>>, 136 | f: impl FnOnce() -> Result, 137 | mode: AllocationMode, 138 | ) -> Result { 139 | let ns = cs.into(); 140 | f().and_then(|proof| { 141 | let product_poly_comm = ProductPolynomialCommitmentVar::new_variable( 142 | ns.clone(), 143 | || Ok(&proof.borrow().product_poly_comm), 144 | mode, 145 | )?; 146 | let hiding_comms = proof 147 | .borrow() 148 | .hiding_comms 149 | .as_ref() 150 | .map(|hiding_comms| { 151 | ProofHidingCommitmentsVar::new_variable(ns.clone(), || Ok(hiding_comms), mode) 152 | }) 153 | .transpose()?; 154 | 155 | Ok(Self { 156 | product_poly_comm, 157 | hiding_comms, 158 | _curve: PhantomData, 159 | }) 160 | }) 161 | } 162 | } 163 | 164 | /// The Pedersen commitments to each coefficient vector of the product polynomial `a(X, µ) ◦ b(X)`. 165 | /// Excludes `n-1`th commitment (0-index) 166 | pub(crate) struct ProductPolynomialCommitmentVar 167 | where 168 | G: AffineCurve, 169 | C: CurveVar>, 170 | { 171 | /// Pedersen commitments to the first `n-1` coefficients of the lower powers. 172 | pub(crate) low: Vec, 173 | 174 | /// Pedersen commitments to the last `n-1` coefficients of the higher powers. 175 | pub(crate) high: Vec, 176 | 177 | #[doc(hidden)] 178 | pub(crate) _curve: PhantomData, 179 | } 180 | 181 | impl AllocVar, ConstraintF> 182 | for ProductPolynomialCommitmentVar 183 | where 184 | G: AffineCurve, 185 | C: CurveVar>, 186 | { 187 | fn new_variable>>( 188 | cs: impl Into>>, 189 | f: impl FnOnce() -> Result, 190 | mode: AllocationMode, 191 | ) -> Result { 192 | let ns = cs.into(); 193 | f().and_then(|product_poly_comm| { 194 | let product_poly_comm_low = product_poly_comm 195 | .borrow() 196 | .low 197 | .iter() 198 | .map(|comm| C::new_variable(ns.clone(), || Ok(comm.clone()), mode)) 199 | .collect::, SynthesisError>>()?; 200 | 201 | let product_poly_comm_high = product_poly_comm 202 | .borrow() 203 | .high 204 | .iter() 205 | .map(|comm| C::new_variable(ns.clone(), || Ok(comm.clone()), mode)) 206 | .collect::, SynthesisError>>()?; 207 | 208 | Ok(Self { 209 | low: product_poly_comm_low, 210 | high: product_poly_comm_high, 211 | _curve: PhantomData, 212 | }) 213 | }) 214 | } 215 | } 216 | 217 | impl AbsorbableGadget> for ProductPolynomialCommitmentVar 218 | where 219 | G: AffineCurve, 220 | C: CurveVar> + AbsorbableGadget>, 221 | { 222 | fn to_sponge_field_elements(&self) -> Result>>, SynthesisError> { 223 | collect_sponge_field_elements_gadget!(self.low, self.high) 224 | } 225 | } 226 | 227 | /// The Pedersen commitments to the random vectors used to apply zero-knowledge to the vectors of 228 | /// the Hadamard product relation. 229 | pub(crate) struct ProofHidingCommitmentsVar 230 | where 231 | G: AffineCurve, 232 | C: CurveVar>, 233 | { 234 | /// Pedersen commitment to the random vector that hides the `a` vector of the Hadamard 235 | /// product relation. 236 | pub(crate) comm_1: C, 237 | 238 | /// Pedersen commitment to the random vector that hides the `b` vector of the Hadamard 239 | /// product relation. 240 | pub(crate) comm_2: C, 241 | 242 | /// Pedersen commitment to the cross term randomness vector 243 | pub(crate) comm_3: C, 244 | 245 | #[doc(hidden)] 246 | pub(crate) _curve: PhantomData, 247 | } 248 | 249 | impl AllocVar, ConstraintF> for ProofHidingCommitmentsVar 250 | where 251 | G: AffineCurve, 252 | C: CurveVar>, 253 | { 254 | fn new_variable>>( 255 | cs: impl Into>>, 256 | f: impl FnOnce() -> Result, 257 | mode: AllocationMode, 258 | ) -> Result { 259 | let ns = cs.into(); 260 | f().and_then(|hiding_comms| { 261 | let comm_1 = C::new_variable(ns.clone(), || Ok(hiding_comms.borrow().comm_1), mode)?; 262 | let comm_2 = C::new_variable(ns.clone(), || Ok(hiding_comms.borrow().comm_2), mode)?; 263 | let comm_3 = C::new_variable(ns.clone(), || Ok(hiding_comms.borrow().comm_3), mode)?; 264 | 265 | Ok(Self { 266 | comm_1, 267 | comm_2, 268 | comm_3, 269 | _curve: PhantomData, 270 | }) 271 | }) 272 | } 273 | } 274 | 275 | impl AbsorbableGadget> for ProofHidingCommitmentsVar 276 | where 277 | G: AffineCurve, 278 | C: CurveVar> + AbsorbableGadget>, 279 | { 280 | fn to_sponge_field_elements(&self) -> Result>>, SynthesisError> { 281 | collect_sponge_field_elements_gadget!(self.comm_1, self.comm_2, self.comm_3) 282 | } 283 | } 284 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /examples/scaling-as.rs: -------------------------------------------------------------------------------- 1 | #![allow(non_camel_case_types)] 2 | // For randomness (during paramgen and proof generation) 3 | // PS: thread_rng is *insecure* 4 | 5 | // For benchmarking 6 | use ark_accumulation::ipa_pc_as::IpaPCDomain; 7 | use ark_accumulation::{ipa_pc_as, ipa_pc_as::AtomicASForInnerProductArgPC}; 8 | use ark_accumulation::{trivial_pc_as, trivial_pc_as::ASForTrivialPC}; 9 | use ark_accumulation::{AccumulationScheme, Accumulator, Input, MakeZK}; 10 | use ark_ff::{One, PrimeField}; 11 | use ark_pallas::{Affine as G1Affine, Fq, Fr}; 12 | use ark_poly::univariate::DensePolynomial; 13 | use ark_poly_commit::ipa_pc::InnerProductArgPC; 14 | use ark_poly_commit::trivial_pc::TrivialPC; 15 | use ark_poly_commit::{LabeledPolynomial, PCCommitterKey, PolynomialCommitment, UVPolynomial}; 16 | use ark_serialize::CanonicalSerialize; 17 | use ark_sponge::domain_separated::DomainSeparatedSponge; 18 | use ark_sponge::poseidon::PoseidonSponge; 19 | use ark_sponge::CryptographicSponge; 20 | use ark_std::rand::{Rng, RngCore}; 21 | use ark_std::vec::Vec; 22 | use ark_std::UniformRand; 23 | use blake2::Blake2s; 24 | use std::time::Instant; 25 | 26 | type TrivPC = TrivialPC>; 27 | type ASForTrivPC = ASForTrivialPC>; 28 | 29 | type IpaPC = InnerProductArgPC< 30 | G1Affine, 31 | Blake2s, 32 | DensePolynomial, 33 | Fq, 34 | DomainSeparatedSponge, IpaPCDomain>, 35 | >; 36 | type ASForIpaPC = AtomicASForInnerProductArgPC>; 37 | 38 | fn profile_as( 39 | min_degree: usize, 40 | max_degree: usize, 41 | sample_parameters_and_index: ParamGen, 42 | sample_inputs: InputGen, 43 | rng: &mut R, 44 | ) where 45 | F: PrimeField, 46 | P: UVPolynomial, 47 | PC: PolynomialCommitment, 48 | CF: PrimeField, 49 | S: CryptographicSponge, 50 | AS: AccumulationScheme, 51 | ParamGen: Fn( 52 | usize, 53 | &mut R, 54 | ) -> ( 55 | (PC::CommitterKey, PC::VerifierKey), 56 | AS::PredicateParams, 57 | AS::PredicateIndex, 58 | ), 59 | InputGen: Fn(&PC::CommitterKey, &mut R) -> Vec>, 60 | R: Rng, 61 | { 62 | for degree in min_degree..=max_degree { 63 | let degree = (1 << degree) - 1; 64 | println!("Degree: {:?}", degree); 65 | let supported_degree = degree; 66 | 67 | let ((ck, _), predicate_params, predicate_index) = 68 | sample_parameters_and_index(supported_degree, rng); 69 | let as_pp = AS::setup(rng).unwrap(); 70 | 71 | let start = Instant::now(); 72 | let (pk, vk, dk) = AS::index(&as_pp, &predicate_params, &predicate_index).unwrap(); 73 | let index_time = start.elapsed(); 74 | println!("Indexer: {:?}", index_time.as_millis()); 75 | 76 | let inputs = sample_inputs(&ck, rng); 77 | 78 | // Initially start with empty accumulators 79 | let mut old_accumulators = Vec::with_capacity(1); 80 | 81 | let (accumulator, _) = AS::prove( 82 | &pk, 83 | Input::::map_to_refs(&inputs), 84 | Accumulator::::map_to_refs(&old_accumulators), 85 | MakeZK::Enabled(rng), 86 | None::, 87 | ) 88 | .unwrap(); 89 | 90 | // Use the same accumulator as input 91 | old_accumulators.push(accumulator.clone()); 92 | old_accumulators.push(accumulator.clone()); 93 | 94 | let start = Instant::now(); 95 | let (accumulator, proof) = AS::prove( 96 | &pk, 97 | Input::::map_to_refs(&inputs), 98 | Accumulator::::map_to_refs(&old_accumulators), 99 | MakeZK::Enabled(rng), 100 | None::, 101 | ) 102 | .unwrap(); 103 | let prover_time = start.elapsed(); 104 | println!("Prover: {:?}", prover_time.as_millis()); 105 | 106 | let start = Instant::now(); 107 | let verification_result = AS::verify( 108 | &vk, 109 | Input::::instances(&inputs), 110 | Accumulator::::instances(&old_accumulators), 111 | &accumulator.instance, 112 | &proof, 113 | None::, 114 | ) 115 | .unwrap(); 116 | let verifier_time = start.elapsed(); 117 | println!("Verifier: {:?}", verifier_time.as_millis()); 118 | 119 | let start = Instant::now(); 120 | let decision_result = AS::decide(&dk, accumulator.as_ref(), None::).unwrap(); 121 | let decider_time = start.elapsed(); 122 | println!("Decider: {:?}\n", decider_time.as_millis()); 123 | println!("Accumulator size: {}", accumulator.serialized_size()); 124 | println!( 125 | "Accumulator instance size: {}", 126 | accumulator.instance.serialized_size() 127 | ); 128 | println!( 129 | "Accumulator witness size: {}", 130 | accumulator.witness.serialized_size() 131 | ); 132 | 133 | println!("\n\n"); 134 | 135 | assert!(verification_result); 136 | assert!(decision_result); 137 | } 138 | } 139 | 140 | type TrivPCKeys = ( 141 | >>::CommitterKey, 142 | >>::VerifierKey, 143 | ); 144 | 145 | fn lh_param_gen( 146 | degree: usize, 147 | rng: &mut R, 148 | ) -> ( 149 | TrivPCKeys, 150 | >>::PredicateParams, 151 | >>::PredicateIndex, 152 | ) { 153 | let predicate_params = TrivPC::setup(degree, None, rng).unwrap(); 154 | let (ck, vk) = TrivPC::trim(&predicate_params, degree, 0, None).unwrap(); 155 | ((ck, vk), predicate_params, degree) 156 | } 157 | 158 | fn lh_input_gen( 159 | ck: &>>::CommitterKey, 160 | rng: &mut R, 161 | ) -> Vec, ASForTrivPC>> { 162 | let labeled_polynomials = vec![{ 163 | let degree = ck.supported_degree(); 164 | let label = format!("Input{}", 1); 165 | 166 | let polynomial = DensePolynomial::rand(degree, rng); 167 | let labeled_polynomial = LabeledPolynomial::new(label, polynomial, None, None); 168 | 169 | labeled_polynomial 170 | }]; 171 | 172 | let (labeled_commitments, _) = TrivPC::commit(ck, &labeled_polynomials, Some(rng)).unwrap(); 173 | 174 | let inputs = labeled_polynomials 175 | .into_iter() 176 | .zip(labeled_commitments) 177 | .map(|(labeled_polynomial, labeled_commitment)| { 178 | let point = Fr::rand(rng); 179 | let eval = labeled_polynomial.evaluate(&point); 180 | 181 | let instance = trivial_pc_as::InputInstance { 182 | commitment: labeled_commitment, 183 | point, 184 | eval, 185 | }; 186 | 187 | Input::<_, _, ASForTrivPC> { 188 | instance, 189 | witness: labeled_polynomial, 190 | } 191 | }) 192 | .collect(); 193 | 194 | inputs 195 | } 196 | 197 | type IpaPC_Keys = ( 198 | >>::CommitterKey, 199 | >>::VerifierKey, 200 | ); 201 | 202 | fn dl_param_gen( 203 | degree: usize, 204 | rng: &mut R, 205 | ) -> ( 206 | IpaPC_Keys, 207 | >>::PredicateParams, 208 | >>::PredicateIndex, 209 | ) { 210 | let predicate_params = IpaPC::setup(degree, None, rng).unwrap(); 211 | let (ck, vk) = IpaPC::trim(&predicate_params, degree, 0, None).unwrap(); 212 | let predicate_index = ipa_pc_as::PredicateIndex { 213 | supported_degree_bound: degree, 214 | supported_hiding_bound: 0, 215 | }; 216 | ((ck, vk), predicate_params, predicate_index) 217 | } 218 | 219 | fn dl_input_gen( 220 | ck: &>>::CommitterKey, 221 | rng: &mut R, 222 | ) -> Vec, ASForIpaPC>> { 223 | let labeled_polynomials = vec![{ 224 | let degree = ck.supported_degree(); 225 | let label = format!("Input{}", 1); 226 | 227 | let polynomial = DensePolynomial::rand(degree, rng); 228 | let labeled_polynomial = LabeledPolynomial::new(label, polynomial, None, None); 229 | 230 | labeled_polynomial 231 | }]; 232 | 233 | let (labeled_commitments, randoms) = 234 | IpaPC::commit(ck, &labeled_polynomials, Some(rng)).unwrap(); 235 | 236 | let inputs = labeled_polynomials 237 | .into_iter() 238 | .zip(labeled_commitments) 239 | .zip(randoms) 240 | .map(|((labeled_polynomial, labeled_commitment), randomness)| { 241 | let point = Fr::rand(rng); 242 | let eval = labeled_polynomial.evaluate(&point); 243 | let ipa_proof = IpaPC::open_individual_opening_challenges( 244 | ck, 245 | vec![&labeled_polynomial], 246 | vec![&labeled_commitment], 247 | &point, 248 | &|_| Fr::one(), 249 | &vec![randomness], 250 | Some(rng), 251 | ) 252 | .unwrap(); 253 | let result = IpaPC::check_individual_opening_challenges( 254 | ck, 255 | vec![&labeled_commitment], 256 | &point, 257 | vec![eval], 258 | &ipa_proof, 259 | &|_| Fr::one(), 260 | Some(rng), 261 | ) 262 | .unwrap(); 263 | assert!(result); 264 | 265 | let input = ipa_pc_as::InputInstance { 266 | ipa_commitment: labeled_commitment, 267 | point, 268 | evaluation: eval, 269 | ipa_proof, 270 | }; 271 | 272 | Input::<_, _, ASForIpaPC> { 273 | instance: input, 274 | witness: (), 275 | } 276 | }) 277 | .collect(); 278 | 279 | inputs 280 | } 281 | 282 | fn main() { 283 | let args: Vec = std::env::args().collect(); 284 | if args.len() < 4 || args[1] == "-h" || args[1] == "--help" { 285 | println!("\nHelp: Invoke this as \n"); 286 | } 287 | let min_degree: usize = String::from(args[1].clone()) 288 | .parse() 289 | .expect(" should be integer"); 290 | let max_degree: usize = String::from(args[2].clone()) 291 | .parse() 292 | .expect(" should be integer"); 293 | 294 | let rng = &mut ark_std::test_rng(); 295 | println!("\n\n\n================ Benchmarking ASForTrivPC ================"); 296 | profile_as::<_, _, TrivPC, _, PoseidonSponge, ASForTrivPC, _, _, _>( 297 | min_degree, 298 | max_degree, 299 | lh_param_gen, 300 | lh_input_gen, 301 | rng, 302 | ); 303 | println!("\n\n\n================ Benchmarking ASForIpaPC ================"); 304 | profile_as::<_, _, IpaPC, _, PoseidonSponge, ASForIpaPC, _, _, _>( 305 | min_degree, 306 | max_degree, 307 | dl_param_gen, 308 | dl_input_gen, 309 | rng, 310 | ); 311 | } 312 | -------------------------------------------------------------------------------- /src/r1cs_nark_as/data_structures.rs: -------------------------------------------------------------------------------- 1 | use crate::hp_as::{ 2 | InputInstance as HPInputInstance, InputWitness as HPInputWitness, ProductPolynomialCommitment, 3 | Proof as HPProof, ProofHidingCommitments, 4 | }; 5 | use crate::r1cs_nark_as::r1cs_nark::{FirstRoundMessage, IndexProverKey, SecondRoundMessage}; 6 | 7 | use ark_ec::AffineCurve; 8 | use ark_ff::{to_bytes, Field, PrimeField, Zero}; 9 | use ark_relations::r1cs::Matrix; 10 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; 11 | use ark_sponge::{collect_sponge_bytes, collect_sponge_field_elements, Absorbable}; 12 | use ark_std::io::{Read, Write}; 13 | use ark_std::vec::Vec; 14 | 15 | /// The [`PredicateIndex`][predicate_index] of the [`ASForR1CSNark`][as_for_r1cs_nark]. 16 | /// 17 | /// [predicate_index]: crate::AccumulationScheme::PredicateIndex 18 | /// [as_for_r1cs_nark]: crate::r1cs_nark_as::ASForR1CSNark 19 | #[derive(Clone)] 20 | pub struct PredicateIndex { 21 | /// The `A` matrix for the R1CS instance. 22 | pub a: Matrix, 23 | 24 | /// The `B` matrix for the R1CS instance. 25 | pub b: Matrix, 26 | 27 | /// The `C` matrix for the R1CS instance. 28 | pub c: Matrix, 29 | 30 | /// The index of the relation to be verified by the NARK. 31 | pub index: usize, 32 | } 33 | 34 | /// The [`ProverKey`][pk] of the [`ASForR1CSNark`][as_for_r1cs_nark]. 35 | /// 36 | /// [pk]: crate::AccumulationScheme::ProverKey 37 | /// [as_for_r1cs_nark]: crate::r1cs_nark_as::ASForR1CSNark 38 | #[derive(Clone)] 39 | pub struct ProverKey { 40 | /// The NARK prover key. 41 | pub nark_pk: IndexProverKey, 42 | 43 | /// Hash of the matrices for the accumulation scheme. 44 | pub(crate) as_matrices_hash: [u8; 32], 45 | } 46 | 47 | /// The [`VerifierKey`][vk] of the [`ASForR1CSNark`][as_for_r1cs_nark]. 48 | /// 49 | /// [vk]: crate::AccumulationScheme::VerifierKey 50 | /// [as_for_r1cs_nark]: crate::r1cs_nark_as::ASForR1CSNark 51 | #[derive(Clone)] 52 | pub struct VerifierKey { 53 | /// The number of public input (i.e. instance) variables. 54 | pub(crate) num_instance_variables: usize, 55 | 56 | /// The number of constraints. 57 | pub(crate) num_constraints: usize, 58 | 59 | /// Hash of the matrices for the NARK. 60 | pub(crate) nark_matrices_hash: [u8; 32], 61 | 62 | /// Hash of the matrices for the accumulation scheme. 63 | pub(crate) as_matrices_hash: [u8; 32], 64 | } 65 | 66 | impl VerifierKey { 67 | /// Outputs a placeholder for a verifier key to be used in PCD circuit setup. 68 | /// The constraints equivalent of the verifier key only requires the correct public input 69 | /// length while everything else may be left as unknown variables. 70 | pub fn placeholder(input_len: usize) -> Self { 71 | Self { 72 | num_instance_variables: input_len, 73 | num_constraints: 0, 74 | nark_matrices_hash: [0u8; 32], 75 | as_matrices_hash: [0u8; 32], 76 | } 77 | } 78 | } 79 | 80 | impl> Absorbable for VerifierKey { 81 | fn to_sponge_bytes(&self) -> Vec { 82 | collect_sponge_bytes!( 83 | CF, 84 | self.num_instance_variables, 85 | self.num_constraints, 86 | self.nark_matrices_hash.to_vec(), 87 | self.as_matrices_hash.to_vec() 88 | ) 89 | } 90 | 91 | fn to_sponge_field_elements(&self) -> Vec { 92 | collect_sponge_field_elements!( 93 | self.num_instance_variables, 94 | self.num_constraints, 95 | self.nark_matrices_hash.to_vec(), 96 | self.as_matrices_hash.to_vec() 97 | ) 98 | } 99 | } 100 | 101 | /// The [`InputInstance`][input_instance] of the [`ASForR1CSNark`][as_for_r1cs_nark]. 102 | /// 103 | /// [input_instance]: crate::AccumulationScheme::InputInstance 104 | /// [as_for_r1cs_nark]: crate::r1cs_nark_as::ASForR1CSNark 105 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 106 | pub struct InputInstance { 107 | /// An R1CS input for the indexed relation. 108 | pub r1cs_input: Vec, 109 | 110 | /// The sigma protocol's prover commitment of the NARK. 111 | pub first_round_message: FirstRoundMessage, 112 | } 113 | 114 | impl InputInstance { 115 | /// Returns a default input instance. 116 | pub fn zero(input_len: usize, make_zk: bool) -> Self { 117 | Self { 118 | r1cs_input: vec![G::ScalarField::zero(); input_len], 119 | first_round_message: FirstRoundMessage::zero(make_zk), 120 | } 121 | } 122 | } 123 | 124 | impl Absorbable for InputInstance 125 | where 126 | CF: PrimeField, 127 | G: AffineCurve + Absorbable, 128 | { 129 | fn to_sponge_bytes(&self) -> Vec { 130 | collect_sponge_bytes!( 131 | CF, 132 | to_bytes!(self.r1cs_input).unwrap(), 133 | self.first_round_message 134 | ) 135 | } 136 | 137 | fn to_sponge_field_elements(&self) -> Vec { 138 | collect_sponge_field_elements!( 139 | to_bytes!(self.r1cs_input).unwrap(), 140 | self.first_round_message 141 | ) 142 | } 143 | } 144 | 145 | /// The [`InputWitness`][input_witness] of the [`ASForR1CSNark`][as_for_r1cs_nark]. 146 | /// 147 | /// [input_witness]: crate::AccumulationScheme::InputWitness 148 | /// [as_for_r1cs_nark]: crate::r1cs_nark_as::ASForR1CSNark 149 | pub type InputWitness = SecondRoundMessage; 150 | 151 | /// The [`AccumulatorInstance`][acc_instance] of the [`ASForR1CSNark`][as_for_r1cs_nark]. 152 | /// 153 | /// [acc_instance]: crate::AccumulationScheme::AccumulatorInstance 154 | /// [as_for_r1cs_nark]: crate::r1cs_nark_as::ASForR1CSNark 155 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 156 | pub struct AccumulatorInstance { 157 | /// An input for the indexed relation. 158 | pub(crate) r1cs_input: Vec, 159 | 160 | /// Pedersen commitment to the `Az` vector. 161 | pub(crate) comm_a: G, 162 | 163 | /// Pedersen commitment to the `Bz` vector. 164 | pub(crate) comm_b: G, 165 | 166 | /// Pedersen commitment to the `Cz` vector. 167 | pub(crate) comm_c: G, 168 | 169 | /// The Hadamard product accumulation scheme input instance. 170 | pub(crate) hp_instance: HPInputInstance, 171 | } 172 | 173 | impl AccumulatorInstance { 174 | /// Outputs a placeholder for an accumulator instance to be used in the PCD circuit setup. 175 | pub fn placeholder(input_len: usize) -> Self { 176 | Self { 177 | r1cs_input: vec![G::ScalarField::zero(); input_len], 178 | comm_a: G::zero(), 179 | comm_b: G::zero(), 180 | comm_c: G::zero(), 181 | hp_instance: HPInputInstance::::zero(), 182 | } 183 | } 184 | } 185 | 186 | impl Absorbable for AccumulatorInstance 187 | where 188 | CF: PrimeField, 189 | G: AffineCurve + Absorbable, 190 | { 191 | fn to_sponge_bytes(&self) -> Vec { 192 | collect_sponge_bytes!( 193 | CF, 194 | to_bytes!(self.r1cs_input).unwrap(), 195 | self.comm_a, 196 | self.comm_b, 197 | self.comm_c, 198 | self.hp_instance 199 | ) 200 | } 201 | 202 | fn to_sponge_field_elements(&self) -> Vec { 203 | collect_sponge_field_elements!( 204 | to_bytes!(self.r1cs_input).unwrap(), 205 | self.comm_a, 206 | self.comm_b, 207 | self.comm_c, 208 | self.hp_instance 209 | ) 210 | } 211 | } 212 | 213 | /// The [`AccumulatorWitness`][acc_witness] of the [`ASForR1CSNark`][as_for_r1cs_nark]. 214 | /// 215 | /// [acc_witness]: crate::AccumulationScheme::AccumulatorWitness 216 | /// [as_for_r1cs_nark]: crate::r1cs_nark_as::ASForR1CSNark 217 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 218 | pub struct AccumulatorWitness { 219 | /// The R1CS witness with randomness applied if zero-knowledge is needed. 220 | pub(crate) r1cs_blinded_witness: Vec, 221 | 222 | /// The Hadamard product accumulation scheme input witness. 223 | pub(crate) hp_witness: HPInputWitness, 224 | 225 | /// Randomness for the Pedersen commitments to the linear combinations. 226 | pub(crate) randomness: Option>, 227 | } 228 | 229 | /// The randomness for the Pedersen commitments to the linear combinations. 230 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 231 | pub(crate) struct AccumulatorWitnessRandomness { 232 | /// The blinded randomness for the Pedersen commitment to the linear combination with the 233 | /// `A` matrix. 234 | pub(crate) sigma_a: F, 235 | 236 | /// The blinded randomness for the Pedersen commitment to the linear combination with the 237 | /// `B` matrix. 238 | pub(crate) sigma_b: F, 239 | 240 | /// The blinded randomness for the Pedersen commitment to the linear combination with the 241 | /// `C` matrix. 242 | pub(crate) sigma_c: F, 243 | } 244 | 245 | /// The [`Proof`][proof] of the [`ASForR1CSNark`][as_for_r1cs_nark]. 246 | /// 247 | /// [proof]: crate::AccumulationScheme::Proof 248 | /// [as_for_r1cs_nark]: crate::r1cs_nark_as::ASForR1CSNark 249 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 250 | pub struct Proof { 251 | /// The Hadamard product accumulation scheme proof. 252 | pub(crate) hp_proof: HPProof, 253 | 254 | /// Randomness or their commitments used to blind the vectors of the indexed relation. 255 | pub(crate) randomness: Option>, 256 | } 257 | 258 | impl Proof { 259 | /// Outputs a placeholder for a proof to be used in the PCD circuit setup. 260 | pub fn placeholder( 261 | r1cs_input_len: usize, 262 | num_accumulators_and_inputs: usize, 263 | make_zk: bool, 264 | ) -> Self { 265 | let randomness = if make_zk { 266 | Some(ProofRandomness { 267 | r1cs_r_input: vec![G::ScalarField::zero(); r1cs_input_len], 268 | comm_r_a: G::zero(), 269 | comm_r_b: G::zero(), 270 | comm_r_c: G::zero(), 271 | }) 272 | } else { 273 | None 274 | }; 275 | 276 | // Accounts for the default case. 277 | let mut num_inputs = num_accumulators_and_inputs; 278 | if num_inputs == 0 { 279 | num_inputs += 1; 280 | } 281 | 282 | // Accounts for the addition dummy input added to HP_AS for zero knowledge. 283 | if num_inputs == 1 && make_zk { 284 | num_inputs += 1; 285 | } 286 | 287 | let hp_proof = HPProof:: { 288 | product_poly_comm: ProductPolynomialCommitment { 289 | low: vec![G::zero(); num_inputs - 1], 290 | high: vec![G::zero(); num_inputs - 1], 291 | }, 292 | 293 | hiding_comms: if make_zk { 294 | Some(ProofHidingCommitments { 295 | comm_1: G::zero(), 296 | comm_2: G::zero(), 297 | comm_3: G::zero(), 298 | }) 299 | } else { 300 | None 301 | }, 302 | }; 303 | 304 | Self { 305 | hp_proof, 306 | randomness, 307 | } 308 | } 309 | } 310 | 311 | /// The randomness or their commitments used to blind the vectors of the indexed relation. 312 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize)] 313 | pub(crate) struct ProofRandomness { 314 | /// Randomness used to blind the R1CS input. 315 | pub(crate) r1cs_r_input: Vec, 316 | 317 | /// Pedersen commitment to the vector that blinds the witness in `Az`. 318 | pub(crate) comm_r_a: G, 319 | 320 | /// Pedersen commitment to the vector that blinds the witness in `Bz`. 321 | pub(crate) comm_r_b: G, 322 | 323 | /// Pedersen commitment to the vector that blinds the witness in `Cz`. 324 | pub(crate) comm_r_c: G, 325 | } 326 | 327 | impl Absorbable for ProofRandomness 328 | where 329 | CF: PrimeField, 330 | G: AffineCurve + Absorbable, 331 | { 332 | fn to_sponge_bytes(&self) -> Vec { 333 | collect_sponge_bytes!( 334 | CF, 335 | to_bytes!(self.r1cs_r_input).unwrap(), 336 | self.comm_r_a, 337 | self.comm_r_b, 338 | self.comm_r_c 339 | ) 340 | } 341 | 342 | fn to_sponge_field_elements(&self) -> Vec { 343 | collect_sponge_field_elements!( 344 | to_bytes!(self.r1cs_r_input).unwrap(), 345 | self.comm_r_a, 346 | self.comm_r_b, 347 | self.comm_r_c 348 | ) 349 | } 350 | } 351 | -------------------------------------------------------------------------------- /src/trivial_pc_as/constraints/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::constraints::ASVerifierGadget; 2 | use crate::trivial_pc_as::{ 3 | ASForTrivialPC, InputInstance, CHALLENGE_POINT_SIZE, LINEAR_COMBINATION_CHALLENGE_SIZE, 4 | }; 5 | use crate::ConstraintF; 6 | 7 | use ark_ec::AffineCurve; 8 | use ark_ff::{Field, ToConstraintField}; 9 | use ark_nonnative_field::{NonNativeFieldMulResultVar, NonNativeFieldVar}; 10 | use ark_r1cs_std::alloc::AllocVar; 11 | use ark_r1cs_std::bits::boolean::Boolean; 12 | use ark_r1cs_std::bits::uint8::UInt8; 13 | use ark_r1cs_std::eq::EqGadget; 14 | use ark_r1cs_std::groups::CurveVar; 15 | use ark_r1cs_std::ToBytesGadget; 16 | use ark_relations::r1cs::{ConstraintSystemRef, SynthesisError}; 17 | use ark_sponge::constraints::AbsorbableGadget; 18 | use ark_sponge::constraints::CryptographicSpongeVar; 19 | use ark_sponge::{absorb_gadget, Absorbable, CryptographicSponge, FieldElementSize}; 20 | use ark_std::marker::PhantomData; 21 | use ark_std::vec; 22 | use ark_std::vec::Vec; 23 | use std::ops::Mul; 24 | 25 | mod data_structures; 26 | pub use data_structures::*; 27 | 28 | /// The verifier gadget of [`ASForTrivialPC`][as_for_trivial_pc]. 29 | /// 30 | /// [as_for_trivial_pc]: crate::trivial_pc_as::ASForTrivialPC 31 | pub struct ASForTrivialPCVerifierGadget 32 | where 33 | G: AffineCurve + ToConstraintField> + Absorbable>, 34 | C: CurveVar::BasePrimeField> 35 | + AbsorbableGadget>, 36 | ConstraintF: Absorbable>, 37 | S: CryptographicSponge>, 38 | SV: CryptographicSpongeVar, S>, 39 | { 40 | _affine: PhantomData, 41 | _curve: PhantomData, 42 | _sponge: PhantomData, 43 | _sponge_var: PhantomData, 44 | } 45 | 46 | impl ASForTrivialPCVerifierGadget 47 | where 48 | G: AffineCurve + ToConstraintField> + Absorbable>, 49 | C: CurveVar::BasePrimeField> 50 | + AbsorbableGadget>, 51 | ConstraintF: Absorbable>, 52 | S: CryptographicSponge>, 53 | SV: CryptographicSpongeVar, S>, 54 | { 55 | /// Check that the proof is properly structured. 56 | fn check_proof_structure(proof: &ProofVar, num_inputs: usize) -> bool { 57 | // Each proof must correspond to an input. 58 | return proof.single_proofs.len() == num_inputs; 59 | } 60 | 61 | /// Compute the linear combination of evaluations. 62 | #[tracing::instrument(target = "r1cs", skip(evaluations, challenge))] 63 | fn combine_evaluation<'a>( 64 | evaluations: impl IntoIterator>>, 65 | challenge: &[NonNativeFieldVar>], 66 | ) -> Result>, SynthesisError> { 67 | let mut combined_evaluation = 68 | NonNativeFieldMulResultVar::>::zero(); 69 | for (i, eval) in evaluations.into_iter().enumerate() { 70 | combined_evaluation += (&eval).mul_without_reduce(&challenge[i])?; 71 | } 72 | 73 | Ok(combined_evaluation.reduce()?) 74 | } 75 | 76 | /// Compute the linear combination of commitments. 77 | #[tracing::instrument(target = "r1cs", skip(commitment, challenge_bytes))] 78 | fn combine_commitment<'a>( 79 | commitment: impl IntoIterator, 80 | challenge_bytes: &[Vec>>], 81 | ) -> Result { 82 | let mut combined_commitment = C::zero(); 83 | for (i, comm) in commitment.into_iter().enumerate() { 84 | combined_commitment += &comm.scalar_mul_le(challenge_bytes[i].iter())?; 85 | } 86 | 87 | Ok(combined_commitment) 88 | } 89 | } 90 | 91 | impl ASVerifierGadget, S, SV, ASForTrivialPC> 92 | for ASForTrivialPCVerifierGadget 93 | where 94 | G: AffineCurve + ToConstraintField> + Absorbable>, 95 | C: CurveVar::BasePrimeField> 96 | + AbsorbableGadget>, 97 | ConstraintF: Absorbable>, 98 | S: CryptographicSponge>, 99 | SV: CryptographicSpongeVar, S>, 100 | { 101 | type VerifierKey = VerifierKeyVar>; 102 | type InputInstance = InputInstanceVar; 103 | type AccumulatorInstance = InputInstanceVar; 104 | type Proof = ProofVar; 105 | 106 | #[tracing::instrument( 107 | target = "r1cs", 108 | skip( 109 | verifier_key, 110 | input_instances, 111 | old_accumulator_instances, 112 | new_accumulator_instance, 113 | proof, 114 | sponge, 115 | ) 116 | )] 117 | fn verify<'a>( 118 | cs: ConstraintSystemRef>, 119 | verifier_key: &Self::VerifierKey, 120 | input_instances: impl IntoIterator, 121 | old_accumulator_instances: impl IntoIterator, 122 | new_accumulator_instance: &Self::AccumulatorInstance, 123 | proof: &Self::Proof, 124 | sponge: Option, 125 | ) -> Result>, SynthesisError> 126 | where 127 | Self::InputInstance: 'a, 128 | Self::AccumulatorInstance: 'a, 129 | { 130 | let sponge = sponge.unwrap_or_else(|| SV::new(cs.clone())); 131 | 132 | let mut all_input_instances = input_instances 133 | .into_iter() 134 | .chain(old_accumulator_instances) 135 | .collect::>(); 136 | 137 | let default_input_instance; 138 | if all_input_instances.is_empty() { 139 | default_input_instance = 140 | Some(InputInstanceVar::new_constant(cs, InputInstance::zero())?); 141 | all_input_instances.push(default_input_instance.as_ref().unwrap()); 142 | } 143 | 144 | if !Self::check_proof_structure(proof, all_input_instances.len()) { 145 | return Ok(Boolean::FALSE); 146 | } 147 | 148 | let mut verify_result = Boolean::TRUE; 149 | 150 | // Step 3 of the scheme's accumulation verifier, as detailed in BCLMS20. 151 | let mut challenge_point_sponge = sponge.clone(); 152 | challenge_point_sponge.absorb(&verifier_key.0)?; 153 | 154 | let mut commitment = Vec::new(); 155 | for (input_instance, single_proof) in 156 | all_input_instances.into_iter().zip(&proof.single_proofs) 157 | { 158 | // Step 3 of the scheme's accumulation verifier, as detailed in BCLMS20. 159 | absorb_gadget!( 160 | &mut challenge_point_sponge, 161 | input_instance, 162 | single_proof.witness_commitment 163 | ); 164 | 165 | // Step 4 of the scheme's accumulation verifier, as detailed in BCLMS20. 166 | let eval_check_lhs: NonNativeFieldVar> = 167 | &single_proof.eval - &input_instance.eval; 168 | let eval_check_rhs: NonNativeFieldVar> = (&single_proof 169 | .witness_eval) 170 | .mul(&(&new_accumulator_instance.point - &input_instance.point)); 171 | 172 | let eval_check = eval_check_lhs.is_eq(&eval_check_rhs)?; 173 | verify_result = verify_result.and(&eval_check)?; 174 | 175 | commitment.push(&input_instance.commitment); 176 | } 177 | 178 | // Step 3 of the scheme's accumulation verifier, as detailed in BCLMS20. 179 | let mut challenge_point_sponge_field_element_and_bits = challenge_point_sponge 180 | .squeeze_nonnative_field_elements_with_sizes(&[FieldElementSize::Truncated( 181 | CHALLENGE_POINT_SIZE, 182 | )])?; 183 | 184 | let challenge_point = challenge_point_sponge_field_element_and_bits 185 | .0 186 | .pop() 187 | .unwrap(); 188 | 189 | let challenge_point_bits = challenge_point_sponge_field_element_and_bits 190 | .1 191 | .pop() 192 | .unwrap(); 193 | 194 | verify_result = 195 | verify_result.and(&challenge_point.is_eq(&new_accumulator_instance.point)?)?; 196 | 197 | // Step 5 of the scheme's accumulation verifier, as detailed in BCLMS20. 198 | let mut linear_combination_challenge_sponge = sponge; 199 | let challenge_point_bytes = challenge_point_bits 200 | .chunks(8) 201 | .map(|bits| { 202 | if bits.len() == 8 { 203 | UInt8::>::from_bits_le(bits) 204 | } else { 205 | let mut bits_tmp = bits.to_vec(); 206 | bits_tmp.resize_with(8, || Boolean::FALSE); 207 | UInt8::>::from_bits_le(bits_tmp.as_slice()) 208 | } 209 | }) 210 | .collect::>(); 211 | 212 | // Step 3 of the scheme's accumulation verifier, as detailed in BCLMS20. 213 | linear_combination_challenge_sponge.absorb(&challenge_point_bytes)?; 214 | 215 | for single_proof in &proof.single_proofs { 216 | absorb_gadget!( 217 | &mut linear_combination_challenge_sponge, 218 | single_proof.eval.to_bytes()?, 219 | single_proof.witness_eval.to_bytes()? 220 | ); 221 | } 222 | 223 | let (linear_combination_challenge, linear_combination_challenge_bits) = 224 | linear_combination_challenge_sponge.squeeze_nonnative_field_elements_with_sizes( 225 | vec![ 226 | FieldElementSize::Truncated(LINEAR_COMBINATION_CHALLENGE_SIZE); 227 | proof.single_proofs.len() * 2 228 | ] 229 | .as_slice(), 230 | )?; 231 | 232 | // Step 6 of the scheme's accumulation verifier, as detailed in BCLMS20. 233 | let combined_eval = Self::combine_evaluation( 234 | proof 235 | .single_proofs 236 | .iter() 237 | .map(|p| &p.eval) 238 | .chain(proof.single_proofs.iter().map(|p| &p.witness_eval)), 239 | linear_combination_challenge.as_slice(), 240 | )?; 241 | 242 | verify_result = verify_result.and(&combined_eval.is_eq(&new_accumulator_instance.eval)?)?; 243 | 244 | // Step 7 of the scheme's accumulation verifier, as detailed in BCLMS20. 245 | let combined_commitment = Self::combine_commitment( 246 | commitment 247 | .into_iter() 248 | .chain(proof.single_proofs.iter().map(|p| &p.witness_commitment)), 249 | linear_combination_challenge_bits.as_slice(), 250 | )?; 251 | 252 | verify_result = 253 | verify_result.and(&combined_commitment.is_eq(&new_accumulator_instance.commitment)?)?; 254 | 255 | Ok(verify_result) 256 | } 257 | } 258 | 259 | #[cfg(test)] 260 | pub mod tests { 261 | use crate::constraints::tests::ASVerifierGadgetTests; 262 | use crate::trivial_pc_as::constraints::ASForTrivialPCVerifierGadget; 263 | use crate::trivial_pc_as::tests::{ASForTrivialPCTestInput, ASForTrivialPCTestParams}; 264 | use crate::trivial_pc_as::ASForTrivialPC; 265 | use ark_relations::r1cs::SynthesisError; 266 | use ark_sponge::poseidon::constraints::PoseidonSpongeVar; 267 | use ark_sponge::poseidon::PoseidonSponge; 268 | 269 | type G = ark_pallas::Affine; 270 | type C = ark_pallas::constraints::GVar; 271 | type CF = ark_pallas::Fq; 272 | 273 | type Sponge = PoseidonSponge; 274 | type SpongeVar = PoseidonSpongeVar; 275 | 276 | type AS = ASForTrivialPC; 277 | type ASV = ASForTrivialPCVerifierGadget; 278 | type I = ASForTrivialPCTestInput; 279 | 280 | type Tests = ASVerifierGadgetTests; 281 | 282 | #[test] 283 | pub fn single_input_init_test() -> Result<(), SynthesisError> { 284 | Tests::single_input_init_test(&ASForTrivialPCTestParams { degree: 11 }) 285 | } 286 | 287 | #[test] 288 | pub fn multiple_inputs_init_test() -> Result<(), SynthesisError> { 289 | Tests::multiple_inputs_init_test(&ASForTrivialPCTestParams { degree: 11 }) 290 | } 291 | 292 | #[test] 293 | pub fn simple_accumulation_test() -> Result<(), SynthesisError> { 294 | Tests::simple_accumulation_test(&ASForTrivialPCTestParams { degree: 11 }) 295 | } 296 | 297 | #[test] 298 | pub fn multiple_inputs_accumulation_test() -> Result<(), SynthesisError> { 299 | Tests::multiple_inputs_accumulation_test(&ASForTrivialPCTestParams { degree: 11 }) 300 | } 301 | 302 | #[test] 303 | pub fn accumulators_only_test() -> Result<(), SynthesisError> { 304 | Tests::accumulators_only_test(&ASForTrivialPCTestParams { degree: 11 }) 305 | } 306 | 307 | #[test] 308 | pub fn no_inputs_init_test() -> Result<(), SynthesisError> { 309 | Tests::no_inputs_init_test(&ASForTrivialPCTestParams { degree: 11 }) 310 | } 311 | } 312 | -------------------------------------------------------------------------------- /src/constraints.rs: -------------------------------------------------------------------------------- 1 | use crate::{AccumulationScheme, AtomicAccumulationScheme}; 2 | 3 | use ark_ff::PrimeField; 4 | use ark_r1cs_std::alloc::AllocVar; 5 | use ark_r1cs_std::bits::boolean::Boolean; 6 | use ark_relations::r1cs::{ConstraintSystemRef, SynthesisError}; 7 | use ark_sponge::constraints::CryptographicSpongeVar; 8 | use ark_sponge::CryptographicSponge; 9 | 10 | /// The verifier gadget of an [`AccumulationScheme`]. 11 | pub trait ASVerifierGadget< 12 | CF: PrimeField, 13 | S: CryptographicSponge, 14 | SV: CryptographicSpongeVar, 15 | AS: AccumulationScheme, 16 | > 17 | { 18 | /// The key used to check that an accumulator was computed correctly from the inputs 19 | /// and old accumulators. 20 | /// The constraints equivalent of [`AccumulationScheme::VerifierKey`]. 21 | type VerifierKey: AllocVar; 22 | 23 | /// The instance of the input that was accumulated. 24 | /// The constraints equivalent of [`AccumulationScheme::InputInstance`]. 25 | type InputInstance: AllocVar; 26 | 27 | /// The instance of the accumulator. 28 | /// The constraints equivalent of [`AccumulationScheme::AccumulatorInstance`]. 29 | type AccumulatorInstance: AllocVar; 30 | 31 | /// The proof attesting that an accumulator was properly computed. 32 | /// The constraints equivalent of [`AccumulationScheme::Proof`]. 33 | type Proof: AllocVar; 34 | 35 | /// Verifies that the new accumulator instance was computed properly from the input instances 36 | /// and old accumulator instances. 37 | /// The constraints equivalent of [`AccumulationScheme::verify`]. 38 | fn verify<'a>( 39 | cs: ConstraintSystemRef, 40 | verifier_key: &Self::VerifierKey, 41 | input_instances: impl IntoIterator, 42 | old_accumulator_instances: impl IntoIterator, 43 | new_accumulator_instance: &Self::AccumulatorInstance, 44 | proof: &Self::Proof, 45 | sponge: Option, 46 | ) -> Result, SynthesisError> 47 | where 48 | Self::InputInstance: 'a, 49 | Self::AccumulatorInstance: 'a; 50 | } 51 | 52 | /// The verifier gadget of an [`AtomicAccumulationScheme`][crate::AtomicAccumulationScheme]. 53 | pub trait AtomicASVerifierGadget< 54 | CF: PrimeField, 55 | S: CryptographicSponge, 56 | SV: CryptographicSpongeVar, 57 | AS: AtomicAccumulationScheme, 58 | >: ASVerifierGadget 59 | { 60 | } 61 | 62 | #[cfg(test)] 63 | pub mod tests { 64 | use crate::constraints::ASVerifierGadget; 65 | use crate::tests::{ASTestInput, TemplateParams, TestParameters}; 66 | use crate::{AccumulationScheme, Accumulator, Input, MakeZK}; 67 | use ark_ff::PrimeField; 68 | use ark_r1cs_std::alloc::AllocVar; 69 | use ark_r1cs_std::bits::boolean::Boolean; 70 | use ark_r1cs_std::eq::EqGadget; 71 | use ark_relations::r1cs::{ConstraintSystem, SynthesisError}; 72 | use ark_sponge::constraints::CryptographicSpongeVar; 73 | use ark_sponge::CryptographicSponge; 74 | use ark_std::marker::PhantomData; 75 | 76 | pub const NUM_ITERATIONS: usize = 1; 77 | 78 | pub struct ASVerifierGadgetTests 79 | where 80 | CF: PrimeField, 81 | S: CryptographicSponge, 82 | SV: CryptographicSpongeVar, 83 | AS: AccumulationScheme, 84 | ASV: ASVerifierGadget, 85 | I: ASTestInput, 86 | { 87 | _constraint_field: PhantomData, 88 | _sponge: PhantomData, 89 | _sponge_var: PhantomData, 90 | _acc_scheme: PhantomData, 91 | _acc_scheme_verifier: PhantomData, 92 | _test_input: PhantomData, 93 | } 94 | 95 | impl ASVerifierGadgetTests 96 | where 97 | CF: PrimeField, 98 | S: CryptographicSponge, 99 | SV: CryptographicSpongeVar, 100 | AS: AccumulationScheme, 101 | ASV: ASVerifierGadget, 102 | I: ASTestInput, 103 | { 104 | /// For each iteration, runs the accumulation scheme for `num_accumulations` steps of 105 | /// proving and verifying. 106 | /// Assumes that all native AS operations work. 107 | pub fn test_template( 108 | template_params: &TemplateParams, 109 | test_params: &I::TestParams, 110 | ) -> Result { 111 | assert!(template_params.num_iterations > 0); 112 | 113 | let num_inputs_per_iteration = &template_params.num_inputs_per_iteration; 114 | let num_iterations = template_params.num_iterations; 115 | let total_num_inputs = num_iterations * num_inputs_per_iteration.iter().sum::(); 116 | 117 | let cs = ConstraintSystem::::new_ref(); 118 | 119 | let mut rng = ark_std::test_rng(); 120 | let public_params = AS::setup(&mut rng).ok().unwrap(); 121 | 122 | let (input_params, predicate_params, predicate_index) = I::setup(test_params, &mut rng); 123 | let (pk, vk, _) = AS::index(&public_params, &predicate_params, &predicate_index) 124 | .ok() 125 | .unwrap(); 126 | let vk_var = ASV::VerifierKey::new_constant(cs.clone(), vk.clone())?; 127 | 128 | let inputs = I::generate_inputs(&input_params, total_num_inputs, &mut rng); 129 | assert_eq!(total_num_inputs, inputs.len()); 130 | 131 | let input_instance_vars = inputs 132 | .iter() 133 | .map(|input| { 134 | ASV::InputInstance::new_witness(cs.clone(), || Ok(input.instance.clone())) 135 | }) 136 | .collect::, SynthesisError>>() 137 | .unwrap(); 138 | 139 | let mut inputs_start = 0; 140 | for _ in 0..num_iterations { 141 | let mut old_accumulators = Vec::with_capacity(num_inputs_per_iteration.len()); 142 | let mut old_accumulator_instance_vars = 143 | Vec::with_capacity(num_inputs_per_iteration.len()); 144 | 145 | for num_inputs in num_inputs_per_iteration { 146 | let inputs = &inputs[inputs_start..(inputs_start + num_inputs)]; 147 | let input_instance_vars = 148 | &input_instance_vars[inputs_start..(inputs_start + num_inputs)]; 149 | inputs_start += num_inputs; 150 | 151 | let (accumulator, proof) = AS::prove( 152 | &pk, 153 | Input::::map_to_refs(inputs), 154 | Accumulator::::map_to_refs(&old_accumulators), 155 | if test_params.make_zk() { 156 | MakeZK::Enabled(&mut rng) 157 | } else { 158 | MakeZK::Disabled 159 | }, 160 | None::, 161 | ) 162 | .ok() 163 | .unwrap(); 164 | 165 | let accumulator_instance_var = 166 | ASV::AccumulatorInstance::new_input(cs.clone(), || { 167 | Ok(accumulator.instance.clone()) 168 | }) 169 | .unwrap(); 170 | 171 | let proof_var = ASV::Proof::new_witness(cs.clone(), || Ok(proof)).unwrap(); 172 | 173 | assert!( 174 | cs.is_satisfied().unwrap(), 175 | "CS is not satisfied from the test setup." 176 | ); 177 | 178 | ASV::verify( 179 | cs.clone(), 180 | &vk_var, 181 | input_instance_vars, 182 | &old_accumulator_instance_vars, 183 | &accumulator_instance_var, 184 | &proof_var, 185 | None::, 186 | ) 187 | .unwrap() 188 | .enforce_equal(&Boolean::TRUE) 189 | .unwrap(); 190 | 191 | assert!(cs.is_satisfied().unwrap(), "Verify failed."); 192 | 193 | old_accumulator_instance_vars.push( 194 | ASV::AccumulatorInstance::new_witness(cs.clone(), || { 195 | Ok(accumulator.instance.clone()) 196 | }) 197 | .unwrap(), 198 | ); 199 | old_accumulators.push(accumulator); 200 | } 201 | } 202 | 203 | Ok(true) 204 | } 205 | 206 | pub fn print_costs_breakdown(test_params: &I::TestParams) { 207 | let mut rng = ark_std::test_rng(); 208 | 209 | let (input_params, predicate_params, predicate_index) = I::setup(test_params, &mut rng); 210 | let pp = AS::setup(&mut rng).unwrap(); 211 | let (pk, vk, _) = AS::index(&pp, &predicate_params, &predicate_index).unwrap(); 212 | 213 | let mut inputs = I::generate_inputs(&input_params, 2, &mut rng); 214 | 215 | let old_input = inputs.pop().unwrap(); 216 | let new_input = inputs.pop().unwrap(); 217 | 218 | let (old_accumulator, _) = AS::prove( 219 | &pk, 220 | vec![old_input.as_ref()], 221 | vec![], 222 | if test_params.make_zk() { 223 | MakeZK::Enabled(&mut rng) 224 | } else { 225 | MakeZK::Disabled 226 | }, 227 | None::, 228 | ) 229 | .unwrap(); 230 | 231 | let (new_accumulator, proof) = AS::prove( 232 | &pk, 233 | vec![new_input.as_ref()], 234 | vec![old_accumulator.as_ref()], 235 | if test_params.make_zk() { 236 | MakeZK::Enabled(&mut rng) 237 | } else { 238 | MakeZK::Disabled 239 | }, 240 | None::, 241 | ) 242 | .unwrap(); 243 | 244 | let cs = ConstraintSystem::::new_ref(); 245 | 246 | let start_cost = cs.num_constraints(); 247 | let vk_var = ASV::VerifierKey::new_constant(cs.clone(), vk.clone()).unwrap(); 248 | println!( 249 | "Cost of allocating vk {:?}", 250 | cs.num_constraints() - start_cost 251 | ); 252 | 253 | let start_cost = cs.num_constraints(); 254 | let new_input_instance_var = 255 | ASV::InputInstance::new_witness(cs.clone(), || Ok(new_input.instance)).unwrap(); 256 | println!( 257 | "Cost of allocating input {:?}", 258 | cs.num_constraints() - start_cost 259 | ); 260 | 261 | let start_cost = cs.num_constraints(); 262 | let old_accumulator_instance_var = 263 | ASV::AccumulatorInstance::new_witness(cs.clone(), || Ok(old_accumulator.instance)) 264 | .unwrap(); 265 | println!( 266 | "Cost of allocating old accumulator {:?}", 267 | cs.num_constraints() - start_cost 268 | ); 269 | 270 | let start_cost = cs.num_constraints(); 271 | let new_accumulator_instance_var = 272 | ASV::AccumulatorInstance::new_input(cs.clone(), || Ok(new_accumulator.instance)) 273 | .unwrap(); 274 | println!( 275 | "Cost of allocating new accumulator {:?}", 276 | cs.num_constraints() - start_cost 277 | ); 278 | 279 | let start_cost = cs.num_constraints(); 280 | let proof_var = ASV::Proof::new_witness(cs.clone(), || Ok(proof)).unwrap(); 281 | println!( 282 | "Cost of allocating proof {:?}", 283 | cs.num_constraints() - start_cost 284 | ); 285 | 286 | let start_cost = cs.num_constraints(); 287 | ASV::verify( 288 | cs.clone(), 289 | &vk_var, 290 | vec![&new_input_instance_var], 291 | vec![&old_accumulator_instance_var], 292 | &new_accumulator_instance_var, 293 | &proof_var, 294 | None::, 295 | ) 296 | .unwrap() 297 | .enforce_equal(&Boolean::TRUE) 298 | .unwrap(); 299 | println!("Cost of verify {:?}", cs.num_constraints() - start_cost); 300 | 301 | println!("Num constaints: {:}", cs.num_constraints()); 302 | println!("Num instance: {:}", cs.num_instance_variables()); 303 | println!("Num witness: {:}", cs.num_witness_variables()); 304 | 305 | assert!(cs.is_satisfied().unwrap()); 306 | } 307 | 308 | /// Tests the initialization of the first accumulator using one input. 309 | pub fn single_input_init_test(test_params: &I::TestParams) -> Result<(), SynthesisError> { 310 | let template_params = TemplateParams { 311 | num_iterations: NUM_ITERATIONS, 312 | num_inputs_per_iteration: vec![1], 313 | }; 314 | assert!(Self::test_template(&template_params, test_params)?); 315 | Ok(()) 316 | } 317 | 318 | /// Tests the initialization of the first accumulator using multiple inputs. 319 | pub fn multiple_inputs_init_test( 320 | test_params: &I::TestParams, 321 | ) -> Result<(), SynthesisError> { 322 | let template_params = TemplateParams { 323 | num_iterations: NUM_ITERATIONS, 324 | num_inputs_per_iteration: vec![3], 325 | }; 326 | assert!(Self::test_template(&template_params, test_params)?); 327 | Ok(()) 328 | } 329 | 330 | /// Tests the accumulation of one input and one accumulator. 331 | pub fn simple_accumulation_test(test_params: &I::TestParams) -> Result<(), SynthesisError> { 332 | let template_params = TemplateParams { 333 | num_iterations: NUM_ITERATIONS, 334 | num_inputs_per_iteration: vec![1, 1], 335 | }; 336 | Self::print_costs_breakdown(test_params); 337 | assert!(Self::test_template(&template_params, test_params)?); 338 | Ok(()) 339 | } 340 | 341 | /// Tests the accumulation of multiple inputs and multiple accumulators. 342 | pub fn multiple_inputs_accumulation_test( 343 | test_params: &I::TestParams, 344 | ) -> Result<(), SynthesisError> { 345 | let template_params = TemplateParams { 346 | num_iterations: NUM_ITERATIONS, 347 | num_inputs_per_iteration: vec![1, 1, 2, 3], 348 | }; 349 | assert!(Self::test_template(&template_params, test_params)?); 350 | Ok(()) 351 | } 352 | 353 | /// Tests the accumulation of multiple accumulators without any inputs. 354 | pub fn accumulators_only_test(test_params: &I::TestParams) -> Result<(), SynthesisError> { 355 | let template_params = TemplateParams { 356 | num_iterations: NUM_ITERATIONS, 357 | num_inputs_per_iteration: vec![1, 0, 0, 0], 358 | }; 359 | 360 | assert!(Self::test_template(&template_params, test_params)?); 361 | Ok(()) 362 | } 363 | 364 | /// Tests the initialization of the first accumulator without any inputs. 365 | pub fn no_inputs_init_test(test_params: &I::TestParams) -> Result<(), SynthesisError> { 366 | let template_params = TemplateParams { 367 | num_iterations: 1, 368 | num_inputs_per_iteration: vec![0], 369 | }; 370 | 371 | assert!(Self::test_template(&template_params, test_params)?); 372 | Ok(()) 373 | } 374 | } 375 | } 376 | -------------------------------------------------------------------------------- /src/hp_as/constraints/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::constraints::ASVerifierGadget; 2 | use crate::hp_as::data_structures::InputInstance; 3 | use crate::hp_as::{ASForHadamardProducts, CHALLENGE_SIZE}; 4 | use crate::ConstraintF; 5 | 6 | use ark_ec::AffineCurve; 7 | use ark_nonnative_field::NonNativeFieldVar; 8 | use ark_r1cs_std::alloc::AllocVar; 9 | use ark_r1cs_std::bits::boolean::Boolean; 10 | use ark_r1cs_std::groups::CurveVar; 11 | use ark_r1cs_std::ToBitsGadget; 12 | use ark_relations::r1cs::{ConstraintSystemRef, SynthesisError}; 13 | use ark_sponge::constraints::AbsorbableGadget; 14 | use ark_sponge::constraints::{bits_le_to_nonnative, CryptographicSpongeVar}; 15 | use ark_sponge::{absorb_gadget, Absorbable, CryptographicSponge, FieldElementSize}; 16 | use ark_std::marker::PhantomData; 17 | use ark_std::ops::Mul; 18 | use ark_std::vec; 19 | use ark_std::vec::Vec; 20 | 21 | mod data_structures; 22 | pub use data_structures::*; 23 | 24 | /// The verifier gadget of [`ASForHadamardProducts`][as_for_hp]. 25 | /// 26 | /// [as_for_hp]: crate::hp_as::ASForHadamardProducts 27 | pub struct ASForHPVerifierGadget 28 | where 29 | G: AffineCurve + Absorbable>, 30 | C: CurveVar> + AbsorbableGadget>, 31 | ConstraintF: Absorbable>, 32 | S: CryptographicSponge>, 33 | SV: CryptographicSpongeVar, S>, 34 | { 35 | _affine: PhantomData, 36 | _curve: PhantomData, 37 | _sponge: PhantomData, 38 | _sponge_var: PhantomData, 39 | } 40 | 41 | impl ASForHPVerifierGadget 42 | where 43 | G: AffineCurve + Absorbable>, 44 | C: CurveVar> + AbsorbableGadget>, 45 | ConstraintF: Absorbable>, 46 | S: CryptographicSponge>, 47 | SV: CryptographicSpongeVar, S>, 48 | { 49 | /// Check that the input witness is properly structured. 50 | fn check_proof_structure(proof: &ProofVar, num_inputs: usize) -> bool { 51 | assert!(num_inputs > 0); 52 | 53 | // The number of commitments to the low and high coefficients must be equal, given how 54 | // they were computed. 55 | if proof.product_poly_comm.low.len() != proof.product_poly_comm.high.len() { 56 | return false; 57 | } 58 | 59 | // The number of commitments can be derived from the number of inputs. Ensure that 60 | // they match. 61 | if proof.product_poly_comm.low.len() != num_inputs - 1 { 62 | return false; 63 | } 64 | 65 | true 66 | } 67 | 68 | /// Compute the mu challenges from a provided sponge. 69 | #[tracing::instrument(target = "r1cs", skip(sponge, num_inputs, make_zk))] 70 | fn squeeze_mu_challenges( 71 | sponge: &mut impl CryptographicSpongeVar, S>, 72 | num_inputs: usize, 73 | make_zk: bool, 74 | ) -> Result>>>, SynthesisError> { 75 | let mut mu_challenges_bits = Vec::with_capacity(num_inputs); 76 | mu_challenges_bits.push(vec![Boolean::TRUE]); 77 | 78 | if num_inputs > 1 { 79 | let mu_challenges_bits_rest = sponge.squeeze_bits(CHALLENGE_SIZE * (num_inputs - 1))?; 80 | mu_challenges_bits_rest 81 | .chunks(CHALLENGE_SIZE) 82 | .into_iter() 83 | .for_each(|bits| mu_challenges_bits.push(bits.to_vec())); 84 | } 85 | 86 | if make_zk { 87 | let hiding_components_bits = 88 | vec![&mu_challenges_bits[1], &mu_challenges_bits[num_inputs - 1]]; 89 | let mut hiding_components_fe: Vec>> = 90 | bits_le_to_nonnative(sponge.cs().clone(), hiding_components_bits)?; 91 | mu_challenges_bits.push( 92 | (hiding_components_fe 93 | .pop() 94 | .unwrap() 95 | .mul(&hiding_components_fe.pop().unwrap())) 96 | .to_bits_le()?, 97 | ); 98 | } 99 | 100 | Ok(mu_challenges_bits) 101 | } 102 | 103 | /// Compute the nu challenges from a provided sponge. 104 | #[tracing::instrument(target = "r1cs", skip(sponge, num_inputs))] 105 | fn squeeze_nu_challenges( 106 | sponge: &mut impl CryptographicSpongeVar, S>, 107 | num_inputs: usize, 108 | ) -> Result>>>, SynthesisError> { 109 | let nu_size = FieldElementSize::Truncated(CHALLENGE_SIZE); 110 | let (mut nu_challenge_fe, mut nu_challenge_bits) = 111 | sponge.squeeze_nonnative_field_elements_with_sizes(vec![nu_size].as_slice())?; 112 | let nu_challenge_fe: NonNativeFieldVar> = 113 | nu_challenge_fe.pop().unwrap(); 114 | 115 | let mut nu_challenges_bits: Vec>>> = 116 | Vec::with_capacity(2 * num_inputs - 1); 117 | 118 | nu_challenges_bits.push(vec![Boolean::TRUE]); 119 | nu_challenges_bits.push(nu_challenge_bits.pop().unwrap()); 120 | 121 | let mut cur_nu_challenge = nu_challenge_fe.clone(); 122 | for _ in 2..(num_inputs + 1) { 123 | cur_nu_challenge *= &nu_challenge_fe; 124 | nu_challenges_bits.push(cur_nu_challenge.to_bits_le()?); 125 | } 126 | 127 | Ok(nu_challenges_bits) 128 | } 129 | 130 | /// Computes the linear combination of Pedersen commitments. 131 | #[tracing::instrument( 132 | target = "r1cs", 133 | skip(commitments, challenges, extra_challenges, hiding_comms) 134 | )] 135 | fn combine_commitments<'a>( 136 | commitments: impl IntoIterator, 137 | challenges: &[Vec>>], 138 | extra_challenges: Option<&[Vec>>]>, 139 | hiding_comms: Option<&C>, 140 | ) -> Result { 141 | let mut combined_commitment = hiding_comms.map(C::clone).unwrap_or(C::zero()); 142 | for (i, commitment) in commitments.into_iter().enumerate() { 143 | let mut addend = commitment.clone(); 144 | if !(challenges[i].len() == 1 && challenges[i][0].eq(&Boolean::TRUE)) { 145 | addend = addend.scalar_mul_le(challenges[i].iter())?; 146 | } 147 | 148 | if let Some(extra_challenge) = 149 | extra_challenges.as_ref().map(|challenges| &challenges[i]) 150 | { 151 | if !(extra_challenge.len() == 1 && extra_challenge[0].eq(&Boolean::TRUE)) { 152 | addend = addend.scalar_mul_le(extra_challenge.iter())?; 153 | } 154 | } 155 | 156 | combined_commitment += &addend; 157 | } 158 | 159 | Ok(combined_commitment) 160 | } 161 | 162 | /// Combines the accumulation input instances into a single input instance. 163 | #[tracing::instrument( 164 | target = "r1cs", 165 | skip(input_instances, proof, mu_challenges, nu_challenges) 166 | )] 167 | fn compute_combined_hp_commitments( 168 | input_instances: &[&InputInstanceVar], 169 | proof: &ProofVar, 170 | mu_challenges: &[Vec>>], 171 | nu_challenges: &[Vec>>], 172 | ) -> Result, SynthesisError> { 173 | let num_inputs = input_instances.len(); 174 | 175 | let hiding_comm_addend_1 = proof 176 | .hiding_comms 177 | .as_ref() 178 | .map(|hiding_comms| { 179 | hiding_comms 180 | .comm_1 181 | .scalar_mul_le(mu_challenges[num_inputs].iter()) 182 | }) 183 | .transpose()?; 184 | 185 | let comm_1 = Self::combine_commitments( 186 | input_instances.iter().map(|instance| &instance.comm_1), 187 | mu_challenges, 188 | Some(nu_challenges), 189 | hiding_comm_addend_1.as_ref(), 190 | )?; 191 | 192 | let hiding_comm_addend_2 = proof 193 | .hiding_comms 194 | .as_ref() 195 | .map(|hiding_comms| hiding_comms.comm_2.scalar_mul_le(mu_challenges[1].iter())) 196 | .transpose()?; 197 | 198 | let comm_2 = Self::combine_commitments( 199 | input_instances 200 | .iter() 201 | .map(|instance| &instance.comm_2) 202 | .rev(), 203 | nu_challenges, 204 | None, 205 | hiding_comm_addend_2.as_ref(), 206 | )?; 207 | 208 | let comm_3 = { 209 | let product_poly_comm_low_addend = Self::combine_commitments( 210 | proof.product_poly_comm.low.iter(), 211 | &nu_challenges, 212 | None, 213 | None, 214 | )?; 215 | 216 | let product_poly_comm_high_addend = Self::combine_commitments( 217 | proof.product_poly_comm.high.iter(), 218 | &nu_challenges, 219 | None, 220 | None, 221 | )? 222 | .scalar_mul_le(nu_challenges[1].iter())?; 223 | 224 | let hiding_comm_addend_3 = proof 225 | .hiding_comms 226 | .as_ref() 227 | .map(|hiding_comms| { 228 | hiding_comms 229 | .comm_3 230 | .scalar_mul_le(mu_challenges[num_inputs].iter()) 231 | }) 232 | .transpose()?; 233 | 234 | let comm_3_addend = Self::combine_commitments( 235 | input_instances.iter().map(|instance| &instance.comm_3), 236 | &mu_challenges, 237 | None, 238 | hiding_comm_addend_3.as_ref(), 239 | )?; 240 | 241 | product_poly_comm_low_addend 242 | + &(product_poly_comm_high_addend + &comm_3_addend) 243 | .scalar_mul_le(nu_challenges[num_inputs - 1].iter())? 244 | }; 245 | 246 | Ok(InputInstanceVar { 247 | comm_1, 248 | comm_2, 249 | comm_3, 250 | _curve: PhantomData, 251 | }) 252 | } 253 | } 254 | 255 | impl ASVerifierGadget, S, SV, ASForHadamardProducts> 256 | for ASForHPVerifierGadget 257 | where 258 | G: AffineCurve + Absorbable>, 259 | C: CurveVar> + AbsorbableGadget>, 260 | ConstraintF: Absorbable>, 261 | S: CryptographicSponge>, 262 | SV: CryptographicSpongeVar, S>, 263 | { 264 | type VerifierKey = VerifierKeyVar>; 265 | type InputInstance = InputInstanceVar; 266 | type AccumulatorInstance = InputInstanceVar; 267 | type Proof = ProofVar; 268 | 269 | #[tracing::instrument( 270 | target = "r1cs", 271 | skip( 272 | verifier_key, 273 | input_instances, 274 | old_accumulator_instances, 275 | new_accumulator_instance, 276 | proof, 277 | sponge 278 | ) 279 | )] 280 | fn verify<'a>( 281 | cs: ConstraintSystemRef>, 282 | verifier_key: &Self::VerifierKey, 283 | input_instances: impl IntoIterator, 284 | old_accumulator_instances: impl IntoIterator, 285 | new_accumulator_instance: &Self::AccumulatorInstance, 286 | proof: &Self::Proof, 287 | sponge: Option, 288 | ) -> Result>, SynthesisError> 289 | where 290 | Self::InputInstance: 'a, 291 | Self::AccumulatorInstance: 'a, 292 | { 293 | let sponge = sponge.unwrap_or_else(|| SV::new(cs)); 294 | 295 | let mut input_instances = input_instances.into_iter().collect::>(); 296 | let mut old_accumulator_instances = 297 | old_accumulator_instances.into_iter().collect::>(); 298 | let mut num_all_inputs = input_instances.len() + old_accumulator_instances.len(); 299 | 300 | let make_zk = proof.hiding_comms.is_some(); 301 | 302 | // Default input in the case there are no provided inputs or accumulators. 303 | let default_input_instance; 304 | if num_all_inputs == 0 { 305 | default_input_instance = Some(InputInstanceVar::new_constant( 306 | sponge.cs(), 307 | InputInstance::zero(), 308 | )?); 309 | 310 | input_instances.push(default_input_instance.as_ref().unwrap()); 311 | num_all_inputs += 1; 312 | } 313 | 314 | // Placeholder input for hiding. 315 | let placeholder_input_instance; 316 | if make_zk && num_all_inputs == 1 { 317 | placeholder_input_instance = Some(InputInstanceVar::new_constant( 318 | sponge.cs(), 319 | InputInstance::zero(), 320 | )?); 321 | 322 | input_instances.push(placeholder_input_instance.as_ref().unwrap()); 323 | num_all_inputs += 1; 324 | } 325 | 326 | if !Self::check_proof_structure(proof, num_all_inputs) { 327 | return Ok(Boolean::FALSE); 328 | } 329 | 330 | let mut all_input_instances = input_instances; 331 | all_input_instances.append(&mut old_accumulator_instances); 332 | 333 | // Step 1 of the scheme's accumulation verifier, as detailed in BCLMS20. 334 | let mut challenges_sponge = sponge; 335 | absorb_gadget!( 336 | &mut challenges_sponge, 337 | &verifier_key.num_supported_elems, 338 | &all_input_instances, 339 | &proof.hiding_comms 340 | ); 341 | 342 | let mu_challenges_bits = 343 | Self::squeeze_mu_challenges(&mut challenges_sponge, num_all_inputs, make_zk)?; 344 | 345 | challenges_sponge.absorb(&proof.product_poly_comm)?; 346 | 347 | let nu_challenges_bits = 348 | Self::squeeze_nu_challenges(&mut challenges_sponge, num_all_inputs)?; 349 | 350 | // Steps 2-4 of the scheme's accumulation verifier, as detailed in BCLMS20. 351 | let accumulator_instance = Self::compute_combined_hp_commitments( 352 | all_input_instances.as_slice(), 353 | proof, 354 | &mu_challenges_bits, 355 | &nu_challenges_bits, 356 | )?; 357 | 358 | let result1 = accumulator_instance 359 | .comm_1 360 | .is_eq(&new_accumulator_instance.comm_1)?; 361 | let result2 = accumulator_instance 362 | .comm_2 363 | .is_eq(&new_accumulator_instance.comm_2)?; 364 | let result3 = accumulator_instance 365 | .comm_3 366 | .is_eq(&new_accumulator_instance.comm_3)?; 367 | 368 | result1.and(&result2)?.and(&result3) 369 | } 370 | } 371 | 372 | #[cfg(test)] 373 | pub mod tests { 374 | use crate::constraints::tests::ASVerifierGadgetTests; 375 | use crate::hp_as::constraints::ASForHPVerifierGadget; 376 | use crate::hp_as::tests::{ASForHPTestInput, ASForHPTestParams}; 377 | use crate::hp_as::ASForHadamardProducts; 378 | use ark_relations::r1cs::SynthesisError; 379 | use ark_sponge::poseidon::constraints::PoseidonSpongeVar; 380 | use ark_sponge::poseidon::PoseidonSponge; 381 | 382 | type G = ark_pallas::Affine; 383 | type C = ark_pallas::constraints::GVar; 384 | type CF = ark_pallas::Fq; 385 | 386 | type Sponge = PoseidonSponge; 387 | type SpongeVar = PoseidonSpongeVar; 388 | 389 | type AS = ASForHadamardProducts; 390 | type I = ASForHPTestInput; 391 | type ASV = ASForHPVerifierGadget; 392 | 393 | type Tests = ASVerifierGadgetTests; 394 | 395 | #[test] 396 | pub fn single_input_init_test_no_zk() -> Result<(), SynthesisError> { 397 | Tests::single_input_init_test(&ASForHPTestParams { 398 | vector_len: 11, 399 | make_zk: false, 400 | }) 401 | } 402 | 403 | #[test] 404 | pub fn single_input_init_test_zk() -> Result<(), SynthesisError> { 405 | Tests::single_input_init_test(&ASForHPTestParams { 406 | vector_len: 11, 407 | make_zk: true, 408 | }) 409 | } 410 | 411 | #[test] 412 | pub fn multiple_inputs_init_test_no_zk() -> Result<(), SynthesisError> { 413 | Tests::multiple_inputs_init_test(&ASForHPTestParams { 414 | vector_len: 11, 415 | make_zk: false, 416 | }) 417 | } 418 | 419 | #[test] 420 | pub fn multiple_input_init_test_zk() -> Result<(), SynthesisError> { 421 | Tests::multiple_inputs_init_test(&ASForHPTestParams { 422 | vector_len: 11, 423 | make_zk: true, 424 | }) 425 | } 426 | 427 | #[test] 428 | pub fn simple_accumulation_test_no_zk() -> Result<(), SynthesisError> { 429 | Tests::simple_accumulation_test(&ASForHPTestParams { 430 | vector_len: 11, 431 | make_zk: false, 432 | }) 433 | } 434 | 435 | #[test] 436 | pub fn simple_accumulation_test_zk() -> Result<(), SynthesisError> { 437 | Tests::simple_accumulation_test(&ASForHPTestParams { 438 | vector_len: 11, 439 | make_zk: true, 440 | }) 441 | } 442 | 443 | #[test] 444 | pub fn multiple_inputs_accumulation_test_no_zk() -> Result<(), SynthesisError> { 445 | Tests::multiple_inputs_accumulation_test(&ASForHPTestParams { 446 | vector_len: 11, 447 | make_zk: false, 448 | }) 449 | } 450 | 451 | #[test] 452 | pub fn multiple_inputs_accumulation_test_zk() -> Result<(), SynthesisError> { 453 | Tests::multiple_inputs_accumulation_test(&ASForHPTestParams { 454 | vector_len: 11, 455 | make_zk: true, 456 | }) 457 | } 458 | 459 | #[test] 460 | pub fn accumulators_only_test_no_zk() -> Result<(), SynthesisError> { 461 | Tests::accumulators_only_test(&ASForHPTestParams { 462 | vector_len: 11, 463 | make_zk: false, 464 | }) 465 | } 466 | 467 | #[test] 468 | pub fn accumulators_only_test_zk() -> Result<(), SynthesisError> { 469 | Tests::accumulators_only_test(&ASForHPTestParams { 470 | vector_len: 11, 471 | make_zk: true, 472 | }) 473 | } 474 | 475 | #[test] 476 | pub fn no_inputs_init_test_no_zk() -> Result<(), SynthesisError> { 477 | Tests::no_inputs_init_test(&ASForHPTestParams { 478 | vector_len: 11, 479 | make_zk: false, 480 | }) 481 | } 482 | 483 | #[test] 484 | pub fn no_inputs_init_test_zk() -> Result<(), SynthesisError> { 485 | Tests::no_inputs_init_test(&ASForHPTestParams { 486 | vector_len: 11, 487 | make_zk: true, 488 | }) 489 | } 490 | } 491 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![cfg_attr(not(feature = "std"), no_std)] 2 | 3 | //! A crate that provides infrastructure to implement accumulation schemes. 4 | //! The interface for accumulation schemes were formalized in [\[BCMS20\]][\[BCMS20\]] and 5 | //! [\[BCLMS20\]][bclms20]. 6 | //! 7 | //! [\[BCMS20\]]: https://eprint.iacr.org/2020/499 8 | //! [bclms20]: https://eprint.iacr.org/2020/1618 9 | 10 | #![deny( 11 | const_err, 12 | future_incompatible, 13 | missing_docs, 14 | non_shorthand_field_patterns, 15 | renamed_and_removed_lints, 16 | rust_2018_idioms, 17 | stable_features, 18 | trivial_casts, 19 | trivial_numeric_casts, 20 | unused, 21 | variant_size_differences, 22 | warnings 23 | )] 24 | #![forbid(unsafe_code)] 25 | #![cfg_attr(docsrs, feature(doc_cfg))] 26 | 27 | use ark_ff::PrimeField; 28 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; 29 | use ark_sponge::CryptographicSponge; 30 | use ark_std::rand::RngCore; 31 | 32 | #[macro_use] 33 | extern crate derivative; 34 | 35 | #[cfg(feature = "r1cs-nark-as")] 36 | #[macro_use] 37 | extern crate ark_std; 38 | 39 | /// Common data structures used by [`AccumulationScheme`]. 40 | pub use data_structures::*; 41 | mod data_structures; 42 | 43 | /// Common errors for [`AccumulationScheme`]. 44 | pub mod error; 45 | 46 | /// Traits for [`AccumulationScheme`] verifier gadgets. 47 | #[cfg(feature = "r1cs")] 48 | #[cfg_attr(docsrs, doc(cfg(feature = "r1cs")))] 49 | pub mod constraints; 50 | 51 | /// An accumulation scheme for the Hadamard product relation. 52 | /// The construction is described in detail in [\[BCLMS20\]][bclms20]. 53 | /// 54 | /// [bclms20]: https://eprint.iacr.org/2020/1618 55 | #[cfg(feature = "hp-as")] 56 | #[cfg_attr(docsrs, doc(cfg(feature = "hp-as")))] 57 | pub mod hp_as; 58 | 59 | /// An accumulation scheme based on the hardness of the discrete log problem. 60 | /// The construction is described in detail in [\[BCMS20\]][\[BCMS20\]]. 61 | /// 62 | /// [\[BCMS20\]]: https://eprint.iacr.org/2020/499 63 | #[cfg(feature = "ipa-pc-as")] 64 | #[cfg_attr(docsrs, doc(cfg(feature = "ipa-pc-as")))] 65 | pub mod ipa_pc_as; 66 | 67 | /// An accumulation scheme for a NARK for R1CS. 68 | /// The construction is described in detail in [\[BCLMS20\]][bclms20]. 69 | /// 70 | /// [bclms20]: https://eprint.iacr.org/2020/1618 71 | #[cfg(feature = "r1cs-nark-as")] 72 | #[cfg_attr(docsrs, doc(cfg(feature = "r1cs-nark-as")))] 73 | pub mod r1cs_nark_as; 74 | 75 | /// An accumulation scheme for trivial homomorphic commitment schemes. 76 | /// The construction is described in detail in [\[BCLMS20\]][bclms20]. 77 | /// 78 | /// [bclms20]: https://eprint.iacr.org/2020/1618 79 | #[cfg(feature = "trivial-pc-as")] 80 | #[cfg_attr(docsrs, doc(cfg(feature = "trivial-pc-as")))] 81 | pub mod trivial_pc_as; 82 | 83 | /// An interface for an accumulation scheme. In an accumulation scheme for a predicate, a prover 84 | /// accumulates a stream of [`Inputs`][in] into an object called an [`Accumulator`][acc]. The prover 85 | /// also outputs a [`Proof`][pf] attesting that the [`Accumulator`][acc] was computed correctly, 86 | /// which a verifier can check. At any point, a decider can use an [`Accumulator`][acc] to determine 87 | /// if each accumulated input satisfied the predicate. 88 | /// The interface is defined in [\[BCLMS20\]][bclms20] as `SplitAccumulationScheme`. 89 | /// 90 | /// [in]: Input 91 | /// [acc]: Accumulator 92 | /// [pf]: AccumulationScheme::Proof 93 | /// [bclms20]: https://eprint.iacr.org/2020/1618 94 | /// 95 | /// # Example 96 | /// ``` 97 | /// // This example only serves to demonstrate the general flow of the trait. 98 | /// 99 | /// use ark_accumulation::{AccumulationScheme, Accumulator, Input, MakeZK}; 100 | /// use ark_ff::PrimeField; 101 | /// use ark_sponge::CryptographicSponge; 102 | /// use ark_std::rand::RngCore; 103 | /// 104 | /// // Basic setup to get the parameters and keys of an accumulation scheme. 105 | /// fn initialize< 106 | /// CF: PrimeField, 107 | /// S: CryptographicSponge, 108 | /// AS: AccumulationScheme, 109 | /// R: RngCore, 110 | /// >( 111 | /// predicate_params: &AS::PredicateParams, 112 | /// predicate_index: &AS::PredicateIndex, 113 | /// rng: &mut R, 114 | /// ) -> Result<(), AS::Error> { 115 | /// let pp = AS::setup(rng)?; 116 | /// let (prover_key, verifier_key, decider_key) = 117 | /// AS::index(&pp, predicate_params, predicate_index)?; 118 | /// 119 | /// # unimplemented!() 120 | /// } 121 | /// 122 | /// // What happens if there is a new set of inputs? 123 | /// fn step< 124 | /// CF: PrimeField, 125 | /// S: CryptographicSponge, 126 | /// AS: AccumulationScheme, 127 | /// R: RngCore, 128 | /// >( 129 | /// prover_key: AS::ProverKey, 130 | /// verifier_key: AS::VerifierKey, 131 | /// decider_key: AS::DeciderKey, 132 | /// 133 | /// new_inputs: &Vec>, 134 | /// old_accumulators: &mut Vec>, 135 | /// rng: &mut R, 136 | /// ) -> Result<(), AS::Error> { 137 | /// // If there is a new input, then... 138 | /// 139 | /// // The prover may run: 140 | /// let (accumulator, proof) = AS::prove( 141 | /// &prover_key, 142 | /// Input::::map_to_refs(new_inputs), 143 | /// Accumulator::::map_to_refs(&*old_accumulators), 144 | /// MakeZK::Enabled(rng), 145 | /// None::, 146 | /// )?; 147 | /// 148 | /// // After the accumulation, the verifier may run: 149 | /// let verify_result = AS::verify( 150 | /// &verifier_key, 151 | /// Input::::instances(new_inputs), 152 | /// Accumulator::::instances(&*old_accumulators), 153 | /// &accumulator.instance, 154 | /// &proof, 155 | /// None::, 156 | /// )?; 157 | /// 158 | /// // At any point, the decider may run: 159 | /// let decide_result = AS::decide(&decider_key, accumulator.as_ref(), None::)?; 160 | /// 161 | /// # unimplemented!() 162 | /// } 163 | pub trait AccumulationScheme>: Sized { 164 | /// The public parameters for the accumulation scheme. 165 | type PublicParameters: Clone; 166 | 167 | /// The public parameters of the accumulation scheme's predicate. 168 | type PredicateParams: Clone; 169 | 170 | /// The index of the accumulation scheme's predicate. 171 | type PredicateIndex: Clone; 172 | 173 | /// The key used to accumulate inputs and old accumulators and to prove that the accumulation 174 | /// was computed correctly. 175 | type ProverKey: Clone; 176 | 177 | /// The key used to check that an accumulator was computed correctly from the inputs 178 | /// and old accumulators. 179 | type VerifierKey: Clone; 180 | 181 | /// The key used to establish whether each of the accumulated inputs satisfies the predicate. 182 | type DeciderKey: Clone; 183 | 184 | /// The instance of the input to be accumulated. 185 | type InputInstance: Clone + CanonicalSerialize + CanonicalDeserialize; 186 | 187 | /// The witness of the input to be accumulated. 188 | type InputWitness: Clone + CanonicalSerialize + CanonicalDeserialize; 189 | 190 | /// The instance of the accumulator. 191 | type AccumulatorInstance: Clone + CanonicalSerialize + CanonicalDeserialize; 192 | 193 | /// The witness of the accumulator. 194 | type AccumulatorWitness: Clone + CanonicalSerialize + CanonicalDeserialize; 195 | 196 | /// The proof attesting that an accumulator was properly computed. 197 | type Proof: Clone; 198 | 199 | /// The error type used in the scheme. 200 | type Error: ark_std::error::Error; 201 | 202 | /// Outputs the public parameters of the accumulation scheme. 203 | fn setup(rng: &mut impl RngCore) -> Result; 204 | 205 | /// Outputs the prover, verifier, and decider keys, specialized for a specific index of the 206 | /// predicate. 207 | fn index( 208 | public_params: &Self::PublicParameters, 209 | predicate_params: &Self::PredicateParams, 210 | predicate_index: &Self::PredicateIndex, 211 | ) -> Result<(Self::ProverKey, Self::VerifierKey, Self::DeciderKey), Self::Error>; 212 | 213 | /// Accumulates inputs and past accumulators. Additionally outputs a proof attesting that the 214 | /// new accumulator was computed properly from the inputs and old accumulators. 215 | fn prove<'a>( 216 | prover_key: &Self::ProverKey, 217 | inputs: impl IntoIterator>, 218 | old_accumulators: impl IntoIterator>, 219 | make_zk: MakeZK<'_>, 220 | sponge: Option, 221 | ) -> Result<(Accumulator, Self::Proof), Self::Error> 222 | where 223 | Self: 'a, 224 | S: 'a; 225 | 226 | /// Verifies that the new accumulator instance was computed properly from the input instances 227 | /// and old accumulator instances. 228 | fn verify<'a>( 229 | verifier_key: &Self::VerifierKey, 230 | input_instances: impl IntoIterator, 231 | old_accumulator_instances: impl IntoIterator, 232 | new_accumulator_instance: &Self::AccumulatorInstance, 233 | proof: &Self::Proof, 234 | sponge: Option, 235 | ) -> Result 236 | where 237 | Self: 'a, 238 | S: 'a; 239 | 240 | /// Determines whether an accumulator is valid, which means every accumulated input satisfies 241 | /// the predicate. 242 | fn decide<'a>( 243 | decider_key: &Self::DeciderKey, 244 | accumulator: AccumulatorRef<'_, CF, S, Self>, 245 | sponge: Option, 246 | ) -> Result 247 | where 248 | Self: 'a; 249 | } 250 | 251 | /// A special case of an [`AccumulationScheme`] that has empty witnesses, so entire 252 | /// [`Inputs`][Input] and [`Accumulators`][Accumulator] are passed into the verifier. 253 | /// The interface is defined in [\[BCMS20\]][\[BCMS20\]] as `AccumulationScheme` and in [\[BCLMS20\]][bclms20] 254 | /// as `AtomicAccumulationScheme`. 255 | /// 256 | /// [\[BCMS20\]]: https://eprint.iacr.org/2020/499 257 | /// [bclms20]: https://eprint.iacr.org/2020/1618 258 | pub trait AtomicAccumulationScheme>: 259 | AccumulationScheme 260 | { 261 | } 262 | 263 | #[cfg(test)] 264 | pub mod tests { 265 | use crate::data_structures::{Accumulator, Input}; 266 | use crate::{AccumulationScheme, MakeZK}; 267 | use ark_ff::PrimeField; 268 | use ark_sponge::CryptographicSponge; 269 | use ark_std::marker::PhantomData; 270 | use ark_std::rand::RngCore; 271 | use ark_std::vec::Vec; 272 | 273 | pub const NUM_ITERATIONS: usize = 50; 274 | 275 | pub trait TestParameters { 276 | fn make_zk(&self) -> bool; 277 | } 278 | 279 | /// An interface for generating inputs and accumulators to test an accumulation scheme. 280 | pub trait ASTestInput, A: AccumulationScheme> 281 | { 282 | /// Parameters for setting up the test 283 | type TestParams: TestParameters; 284 | 285 | /// Parameters for generating the inputs and accumulators 286 | type InputParams; 287 | 288 | /// Sets up the test inputs. Establishes the parameters and index for the predicate. Also 289 | /// outputs the parameters to generate accumulators and inputs for the corresponding 290 | /// predicate index. 291 | fn setup( 292 | test_params: &Self::TestParams, 293 | rng: &mut impl RngCore, 294 | ) -> (Self::InputParams, A::PredicateParams, A::PredicateIndex); 295 | 296 | /// Generates `num_inputs` inputs for one accumulation. 297 | fn generate_inputs( 298 | input_params: &Self::InputParams, 299 | num_inputs: usize, 300 | rng: &mut impl RngCore, 301 | ) -> Vec>; 302 | } 303 | 304 | pub struct TemplateParams { 305 | pub(crate) num_iterations: usize, 306 | pub(crate) num_inputs_per_iteration: Vec, 307 | } 308 | 309 | pub struct ASTests 310 | where 311 | CF: PrimeField, 312 | S: CryptographicSponge, 313 | AS: AccumulationScheme, 314 | I: ASTestInput, 315 | { 316 | _constraint_field: PhantomData, 317 | _sponge: PhantomData, 318 | _acc_scheme: PhantomData, 319 | _test_input: PhantomData, 320 | } 321 | 322 | impl ASTests 323 | where 324 | CF: PrimeField, 325 | S: CryptographicSponge, 326 | AS: AccumulationScheme, 327 | I: ASTestInput, 328 | { 329 | /// For each iteration, runs the accumulation scheme for `num_accumulations` steps of 330 | /// proving and verifying. 331 | /// At the end of the iteration, the last accumulator is put through a single decider. 332 | /// The function will return whether all of the verifiers and deciders returned true 333 | /// from all of the iterations. 334 | pub fn test_template( 335 | template_params: &TemplateParams, 336 | test_params: &I::TestParams, 337 | ) -> Result { 338 | assert!(template_params.num_iterations > 0); 339 | 340 | let num_inputs_per_iteration = &template_params.num_inputs_per_iteration; 341 | let num_iterations = template_params.num_iterations; 342 | let total_num_inputs = num_iterations * num_inputs_per_iteration.iter().sum::(); 343 | 344 | let mut rng = ark_std::test_rng(); 345 | let public_params = AS::setup(&mut rng)?; 346 | 347 | let (input_params, predicate_params, predicate_index) = I::setup(test_params, &mut rng); 348 | let (pk, vk, dk) = AS::index(&public_params, &predicate_params, &predicate_index)?; 349 | 350 | let inputs = I::generate_inputs(&input_params, total_num_inputs, &mut rng); 351 | assert_eq!(total_num_inputs, inputs.len()); 352 | 353 | let mut inputs_start = 0; 354 | for _ in 0..num_iterations { 355 | let mut old_accumulators = Vec::with_capacity(num_inputs_per_iteration.len()); 356 | for num_inputs in num_inputs_per_iteration { 357 | let inputs = &inputs[inputs_start..(inputs_start + num_inputs)]; 358 | inputs_start += num_inputs; 359 | 360 | let (accumulator, proof) = AS::prove( 361 | &pk, 362 | Input::::map_to_refs(inputs), 363 | Accumulator::::map_to_refs(&old_accumulators), 364 | if test_params.make_zk() { 365 | MakeZK::Enabled(&mut rng) 366 | } else { 367 | MakeZK::Disabled 368 | }, 369 | None::, 370 | )?; 371 | 372 | if !AS::verify( 373 | &vk, 374 | Input::::instances(inputs), 375 | Accumulator::::instances(&old_accumulators), 376 | &accumulator.instance, 377 | &proof, 378 | None::, 379 | )? { 380 | println!("{}", format!("Verify failed")); 381 | return Ok(false); 382 | } 383 | 384 | old_accumulators.push(accumulator); 385 | } 386 | 387 | assert!(old_accumulators.len() > 0); 388 | if !AS::decide(&dk, old_accumulators.last().unwrap().as_ref(), None::)? { 389 | println!("Decide failed"); 390 | return Ok(false); 391 | } 392 | } 393 | 394 | Ok(true) 395 | } 396 | 397 | /// Tests the initialization of the first accumulator using one input. 398 | pub fn single_input_init_test(test_params: &I::TestParams) -> Result<(), AS::Error> { 399 | let template_params = TemplateParams { 400 | num_iterations: NUM_ITERATIONS, 401 | num_inputs_per_iteration: vec![1], 402 | }; 403 | assert!(Self::test_template(&template_params, test_params)?); 404 | Ok(()) 405 | } 406 | 407 | /// Tests the initialization of the first accumulator using multiple inputs. 408 | pub fn multiple_inputs_init_test(test_params: &I::TestParams) -> Result<(), AS::Error> { 409 | let template_params = TemplateParams { 410 | num_iterations: NUM_ITERATIONS, 411 | num_inputs_per_iteration: vec![3], 412 | }; 413 | assert!(Self::test_template(&template_params, test_params)?); 414 | Ok(()) 415 | } 416 | 417 | /// Tests the accumulation of one input and one accumulator. 418 | pub fn simple_accumulation_test(test_params: &I::TestParams) -> Result<(), AS::Error> { 419 | let template_params = TemplateParams { 420 | num_iterations: NUM_ITERATIONS, 421 | num_inputs_per_iteration: vec![1, 1], 422 | }; 423 | assert!(Self::test_template(&template_params, test_params)?); 424 | Ok(()) 425 | } 426 | 427 | /// Tests the accumulation of multiple inputs and multiple accumulators. 428 | pub fn multiple_inputs_accumulation_test( 429 | test_params: &I::TestParams, 430 | ) -> Result<(), AS::Error> { 431 | let template_params = TemplateParams { 432 | num_iterations: NUM_ITERATIONS, 433 | num_inputs_per_iteration: vec![1, 1, 2, 3], 434 | }; 435 | assert!(Self::test_template(&template_params, test_params)?); 436 | Ok(()) 437 | } 438 | 439 | /// Tests the accumulation of multiple accumulators without any inputs. 440 | pub fn accumulators_only_test(test_params: &I::TestParams) -> Result<(), AS::Error> { 441 | let template_params = TemplateParams { 442 | num_iterations: NUM_ITERATIONS, 443 | num_inputs_per_iteration: vec![1, 0, 0, 0], 444 | }; 445 | 446 | assert!(Self::test_template(&template_params, test_params)?); 447 | Ok(()) 448 | } 449 | 450 | /// Tests the initialization of the first accumulator without any inputs. 451 | pub fn no_inputs_init_test(test_params: &I::TestParams) -> Result<(), AS::Error> { 452 | let template_params = TemplateParams { 453 | num_iterations: 1, 454 | num_inputs_per_iteration: vec![0], 455 | }; 456 | 457 | assert!(Self::test_template(&template_params, test_params)?); 458 | Ok(()) 459 | } 460 | } 461 | } 462 | -------------------------------------------------------------------------------- /src/r1cs_nark_as/constraints/data_structures.rs: -------------------------------------------------------------------------------- 1 | use crate::hp_as::constraints::{ 2 | InputInstanceVar as HPInputInstanceVar, ProofVar as HPProofVar, 3 | VerifierKeyVar as HPVerifierKeyVar, 4 | }; 5 | use crate::r1cs_nark_as::data_structures::{ 6 | AccumulatorInstance, InputInstance, Proof, ProofRandomness, VerifierKey, 7 | }; 8 | use crate::r1cs_nark_as::r1cs_nark::{FirstRoundMessage, FirstRoundMessageRandomness}; 9 | use crate::ConstraintF; 10 | 11 | use ark_ec::AffineCurve; 12 | use ark_ff::PrimeField; 13 | use ark_nonnative_field::NonNativeFieldVar; 14 | use ark_r1cs_std::alloc::{AllocVar, AllocationMode}; 15 | use ark_r1cs_std::fields::fp::FpVar; 16 | use ark_r1cs_std::groups::CurveVar; 17 | use ark_r1cs_std::ToBytesGadget; 18 | use ark_relations::r1cs::{Namespace, SynthesisError}; 19 | use ark_sponge::constraints::AbsorbableGadget; 20 | use ark_sponge::{collect_sponge_field_elements_gadget, Absorbable}; 21 | use ark_std::borrow::Borrow; 22 | use ark_std::marker::PhantomData; 23 | use ark_std::vec::Vec; 24 | 25 | /// The [`VerifierKey`][vk] of the [`ASForR1CSNarkVerifierGadget`][as_for_r1cs_nark_verifier]. 26 | /// 27 | /// [vk]: crate::constraints::ASVerifierGadget::VerifierKey 28 | /// [as_for_r1cs_nark_verifier]: crate::r1cs_nark_as::constraints::ASForR1CSNarkVerifierGadget 29 | pub struct VerifierKeyVar { 30 | /// The number of public input (i.e. instance) variables. 31 | pub(crate) num_instance_variables: usize, 32 | 33 | /// The verifier key for accumulation scheme for Hadamard Products. 34 | pub(crate) hp_as_vk: HPVerifierKeyVar, 35 | 36 | /// Hash of the matrices compute for the nark. 37 | pub(crate) nark_matrices_hash: Vec>, 38 | 39 | /// Hash of the matrices computed for the accumulation scheme. 40 | pub(crate) as_matrices_hash: Vec>, 41 | } 42 | 43 | impl AllocVar for VerifierKeyVar { 44 | fn new_variable>( 45 | cs: impl Into>, 46 | f: impl FnOnce() -> Result, 47 | mode: AllocationMode, 48 | ) -> Result { 49 | let ns = cs.into(); 50 | f().and_then(|vk| { 51 | let vk = vk.borrow(); 52 | 53 | let num_instance_variables = vk.num_instance_variables; 54 | 55 | let hp_as_vk = 56 | HPVerifierKeyVar::new_variable(ns.clone(), || Ok(vk.num_constraints), mode)?; 57 | 58 | let nark_matrices_hash = vk 59 | .nark_matrices_hash 60 | .as_ref() 61 | .to_sponge_field_elements() 62 | .into_iter() 63 | .map(|f: CF| FpVar::new_variable(ns.clone(), || Ok(f), mode)) 64 | .collect::, SynthesisError>>()?; 65 | 66 | let as_matrices_hash = vk 67 | .as_matrices_hash 68 | .as_ref() 69 | .to_sponge_field_elements() 70 | .into_iter() 71 | .map(|f: CF| FpVar::new_variable(ns.clone(), || Ok(f), mode)) 72 | .collect::, SynthesisError>>()?; 73 | 74 | Ok(Self { 75 | num_instance_variables, 76 | hp_as_vk, 77 | nark_matrices_hash, 78 | as_matrices_hash, 79 | }) 80 | }) 81 | } 82 | } 83 | 84 | impl AbsorbableGadget for VerifierKeyVar { 85 | fn to_sponge_field_elements(&self) -> Result>, SynthesisError> { 86 | let num_instance_variables = FpVar::Constant(CF::from(self.num_instance_variables as u64)); 87 | collect_sponge_field_elements_gadget!( 88 | num_instance_variables, 89 | self.hp_as_vk, 90 | self.nark_matrices_hash, 91 | self.as_matrices_hash 92 | ) 93 | } 94 | } 95 | 96 | /// The sigma protocol's prover commitment. 97 | pub struct FirstRoundMessageVar>> { 98 | /// Pedersen commitment to the `Az` vector. 99 | pub(crate) comm_a: C, 100 | 101 | /// Pedersen commitment to the `Bz` vector. 102 | pub(crate) comm_b: C, 103 | 104 | /// Pedersen commitment to the `Cz` vector. 105 | pub(crate) comm_c: C, 106 | 107 | /// The randomness used for the commitment. 108 | pub(crate) randomness: Option>, 109 | 110 | #[doc(hidden)] 111 | pub(crate) _affine_phantom: PhantomData, 112 | } 113 | 114 | impl AbsorbableGadget> for FirstRoundMessageVar 115 | where 116 | G: AffineCurve, 117 | C: CurveVar> + AbsorbableGadget>, 118 | { 119 | fn to_sponge_field_elements(&self) -> Result>>, SynthesisError> { 120 | collect_sponge_field_elements_gadget!( 121 | self.comm_a, 122 | self.comm_b, 123 | self.comm_c, 124 | self.randomness 125 | ) 126 | } 127 | } 128 | 129 | impl AllocVar, ConstraintF> for FirstRoundMessageVar 130 | where 131 | G: AffineCurve, 132 | C: CurveVar>, 133 | { 134 | fn new_variable>>( 135 | cs: impl Into>>, 136 | f: impl FnOnce() -> Result, 137 | mode: AllocationMode, 138 | ) -> Result { 139 | let ns = cs.into(); 140 | f().and_then(|first_round_msg| { 141 | let first_round_msg = first_round_msg.borrow(); 142 | 143 | let comm_a = C::new_variable(ns.clone(), || Ok(first_round_msg.comm_a.clone()), mode)?; 144 | let comm_b = C::new_variable(ns.clone(), || Ok(first_round_msg.comm_b.clone()), mode)?; 145 | let comm_c = C::new_variable(ns.clone(), || Ok(first_round_msg.comm_c.clone()), mode)?; 146 | 147 | let randomness = first_round_msg 148 | .randomness 149 | .clone() 150 | .map(|r| FirstRoundMessageRandomnessVar::new_variable(ns.clone(), || Ok(r), mode)) 151 | .transpose()?; 152 | 153 | Ok(Self { 154 | comm_a, 155 | comm_b, 156 | comm_c, 157 | randomness, 158 | _affine_phantom: PhantomData, 159 | }) 160 | }) 161 | } 162 | } 163 | 164 | /// The sigma protocol's prover commitment randomness. 165 | pub struct FirstRoundMessageRandomnessVar< 166 | G: AffineCurve, 167 | C: CurveVar>, 168 | > { 169 | /// Pedersen commitment to the vector that blinds the witness in `Az`. 170 | pub(crate) comm_r_a: C, 171 | 172 | /// Pedersen commitment to the vector that blinds the witness in `Bz`. 173 | pub(crate) comm_r_b: C, 174 | 175 | /// Pedersen commitment to the vector that blinds the witness in `Cz`. 176 | pub(crate) comm_r_c: C, 177 | 178 | /// Pedersen commitment to the first cross term randomness vector. 179 | pub(crate) comm_1: C, 180 | 181 | /// Pedersen commitment to the second cross term randomness vector. 182 | pub(crate) comm_2: C, 183 | 184 | #[doc(hidden)] 185 | pub(crate) _affine_phantom: PhantomData, 186 | } 187 | 188 | impl AbsorbableGadget> for FirstRoundMessageRandomnessVar 189 | where 190 | G: AffineCurve, 191 | C: CurveVar> + AbsorbableGadget>, 192 | { 193 | fn to_sponge_field_elements(&self) -> Result>>, SynthesisError> { 194 | collect_sponge_field_elements_gadget!( 195 | self.comm_r_a, 196 | self.comm_r_b, 197 | self.comm_r_c, 198 | self.comm_1, 199 | self.comm_2 200 | ) 201 | } 202 | } 203 | 204 | impl AllocVar, ConstraintF> 205 | for FirstRoundMessageRandomnessVar 206 | where 207 | G: AffineCurve, 208 | C: CurveVar>, 209 | { 210 | fn new_variable>>( 211 | cs: impl Into>>, 212 | f: impl FnOnce() -> Result, 213 | mode: AllocationMode, 214 | ) -> Result { 215 | let ns = cs.into(); 216 | f().and_then(|first_round_msg| { 217 | let first_round_msg_randomness = first_round_msg.borrow(); 218 | 219 | let comm_r_a = 220 | C::new_variable(ns.clone(), || Ok(first_round_msg_randomness.comm_r_a), mode)?; 221 | let comm_r_b = 222 | C::new_variable(ns.clone(), || Ok(first_round_msg_randomness.comm_r_b), mode)?; 223 | let comm_r_c = 224 | C::new_variable(ns.clone(), || Ok(first_round_msg_randomness.comm_r_c), mode)?; 225 | let comm_1 = 226 | C::new_variable(ns.clone(), || Ok(first_round_msg_randomness.comm_1), mode)?; 227 | let comm_2 = 228 | C::new_variable(ns.clone(), || Ok(first_round_msg_randomness.comm_2), mode)?; 229 | 230 | Ok(Self { 231 | comm_r_a, 232 | comm_r_b, 233 | comm_r_c, 234 | comm_1, 235 | comm_2, 236 | _affine_phantom: PhantomData, 237 | }) 238 | }) 239 | } 240 | } 241 | 242 | /// The [`InputInstance`][input_instance] of the 243 | /// [`ASForR1CSNarkVerifierGadget`][as_for_r1cs_nark_verifier]. 244 | /// 245 | /// [input_instance]: crate::constraints::ASVerifierGadget::InputInstance 246 | /// [as_for_r1cs_nark_verifier]: crate::r1cs_nark_as::constraints::ASForR1CSNarkVerifierGadget 247 | pub struct InputInstanceVar>> { 248 | /// An R1CS input. 249 | pub r1cs_input: Vec>>, 250 | 251 | /// The sigma protocol's prover commitment of the NARK. 252 | pub first_round_message: FirstRoundMessageVar, 253 | } 254 | 255 | impl AbsorbableGadget> for InputInstanceVar 256 | where 257 | G: AffineCurve, 258 | C: CurveVar> + AbsorbableGadget>, 259 | { 260 | fn to_sponge_field_elements(&self) -> Result>>, SynthesisError> { 261 | let mut r1cs_input_bytes = Vec::new(); 262 | for elem in &self.r1cs_input { 263 | r1cs_input_bytes.append(&mut elem.to_bytes()?); 264 | } 265 | 266 | collect_sponge_field_elements_gadget!(r1cs_input_bytes, self.first_round_message) 267 | } 268 | } 269 | 270 | impl AllocVar, ConstraintF> for InputInstanceVar 271 | where 272 | G: AffineCurve, 273 | C: CurveVar>, 274 | { 275 | fn new_variable>>( 276 | cs: impl Into>>, 277 | f: impl FnOnce() -> Result, 278 | mode: AllocationMode, 279 | ) -> Result { 280 | let ns = cs.into(); 281 | f().and_then(|instance| { 282 | let instance = instance.borrow(); 283 | let r1cs_input = instance 284 | .r1cs_input 285 | .clone() 286 | .into_iter() 287 | .map(|elem| { 288 | NonNativeFieldVar::>::new_variable( 289 | ns.clone(), 290 | || Ok(elem), 291 | mode, 292 | ) 293 | }) 294 | .collect::, SynthesisError>>()?; 295 | 296 | let first_round_message = FirstRoundMessageVar::new_variable( 297 | ns.clone(), 298 | || Ok(instance.first_round_message.clone()), 299 | mode, 300 | )?; 301 | 302 | Ok(Self { 303 | r1cs_input, 304 | first_round_message, 305 | }) 306 | }) 307 | } 308 | } 309 | 310 | /// The [`AccumulatorInstance`][acc_instance] of the 311 | /// [`ASForR1CSNarkVerifierGadget`][as_for_r1cs_nark_verifier]. 312 | /// 313 | /// [acc_instance]: crate::constraints::ASVerifierGadget::AccumulatorInstance 314 | /// [as_for_r1cs_nark_verifier]: crate::r1cs_nark_as::constraints::ASForR1CSNarkVerifierGadget 315 | pub struct AccumulatorInstanceVar>> { 316 | /// An input for the indexed relation. 317 | pub(crate) r1cs_input: Vec>>, 318 | 319 | /// Pedersen commitment to the `Az` vector. 320 | pub(crate) comm_a: C, 321 | 322 | /// Pedersen commitment to the `Az` vector. 323 | pub(crate) comm_b: C, 324 | 325 | /// Pedersen commitment to the `Az` vector. 326 | pub(crate) comm_c: C, 327 | 328 | /// The Hadamard product accumulation scheme input instance. 329 | pub(crate) hp_instance: HPInputInstanceVar, 330 | } 331 | 332 | impl AbsorbableGadget> for AccumulatorInstanceVar 333 | where 334 | G: AffineCurve, 335 | C: CurveVar> + AbsorbableGadget>, 336 | { 337 | fn to_sponge_field_elements(&self) -> Result>>, SynthesisError> { 338 | let mut r1cs_input_bytes = Vec::new(); 339 | for elem in &self.r1cs_input { 340 | r1cs_input_bytes.append(&mut elem.to_bytes()?); 341 | } 342 | 343 | collect_sponge_field_elements_gadget!( 344 | r1cs_input_bytes, 345 | self.comm_a, 346 | self.comm_b, 347 | self.comm_c, 348 | self.hp_instance 349 | ) 350 | } 351 | } 352 | 353 | impl AllocVar, ConstraintF> for AccumulatorInstanceVar 354 | where 355 | G: AffineCurve, 356 | C: CurveVar>, 357 | { 358 | fn new_variable>>( 359 | cs: impl Into>>, 360 | f: impl FnOnce() -> Result, 361 | mode: AllocationMode, 362 | ) -> Result { 363 | let ns = cs.into(); 364 | f().and_then(|instance| { 365 | let instance = instance.borrow(); 366 | 367 | let r1cs_input = instance 368 | .r1cs_input 369 | .clone() 370 | .into_iter() 371 | .map(|elem| { 372 | NonNativeFieldVar::>::new_variable( 373 | ns.clone(), 374 | || Ok(elem), 375 | mode, 376 | ) 377 | }) 378 | .collect::, SynthesisError>>()?; 379 | let comm_a = C::new_variable(ns.clone(), || Ok(instance.comm_a.clone()), mode)?; 380 | let comm_b = C::new_variable(ns.clone(), || Ok(instance.comm_b.clone()), mode)?; 381 | let comm_c = C::new_variable(ns.clone(), || Ok(instance.comm_c.clone()), mode)?; 382 | let hp_instance = HPInputInstanceVar::new_variable( 383 | ns.clone(), 384 | || Ok(instance.hp_instance.clone()), 385 | mode, 386 | )?; 387 | 388 | Ok(Self { 389 | r1cs_input, 390 | comm_a, 391 | comm_b, 392 | comm_c, 393 | hp_instance, 394 | }) 395 | }) 396 | } 397 | } 398 | 399 | /// The [`Proof`][proof_var] of the [`ASForR1CSNarkVerifierGadget`][as_for_r1cs_nark_verifier]. 400 | /// 401 | /// [proof_var]: crate::constraints::ASVerifierGadget::Proof 402 | /// [as_for_r1cs_nark_verifier]: crate::r1cs_nark_as::constraints::ASForR1CSNarkVerifierGadget 403 | pub struct ProofVar>> { 404 | /// The Hadamard product accumulation scheme proof. 405 | pub(crate) hp_proof: HPProofVar, 406 | 407 | /// Randomness or their commitments used to blind the vectors of the indexed relation. 408 | pub(crate) randomness: Option>, 409 | } 410 | 411 | impl AllocVar, ConstraintF> for ProofVar 412 | where 413 | G: AffineCurve, 414 | C: CurveVar>, 415 | { 416 | fn new_variable>>( 417 | cs: impl Into>>, 418 | f: impl FnOnce() -> Result, 419 | mode: AllocationMode, 420 | ) -> Result { 421 | let ns = cs.into(); 422 | f().and_then(|proof| { 423 | let proof = proof.borrow(); 424 | 425 | let hp_proof = 426 | HPProofVar::new_variable(ns.clone(), || Ok(proof.hp_proof.clone()), mode)?; 427 | 428 | let randomness = proof 429 | .randomness 430 | .as_ref() 431 | .map(|randomness| { 432 | ProofRandomnessVar::new_variable(ns.clone(), || Ok(randomness.clone()), mode) 433 | }) 434 | .transpose()?; 435 | 436 | Ok(Self { 437 | hp_proof, 438 | randomness, 439 | }) 440 | }) 441 | } 442 | } 443 | 444 | /// The randomness or their commitments used to blind the vectors of the indexed relation. 445 | pub(crate) struct ProofRandomnessVar>> { 446 | /// Randomness used to blind the R1CS input. 447 | pub(crate) r1cs_r_input: Vec>>, 448 | 449 | /// Pedersen commitment to the vector that blinds the witness in `Az`. 450 | pub(crate) comm_r_a: C, 451 | 452 | /// Pedersen commitment to the vector that blinds the witness in `Bz`. 453 | pub(crate) comm_r_b: C, 454 | 455 | /// Pedersen commitment to the vector that blinds the witness in `Cz`. 456 | pub(crate) comm_r_c: C, 457 | } 458 | 459 | impl AbsorbableGadget> for ProofRandomnessVar 460 | where 461 | G: AffineCurve, 462 | C: CurveVar> + AbsorbableGadget>, 463 | { 464 | fn to_sponge_field_elements(&self) -> Result>>, SynthesisError> { 465 | let mut r1cs_r_input_bytes = Vec::new(); 466 | for elem in &self.r1cs_r_input { 467 | r1cs_r_input_bytes.append(&mut elem.to_bytes()?); 468 | } 469 | 470 | collect_sponge_field_elements_gadget!( 471 | r1cs_r_input_bytes, 472 | self.comm_r_a, 473 | self.comm_r_b, 474 | self.comm_r_c 475 | ) 476 | } 477 | } 478 | 479 | impl AllocVar, ConstraintF> for ProofRandomnessVar 480 | where 481 | G: AffineCurve, 482 | C: CurveVar>, 483 | { 484 | fn new_variable>>( 485 | cs: impl Into>>, 486 | f: impl FnOnce() -> Result, 487 | mode: AllocationMode, 488 | ) -> Result { 489 | let ns = cs.into(); 490 | f().and_then(|randomness| { 491 | let randomness = randomness.borrow(); 492 | 493 | let r1cs_r_input = randomness 494 | .r1cs_r_input 495 | .clone() 496 | .into_iter() 497 | .map(|elem| { 498 | NonNativeFieldVar::>::new_variable( 499 | ns.clone(), 500 | || Ok(elem), 501 | mode, 502 | ) 503 | }) 504 | .collect::, SynthesisError>>()?; 505 | 506 | let comm_r_a = C::new_variable(ns.clone(), || Ok(randomness.comm_r_a.clone()), mode)?; 507 | let comm_r_b = C::new_variable(ns.clone(), || Ok(randomness.comm_r_b.clone()), mode)?; 508 | let comm_r_c = C::new_variable(ns.clone(), || Ok(randomness.comm_r_c.clone()), mode)?; 509 | 510 | Ok(Self { 511 | r1cs_r_input, 512 | comm_r_a, 513 | comm_r_b, 514 | comm_r_c, 515 | }) 516 | }) 517 | } 518 | } 519 | -------------------------------------------------------------------------------- /src/r1cs_nark_as/r1cs_nark/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::r1cs_nark_as::CHALLENGE_SIZE; 2 | use crate::ConstraintF; 3 | 4 | use ark_ec::AffineCurve; 5 | use ark_ff::{BigInteger, Field, PrimeField, Zero}; 6 | use ark_poly_commit::trivial_pc::PedersenCommitment; 7 | use ark_relations::r1cs::{ 8 | ConstraintSynthesizer, ConstraintSystem, Matrix, OptimizationGoal, SynthesisError, 9 | SynthesisMode, 10 | }; 11 | use ark_serialize::CanonicalSerialize; 12 | use ark_sponge::{absorb, Absorbable, CryptographicSponge, FieldElementSize}; 13 | use ark_std::rand::RngCore; 14 | use ark_std::vec; 15 | use ark_std::vec::Vec; 16 | use ark_std::{cfg_into_iter, cfg_iter, marker::PhantomData, UniformRand}; 17 | use blake2::{digest::VariableOutput, VarBlake2b}; 18 | 19 | #[cfg(feature = "parallel")] 20 | use rayon::prelude::*; 21 | 22 | mod data_structures; 23 | pub use data_structures::*; 24 | 25 | type R1CSResult = Result; 26 | 27 | pub(crate) const PROTOCOL_NAME: &[u8] = b"R1CS-NARK-2020"; 28 | 29 | /// A simple non-interactive argument of knowledge for R1CS. 30 | /// The construction is described in detail in Section 8 of [\[BCLMS20\]][bclms20]. 31 | /// 32 | /// [bclms20]: https://eprint.iacr.org/2020/1618 33 | pub struct R1CSNark 34 | where 35 | G: AffineCurve + Absorbable>, 36 | ConstraintF: Absorbable>, 37 | S: CryptographicSponge>, 38 | { 39 | _affine: PhantomData, 40 | _sponge: PhantomData, 41 | } 42 | 43 | impl R1CSNark 44 | where 45 | G: AffineCurve + Absorbable>, 46 | ConstraintF: Absorbable>, 47 | S: CryptographicSponge>, 48 | { 49 | pub(crate) fn compute_challenge( 50 | matrices_hash: &[u8; 32], 51 | input: &[G::ScalarField], 52 | msg: &FirstRoundMessage, 53 | mut sponge: S, 54 | ) -> G::ScalarField { 55 | sponge.absorb(&matrices_hash.as_ref()); 56 | 57 | let input_bytes = input 58 | .iter() 59 | .flat_map(|inp| inp.into_repr().to_bytes_le()) 60 | .collect::>(); 61 | 62 | absorb!(&mut sponge, input_bytes, msg); 63 | 64 | let out = sponge 65 | .squeeze_nonnative_field_elements_with_sizes(&[FieldElementSize::Truncated( 66 | CHALLENGE_SIZE, 67 | )]) 68 | .pop() 69 | .unwrap(); 70 | 71 | out 72 | } 73 | 74 | /// Performs a setup for R1CS. This function does not currently do anything meaning. 75 | pub fn setup() -> PublicParameters {} 76 | 77 | /// Outputs a specialized prover and verifier key for some R1CS instance. 78 | pub fn index>( 79 | _pp: &PublicParameters, 80 | r1cs_instance: C, 81 | ) -> R1CSResult<(IndexProverKey, IndexVerifierKey)> { 82 | let constraint_time = start_timer!(|| "Generating constraints"); 83 | 84 | let ics = ConstraintSystem::new_ref(); 85 | ics.set_optimization_goal(OptimizationGoal::Constraints); 86 | ics.set_mode(SynthesisMode::Setup); 87 | r1cs_instance.generate_constraints(ics.clone())?; 88 | 89 | end_timer!(constraint_time); 90 | 91 | let matrix_processing_time = start_timer!(|| "Processing matrices"); 92 | ics.finalize(); 93 | 94 | let matrices = ics.to_matrices().expect("should not be `None`"); 95 | let (a, b, c) = (matrices.a, matrices.b, matrices.c); 96 | let (num_input_variables, num_witness_variables, num_constraints) = ( 97 | ics.num_instance_variables(), 98 | ics.num_witness_variables(), 99 | ics.num_constraints(), 100 | ); 101 | 102 | end_timer!(matrix_processing_time); 103 | 104 | let matrices_hash = hash_matrices(PROTOCOL_NAME, &a, &b, &c); 105 | 106 | let num_variables = num_input_variables + num_witness_variables; 107 | let pp = PedersenCommitment::setup(num_constraints); 108 | let ck = PedersenCommitment::trim(&pp, num_constraints); 109 | let index_info = IndexInfo { 110 | num_variables, 111 | num_constraints, 112 | num_instance_variables: num_input_variables, 113 | matrices_hash, 114 | }; 115 | let ipk = IndexProverKey { 116 | index_info, 117 | a, 118 | b, 119 | c, 120 | ck, 121 | }; 122 | let ivk = ipk.clone(); 123 | Ok((ipk, ivk)) 124 | } 125 | 126 | /// Proves that some R1CS relation holds. 127 | pub fn prove>( 128 | ipk: &IndexProverKey, 129 | r1cs: C, 130 | make_zk: bool, 131 | sponge: Option, 132 | mut rng: Option<&mut dyn RngCore>, 133 | ) -> R1CSResult> { 134 | let init_time = start_timer!(|| "NARK::Prover"); 135 | 136 | // Step 1 of the scheme's prover, as detailed in BCLMS20. 137 | let constraint_time = start_timer!(|| "Generating constraints and witnesses"); 138 | let pcs = ConstraintSystem::new_ref(); 139 | pcs.set_optimization_goal(OptimizationGoal::Constraints); 140 | pcs.set_mode(ark_relations::r1cs::SynthesisMode::Prove { 141 | construct_matrices: false, 142 | }); 143 | r1cs.generate_constraints(pcs.clone())?; 144 | end_timer!(constraint_time); 145 | 146 | pcs.finalize(); 147 | let (input, witness, num_constraints) = { 148 | let pcs = pcs.borrow().unwrap(); 149 | ( 150 | pcs.instance_assignment.as_slice().to_vec(), 151 | pcs.witness_assignment.as_slice().to_vec(), 152 | pcs.num_constraints, 153 | ) 154 | }; 155 | 156 | let num_input_variables = input.len(); 157 | let num_witness_variables = witness.len(); 158 | let num_variables = num_input_variables + num_witness_variables; 159 | 160 | assert_eq!(ipk.index_info.num_variables, num_variables); 161 | assert_eq!(ipk.index_info.num_constraints, num_constraints); 162 | 163 | // Step 2 of the scheme's prover, as detailed in BCLMS20. 164 | let r = if make_zk { 165 | // Sample r 166 | let randomizer_time = start_timer!(|| "Sampling randomizer r"); 167 | 168 | let rng = rng.as_mut().unwrap(); 169 | let mut r = Vec::with_capacity(num_witness_variables); 170 | for _ in 0..num_witness_variables { 171 | r.push(G::ScalarField::rand(rng)) 172 | } 173 | 174 | end_timer!(randomizer_time); 175 | 176 | Some(r) 177 | } else { 178 | None 179 | }; 180 | 181 | // Step 3 of the scheme's prover, as detailed in BCLMS20. 182 | let eval_z_m_time = start_timer!(|| "Evaluating z_M"); 183 | let z_a = matrix_vec_mul(&ipk.a, &input, &witness); 184 | let z_b = matrix_vec_mul(&ipk.b, &input, &witness); 185 | let z_c = matrix_vec_mul(&ipk.c, &input, &witness); 186 | end_timer!(eval_z_m_time); 187 | 188 | let (r_a, r_b, r_c) = if make_zk { 189 | let r_ref = r.as_ref().unwrap(); 190 | let zeros = vec![G::ScalarField::zero(); num_input_variables]; 191 | 192 | // Compute r_a, r_b, r_c. 193 | let eval_r_m_time = start_timer!(|| "Evaluating r_M"); 194 | let r_a = matrix_vec_mul(&ipk.a, &zeros, r_ref); 195 | let r_b = matrix_vec_mul(&ipk.b, &zeros, r_ref); 196 | let r_c = matrix_vec_mul(&ipk.c, &zeros, r_ref); 197 | end_timer!(eval_r_m_time); 198 | 199 | (Some(r_a), Some(r_b), Some(r_c)) 200 | } else { 201 | (None, None, None) 202 | }; 203 | 204 | // Step 4 of the scheme's prover, as detailed in BCLMS20. 205 | // Sample blinders for z_a, z_b, z_c. 206 | let (mut a_blinder, mut b_blinder, mut c_blinder) = (None, None, None); 207 | if make_zk { 208 | let rng = rng.as_mut().unwrap(); 209 | a_blinder = Some(G::ScalarField::rand(rng)); 210 | b_blinder = Some(G::ScalarField::rand(rng)); 211 | c_blinder = Some(G::ScalarField::rand(rng)); 212 | } 213 | 214 | let commit_time = start_timer!(|| "Committing to z_A, z_B, and z_C"); 215 | // Compute hiding commitments to z_a, z_b, z_c. 216 | let comm_a = PedersenCommitment::commit(&ipk.ck, &z_a, a_blinder); 217 | let comm_b = PedersenCommitment::commit(&ipk.ck, &z_b, b_blinder); 218 | let comm_c = PedersenCommitment::commit(&ipk.ck, &z_c, c_blinder); 219 | 220 | end_timer!(commit_time); 221 | 222 | let (mut r_a_blinder, mut r_b_blinder, mut r_c_blinder) = (None, None, None); 223 | let (mut blinder_1, mut blinder_2) = (None, None); 224 | let first_round_randomness = if make_zk { 225 | let rng = rng.as_mut().unwrap(); 226 | 227 | // Sample blinders for r_a, r_b, r_c. 228 | r_a_blinder = Some(G::ScalarField::rand(rng)); 229 | r_b_blinder = Some(G::ScalarField::rand(rng)); 230 | r_c_blinder = Some(G::ScalarField::rand(rng)); 231 | 232 | // Commit to r_a, r_b, r_c. 233 | let commit_time = start_timer!(|| "Committing to r_A, r_B, r_C"); 234 | let comm_r_a = PedersenCommitment::commit(&ipk.ck, r_a.as_ref().unwrap(), r_a_blinder); 235 | let comm_r_b = PedersenCommitment::commit(&ipk.ck, r_b.as_ref().unwrap(), r_b_blinder); 236 | let comm_r_c = PedersenCommitment::commit(&ipk.ck, r_c.as_ref().unwrap(), r_c_blinder); 237 | end_timer!(commit_time); 238 | 239 | // Step 5 of the scheme's prover, as detailed in BCLMS20. 240 | // Commit to z_a ○ r_b + z_b ○ r_a. 241 | let cross_prod_time = start_timer!(|| "Computing cross product z_a ○ r_b + z_b ○ r_a"); 242 | let z_a_times_r_b = cfg_iter!(z_a).zip(r_b.as_ref().unwrap()); 243 | let z_b_times_r_a = cfg_iter!(z_b).zip(r_a.as_ref().unwrap()); 244 | let cross_product: Vec<_> = z_a_times_r_b 245 | .zip(z_b_times_r_a) 246 | .map(|((z_a, r_b), (z_b, r_a))| *z_a * r_b + *z_b * r_a) 247 | .collect(); 248 | end_timer!(cross_prod_time); 249 | blinder_1 = Some(G::ScalarField::rand(rng)); 250 | let commit_time = start_timer!(|| "Committing to cross product"); 251 | let comm_1 = PedersenCommitment::commit(&ipk.ck, &cross_product, blinder_1); 252 | end_timer!(commit_time); 253 | 254 | // Commit to r_a ○ r_b. 255 | let commit_time = start_timer!(|| "Committing to r_a ○ r_b"); 256 | let r_a_r_b_product: Vec<_> = cfg_iter!(r_a.as_ref().unwrap()) 257 | .zip(r_b.unwrap()) 258 | .map(|(r_a, r_b)| r_b * r_a) 259 | .collect(); 260 | blinder_2 = Some(G::ScalarField::rand(rng)); 261 | let comm_2 = PedersenCommitment::commit(&ipk.ck, &r_a_r_b_product, blinder_2); 262 | end_timer!(commit_time); 263 | 264 | Some(FirstRoundMessageRandomness { 265 | comm_r_a, 266 | comm_r_b, 267 | comm_r_c, 268 | comm_1, 269 | comm_2, 270 | }) 271 | } else { 272 | None 273 | }; 274 | 275 | // Step 6 of the scheme's prover, as detailed in BCLMS20. 276 | let first_msg = FirstRoundMessage { 277 | comm_a, 278 | comm_b, 279 | comm_c, 280 | randomness: first_round_randomness, 281 | }; 282 | 283 | // Step 7 of the scheme's prover, as detailed in BCLMS20. 284 | let gamma = Self::compute_challenge( 285 | &ipk.index_info.matrices_hash, 286 | &input, 287 | &first_msg, 288 | sponge.unwrap_or_else(|| S::new()), 289 | ); 290 | 291 | let mut blinded_witness = witness; 292 | let second_round_randomness = if make_zk { 293 | // Step 8 of the scheme's prover, as detailed in BCLMS20. 294 | ark_std::cfg_iter_mut!(blinded_witness) 295 | .zip(r.unwrap()) 296 | .for_each(|(s, r)| *s += gamma * r); 297 | 298 | // Step 9 of the scheme's prover, as detailed in BCLMS20. 299 | let sigma_a = a_blinder.unwrap() + gamma * r_a_blinder.unwrap(); 300 | let sigma_b = b_blinder.unwrap() + gamma * r_b_blinder.unwrap(); 301 | let sigma_c = c_blinder.unwrap() + gamma * r_c_blinder.unwrap(); 302 | 303 | // Step 10 of the scheme's prover, as detailed in BCLMS20. 304 | let sigma_o = c_blinder.unwrap() 305 | + gamma * blinder_1.unwrap() 306 | + gamma.square() * blinder_2.unwrap(); 307 | 308 | Some(SecondRoundMessageRandomness { 309 | sigma_a, 310 | sigma_b, 311 | sigma_c, 312 | sigma_o, 313 | }) 314 | } else { 315 | None 316 | }; 317 | 318 | // Step 11 of the scheme's prover, as detailed in BCLMS20. 319 | let second_msg = SecondRoundMessage { 320 | blinded_witness, 321 | randomness: second_round_randomness, 322 | }; 323 | 324 | // Step 12 of the scheme's prover, as detailed in BCLMS20. 325 | let proof = Proof { 326 | first_msg, 327 | second_msg, 328 | }; 329 | 330 | end_timer!(init_time); 331 | Ok(proof) 332 | } 333 | 334 | /// Verifies that some R1CS relation holds. 335 | pub fn verify( 336 | ivk: &IndexVerifierKey, 337 | input: &[G::ScalarField], 338 | proof: &Proof, 339 | sponge: Option, 340 | ) -> bool { 341 | let init_time = start_timer!(|| "NARK::Verifier"); 342 | if proof.first_msg.randomness.is_some() != proof.second_msg.randomness.is_some() { 343 | return false; 344 | } 345 | 346 | // Step 2 of the scheme's verifier, as detailed in BCLMS20. 347 | let gamma = Self::compute_challenge( 348 | &ivk.index_info.matrices_hash, 349 | &input, 350 | &proof.first_msg, 351 | sponge.unwrap_or_else(|| S::new()), 352 | ); 353 | 354 | // Step 3 of the scheme's verifier, as detailed in BCLMS20. 355 | let mat_vec_mul_time = start_timer!(|| "Computing M * blinded_witness"); 356 | let a_times_blinded_witness = 357 | matrix_vec_mul(&ivk.a, &input, &proof.second_msg.blinded_witness); 358 | let b_times_blinded_witness = 359 | matrix_vec_mul(&ivk.b, &input, &proof.second_msg.blinded_witness); 360 | let c_times_blinded_witness = 361 | matrix_vec_mul(&ivk.c, &input, &proof.second_msg.blinded_witness); 362 | end_timer!(mat_vec_mul_time); 363 | 364 | // Step 4 of the scheme's verifier, as detailed in BCLMS20. 365 | let mut comm_a = proof.first_msg.comm_a.into_projective(); 366 | let mut comm_b = proof.first_msg.comm_b.into_projective(); 367 | let mut comm_c = proof.first_msg.comm_c.into_projective(); 368 | if let Some(first_msg_randomness) = proof.first_msg.randomness.as_ref() { 369 | comm_a += first_msg_randomness.comm_r_a.mul(gamma); 370 | comm_b += first_msg_randomness.comm_r_b.mul(gamma); 371 | comm_c += first_msg_randomness.comm_r_c.mul(gamma); 372 | } 373 | 374 | let commit_time = start_timer!(|| "Reconstructing c_A, c_B, c_C commitments"); 375 | let reconstructed_comm_a = PedersenCommitment::commit( 376 | &ivk.ck, 377 | &a_times_blinded_witness, 378 | proof.second_msg.randomness.as_ref().map(|r| r.sigma_a), 379 | ); 380 | let reconstructed_comm_b = PedersenCommitment::commit( 381 | &ivk.ck, 382 | &b_times_blinded_witness, 383 | proof.second_msg.randomness.as_ref().map(|r| r.sigma_b), 384 | ); 385 | let reconstructed_comm_c = PedersenCommitment::commit( 386 | &ivk.ck, 387 | &c_times_blinded_witness, 388 | proof.second_msg.randomness.as_ref().map(|r| r.sigma_c), 389 | ); 390 | 391 | let a_equal = comm_a == reconstructed_comm_a.into_projective(); 392 | let b_equal = comm_b == reconstructed_comm_b.into_projective(); 393 | let c_equal = comm_c == reconstructed_comm_c.into_projective(); 394 | drop(c_times_blinded_witness); 395 | end_timer!(commit_time); 396 | 397 | // Step 5 of the scheme's verifier, as detailed in BCLMS20. 398 | let had_prod_time = start_timer!(|| "Computing Hadamard product and commitment to it"); 399 | let had_prod: Vec<_> = cfg_into_iter!(a_times_blinded_witness) 400 | .zip(b_times_blinded_witness) 401 | .map(|(a, b)| a * b) 402 | .collect(); 403 | let reconstructed_had_prod_comm = PedersenCommitment::commit( 404 | &ivk.ck, 405 | &had_prod, 406 | proof.second_msg.randomness.as_ref().map(|r| r.sigma_o), 407 | ); 408 | end_timer!(had_prod_time); 409 | 410 | let mut had_prod_comm = proof.first_msg.comm_c.into_projective(); 411 | if let Some(first_msg_randomness) = proof.first_msg.randomness.as_ref() { 412 | had_prod_comm += first_msg_randomness.comm_1.mul(gamma); 413 | had_prod_comm += first_msg_randomness.comm_2.mul(gamma.square()); 414 | } 415 | let had_prod_equal = had_prod_comm == reconstructed_had_prod_comm.into_projective(); 416 | add_to_trace!(|| "Verifier result", || format!("A equal: {}, B equal: {}, C equal: {}, Hadamard Product equal: {}", a_equal, b_equal, c_equal, had_prod_equal)); 417 | end_timer!(init_time); 418 | a_equal & b_equal & c_equal & had_prod_equal 419 | } 420 | } 421 | 422 | pub(crate) fn hash_matrices( 423 | domain_separator: &[u8], 424 | a: &Matrix, 425 | b: &Matrix, 426 | c: &Matrix, 427 | ) -> [u8; 32] { 428 | let mut serialized_matrices = domain_separator.to_vec(); 429 | a.serialize(&mut serialized_matrices).unwrap(); 430 | b.serialize(&mut serialized_matrices).unwrap(); 431 | c.serialize(&mut serialized_matrices).unwrap(); 432 | 433 | let mut hasher = VarBlake2b::new(32).unwrap(); 434 | digest::Update::update(&mut hasher, &serialized_matrices); 435 | 436 | let mut matrices_hash = [0u8; 32]; 437 | hasher.finalize_variable(|res| matrices_hash.copy_from_slice(res)); 438 | 439 | matrices_hash 440 | } 441 | 442 | // Computes `matrix * (input || witness)`. 443 | pub(crate) fn matrix_vec_mul(matrix: &Matrix, input: &[F], witness: &[F]) -> Vec { 444 | ark_std::cfg_iter!(matrix) 445 | .map(|row| inner_prod(row, input, witness)) 446 | .collect() 447 | } 448 | 449 | // Computes the inner product of `row` and `input || witness` 450 | fn inner_prod(row: &[(F, usize)], input: &[F], witness: &[F]) -> F { 451 | let mut acc = F::zero(); 452 | for &(ref coeff, i) in row { 453 | let tmp = if i < input.len() { 454 | input[i] 455 | } else { 456 | witness[i - input.len()] 457 | }; 458 | 459 | acc += &(if coeff.is_one() { tmp } else { tmp * coeff }); 460 | } 461 | acc 462 | } 463 | 464 | #[cfg(test)] 465 | pub(crate) mod test { 466 | use super::*; 467 | use ark_ff::{PrimeField, UniformRand}; 468 | use ark_pallas::{Affine, Fq, Fr}; 469 | use ark_relations::{ 470 | lc, 471 | r1cs::{ConstraintSynthesizer, ConstraintSystemRef, SynthesisError}, 472 | }; 473 | use ark_sponge::poseidon::PoseidonSponge; 474 | const NUM_ITERS: usize = 10; 475 | 476 | #[derive(Copy, Clone)] 477 | struct DummyCircuit { 478 | pub a: Option, 479 | pub b: Option, 480 | pub num_variables: usize, 481 | pub num_constraints: usize, 482 | } 483 | 484 | impl ConstraintSynthesizer for DummyCircuit { 485 | fn generate_constraints(self, cs: ConstraintSystemRef) -> Result<(), SynthesisError> { 486 | let a = cs.new_witness_variable(|| self.a.ok_or(SynthesisError::AssignmentMissing))?; 487 | let b = cs.new_witness_variable(|| self.b.ok_or(SynthesisError::AssignmentMissing))?; 488 | let c = cs.new_input_variable(|| { 489 | let a = self.a.ok_or(SynthesisError::AssignmentMissing)?; 490 | let b = self.b.ok_or(SynthesisError::AssignmentMissing)?; 491 | 492 | Ok(a * b) 493 | })?; 494 | 495 | for _ in 0..(self.num_variables - 3) { 496 | cs.new_witness_variable(|| self.a.ok_or(SynthesisError::AssignmentMissing))?; 497 | } 498 | 499 | for _ in 0..self.num_constraints - 1 { 500 | cs.enforce_constraint(lc!() + a, lc!() + b, lc!() + c)?; 501 | } 502 | 503 | cs.enforce_constraint(lc!(), lc!(), lc!())?; 504 | 505 | Ok(()) 506 | } 507 | } 508 | 509 | #[test] 510 | fn test_simple_circuit() { 511 | let rng = &mut ark_std::test_rng(); 512 | let c = DummyCircuit { 513 | a: Some(Fr::rand(rng)), 514 | b: Some(Fr::rand(rng)), 515 | num_variables: 10, 516 | num_constraints: 100, 517 | }; 518 | 519 | let pcs = ConstraintSystem::new_ref(); 520 | pcs.set_optimization_goal(OptimizationGoal::Constraints); 521 | pcs.set_mode(ark_relations::r1cs::SynthesisMode::Prove { 522 | construct_matrices: false, 523 | }); 524 | c.generate_constraints(pcs.clone()).unwrap(); 525 | 526 | let r1cs_input = pcs.borrow().unwrap().instance_assignment.clone(); 527 | 528 | let pp = R1CSNark::>::setup(); 529 | let (ipk, ivk) = R1CSNark::>::index(&pp, c).unwrap(); 530 | 531 | let start = ark_std::time::Instant::now(); 532 | 533 | for i in 0..NUM_ITERS { 534 | let proof = R1CSNark::>::prove( 535 | &ipk, 536 | c.clone(), 537 | i % 2 == 1, 538 | Some(PoseidonSponge::::new()), 539 | Some(rng), 540 | ) 541 | .unwrap(); 542 | 543 | assert!(R1CSNark::>::verify( 544 | &ivk, 545 | &r1cs_input, 546 | &proof, 547 | Some(PoseidonSponge::::new()), 548 | )) 549 | } 550 | 551 | println!( 552 | "per-constraint proving time for {}: {} ns/constraint", 553 | stringify!($bench_pairing_engine), 554 | start.elapsed().as_nanos() / NUM_ITERS as u128 / 65536u128 555 | ); 556 | } 557 | } 558 | --------------------------------------------------------------------------------