├── .github └── workflows │ └── rust.yml ├── .gitignore ├── Cargo.toml ├── TODO.md ├── jellyfish ├── .dockerignore ├── .envrc ├── .github │ ├── dependabot.yml │ ├── pull_request_template.md │ └── workflows │ │ ├── build.yml │ │ └── periodic_checks.yml ├── .gitignore ├── CHANGELOG.md ├── Cargo.toml ├── LICENSE ├── README.md ├── bench.md ├── flake.lock ├── flake.nix ├── plonk │ ├── Cargo.toml │ ├── benches │ │ └── bench.rs │ ├── examples │ │ └── proof_of_exp.rs │ └── src │ │ ├── circuit │ │ ├── mod.rs │ │ ├── plonk_verifier │ │ │ ├── gadgets.rs │ │ │ ├── mod.rs │ │ │ ├── poly.rs │ │ │ └── structs.rs │ │ └── transcript.rs │ │ ├── constants.rs │ │ ├── errors.rs │ │ ├── lib.rs │ │ ├── proof_system │ │ ├── batch_arg.rs │ │ ├── mod.rs │ │ ├── prover.rs │ │ ├── snark.rs │ │ ├── structs.rs │ │ └── verifier.rs │ │ ├── testing_apis.rs │ │ └── transcript │ │ ├── mod.rs │ │ ├── rescue.rs │ │ ├── solidity.rs │ │ └── standard.rs ├── primitives │ ├── Cargo.toml │ ├── benches │ │ ├── merkle_path.rs │ │ └── pcs.rs │ └── src │ │ ├── aead.rs │ │ ├── circuit │ │ ├── commitment.rs │ │ ├── elgamal.rs │ │ ├── merkle_tree │ │ │ ├── mod.rs │ │ │ ├── rescue_merkle_tree.rs │ │ │ └── sparse_merkle_tree.rs │ │ ├── mod.rs │ │ ├── prf.rs │ │ ├── rescue │ │ │ ├── mod.rs │ │ │ ├── native.rs │ │ │ └── non_native.rs │ │ └── signature │ │ │ ├── mod.rs │ │ │ └── schnorr.rs │ │ ├── commitment.rs │ │ ├── constants.rs │ │ ├── crhf.rs │ │ ├── elgamal.rs │ │ ├── errors.rs │ │ ├── hash_to_group │ │ ├── mod.rs │ │ ├── short_weierstrass.rs │ │ └── twisted_edwards.rs │ │ ├── lib.rs │ │ ├── merkle_tree │ │ ├── append_only.rs │ │ ├── examples.rs │ │ ├── internal.rs │ │ ├── light_weight.rs │ │ ├── macros.rs │ │ ├── mod.rs │ │ ├── prelude.rs │ │ └── universal_merkle_tree.rs │ │ ├── pasta │ │ └── mod.rs │ │ ├── pcs │ │ ├── errors.rs │ │ ├── mod.rs │ │ ├── multilinear_kzg │ │ │ ├── batching.rs │ │ │ ├── mod.rs │ │ │ ├── srs.rs │ │ │ └── util.rs │ │ ├── prelude.rs │ │ ├── structs.rs │ │ ├── transcript.rs │ │ ├── univariate_ipa │ │ │ └── mod.rs │ │ └── univariate_kzg │ │ │ ├── mod.rs │ │ │ └── srs.rs │ │ ├── prf.rs │ │ ├── rescue │ │ ├── errors.rs │ │ ├── mod.rs │ │ ├── rescue_constants │ │ │ ├── bls12_377_base.rs │ │ │ ├── bls12_381_base.rs │ │ │ ├── bn254_base.rs │ │ │ ├── bw6_761_base.rs │ │ │ ├── ed_on_bls12_377_base.rs │ │ │ ├── ed_on_bls12_381_base.rs │ │ │ ├── ed_on_bn254_base.rs │ │ │ └── mod.rs │ │ └── sponge.rs │ │ ├── scalars_n_bases.rs │ │ ├── signatures │ │ ├── bls.rs │ │ ├── mod.rs │ │ └── schnorr.rs │ │ ├── utils.rs │ │ └── vrf │ │ ├── blsvrf.rs │ │ ├── ecvrf.rs │ │ └── mod.rs ├── relation │ ├── Cargo.toml │ └── src │ │ ├── constants.rs │ │ ├── constraint_system.rs │ │ ├── errors.rs │ │ ├── gadgets │ │ ├── arithmetic.rs │ │ ├── cmp.rs │ │ ├── ecc │ │ │ ├── conversion.rs │ │ │ ├── glv.rs │ │ │ ├── mod.rs │ │ │ └── msm.rs │ │ ├── logic.rs │ │ ├── mod.rs │ │ ├── range.rs │ │ ├── ultraplonk │ │ │ ├── lookup_table.rs │ │ │ ├── mod.rs │ │ │ ├── mod_arith.rs │ │ │ ├── non_native_gates.rs │ │ │ └── range.rs │ │ └── utils.rs │ │ ├── gates │ │ ├── arithmetic.rs │ │ ├── ecc.rs │ │ ├── logic.rs │ │ ├── lookup.rs │ │ └── mod.rs │ │ └── lib.rs ├── rustfmt.toml ├── scripts │ ├── check_no_std.sh │ ├── run_benchmarks.m4 │ ├── run_benchmarks.sh │ ├── run_tests.sh │ └── test_coverage.sh ├── shell.nix └── utilities │ ├── Cargo.toml │ └── src │ ├── conversion.rs │ ├── lib.rs │ ├── macros.rs │ ├── multi_pairing.rs │ ├── par_utils.rs │ └── serialize.rs └── src ├── errors.rs ├── folding_scheme.rs ├── ivc.rs ├── lib.rs ├── relaxed_plonk.rs ├── sangria.rs └── vector_commitment ├── mod.rs └── pedersen ├── arithmetic_definitions ├── commitment.rs └── mod.rs ├── mod.rs └── tests.rs /.github/workflows/rust.yml: -------------------------------------------------------------------------------- 1 | name: Rust 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | types: [opened, synchronize, reopened, ready_for_review] 9 | 10 | env: 11 | CARGO_TERM_COLOR: always 12 | # Disable incremental compilation. 13 | # 14 | # Incremental compilation is useful as part of an edit-build-test-edit cycle, 15 | # as it lets the compiler avoid recompiling code that hasn't changed. However, 16 | # on CI, we're not making small edits; we're almost always building the entire 17 | # project from scratch. Thus, incremental compilation on CI actually 18 | # introduces *additional* overhead to support making future builds 19 | # faster...but no future builds will ever occur in any given CI environment. 20 | # 21 | # See https://matklad.github.io/2021/09/04/fast-rust-builds.html#ci-workflow 22 | # for details. 23 | CARGO_INCREMENTAL: 0 24 | # Allow more retries for network requests in cargo (downloading crates) and 25 | # rustup (installing toolchains). This should help to reduce flaky CI failures 26 | # from transient network timeouts or other issues. 27 | CARGO_NET_RETRY: 10 28 | RUSTUP_MAX_RETRIES: 10 29 | # Don't emit giant backtraces in the CI logs. 30 | RUST_BACKTRACE: short 31 | 32 | jobs: 33 | test: 34 | name: Test Rust ${{matrix.toolchain}} on ${{matrix.os}} 35 | runs-on: ${{matrix.os}}-latest 36 | strategy: 37 | fail-fast: false 38 | matrix: 39 | toolchain: [stable, nightly] 40 | os: [ubuntu] 41 | directory: ['.', 'jellyfish'] 42 | defaults: 43 | run: 44 | working-directory: ${{ matrix.directory }} 45 | steps: 46 | - uses: actions/checkout@v3 47 | - uses: actions-rs/toolchain@v1 48 | with: 49 | toolchain: ${{matrix.toolchain}} 50 | - uses: taiki-e/install-action@nextest 51 | - name: cargo test 52 | run: | 53 | cargo nextest run --all-features 54 | - name: Doctests 55 | run: | 56 | cargo test --doc --all-features 57 | 58 | 59 | 60 | clippy: 61 | name: Clippy 62 | runs-on: ubuntu-latest 63 | steps: 64 | - uses: actions/checkout@v3 65 | - name: Install minimal nightly with clippy and rustfmt 66 | uses: actions-rs/toolchain@v1 67 | with: 68 | toolchain: stable 69 | components: clippy 70 | - name: Clippy 71 | uses: actions-rs/cargo@v1 72 | with: 73 | command: clippy 74 | args: --all -- -D clippy::all -D warnings 75 | 76 | rustfmt: 77 | name: rustfmt 78 | runs-on: ubuntu-latest 79 | steps: 80 | - uses: actions/checkout@v3 81 | - name: Install minimal nightly with clipy 82 | uses: actions-rs/toolchain@v1 83 | with: 84 | toolchain: stable 85 | components: rustfmt 86 | - name: rustfmt 87 | uses: actions-rs/cargo@v1 88 | with: 89 | command: fmt 90 | args: --all -- --check 91 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | /Cargo.lock 3 | /.vscode 4 | .DS_store -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "sangria_impl" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | ark-crypto-primitives = "0.3.0" 10 | ark-ff = "0.3.0" 11 | ark-sponge = "0.3.0" 12 | ark-serialize = "0.3.0" 13 | ark-std = "0.3.0" 14 | thiserror = "1.0.38" 15 | -------------------------------------------------------------------------------- /TODO.md: -------------------------------------------------------------------------------- 1 | # TODOs 2 | 3 | ## Didactic Variant 4 | 5 | Folding scheme: 6 | - one finite field, generic 7 | - one additively homomorphic commitment scheme, Pedersen for generic field 8 | - one arithmetization of pure PLONK, Matrix of field elements with three columns and $n+s+1$ rows 9 | - one circuit-friendly hash function for fiat-shamir, Poseidon with parameters for the finite field 10 | 11 | IVC: 12 | - one cycle of curves, Pasta 13 | - one circuit friendly hash function, Poseidon with parameters for the scalar field of the primary curve 14 | - one circuit of the folding verifier 15 | 16 | Proof compression: 17 | - one snark for relaxed Plonk arithmetization, (modified) Halo2 -------------------------------------------------------------------------------- /jellyfish/.dockerignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | Cargo.lock 3 | Dockerfile* 4 | target 5 | -------------------------------------------------------------------------------- /jellyfish/.envrc: -------------------------------------------------------------------------------- 1 | use nix 2 | -------------------------------------------------------------------------------- /jellyfish/.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | 8 | - package-ecosystem: "cargo" 9 | directory: "/" 10 | schedule: 11 | interval: "daily" -------------------------------------------------------------------------------- /jellyfish/.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 4 | 5 | 10 | 11 | ## Description 12 | 13 | 16 | 17 | closes: #XXXX 18 | 19 | --- 20 | 21 | Before we can merge this PR, please make sure that all the following items have been 22 | checked off. If any of the checklist items are not applicable, please leave them but 23 | write a little note why. 24 | 25 | - [ ] Targeted PR against correct branch (main) 26 | - [ ] Linked to GitHub issue with discussion and accepted design OR have an explanation in the PR that describes this work. 27 | - [ ] Wrote unit tests 28 | - [ ] Updated relevant documentation in the code 29 | - [ ] Added a relevant changelog entry to the `Pending` section in `CHANGELOG.md` 30 | - [ ] Re-reviewed `Files changed` in the GitHub PR explorer 31 | -------------------------------------------------------------------------------- /jellyfish/.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: Build 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | schedule: 11 | - cron: "0 0 * * 1" 12 | workflow_dispatch: 13 | 14 | jobs: 15 | build: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - name: Cancel Outdated Builds 19 | uses: styfle/cancel-workflow-action@0.11.0 20 | with: 21 | all_but_latest: true 22 | access_token: ${{ github.token }} 23 | 24 | - name: Checkout Repository 25 | uses: actions/checkout@v3 26 | 27 | - name: Install stable toolchain 28 | uses: actions-rs/toolchain@v1 29 | with: 30 | profile: default 31 | toolchain: stable 32 | override: true 33 | default: true 34 | components: rustfmt, clippy 35 | 36 | - name: Install nightly toolchain 37 | uses: actions-rs/toolchain@v1 38 | with: 39 | profile: minimal 40 | toolchain: nightly 41 | override: false 42 | default: false 43 | components: rustfmt 44 | 45 | - uses: Swatinem/rust-cache@v2 46 | name: Enable Rust Caching 47 | 48 | - name: Format Check 49 | run: cargo +nightly fmt -- --check 50 | 51 | - name: Clippy 52 | uses: actions-rs/clippy-check@v1 53 | with: 54 | token: ${{ github.token }} 55 | 56 | - name: Audit 57 | uses: actions-rs/audit-check@v1 58 | with: 59 | token: ${{ github.token }} 60 | 61 | - name: Check Bench 62 | run: cargo bench --no-run 63 | 64 | - name: Check Ignored Tests 65 | run: cargo test --no-run -- --ignored 66 | 67 | - name: Check no_std compilation 68 | run: cargo test --no-run --no-default-features 69 | 70 | - name: Test 71 | run: bash ./scripts/run_tests.sh 72 | 73 | - name: Example 74 | run: cargo run --release --example proof_of_exp 75 | 76 | - name: Generate Documentation 77 | run: | 78 | cargo doc --no-deps --lib --release 79 | cp -R target/doc public 80 | echo '' > public/index.html 81 | 82 | - name: Deploy 83 | uses: peaceiris/actions-gh-pages@v3 84 | if: ${{ github.ref == 'refs/heads/main' }} 85 | with: 86 | github_token: ${{ secrets.GITHUB_TOKEN }} 87 | publish_dir: ./public 88 | cname: jellyfish.docs.espressosys.com 89 | -------------------------------------------------------------------------------- /jellyfish/.github/workflows/periodic_checks.yml: -------------------------------------------------------------------------------- 1 | name: Periodic code checks 2 | 3 | on: 4 | schedule: 5 | - cron: "0 0 * * 0" # At 00:00 on Sunday. 6 | workflow_dispatch: 7 | 8 | jobs: 9 | check: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Checkout Repository 13 | uses: actions/checkout@v3 14 | 15 | - name: Install nightly toolchain 16 | uses: actions-rs/toolchain@v1 17 | with: 18 | profile: minimal 19 | toolchain: nightly 20 | override: true 21 | default: false 22 | 23 | - name: Run cargo-udeps 24 | uses: aig787/cargo-udeps-action@v1 25 | with: 26 | args: '--all-targets' 27 | -------------------------------------------------------------------------------- /jellyfish/.gitignore: -------------------------------------------------------------------------------- 1 | .*.sw* 2 | .DS_Store 3 | .idea 4 | /target 5 | cargo-system-config.toml 6 | Cargo.lock 7 | 8 | # Test coverage (grcov) 9 | default.profraw 10 | /.pre-commit-config.yaml 11 | -------------------------------------------------------------------------------- /jellyfish/Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | "plonk", 4 | "primitives", 5 | "relation", 6 | "utilities", 7 | ] 8 | 9 | [workspace.package] 10 | version = "0.2.0" 11 | authors = ["Espresso Systems "] 12 | edition = "2021" 13 | license = "MIT" 14 | rust-version = "1.64.0" 15 | homepage = "https://github.com/EspressoSystems/jellyfish" 16 | documentation = "https://jellyfish.docs.espressosys.com" 17 | repository = "https://github.com/EspressoSystems/jellyfish" 18 | -------------------------------------------------------------------------------- /jellyfish/LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2022 Espresso Systems (espressosys.com) 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /jellyfish/README.md: -------------------------------------------------------------------------------- 1 | # Jellyfish cryptographic library 2 | 3 | ![example workflow](https://github.com/EspressoSystems/jellyfish/actions/workflows/build.yml/badge.svg) 4 | ![Crates.io (version)](https://img.shields.io/crates/dv/jf-plonk/0.1.0) 5 | ![GitHub](https://img.shields.io/github/license/EspressoSystems/jellyfish) 6 | 7 | ## Disclaimer 8 | 9 | **DISCLAIMER:** This software is provided "as is" and its security has not been externally audited. Use at your own risk. 10 | 11 | ## Chatroom 12 | 13 | For general discussions on Jellyfish PLONK, please join our [Discord channel](https://discord.gg/GJa4gznGfU). 14 | 15 | ## Development environment setup 16 | 17 | We recommend the following tools: 18 | 19 | - [`nix`](https://nixos.org/download.html) 20 | - [`direnv`](https://direnv.net/docs/installation.html) 21 | 22 | Run `direnv allow` at the repo root. You should see dependencies (including Rust) being installed. 23 | 24 | ## Build, run tests and examples 25 | 26 | Build: 27 | 28 | ``` 29 | cargo build 30 | ``` 31 | 32 | Run an example: 33 | 34 | ``` 35 | cargo run --release --example proof_of_exp 36 | ``` 37 | 38 | This is a simple example to prove and verify knowledge of exponent. 39 | It shows how one may compose a circuit, and then build a proof for the circuit. 40 | 41 | ### Tests 42 | 43 | ``` 44 | cargo test --release 45 | ``` 46 | 47 | Note that by default the _release_ mode does not check integers overflow. 48 | In order to enforce this check run: 49 | 50 | ``` 51 | ./scripts/run_tests.sh 52 | ``` 53 | 54 | #### Test coverage 55 | 56 | We use [grcov](https://github.com/mozilla/grcov) for test coverage 57 | 58 | ``` 59 | ./scripts/test_coverage.sh 60 | ``` 61 | 62 | ### Generate and read the documentation 63 | 64 | #### Standard 65 | 66 | ``` 67 | cargo doc --open 68 | ``` 69 | 70 | ### Code formatting 71 | 72 | To format your code run 73 | 74 | ``` 75 | cargo fmt 76 | ``` 77 | 78 | ### Updating non-cargo dependencies 79 | 80 | Run `nix flake update` if you would like to pin other version edit `flake.nix` 81 | beforehand. Commit the lock file when happy. 82 | 83 | To update only a single input specify it as argument, for example 84 | 85 | nix flake update github:oxalica/rust-overlay 86 | 87 | ### Benchmarks 88 | 89 | #### Primitives 90 | 91 | Currently, a benchmark for verifying Merkle paths is implemented. 92 | The additional flags allow using assembly implementation of `square_in_place` and `mul_assign` within arkworks: 93 | 94 | ```bash 95 | RUSTFLAGS='-Ctarget-cpu=native -Ctarget-feature=+bmi2,+adx' cargo bench --bench=merkle_path 96 | ``` 97 | 98 | #### PLONK proof generation/verification 99 | 100 | For benchmark, run: 101 | 102 | ``` 103 | RAYON_NUM_THREADS=N cargo bench 104 | ``` 105 | 106 | where N is the number of threads you want to use (N = 1 for single-thread). 107 | 108 | A sample benchmark result is available under [`bench.md`](./bench.md). 109 | 110 | ## Git Hooks 111 | 112 | The pre-commit hooks are installed via the nix shell. To run them on all files use 113 | 114 | ``` 115 | pre-commit run --all-files 116 | ``` 117 | -------------------------------------------------------------------------------- /jellyfish/bench.md: -------------------------------------------------------------------------------- 1 | # Plonk proof generation/verification 2 | 3 | 4 | 5 | ## Desktop 6 | - Processor: AMD 5900x 12 core 24 thread at 3.7 GHz 7 | - Memory: 16 GB 2667 MHz DDR4 8 | - OS: ubuntu 20.04 9 | - rustc 1.56.1 (59eed8a2a 2021-11-01) 10 | - `RAYON_NUM_THREADS=24 cargo bench` 11 | 12 | ### TurboPlonk 13 | | | Proving | Verifying | Batch Verifying | 14 | |:---|---:|---:|---:| 15 | | | ns/constraints | single proof | 1k proofs | 16 | | BLS12-381 | 29591 | 2.083 ms | 7.445 ms | 17 | | BN-254 | 23069 | 1.459 ms | 6.540 ms | 18 | | BW6-761 | 120446 | 10.885 ms | 19.615 ms | 19 | 20 | 21 | ### UltraPlonk 22 | | | Proving | Verifying | Batch Verifying | 23 | |:---|---:|---:|---:| 24 | | | ns/constraints | single proof | 1k proofs | 25 | | BLS12-381 | 41747 | 2.314 ms | 8.381 ms | 26 | | BN-254 | 33701 | 1.459 ms | 7.430 ms | 27 | | BW6-761 | 162476 | 9.413 ms | 21.505 ms | 28 | 29 | 30 | 31 | ## Laptop 32 | - MacBoo Pro (16-inch, 2019) 33 | - Processor: 2.3 GHz 8-Core Intel Core i9 34 | - Memory: 16 GB 2667 MHz DDR4 35 | - `RAYON_NUM_THREADS=N cargo bench` 36 | 37 | ### TurboPlonk 38 | | | Proving | Verifying | Batch Verifying | 39 | |:---|---:|---:|---:| 40 | | | ns/constraints | single proof | 1k proofs | 41 | | BLS12-381 | 59317 | 3.207 ms | 17.683 ms | 42 | | BN-254 | 44857 | 2.364 ms | 14.803 ms | 43 | | BW6-761 | 271828 | 12.504 ms | 37.909 ms | 44 | 45 | 46 | ### UltraPlonk 47 | | | Proving | Verifying | Batch Verifying | 48 | |:---|---:|---:|---:| 49 | | | ns/constraints | single proof | 1k proofs | 50 | | BLS12-381 | 89593 | 3.549 ms | 20.784 ms | 51 | | BN-254 | 70383 | 2.390 ms | 17.173 ms | 52 | | BW6-761 | 373141 | 13.656 ms | 44.023 ms | 53 | 54 | -------------------------------------------------------------------------------- /jellyfish/flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "flake-compat": { 4 | "flake": false, 5 | "locked": { 6 | "lastModified": 1650374568, 7 | "narHash": "sha256-Z+s0J8/r907g149rllvwhb4pKi8Wam5ij0st8PwAh+E=", 8 | "owner": "edolstra", 9 | "repo": "flake-compat", 10 | "rev": "b4a34015c698c7793d592d66adbab377907a2be8", 11 | "type": "github" 12 | }, 13 | "original": { 14 | "owner": "edolstra", 15 | "repo": "flake-compat", 16 | "type": "github" 17 | } 18 | }, 19 | "flake-utils": { 20 | "locked": { 21 | "lastModified": 1659877975, 22 | "narHash": "sha256-zllb8aq3YO3h8B/U0/J1WBgAL8EX5yWf5pMj3G0NAmc=", 23 | "owner": "numtide", 24 | "repo": "flake-utils", 25 | "rev": "c0e246b9b83f637f4681389ecabcb2681b4f3af0", 26 | "type": "github" 27 | }, 28 | "original": { 29 | "owner": "numtide", 30 | "repo": "flake-utils", 31 | "type": "github" 32 | } 33 | }, 34 | "flake-utils_2": { 35 | "locked": { 36 | "lastModified": 1644229661, 37 | "narHash": "sha256-1YdnJAsNy69bpcjuoKdOYQX0YxZBiCYZo4Twxerqv7k=", 38 | "owner": "numtide", 39 | "repo": "flake-utils", 40 | "rev": "3cecb5b042f7f209c56ffd8371b2711a290ec797", 41 | "type": "github" 42 | }, 43 | "original": { 44 | "owner": "numtide", 45 | "repo": "flake-utils", 46 | "type": "github" 47 | } 48 | }, 49 | "flake-utils_3": { 50 | "locked": { 51 | "lastModified": 1659877975, 52 | "narHash": "sha256-zllb8aq3YO3h8B/U0/J1WBgAL8EX5yWf5pMj3G0NAmc=", 53 | "owner": "numtide", 54 | "repo": "flake-utils", 55 | "rev": "c0e246b9b83f637f4681389ecabcb2681b4f3af0", 56 | "type": "github" 57 | }, 58 | "original": { 59 | "owner": "numtide", 60 | "repo": "flake-utils", 61 | "type": "github" 62 | } 63 | }, 64 | "nixpkgs": { 65 | "locked": { 66 | "lastModified": 1666109165, 67 | "narHash": "sha256-BMLyNVkr0oONuq3lKlFCRVuYqF75CO68Z8EoCh81Zdk=", 68 | "owner": "nixos", 69 | "repo": "nixpkgs", 70 | "rev": "32096899af23d49010bd8cf6a91695888d9d9e73", 71 | "type": "github" 72 | }, 73 | "original": { 74 | "owner": "nixos", 75 | "ref": "nixos-unstable", 76 | "repo": "nixpkgs", 77 | "type": "github" 78 | } 79 | }, 80 | "nixpkgs_2": { 81 | "locked": { 82 | "lastModified": 1665296151, 83 | "narHash": "sha256-uOB0oxqxN9K7XGF1hcnY+PQnlQJ+3bP2vCn/+Ru/bbc=", 84 | "owner": "NixOS", 85 | "repo": "nixpkgs", 86 | "rev": "14ccaaedd95a488dd7ae142757884d8e125b3363", 87 | "type": "github" 88 | }, 89 | "original": { 90 | "owner": "NixOS", 91 | "ref": "nixpkgs-unstable", 92 | "repo": "nixpkgs", 93 | "type": "github" 94 | } 95 | }, 96 | "pre-commit-hooks": { 97 | "inputs": { 98 | "flake-utils": "flake-utils_2", 99 | "nixpkgs": [ 100 | "nixpkgs" 101 | ] 102 | }, 103 | "locked": { 104 | "lastModified": 1666160137, 105 | "narHash": "sha256-8bQu+6poMzUyS2n3C1v3hkO6ZhRzj8Pf3CDCNckqQE4=", 106 | "owner": "cachix", 107 | "repo": "pre-commit-hooks.nix", 108 | "rev": "e6c8efee1c108bb27522b9fd25b1cd0eb3288681", 109 | "type": "github" 110 | }, 111 | "original": { 112 | "owner": "cachix", 113 | "repo": "pre-commit-hooks.nix", 114 | "type": "github" 115 | } 116 | }, 117 | "root": { 118 | "inputs": { 119 | "flake-compat": "flake-compat", 120 | "flake-utils": "flake-utils", 121 | "nixpkgs": "nixpkgs", 122 | "pre-commit-hooks": "pre-commit-hooks", 123 | "rust-overlay": "rust-overlay" 124 | } 125 | }, 126 | "rust-overlay": { 127 | "inputs": { 128 | "flake-utils": "flake-utils_3", 129 | "nixpkgs": "nixpkgs_2" 130 | }, 131 | "locked": { 132 | "lastModified": 1666148516, 133 | "narHash": "sha256-pFgSJzUFsnCTulIzhn3HHImaZpqlMxAvXTrhg0qlMOE=", 134 | "owner": "oxalica", 135 | "repo": "rust-overlay", 136 | "rev": "3e41700ab6f585b9569112ee7516c74f8d072989", 137 | "type": "github" 138 | }, 139 | "original": { 140 | "owner": "oxalica", 141 | "repo": "rust-overlay", 142 | "type": "github" 143 | } 144 | } 145 | }, 146 | "root": "root", 147 | "version": 7 148 | } 149 | -------------------------------------------------------------------------------- /jellyfish/flake.nix: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | # This file is part of the Jellyfish library. 3 | 4 | # You should have received a copy of the MIT License 5 | # along with the Jellyfish library. If not, see . 6 | 7 | { 8 | description = "Jellyfish dev env"; 9 | 10 | inputs.nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable"; 11 | inputs.flake-utils.url = "github:numtide/flake-utils"; # for dedup 12 | 13 | # allow shell.nix alongside flake.nix 14 | inputs.flake-compat.url = "github:edolstra/flake-compat"; 15 | inputs.flake-compat.flake = false; 16 | 17 | inputs.rust-overlay.url = "github:oxalica/rust-overlay"; 18 | inputs.pre-commit-hooks.url = "github:cachix/pre-commit-hooks.nix"; 19 | inputs.pre-commit-hooks.inputs.nixpkgs.follows = "nixpkgs"; 20 | 21 | outputs = { self, nixpkgs, flake-utils, flake-compat, rust-overlay, pre-commit-hooks, ... }: 22 | flake-utils.lib.eachDefaultSystem (system: 23 | let 24 | overlays = [ (import rust-overlay) ]; 25 | pkgs = import nixpkgs { inherit system overlays; }; 26 | nightlyToolchain = pkgs.rust-bin.selectLatestNightlyWith 27 | (toolchain: toolchain.minimal.override { extensions = [ "rustfmt" ]; }); 28 | 29 | stableToolchain = pkgs.rust-bin.stable.latest.minimal.override { 30 | extensions = [ "clippy" "llvm-tools-preview" "rust-src" ]; 31 | }; 32 | in with pkgs; 33 | { 34 | check = { 35 | pre-commit-check = pre-commit-hooks.lib.${system}.run { 36 | src = ./.; 37 | hooks = { 38 | check-format = { 39 | enable = true; 40 | files = "\\.rs$"; 41 | entry = "cargo fmt -- --check"; 42 | }; 43 | doctest = { 44 | enable = true; 45 | entry = "cargo test --doc"; 46 | files = "\\.rs$"; 47 | pass_filenames = false; 48 | }; 49 | cargo-clippy = { 50 | enable = true; 51 | description = "Lint Rust code."; 52 | entry = "cargo-clippy --workspace -- -D warnings"; 53 | files = "\\.rs$"; 54 | pass_filenames = false; 55 | }; 56 | cargo-sort = { 57 | enable = true; 58 | description = "Ensure Cargo.toml are sorted"; 59 | entry = "cargo sort -w"; 60 | pass_filenames = false; 61 | }; 62 | }; 63 | }; 64 | }; 65 | devShell = mkShell { 66 | buildInputs = [ 67 | argbash 68 | openssl 69 | pkgconfig 70 | git 71 | 72 | stableToolchain 73 | nightlyToolchain 74 | cargo-sort 75 | 76 | ] ++ lib.optionals stdenv.isDarwin [ darwin.apple_sdk.frameworks.Security ]; 77 | 78 | shellHook = '' 79 | export RUST_BACKTRACE=full 80 | export PATH="$PATH:$(pwd)/target/debug:$(pwd)/target/release" 81 | 82 | # Ensure `cargo fmt` uses `rustfmt` from nightly. 83 | export RUSTFMT="${nightlyToolchain}/bin/rustfmt" 84 | '' 85 | # install pre-commit hooks 86 | + self.check.${system}.pre-commit-check.shellHook; 87 | }; 88 | } 89 | ); 90 | } 91 | -------------------------------------------------------------------------------- /jellyfish/plonk/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "jf-plonk" 3 | description = "UltraPlonk implementation" 4 | # can change back to version.workspace = true after the following issue is fixed: 5 | # https://github.com/DevinR528/cargo-sort/issues/47 6 | version = { workspace = true } 7 | authors = { workspace = true } 8 | edition = { workspace = true } 9 | license = { workspace = true } 10 | rust-version = { workspace = true } 11 | 12 | [dependencies] 13 | ark-bls12-377 = { git = "https://github.com/arkworks-rs/curves", rev = "677b4ae751a274037880ede86e9b6f30f62635af" } 14 | ark-bls12-381 = "0.3.0" 15 | ark-bn254 = "0.3.0" 16 | ark-bw6-761 = { git = "https://github.com/arkworks-rs/curves", rev = "677b4ae751a274037880ede86e9b6f30f62635af" } 17 | ark-ec = "0.3.0" 18 | ark-ff = { version = "0.3.0", features = [ "asm" ] } 19 | ark-poly = "0.3.0" 20 | ark-serialize = "0.3.0" 21 | ark-std = { version = "0.3.0", default-features = false } 22 | derivative = { version = "2", features = ["use_core"] } 23 | displaydoc = { version = "0.2.3", default-features = false } 24 | downcast-rs = { version = "1.2.0", default-features = false } 25 | dyn-clone = "^1.0" 26 | espresso-systems-common = { git = "https://github.com/espressosystems/espresso-systems-common", tag = "0.4.0" } 27 | hashbrown = "0.13.2" 28 | itertools = { version = "0.10.1", default-features = false } 29 | jf-primitives = { path = "../primitives", default-features = false } 30 | jf-relation = { path = "../relation", default-features = false } 31 | jf-utils = { path = "../utilities" } 32 | merlin = { version = "3.0.0", default-features = false } 33 | num-bigint = { version = "0.4", default-features = false } 34 | rand_chacha = { version = "0.3.1" } 35 | rayon = { version = "1.5.0", optional = true } 36 | serde = { version = "1.0", default-features = false, features = ["derive"] } 37 | sha3 = "^0.10" 38 | tagged-base64 = { git = "https://github.com/espressosystems/tagged-base64", tag = "0.2.4" } 39 | 40 | [dev-dependencies] 41 | ark-ed-on-bls12-377 = { git = "https://github.com/arkworks-rs/curves", rev = "677b4ae751a274037880ede86e9b6f30f62635af" } 42 | ark-ed-on-bls12-381 = "0.3.0" 43 | ark-ed-on-bn254 = "0.3.0" 44 | hex = "^0.4.3" 45 | 46 | # Benchmarks 47 | [[bench]] 48 | name = "plonk-benches" 49 | path = "benches/bench.rs" 50 | harness = false 51 | 52 | [features] 53 | default = ["parallel"] 54 | std = ["ark-std/std", "ark-serialize/std", "ark-ff/std", "ark-ec/std", "ark-poly/std"] 55 | test_apis = [] # exposing apis for testing purpose 56 | parallel = ["ark-ff/parallel", "ark-ec/parallel", "ark-poly/parallel", 57 | "jf-utils/parallel", "jf-relation/parallel", "jf-primitives/parallel", 58 | "rayon" ] 59 | -------------------------------------------------------------------------------- /jellyfish/plonk/examples/proof_of_exp.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! This file contains an example showing how to build a proof of knowledge 8 | //! of the exponent over a native field. 9 | //! 10 | //! - secret input `x`; 11 | //! - public generator `G`; 12 | //! - public group element `X := xG` 13 | 14 | use ark_bls12_381::Bls12_381; 15 | use ark_ec::{ 16 | twisted_edwards_extended::GroupAffine as TEAffine, AffineCurve, ModelParameters, PairingEngine, 17 | ProjectiveCurve, TEModelParameters, 18 | }; 19 | use ark_ed_on_bls12_381::{EdwardsAffine, EdwardsParameters, Fr}; 20 | use ark_ff::PrimeField; 21 | use ark_std::{rand::SeedableRng, UniformRand}; 22 | use jf_plonk::{ 23 | errors::PlonkError, 24 | proof_system::{PlonkKzgSnark, UniversalSNARK}, 25 | transcript::StandardTranscript, 26 | }; 27 | use jf_primitives::pcs::prelude::UnivariateKzgPCS; 28 | use jf_relation::{gadgets::ecc::Point, Arithmetization, Circuit, PlonkCircuit}; 29 | use jf_utils::fr_to_fq; 30 | use rand_chacha::ChaCha20Rng; 31 | 32 | // The following example proves knowledge of exponent. 33 | #[allow(non_snake_case)] 34 | fn main() -> Result<(), PlonkError> { 35 | // set up the inputs and parameters 36 | let mut rng = ChaCha20Rng::from_seed([0u8; 32]); 37 | let x = Fr::rand(&mut rng); 38 | let G = EdwardsAffine::prime_subgroup_generator(); 39 | let X = G.mul(x).into_affine(); 40 | 41 | // Our first step is to build a circuit for the following statements. 42 | // - secret input `x`; 43 | // - public generator `G`; 44 | // - public group element `X := xG` 45 | // This circuit does not need to have real inputs. 46 | // We can simply use a dummy data set. 47 | let circuit = proof_of_exponent_circuit::(x, X)?; 48 | 49 | // Knowing the circuit size, we are able to simulate the universal 50 | // setup and obtain the structured reference string (SRS). 51 | // 52 | // The required SRS size can be obtained from the circuit. 53 | let srs_size = circuit.srs_size()?; 54 | let srs = PlonkKzgSnark::>::universal_setup( 55 | srs_size, &mut rng, 56 | )?; 57 | 58 | // Then, we generate the proving key and verification key from the SRS and 59 | // circuit. 60 | let (pk, vk) = 61 | PlonkKzgSnark::>::preprocess(&srs, &circuit)?; 62 | 63 | // Next, we generate the proof. 64 | // The proof generation will need an internal transcript for Fiat-Shamir 65 | // transformation. For this example we use a `StandardTranscript`. 66 | let proof = 67 | PlonkKzgSnark::>::prove::<_, _, StandardTranscript>( 68 | &mut rng, &circuit, &pk, None, 69 | )?; 70 | 71 | // Last step, verify the proof against the public inputs. 72 | let public_inputs = circuit.public_input().unwrap(); 73 | // extra messages to bound to proof by appending in its transcripts, not used 74 | // here. 75 | let extra_transcript_init_msg = None; 76 | assert!(PlonkKzgSnark::>::verify::( 77 | &vk, 78 | &public_inputs, 79 | &proof, 80 | extra_transcript_init_msg, 81 | ) 82 | .is_ok()); 83 | 84 | Ok(()) 85 | } 86 | 87 | // This function build the PoE circuit. 88 | // 89 | // We write the code with generics so that is can be adapted to 90 | // multiple curves. 91 | // Specifically, the PoE is associated with 92 | // - an embedded curve with param `EmbedCurve` that defined twisted-edwards 93 | // parameters for a curve 94 | // - a pairing engine 95 | // - the native field F for the prove system 96 | #[allow(non_snake_case)] 97 | fn proof_of_exponent_circuit( 98 | x: EmbedCurve::ScalarField, 99 | X: TEAffine, 100 | ) -> Result, PlonkError> 101 | where 102 | EmbedCurve: TEModelParameters, 103 | ::BaseField: PrimeField, 104 | PairingCurve: PairingEngine, 105 | { 106 | // Let's check that the inputs are indeed correct before we build a circuit. 107 | let G = TEAffine::::prime_subgroup_generator(); 108 | assert_eq!(X, G.mul(x), "the inputs are incorrect: X != xG"); 109 | 110 | // Step 1: 111 | // We instantiate a turbo plonk circuit. 112 | // 113 | // Here we only need turbo plonk since we are not using plookups. 114 | let mut circuit = PlonkCircuit::::new_turbo_plonk(); 115 | 116 | // Step 2: 117 | // now we create variables for each input to the circuit. 118 | 119 | // First variable is x which is an field element over P::ScalarField. 120 | // We will need to lift it to P::BaseField. 121 | let x_fq = fr_to_fq::<_, EmbedCurve>(&x); 122 | let x_var = circuit.create_variable(x_fq)?; 123 | 124 | // The next variable is a public constant: generator `G`. 125 | // We need to convert the point to Jellyfish's own `Point` struct. 126 | let G_jf: Point = G.into(); 127 | let G_var = circuit.create_constant_point_variable(G_jf)?; 128 | 129 | // The last variable is a public variable `X`. 130 | let X_jf: Point = X.into(); 131 | let X_var = circuit.create_public_point_variable(X_jf)?; 132 | 133 | // Step 3: 134 | // Connect the wires. 135 | let X_var_computed = circuit.variable_base_scalar_mul::(x_var, &G_var)?; 136 | circuit.enforce_point_equal(&X_var_computed, &X_var)?; 137 | 138 | // Sanity check: the circuit must be satisfied. 139 | assert!(circuit 140 | .check_circuit_satisfiability(&[X_jf.get_x(), X_jf.get_y()]) 141 | .is_ok()); 142 | 143 | // And we are done! 144 | circuit.finalize_for_arithmetization()?; 145 | 146 | Ok(circuit) 147 | } 148 | -------------------------------------------------------------------------------- /jellyfish/plonk/src/circuit/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | /// Circuits implementation 8 | pub mod plonk_verifier; 9 | pub mod transcript; 10 | -------------------------------------------------------------------------------- /jellyfish/plonk/src/constants.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Crate wide constants. 8 | 9 | /// Proof-system-related constants. 10 | /// 11 | /// label for the extra data field to be appended to the transcript during 12 | /// initialization 13 | pub(crate) const EXTRA_TRANSCRIPT_MSG_LABEL: &[u8] = b"extra info"; 14 | 15 | /// Compute the ratio between the quotient polynomial domain size and 16 | /// the vanishing polynomial domain size 17 | #[inline] 18 | pub(crate) fn domain_size_ratio(n: usize, num_wire_types: usize) -> usize { 19 | (num_wire_types * (n + 1) + 2) / n + 1 20 | } 21 | 22 | /// Keccak-256 have a 64 byte state size to accommodate two hash digests. 23 | pub const KECCAK256_STATE_SIZE: usize = 64; 24 | -------------------------------------------------------------------------------- /jellyfish/plonk/src/errors.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Error module. 8 | 9 | use ark_std::{format, string::String}; 10 | use displaydoc::Display; 11 | use jf_primitives::pcs::errors::PCSError; 12 | use jf_relation::errors::CircuitError; 13 | 14 | /// A `enum` specifying the possible failure modes of the Plonk. 15 | #[derive(Display, Debug)] 16 | pub enum PlonkError { 17 | /// The index is too large for the universal public parameters 18 | IndexTooLarge, 19 | /// Failed to create domain 20 | DomainCreationError, 21 | /// Failed to get array value by index 22 | IndexError, 23 | /// Divided by zero field element 24 | DivisionError, 25 | /// An error in the Plonk SNARK logic: {0} 26 | SnarkError(SnarkError), 27 | /// An error in the underlying polynomial commitment: {0} 28 | PCSError(PCSError), 29 | /// An error in the Plonk circuit: {0} 30 | CircuitError(CircuitError), 31 | /// An error during IO: {0} 32 | IoError(ark_std::io::Error), 33 | /// An error during (de)serialization 34 | SerializationError(ark_serialize::SerializationError), 35 | /// Plonk proof verification failed due to wrong proof 36 | WrongProof, 37 | /// Rescue Error 38 | PrimitiveError(jf_primitives::errors::PrimitivesError), 39 | /// Invalid parameters 40 | InvalidParameters(String), 41 | /// Non-native field overflow 42 | NonNativeFieldOverflow, 43 | /// Iterator out of range 44 | IteratorOutOfRange, 45 | /// Public inputs for partial verification circuit do not match 46 | PublicInputsDoNotMatch, 47 | } 48 | 49 | impl ark_std::error::Error for PlonkError {} 50 | 51 | impl From for PlonkError { 52 | fn from(e: PCSError) -> Self { 53 | Self::PCSError(e) 54 | } 55 | } 56 | 57 | impl From for PlonkError { 58 | fn from(e: ark_std::io::Error) -> Self { 59 | Self::IoError(e) 60 | } 61 | } 62 | 63 | impl From for PlonkError { 64 | fn from(e: ark_serialize::SerializationError) -> Self { 65 | Self::SerializationError(e) 66 | } 67 | } 68 | 69 | impl From for PlonkError { 70 | fn from(e: jf_primitives::errors::PrimitivesError) -> Self { 71 | Self::PrimitiveError(e) 72 | } 73 | } 74 | 75 | /// A `enum` specifying the possible failure modes of the underlying SNARK. 76 | #[derive(Display, Debug)] 77 | pub enum SnarkError { 78 | #[rustfmt::skip] 79 | /// Suspect: circuit is not satisfied. The quotient polynomial has wrong degree: {0}, expected: {1}. 80 | WrongQuotientPolyDegree(usize, usize), 81 | /// Invalid parameters: {0} 82 | ParameterError(String), 83 | /// The SNARK does not support lookup 84 | SnarkLookupUnsupported, 85 | } 86 | 87 | #[cfg(feature = "std")] 88 | impl std::error::Error for SnarkError {} 89 | 90 | impl From for PlonkError { 91 | fn from(e: SnarkError) -> Self { 92 | Self::SnarkError(e) 93 | } 94 | } 95 | 96 | impl From for PlonkError { 97 | fn from(e: CircuitError) -> Self { 98 | Self::CircuitError(e) 99 | } 100 | } 101 | 102 | impl From for CircuitError { 103 | // this happen during invocation of Plonk proof system API inside Verifier 104 | // gadget 105 | fn from(e: PlonkError) -> Self { 106 | Self::ParameterError(format!("Plonk proof system err: {e:?}")) 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /jellyfish/plonk/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! A Rust Implementation of the Plonk ZKP System and Extensions. 8 | 9 | #![cfg_attr(not(feature = "std"), no_std)] 10 | #![warn(missing_docs)] 11 | #![allow(clippy::derived_hash_with_manual_eq)] 12 | #[cfg(test)] 13 | extern crate std; 14 | 15 | #[macro_use] 16 | extern crate derivative; 17 | 18 | /// Customized circuit 19 | pub mod circuit; 20 | pub mod constants; 21 | pub mod errors; 22 | pub mod proof_system; 23 | pub mod transcript; 24 | 25 | pub use jf_relation::PlonkType; 26 | 27 | #[cfg(feature = "test_apis")] 28 | pub mod testing_apis; 29 | -------------------------------------------------------------------------------- /jellyfish/plonk/src/proof_system/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Interfaces for Plonk-based proof systems 8 | use ark_std::{ 9 | error::Error, 10 | fmt::Debug, 11 | rand::{CryptoRng, RngCore}, 12 | vec::Vec, 13 | }; 14 | use jf_primitives::pcs::CommitmentGroup; 15 | use jf_relation::Arithmetization; 16 | pub mod batch_arg; 17 | pub(crate) mod prover; 18 | pub(crate) mod snark; 19 | pub mod structs; 20 | pub(crate) mod verifier; 21 | use crate::transcript::PlonkTranscript; 22 | pub use snark::PlonkKzgSnark; 23 | 24 | // TODO: (alex) should we name it `PlonkishSNARK` instead? since we use 25 | // `PlonkTranscript` on prove and verify. 26 | /// An interface for SNARKs with universal setup. 27 | pub trait UniversalSNARK { 28 | /// The SNARK proof computed by the prover. 29 | type Proof: Clone; 30 | 31 | /// The parameters required by the prover to compute a proof for a specific 32 | /// circuit. 33 | type ProvingKey: Clone; 34 | 35 | /// The parameters required by the verifier to validate a proof for a 36 | /// specific circuit. 37 | type VerifyingKey: Clone; 38 | 39 | /// Universal Structured Reference String from `universal_setup`, used for 40 | /// all subsequent circuit-specific preprocessing 41 | type UniversalSRS: Clone + Debug; 42 | 43 | /// SNARK related error 44 | type Error: 'static + Error; 45 | 46 | /// Generate the universal SRS for the argument system. 47 | /// This setup is for trusted party to run, and mostly only used for 48 | /// testing purpose. In practice, a MPC flavor of the setup will be carried 49 | /// out to have higher assurance on the "toxic waste"/trapdoor being thrown 50 | /// away to ensure soundness of the argument system. 51 | fn universal_setup( 52 | max_degree: usize, 53 | rng: &mut R, 54 | ) -> Result; 55 | 56 | /// Circuit-specific preprocessing to compute the proving/verifying keys. 57 | fn preprocess>( 58 | srs: &Self::UniversalSRS, 59 | circuit: &C, 60 | ) -> Result<(Self::ProvingKey, Self::VerifyingKey), Self::Error>; 61 | 62 | /// Compute a SNARK proof of a circuit `circuit`, using the corresponding 63 | /// proving key `prove_key`. The witness used to 64 | /// generate the proof can be obtained from `circuit`. 65 | /// 66 | /// `extra_transcript_init_msg` is the optional message to be 67 | /// appended to the transcript during its initialization before obtaining 68 | /// any challenges. This field allows application-specific data bound to the 69 | /// resulting proof without any check on the data. It does not incur any 70 | /// additional cost in proof size or prove time. 71 | fn prove( 72 | rng: &mut R, 73 | circuit: &C, 74 | prove_key: &Self::ProvingKey, 75 | extra_transcript_init_msg: Option>, 76 | ) -> Result 77 | where 78 | C: Arithmetization, 79 | R: CryptoRng + RngCore, 80 | T: PlonkTranscript; 81 | 82 | /// Verify a SNARK proof `proof` of the circuit `circuit`, with respect to 83 | /// the public input `pub_input`. 84 | /// 85 | /// `extra_transcript_init_msg`: refer to documentation of `prove` 86 | fn verify>( 87 | verify_key: &Self::VerifyingKey, 88 | public_input: &[E::Fr], 89 | proof: &Self::Proof, 90 | extra_transcript_init_msg: Option>, 91 | ) -> Result<(), Self::Error>; 92 | } 93 | -------------------------------------------------------------------------------- /jellyfish/plonk/src/transcript/rescue.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! This module is a defines rescue transcript. 8 | use super::PlonkTranscript; 9 | use crate::{ 10 | errors::PlonkError, 11 | proof_system::structs::{PlookupEvaluations, ProofEvaluations, VerifyingKey}, 12 | }; 13 | use ark_ec::{short_weierstrass_jacobian::GroupAffine, SWModelParameters as SWParam}; 14 | use ark_std::vec::Vec; 15 | use jf_primitives::{ 16 | crhf::{VariableLengthRescueCRHF, CRHF}, 17 | pcs::{prelude::Commitment, CommitmentGroup, PolynomialCommitmentScheme}, 18 | rescue::{RescueParameter, STATE_SIZE}, 19 | }; 20 | use jf_relation::gadgets::ecc::{Point, SWToTEConParam}; 21 | use jf_utils::{bytes_to_field_elements, field_switching, fq_to_fr_with_mask}; 22 | 23 | /// Transcript with rescue hash function. 24 | /// 25 | /// It is currently implemented simply as 26 | /// - an append only vector of field elements 27 | /// - a state that is initialized with 0 28 | /// 29 | /// We keep appending new elements to the transcript vector, 30 | /// and when a challenge is to be generated, 31 | /// we reset the state with the fresh challenge. 32 | /// 33 | /// 1. state: \[F: STATE_SIZE\] = hash(state|transcript) 34 | /// 2. challenge = state\[0\] 35 | /// 3. transcript = vec!\[challenge\] 36 | pub struct RescueTranscript 37 | where 38 | F: RescueParameter, 39 | { 40 | transcript: Vec, 41 | state: [F; STATE_SIZE], 42 | } 43 | 44 | impl PlonkTranscript for RescueTranscript 45 | where 46 | F: RescueParameter + SWToTEConParam, 47 | { 48 | /// Create a new plonk transcript. `_label` is omitted for efficiency. 49 | fn new(_label: &'static [u8]) -> Self { 50 | RescueTranscript { 51 | transcript: Vec::new(), 52 | state: [F::zero(); STATE_SIZE], 53 | } 54 | } 55 | 56 | fn append_vk_and_pub_input>( 57 | &mut self, 58 | vk: &VerifyingKey, 59 | pub_input: &[E::Fr], 60 | ) -> Result<(), PlonkError> 61 | where 62 | E: CommitmentGroup>, 63 | P: SWParam, 64 | { 65 | // to enable a more efficient verifier circuit, we remove 66 | // the following messages (c.f. merlin transcript) 67 | // - field_size_in_bits 68 | // - domain size 69 | // - number of inputs 70 | // - wire subsets separators 71 | 72 | // selector commitments 73 | for com in vk.selector_comms.iter() { 74 | // convert the SW form commitments into TE form 75 | let te_point: Point = (&com.0).into(); 76 | self.transcript.push(te_point.get_x()); 77 | self.transcript.push(te_point.get_y()); 78 | } 79 | // sigma commitments 80 | for com in vk.sigma_comms.iter() { 81 | // convert the SW form commitments into TE form 82 | let te_point: Point = (&com.0).into(); 83 | self.transcript.push(te_point.get_x()); 84 | self.transcript.push(te_point.get_y()); 85 | } 86 | // public input 87 | for e in pub_input { 88 | self.transcript.push(field_switching(e)) 89 | } 90 | 91 | Ok(()) 92 | } 93 | 94 | /// Append the message to the transcript. `_label` is omitted for 95 | /// efficiency. 96 | fn append_message(&mut self, _label: &'static [u8], msg: &[u8]) -> Result<(), PlonkError> { 97 | // We remove the labels for better efficiency 98 | 99 | let mut f = bytes_to_field_elements(&msg); 100 | self.transcript.append(&mut f); 101 | Ok(()) 102 | } 103 | 104 | /// Append a single commitment to the transcript. `_label` is omitted for 105 | /// efficiency. 106 | fn append_commitment( 107 | &mut self, 108 | _label: &'static [u8], 109 | comm: &Commitment, 110 | ) -> Result<(), PlonkError> 111 | where 112 | E: CommitmentGroup>, 113 | P: SWParam, 114 | { 115 | // convert the SW form commitments into TE form 116 | let te_point: Point = (&comm.0).into(); 117 | // push the x and y coordinate of comm (in twisted 118 | // edwards form) to the transcript 119 | 120 | self.transcript.push(te_point.get_x()); 121 | self.transcript.push(te_point.get_y()); 122 | Ok(()) 123 | } 124 | 125 | /// Append a challenge to the transcript. `_label` is omitted for 126 | /// efficiency. 127 | fn append_challenge( 128 | &mut self, 129 | _label: &'static [u8], 130 | challenge: &E::Fr, 131 | ) -> Result<(), PlonkError> 132 | where 133 | E: CommitmentGroup, 134 | { 135 | self.transcript.push(field_switching(challenge)); 136 | Ok(()) 137 | } 138 | 139 | fn append_proof_evaluations( 140 | &mut self, 141 | evals: &ProofEvaluations, 142 | ) -> Result<(), PlonkError> { 143 | for e in &evals.wires_evals { 144 | self.transcript.push(field_switching(e)) 145 | } 146 | for e in &evals.wire_sigma_evals { 147 | self.transcript.push(field_switching(e)) 148 | } 149 | self.transcript.push(field_switching(&evals.perm_next_eval)); 150 | Ok(()) 151 | } 152 | 153 | fn append_plookup_evaluations( 154 | &mut self, 155 | evals: &PlookupEvaluations, 156 | ) -> Result<(), PlonkError> { 157 | for eval in evals.evals_vec().iter() { 158 | self.transcript.push(field_switching(eval)); 159 | } 160 | for next_eval in evals.next_evals_vec().iter() { 161 | self.transcript.push(field_switching(next_eval)); 162 | } 163 | Ok(()) 164 | } 165 | 166 | /// Generate the challenge for the current transcript, 167 | /// and then append it to the transcript. `_label` is omitted for 168 | /// efficiency. 169 | fn get_and_append_challenge(&mut self, _label: &'static [u8]) -> Result 170 | where 171 | E: CommitmentGroup, 172 | { 173 | // 1. state: [F: STATE_SIZE] = hash(state|transcript) 174 | // 2. challenge = state[0] in Fr 175 | // 3. transcript = Vec::new() 176 | 177 | let input = [self.state.as_ref(), self.transcript.as_ref()].concat(); 178 | let tmp: [F; STATE_SIZE] = VariableLengthRescueCRHF::evaluate(&input)?; 179 | let challenge = fq_to_fr_with_mask::(&tmp[0]); 180 | self.state.copy_from_slice(&tmp); 181 | self.transcript = Vec::new(); 182 | self.transcript.push(field_switching(&challenge)); 183 | 184 | Ok(challenge) 185 | } 186 | } 187 | -------------------------------------------------------------------------------- /jellyfish/plonk/src/transcript/solidity.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! This module implements solidity transcript. 8 | use super::PlonkTranscript; 9 | use crate::{constants::KECCAK256_STATE_SIZE, errors::PlonkError}; 10 | use ark_ff::PrimeField; 11 | use ark_std::vec::Vec; 12 | use jf_primitives::pcs::CommitmentGroup; 13 | use sha3::{Digest, Keccak256}; 14 | 15 | /// Transcript with `keccak256` hash function. 16 | /// 17 | /// It is almost identical to `RescueTranscript` except using Solidity's 18 | /// `keccak256` for Solidity-friendly protocols. 19 | /// 20 | /// It is currently implemented simply as 21 | /// - an append only vector of field elements 22 | /// - a state that is initialized with 0 23 | /// 24 | /// We keep appending new elements to the transcript vector, 25 | /// and when a challenge is to be generated, 26 | /// we reset the state with the fresh challenge. 27 | /// 28 | /// 1. state: \[F: STATE_SIZE\] = hash(state|transcript) 29 | /// 2. challenge = state\[0\] 30 | /// 3. transcript = vec!\[challenge\] 31 | pub struct SolidityTranscript { 32 | transcript: Vec, 33 | state: [u8; KECCAK256_STATE_SIZE], // 64 bytes state size 34 | } 35 | 36 | impl PlonkTranscript for SolidityTranscript { 37 | /// Create a new plonk transcript. `label` is omitted for efficiency. 38 | fn new(_label: &'static [u8]) -> Self { 39 | SolidityTranscript { 40 | transcript: Vec::new(), 41 | state: [0u8; KECCAK256_STATE_SIZE], 42 | } 43 | } 44 | 45 | /// Append the message to the transcript. `_label` is omitted for 46 | /// efficiency. 47 | fn append_message(&mut self, _label: &'static [u8], msg: &[u8]) -> Result<(), PlonkError> { 48 | // We remove the labels for better efficiency 49 | self.transcript.extend_from_slice(msg); 50 | Ok(()) 51 | } 52 | 53 | /// Generate the challenge for the current transcript, 54 | /// and then append it to the transcript. `_label` is omitted for 55 | /// efficiency. 56 | fn get_and_append_challenge(&mut self, _label: &'static [u8]) -> Result 57 | where 58 | E: CommitmentGroup, 59 | { 60 | // 1. state = keccak256(state|transcript|0) || keccak256(state|transcript|1) 61 | let input0 = [self.state.as_ref(), self.transcript.as_ref(), &[0u8]].concat(); 62 | let input1 = [self.state.as_ref(), self.transcript.as_ref(), &[1u8]].concat(); 63 | 64 | let mut hasher = Keccak256::new(); 65 | hasher.update(&input0); 66 | let buf0 = hasher.finalize(); 67 | 68 | let mut hasher = Keccak256::new(); 69 | hasher.update(&input1); 70 | let buf1 = hasher.finalize(); 71 | 72 | self.state.copy_from_slice(&[buf0, buf1].concat()); 73 | 74 | // 2. challenge: sample field from random bytes. 75 | let challenge = E::Fr::from_le_bytes_mod_order(&self.state[..48]); 76 | Ok(challenge) 77 | } 78 | } 79 | 80 | #[test] 81 | fn test_solidity_keccak() { 82 | use hex::FromHex; 83 | use sha3::{Digest, Keccak256}; 84 | let message = "the quick brown fox jumps over the lazy dog".as_bytes(); 85 | 86 | let mut hasher = Keccak256::new(); 87 | hasher.update(message); 88 | let output = hasher.finalize(); 89 | 90 | // test example result yanked from smart contract execution 91 | assert_eq!( 92 | output[..], 93 | <[u8; 32]>::from_hex("865bf05cca7ba26fb8051e8366c6d19e21cadeebe3ee6bfa462b5c72275414ec") 94 | .unwrap() 95 | ); 96 | } 97 | -------------------------------------------------------------------------------- /jellyfish/plonk/src/transcript/standard.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! This module is a wrapper of the Merlin transcript. 8 | use super::PlonkTranscript; 9 | use crate::errors::PlonkError; 10 | use ark_ff::PrimeField; 11 | use jf_primitives::pcs::CommitmentGroup; 12 | use jf_utils::to_bytes; 13 | use merlin::Transcript; 14 | 15 | /// A wrapper of `merlin::Transcript`. 16 | pub struct StandardTranscript(Transcript); 17 | 18 | impl PlonkTranscript for StandardTranscript { 19 | /// create a new plonk transcript 20 | fn new(label: &'static [u8]) -> Self { 21 | Self(Transcript::new(label)) 22 | } 23 | 24 | // append the message to the transcript 25 | fn append_message(&mut self, label: &'static [u8], msg: &[u8]) -> Result<(), PlonkError> { 26 | self.0.append_message(label, msg); 27 | 28 | Ok(()) 29 | } 30 | 31 | // generate the challenge for the current transcript 32 | // and append it to the transcript 33 | fn get_and_append_challenge(&mut self, label: &'static [u8]) -> Result 34 | where 35 | E: CommitmentGroup, 36 | { 37 | let mut buf = [0u8; 64]; 38 | self.0.challenge_bytes(label, &mut buf); 39 | let challenge = E::Fr::from_le_bytes_mod_order(&buf); 40 | self.0.append_message(label, &to_bytes!(&challenge)?); 41 | Ok(challenge) 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /jellyfish/primitives/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "jf-primitives" 3 | description = "Cryptographic primitives" 4 | version = { workspace = true } 5 | authors = { workspace = true } 6 | edition = { workspace = true } 7 | license = { workspace = true } 8 | rust-version = { workspace = true } 9 | 10 | [dependencies] 11 | ark-bls12-377 = { git = "https://github.com/arkworks-rs/curves", rev = "677b4ae751a274037880ede86e9b6f30f62635af" } 12 | ark-bls12-381 = "0.3.0" 13 | ark-bn254 = "0.3.0" 14 | ark-bw6-761 = { git = "https://github.com/arkworks-rs/curves", rev = "677b4ae751a274037880ede86e9b6f30f62635af" } 15 | ark-ec = "0.3.0" 16 | ark-ed-on-bls12-377 = { git = "https://github.com/arkworks-rs/curves", rev = "677b4ae751a274037880ede86e9b6f30f62635af" } 17 | ark-ed-on-bls12-381 = "0.3.0" 18 | ark-ed-on-bn254 = "0.3.0" 19 | ark-ff = "0.3.0" 20 | ark-pallas = "0.3.0" 21 | ark-poly = "0.3.0" 22 | ark-poly-commit = "0.3.0" 23 | ark-serialize = "0.3.0" 24 | ark-sponge = "0.3.0" 25 | ark-std = { version = "0.3.0", default-features = false } 26 | blake2 = { version = "0.9", default-features = false } 27 | ark-vesta = "0.3.0" 28 | blst = "0.3.10" 29 | crypto_box = "0.8.1" 30 | derivative = { version = "2", features = ["use_core"] } 31 | digest = { version = "0.10.1", default-features = false, features = ["alloc"] } 32 | displaydoc = { version = "0.2.3", default-features = false } 33 | espresso-systems-common = { git = "https://github.com/espressosystems/espresso-systems-common", tag = "0.4.0" } 34 | generic-array = { version = "^0.14", default-features = false } 35 | hashbrown = "0.13.2" 36 | itertools = { version = "0.10.1", default-features = false, features = [ "use_alloc" ] } 37 | jf-relation = { path = "../relation", default-features = false } 38 | jf-utils = { path = "../utilities" } 39 | merlin = { version = "3.0.0", default-features = false } 40 | num-bigint = { version = "0.4.3", default-features = false } 41 | num-traits = { version = "0.2.15", default-features = false } 42 | rand_chacha = { version = "0.3.1", default-features = false } 43 | rayon = { version = "1.5.0", optional = true } 44 | serde = { version = "1.0", default-features = false, features = ["derive"] } 45 | sha2 = { version = "0.10.1", default-features = false } 46 | sha3 = { version = "0.10.5", default-features = false } 47 | tagged-base64 = { git = "https://github.com/espressosystems/tagged-base64", tag = "0.2.4" } 48 | typenum = { version = "1.15.0", default-features = false } 49 | zeroize = { version = "1.5", default-features = false } 50 | 51 | [dev-dependencies] 52 | ark-bls12-377 = { git = "https://github.com/arkworks-rs/curves", rev = "677b4ae751a274037880ede86e9b6f30f62635af" } 53 | ark-bls12-381 = "0.3.0" 54 | ark-bn254 = "0.3.0" 55 | ark-bw6-761 = { git = "https://github.com/arkworks-rs/curves", rev = "677b4ae751a274037880ede86e9b6f30f62635af" } 56 | ark-ed-on-bls12-377 = { git = "https://github.com/arkworks-rs/curves", rev = "677b4ae751a274037880ede86e9b6f30f62635af" } 57 | ark-ed-on-bls12-381-bandersnatch = { git = "https://github.com/arkworks-rs/curves", rev = "677b4ae751a274037880ede86e9b6f30f62635af" } 58 | ark-ed-on-bn254 = "0.3.0" 59 | bincode = "1.3" 60 | criterion = "0.4.0" 61 | hashbrown = "0.13.1" 62 | rand_core = { version = "^0.6.0", features = ["getrandom"] } 63 | 64 | [[bench]] 65 | name = "merkle_path" 66 | harness = false 67 | 68 | [[bench]] 69 | name = "pcs" 70 | harness = false 71 | 72 | [features] 73 | default = ["parallel"] 74 | std = [] 75 | print-trace = ["ark-std/print-trace"] 76 | parallel = ["ark-ff/parallel", "ark-ec/parallel", "jf-utils/parallel", 77 | "jf-relation/parallel", "rayon" ] 78 | -------------------------------------------------------------------------------- /jellyfish/primitives/benches/merkle_path.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | #![deny(warnings)] 8 | #[macro_use] 9 | extern crate criterion; 10 | use ark_ed_on_bls12_381::Fq as Fq381; 11 | use ark_std::rand::Rng; 12 | use criterion::Criterion; 13 | use jf_primitives::merkle_tree::{prelude::RescueMerkleTree, MerkleTreeScheme}; 14 | use std::time::Duration; 15 | 16 | const BENCH_NAME: &str = "merkle_path_height_20"; 17 | 18 | fn twenty_hashes(c: &mut Criterion) { 19 | let mut benchmark_group = c.benchmark_group(BENCH_NAME); 20 | benchmark_group.sample_size(10); 21 | benchmark_group.measurement_time(Duration::new(10, 0)); 22 | 23 | let mut rng = ark_std::test_rng(); 24 | 25 | let leaf: Fq381 = rng.gen(); 26 | 27 | let mt = RescueMerkleTree::::from_elems(20, &[leaf, leaf]).unwrap(); 28 | let (_, proof) = mt.lookup(0).expect_ok().unwrap(); 29 | 30 | let num_inputs = 0; 31 | benchmark_group.bench_with_input(BENCH_NAME, &num_inputs, move |b, &_num_inputs| { 32 | b.iter(|| mt.verify(0, &proof).unwrap()) 33 | }); 34 | benchmark_group.finish(); 35 | } 36 | 37 | fn bench(c: &mut Criterion) { 38 | twenty_hashes(c); 39 | } 40 | 41 | criterion_group!(benches, bench); 42 | 43 | criterion_main!(benches); 44 | -------------------------------------------------------------------------------- /jellyfish/primitives/benches/pcs.rs: -------------------------------------------------------------------------------- 1 | use ark_bls12_381::{Bls12_381, Fr}; 2 | use ark_ff::UniformRand; 3 | use ark_poly::{DenseMultilinearExtension, MultilinearExtension}; 4 | use ark_std::{rc::Rc, test_rng}; 5 | use jf_primitives::pcs::{ 6 | prelude::{MultilinearKzgPCS, PCSError, PolynomialCommitmentScheme}, 7 | StructuredReferenceString, 8 | }; 9 | use std::time::Instant; 10 | 11 | fn main() -> Result<(), PCSError> { 12 | bench_pcs() 13 | } 14 | 15 | fn bench_pcs() -> Result<(), PCSError> { 16 | let mut rng = test_rng(); 17 | 18 | // normal polynomials 19 | let uni_params = MultilinearKzgPCS::::gen_srs_for_testing(&mut rng, 18)?; 20 | 21 | for nv in 4..19 { 22 | let repetition = if nv < 10 { 23 | 100 24 | } else if nv < 20 { 25 | 50 26 | } else { 27 | 10 28 | }; 29 | 30 | let poly = Rc::new(DenseMultilinearExtension::rand(nv, &mut rng)); 31 | let (ml_ck, ml_vk) = uni_params.multilinear_srs.trim(nv)?; 32 | let (uni_ck, uni_vk) = uni_params.univariate_srs.trim(nv)?; 33 | let ck = (ml_ck, uni_ck); 34 | let vk = (ml_vk, uni_vk); 35 | 36 | let point: Vec<_> = (0..nv).map(|_| Fr::rand(&mut rng)).collect(); 37 | 38 | // commit 39 | let com = { 40 | let start = Instant::now(); 41 | for _ in 0..repetition { 42 | let _commit = MultilinearKzgPCS::commit(&ck, &poly)?; 43 | } 44 | 45 | println!( 46 | "KZG commit for {} variables: {} ns", 47 | nv, 48 | start.elapsed().as_nanos() / repetition as u128 49 | ); 50 | 51 | MultilinearKzgPCS::commit(&ck, &poly)? 52 | }; 53 | 54 | // open 55 | let (proof, value) = { 56 | let start = Instant::now(); 57 | for _ in 0..repetition { 58 | let _open = MultilinearKzgPCS::open(&ck, &poly, &point)?; 59 | } 60 | 61 | println!( 62 | "KZG open for {} variables: {} ns", 63 | nv, 64 | start.elapsed().as_nanos() / repetition as u128 65 | ); 66 | MultilinearKzgPCS::open(&ck, &poly, &point)? 67 | }; 68 | 69 | // verify 70 | { 71 | let start = Instant::now(); 72 | for _ in 0..repetition { 73 | assert!(MultilinearKzgPCS::verify( 74 | &vk, &com, &point, &value, &proof 75 | )?); 76 | } 77 | println!( 78 | "KZG verify for {} variables: {} ns", 79 | nv, 80 | start.elapsed().as_nanos() / repetition as u128 81 | ); 82 | } 83 | 84 | println!("===================================="); 85 | } 86 | 87 | Ok(()) 88 | } 89 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/circuit/commitment.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Circuit implementation of the commitment scheme. 8 | 9 | use crate::{ 10 | rescue::{RescueParameter, CRHF_RATE}, 11 | utils::pad_with, 12 | }; 13 | use ark_std::vec; 14 | use jf_relation::{errors::CircuitError, Circuit, PlonkCircuit, Variable}; 15 | 16 | use super::rescue::RescueNativeGadget; 17 | 18 | /// Circuit implementation of the commitment scheme. 19 | pub trait CommitmentGadget { 20 | // Commitment scheme 21 | /// Commitment function. 22 | /// * `input` - input variables, 23 | /// * `blinding` - blinding variable 24 | /// * `returns` a variable that refers to the commitment value 25 | /// The underlying the commitment instance is bound to a specific length. 26 | /// Hence input length must match it. 27 | fn commit(&mut self, input: &[Variable], blinding: Variable) -> Result; 28 | } 29 | 30 | impl CommitmentGadget for PlonkCircuit 31 | where 32 | F: RescueParameter, 33 | { 34 | fn commit(&mut self, input: &[Variable], blinding: Variable) -> Result { 35 | let mut msg = vec![blinding]; 36 | msg.extend_from_slice(input); 37 | pad_with(&mut msg, CRHF_RATE, self.zero()); 38 | Ok(RescueNativeGadget::::rescue_sponge_no_padding(self, &msg, 1)?[0]) 39 | } 40 | } 41 | 42 | #[cfg(test)] 43 | mod tests { 44 | use crate::{ 45 | circuit::commitment::CommitmentGadget, 46 | commitment::{CommitmentScheme, FixedLengthRescueCommitment}, 47 | }; 48 | use ark_bls12_377::Fq as Fq377; 49 | use ark_ed_on_bls12_377::Fq as FqEd377; 50 | use ark_ed_on_bls12_381::Fq as FqEd381; 51 | use ark_ed_on_bls12_381_bandersnatch::Fq as FqEd381b; 52 | use ark_ed_on_bn254::Fq as FqEd254; 53 | use ark_ff::UniformRand; 54 | use ark_std::vec::Vec; 55 | use itertools::Itertools; 56 | use jf_relation::{Circuit, PlonkCircuit, Variable}; 57 | 58 | const TEST_INPUT_LEN: usize = 10; 59 | const TEST_INPUT_LEN_PLUS_ONE: usize = 10 + 1; 60 | 61 | macro_rules! test_commit_circuit { 62 | ($base_field:tt) => { 63 | let mut circuit: PlonkCircuit<$base_field> = PlonkCircuit::new_turbo_plonk(); 64 | let mut prng = ark_std::test_rng(); 65 | 66 | let blinding = $base_field::rand(&mut prng); 67 | let blinding_var = circuit.create_variable(blinding).unwrap(); 68 | 69 | let mut data = [$base_field::from(0u64); TEST_INPUT_LEN]; 70 | for i in 0..TEST_INPUT_LEN { 71 | data[i] = $base_field::rand(&mut prng); 72 | } 73 | let data_vars: Vec = data 74 | .iter() 75 | .map(|&x| circuit.create_variable(x).unwrap()) 76 | .collect_vec(); 77 | 78 | let expected_commitment = FixedLengthRescueCommitment::< 79 | $base_field, 80 | TEST_INPUT_LEN, 81 | TEST_INPUT_LEN_PLUS_ONE, 82 | >::commit(&data, Some(&blinding)) 83 | .unwrap(); 84 | 85 | let commitment_var = circuit.commit(&data_vars, blinding_var).unwrap(); 86 | 87 | // Check commitment output consistency 88 | assert_eq!( 89 | expected_commitment, 90 | circuit.witness(commitment_var).unwrap() 91 | ); 92 | 93 | // Check constraints 94 | assert!(circuit.check_circuit_satisfiability(&[]).is_ok()); 95 | *circuit.witness_mut(commitment_var) = $base_field::from(1_u32); 96 | assert!(circuit.check_circuit_satisfiability(&[]).is_err()); 97 | }; 98 | } 99 | #[test] 100 | fn test_commit_circuit() { 101 | test_commit_circuit!(FqEd254); 102 | test_commit_circuit!(FqEd377); 103 | test_commit_circuit!(FqEd381); 104 | test_commit_circuit!(FqEd381b); 105 | test_commit_circuit!(Fq377); 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/circuit/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Circuit implementation of various crypto primitives. 8 | pub mod commitment; 9 | pub mod elgamal; 10 | pub mod merkle_tree; 11 | pub mod prf; 12 | pub mod rescue; 13 | pub mod signature; 14 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/circuit/prf.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Circuit implementation of a PRF. 8 | 9 | use crate::rescue::RescueParameter; 10 | use jf_relation::{errors::CircuitError, PlonkCircuit, Variable}; 11 | 12 | use super::rescue::RescueNativeGadget; 13 | 14 | /// Circuit implementation of a PRF. 15 | pub trait PRFGadget { 16 | /// PRF many to one 17 | /// * `key` - key variable 18 | /// * `input` - input variables, 19 | /// * `returns` variables that refers to the output 20 | fn eval_prf(&mut self, key: Variable, input: &[Variable]) -> Result; 21 | } 22 | 23 | impl PRFGadget for PlonkCircuit 24 | where 25 | F: RescueParameter, 26 | { 27 | fn eval_prf(&mut self, key: Variable, input: &[Variable]) -> Result { 28 | RescueNativeGadget::::rescue_full_state_keyed_sponge_with_zero_padding(self, key, input) 29 | } 30 | } 31 | 32 | #[cfg(test)] 33 | mod tests { 34 | use super::PRFGadget; 35 | use crate::prf::{RescuePRF, PRF}; 36 | use ark_bls12_377::Fq as Fq377; 37 | use ark_ed_on_bls12_377::Fq as FqEd377; 38 | use ark_ed_on_bls12_381::Fq as FqEd381; 39 | use ark_ed_on_bls12_381_bandersnatch::Fq as FqEd381b; 40 | use ark_ed_on_bn254::Fq as FqEd254; 41 | use ark_ff::UniformRand; 42 | use ark_std::vec::Vec; 43 | use jf_relation::{Circuit, PlonkCircuit, Variable}; 44 | 45 | macro_rules! test_prf_circuit { 46 | ($base_field:tt) => { 47 | let mut circuit: PlonkCircuit<$base_field> = PlonkCircuit::new_turbo_plonk(); 48 | let mut prng = ark_std::test_rng(); 49 | let rand_scalar = $base_field::rand(&mut prng); 50 | let key_var = circuit.create_variable(rand_scalar).unwrap(); 51 | let input_len = 10; 52 | let mut data = [$base_field::from(0u8); 10]; 53 | for i in 0..input_len { 54 | data[i] = $base_field::rand(&mut prng); 55 | } 56 | let data_vars: Vec = data 57 | .iter() 58 | .map(|&x| circuit.create_variable(x).unwrap()) 59 | .collect(); 60 | 61 | let expected_prf_output = 62 | RescuePRF::<$base_field, 10, 1>::evaluate(&rand_scalar, &data).unwrap(); 63 | let prf_var = circuit.eval_prf(key_var, &data_vars).unwrap(); 64 | 65 | // Check prf output consistency 66 | assert_eq!(expected_prf_output[0], circuit.witness(prf_var).unwrap()); 67 | 68 | // Check constraints 69 | assert!(circuit.check_circuit_satisfiability(&[]).is_ok()); 70 | *circuit.witness_mut(prf_var) = $base_field::from(1_u32); 71 | assert!(circuit.check_circuit_satisfiability(&[]).is_err()); 72 | }; 73 | } 74 | 75 | #[test] 76 | fn test_prf_circuit() { 77 | test_prf_circuit!(FqEd254); 78 | test_prf_circuit!(FqEd377); 79 | test_prf_circuit!(FqEd381); 80 | test_prf_circuit!(FqEd381b); 81 | test_prf_circuit!(Fq377); 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/circuit/signature/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Circuit implementation of a signature schemes. 8 | //! Currently this module only implements Schnorr signature scheme over EC. 9 | 10 | pub mod schnorr; 11 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/commitment.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Implements a rescue hash based commitment scheme. 8 | 9 | use ark_std::marker::PhantomData; 10 | 11 | use crate::{ 12 | crhf::{FixedLengthRescueCRHF, CRHF}, 13 | errors::PrimitivesError, 14 | rescue::RescueParameter, 15 | }; 16 | use ark_std::{ 17 | borrow::Borrow, 18 | fmt::Debug, 19 | hash::Hash, 20 | string::{String, ToString}, 21 | UniformRand, 22 | }; 23 | 24 | /// A trait for cryptographic commitment scheme 25 | pub trait CommitmentScheme { 26 | /// Input to the commitment 27 | type Input; 28 | /// The type of output commitment value 29 | type Output: Clone + Debug + PartialEq + Eq + Hash; 30 | /// The type of the hiding/blinding factor 31 | type Randomness: Clone + Debug + PartialEq + Eq + UniformRand; 32 | 33 | /// Commit algorithm that takes `input` and blinding randomness `r` 34 | /// (optional for hiding commitment schemes), outputs a commitment. 35 | fn commit>( 36 | input: T, 37 | r: Option<&Self::Randomness>, 38 | ) -> Result; 39 | 40 | /// Verify algorithm that output `Ok` if accepted, or `Err` if rejected. 41 | fn verify>( 42 | input: T, 43 | r: Option<&Self::Randomness>, 44 | comm: &Self::Output, 45 | ) -> Result<(), PrimitivesError>; 46 | } 47 | 48 | #[derive(Debug, Default, Clone, PartialEq, Eq)] 49 | /// Rescue-based Commitment instance for fixed-length input 50 | /// 51 | /// ## Note 52 | /// the current ugly existence of `INPUT_LEN_PLUS_ONE` is due to unstable 53 | /// feature of using const generic in expression (namely can't use `INPUT_LEN + 54 | /// 1` in code). 55 | // FIXME: (alex) when `feature(generic_const_exprs)` is stable, we should remove 56 | // the third generic param. See more: https://github.com/rust-lang/rust/issues/76560 57 | pub struct FixedLengthRescueCommitment< 58 | F: RescueParameter, 59 | const INPUT_LEN: usize, 60 | const INPUT_LEN_PLUS_ONE: usize, 61 | >(PhantomData); 62 | 63 | impl CommitmentScheme 64 | for FixedLengthRescueCommitment 65 | { 66 | type Input = [F; INPUT_LEN]; 67 | type Output = F; 68 | type Randomness = F; 69 | 70 | fn commit>( 71 | input: T, 72 | r: Option<&Self::Randomness>, 73 | ) -> Result { 74 | let mut msg = [F::zero(); INPUT_LEN_PLUS_ONE]; 75 | msg[0] = *r.ok_or_else(|| { 76 | PrimitivesError::ParameterError("Expecting a blinding factor".to_string()) 77 | })?; 78 | msg[1..INPUT_LEN_PLUS_ONE].copy_from_slice(&input.borrow()[..(INPUT_LEN)]); 79 | 80 | Ok(FixedLengthRescueCRHF::::evaluate(&msg)?[0]) 81 | } 82 | 83 | fn verify>( 84 | input: T, 85 | r: Option<&Self::Randomness>, 86 | comm: &Self::Output, 87 | ) -> Result<(), PrimitivesError> { 88 | if ::commit(input, r)? == *comm { 89 | Ok(()) 90 | } else { 91 | Err(PrimitivesError::VerificationError(String::from( 92 | "Commitment verification failed", 93 | ))) 94 | } 95 | } 96 | } 97 | 98 | #[cfg(test)] 99 | mod test { 100 | use crate::{ 101 | commitment::{CommitmentScheme, FixedLengthRescueCommitment}, 102 | rescue::{sponge::RescueCRHF, CRHF_RATE}, 103 | }; 104 | use ark_bls12_377::Fq as Fq377; 105 | use ark_ed_on_bls12_377::Fq as FqEd377; 106 | use ark_ed_on_bls12_381::Fq as FqEd381; 107 | use ark_ed_on_bls12_381_bandersnatch::Fq as FqEd381b; 108 | use ark_ed_on_bn254::Fq as FqEd254; 109 | use ark_ff::UniformRand; 110 | use ark_std::vec; 111 | 112 | macro_rules! test_commit { 113 | ($tr:tt) => { 114 | let mut prng = ark_std::test_rng(); 115 | 116 | let input = [$tr::from(1u64), $tr::from(2u64), $tr::from(3u64)]; 117 | let blind = $tr::rand(&mut prng); 118 | 119 | let c = FixedLengthRescueCommitment::<$tr, 3, 4>::commit(&input, Some(&blind)).unwrap(); 120 | assert!( 121 | FixedLengthRescueCommitment::<$tr, 3, 4>::verify(&input, Some(&blind), &c).is_ok() 122 | ); 123 | // test for correctness 124 | let mut msg = vec![blind]; 125 | msg.extend_from_slice(&input); 126 | if (input.len() + 1) % CRHF_RATE == 0 { 127 | assert_eq!(c, RescueCRHF::sponge_no_padding(&msg, 1).unwrap()[0]) 128 | } else { 129 | assert_eq!(c, RescueCRHF::sponge_with_zero_padding(&msg, 1)[0]) 130 | } 131 | 132 | // smaller input size 133 | let bad_input = [input[0], input[1]]; 134 | assert!( 135 | FixedLengthRescueCommitment::<$tr, 2, 3>::verify(&bad_input, Some(&blind), &c) 136 | .is_err() 137 | ); 138 | // bad blinding factor 139 | let bad_blind = blind + $tr::from(1u8); 140 | assert!( 141 | FixedLengthRescueCommitment::<$tr, 3, 4>::verify(&input, Some(&bad_blind), &c) 142 | .is_err() 143 | ); 144 | // bad input 145 | let bad_input = [$tr::from(2u64), $tr::from(1u64), $tr::from(3u64)]; 146 | assert!( 147 | FixedLengthRescueCommitment::<$tr, 3, 4>::verify(&bad_input, Some(&blind), &c) 148 | .is_err() 149 | ); 150 | }; 151 | } 152 | 153 | #[test] 154 | fn test_commit() { 155 | test_commit!(FqEd254); 156 | test_commit!(FqEd377); 157 | test_commit!(FqEd381); 158 | test_commit!(FqEd381b); 159 | test_commit!(Fq377); 160 | } 161 | } 162 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/constants.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Constants for curve specific parameters. 8 | 9 | /// ciphersuite identifier for schnorr signature 10 | pub const CS_ID_SCHNORR: &str = "SCHNORR_WITH_RESCUE_HASH_v01"; 11 | 12 | /// ciphersuite identifier for BLS signature, see: 13 | /// 14 | pub const CS_ID_BLS_MIN_SIG: &str = "BLS_SIG_BLS12381G1_XMD:SHA-256_SSWU_RO_NUL_"; 15 | 16 | /// Size in bytes of a secret key in our BLS signature scheme. 17 | pub const BLS_SIG_SK_SIZE: usize = 32; 18 | /// Size in bytes of a signature in our BLS signature scheme. 19 | pub const BLS_SIG_SIGNATURE_SIZE: usize = 96; 20 | /// Size in bytes of a compressed signature in our BLS signature scheme. 21 | pub const BLS_SIG_COMPRESSED_SIGNATURE_SIZE: usize = 48; 22 | /// Size in bytes of a verification key in our BLS signature scheme. 23 | pub const BLS_SIG_PK_SIZE: usize = 192; 24 | /// Size in bytes of a compressed verification key in our BLS signature scheme. 25 | pub const BLS_SIG_COMPRESSED_PK_SIZE: usize = 96; 26 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/crhf.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Collision-resistant Hash Functions (CRHF) definitions and implementations. 8 | 9 | use ark_std::{ 10 | borrow::Borrow, fmt::Debug, hash::Hash, marker::PhantomData, string::ToString, vec::Vec, 11 | }; 12 | 13 | use crate::{ 14 | errors::PrimitivesError, 15 | rescue::{sponge::RescueCRHF, RescueParameter, CRHF_RATE}, 16 | }; 17 | 18 | /// A trait for CRHF 19 | /// (based on ark-primitives' definition, but self-declared for minimal 20 | /// dependency and easier future upgradability.) 21 | pub trait CRHF { 22 | /// Input to the CRHF 23 | type Input; 24 | /// Output of the CRHF 25 | // FIXME: (alex) wait until arkwork 0.4.0 to add the following: 26 | // + Default + CanonicalSerialize + CanonicalDeserialize; 27 | // right now, const-generic are not supported yet. 28 | type Output: Clone + PartialEq + Eq + Hash + Debug; 29 | 30 | /// evaluate inputs and return hash output 31 | fn evaluate>(input: T) -> Result; 32 | } 33 | 34 | #[derive(Debug, Clone)] 35 | /// A rescue-sponge-based CRHF with fixed-input size (if not multiple of 3 will 36 | /// get auto-padded) and variable-output size 37 | pub struct FixedLengthRescueCRHF< 38 | F: RescueParameter, 39 | const INPUT_LEN: usize, 40 | const OUTPUT_LEN: usize, 41 | >(PhantomData); 42 | 43 | impl CRHF 44 | for FixedLengthRescueCRHF 45 | { 46 | type Input = [F; INPUT_LEN]; 47 | type Output = [F; OUTPUT_LEN]; 48 | 49 | /// ## Padding 50 | /// if `input` length is not a multiple of `CRHF_RATE`, then it will be 51 | /// padded. By default, we use "zero padding"-style where as many "0" as 52 | /// required are added. 53 | fn evaluate>(input: T) -> Result { 54 | let mut output = [F::zero(); OUTPUT_LEN]; 55 | 56 | let res = match INPUT_LEN % CRHF_RATE { 57 | 0 => RescueCRHF::::sponge_no_padding(input.borrow(), OUTPUT_LEN)?, 58 | _ => RescueCRHF::::sponge_with_zero_padding(input.borrow(), OUTPUT_LEN), 59 | }; 60 | if res.len() != OUTPUT_LEN { 61 | return Err(PrimitivesError::InternalError( 62 | "Unexpected rescue sponge return length".to_string(), 63 | )); 64 | } 65 | 66 | output.copy_from_slice(&res[..]); 67 | Ok(output) 68 | } 69 | } 70 | 71 | #[derive(Debug, Clone)] 72 | /// A rescue-sponge-based CRHF with variable-input and variable-output size 73 | pub struct VariableLengthRescueCRHF(PhantomData); 74 | 75 | impl CRHF for VariableLengthRescueCRHF { 76 | type Input = Vec; 77 | type Output = [F; OUTPUT_LEN]; 78 | 79 | /// ## Padding 80 | /// if `input` length is not a multiple of `CRHF_RATE`, then it will be 81 | /// padded. By default, we use "bit padding"-style where "1" is always 82 | /// appended, then as many "0" as required are added for the overall 83 | /// length to be a multiple of `CRHF_RATE`. 84 | fn evaluate>(input: T) -> Result { 85 | let mut output = [F::zero(); OUTPUT_LEN]; 86 | let res = RescueCRHF::::sponge_with_bit_padding(input.borrow(), OUTPUT_LEN); 87 | if res.len() != OUTPUT_LEN { 88 | return Err(PrimitivesError::InternalError( 89 | "Unexpected rescue sponge return length".to_string(), 90 | )); 91 | } 92 | output.copy_from_slice(&res[..]); 93 | Ok(output) 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/errors.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Error types. 8 | 9 | use crate::rescue::errors::RescueError; 10 | use ark_serialize::SerializationError; 11 | use ark_std::{ 12 | format, 13 | string::{String, ToString}, 14 | }; 15 | use blst::BLST_ERROR; 16 | use displaydoc::Display; 17 | 18 | /// A `enum` specifying the possible failure modes of the primitives. 19 | #[derive(Debug, Display)] 20 | pub enum PrimitivesError { 21 | /// Unsuccessful verification for proof or signature, {0} 22 | VerificationError(String), 23 | /// Bad parameter in function call, {0} 24 | ParameterError(String), 25 | #[rustfmt::skip] 26 | /// ‼ ️Internal error! Please report to Crypto Team immediately!\nMessage: {0} 27 | InternalError(String), 28 | /// Deserialization failed: {0} 29 | DeserializationError(SerializationError), 30 | /// Decryption failed: {0} 31 | FailedDecryption(String), 32 | /// Rescue Error: {0} 33 | RescueError(RescueError), 34 | /// Inconsistent Structure error, {0} 35 | InconsistentStructureError(String), 36 | } 37 | 38 | impl From for PrimitivesError { 39 | fn from(e: RescueError) -> Self { 40 | Self::RescueError(e) 41 | } 42 | } 43 | 44 | impl From for PrimitivesError { 45 | fn from(e: SerializationError) -> Self { 46 | Self::DeserializationError(e) 47 | } 48 | } 49 | 50 | impl From for PrimitivesError { 51 | fn from(e: BLST_ERROR) -> Self { 52 | match e { 53 | BLST_ERROR::BLST_SUCCESS => { 54 | Self::InternalError("Expecting an error, but got a sucess.".to_string()) 55 | }, 56 | BLST_ERROR::BLST_VERIFY_FAIL => Self::VerificationError(format!("{e:?}")), 57 | _ => Self::ParameterError(format!("{e:?}")), 58 | } 59 | } 60 | } 61 | 62 | impl ark_std::error::Error for PrimitivesError {} 63 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/hash_to_group/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Module for hash to various elliptic curve groups 8 | 9 | mod short_weierstrass; 10 | mod twisted_edwards; 11 | 12 | pub use short_weierstrass::SWHashToGroup; 13 | pub use twisted_edwards::TEHashToGroup; 14 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/hash_to_group/short_weierstrass.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Hash to Elliptic Curve implementation of 8 | 9 | use crate::errors::PrimitivesError; 10 | use ark_ec::{ 11 | short_weierstrass_jacobian::{GroupAffine, GroupProjective}, 12 | AffineCurve, SWModelParameters, 13 | }; 14 | use ark_std::{ 15 | rand::{Rng, SeedableRng}, 16 | UniformRand, 17 | }; 18 | use digest::Digest; 19 | use rand_chacha::ChaCha20Rng; 20 | use sha2::Sha256; 21 | 22 | /// Trait definition and default implementation for hash to group functions for 23 | /// Short Weierstrass Curves. 24 | pub trait SWHashToGroup: SWModelParameters + Sized { 25 | /// Hash to Group point, using sha2-512 function 26 | /// hashing to G1 point of `C: ProjectiveCurve`. 27 | // Default implementation implements a naive solution via rejection sampling. 28 | // Slow, and non-constant time. 29 | // 30 | // For specific curves we may want to overload it with a more efficient 31 | // algorithm, such as IETF BLS draft. 32 | fn hash_to_group>( 33 | data: B, 34 | cs_id: B, 35 | ) -> Result, PrimitivesError> { 36 | let mut hasher = Sha256::new(); 37 | hasher.update([cs_id.as_ref(), data.as_ref()].concat()); 38 | let mut seed = [0u8; 32]; 39 | seed.copy_from_slice(hasher.finalize().as_ref()); 40 | let mut rng = ChaCha20Rng::from_seed(seed); 41 | loop { 42 | let x = Self::BaseField::rand(&mut rng); 43 | // a boolean flag to decide if y is positive or not 44 | let y_flag = rng.gen(); 45 | if let Some(p) = GroupAffine::::get_point_from_x(x, y_flag) { 46 | return Ok(p.mul_by_cofactor_to_projective()); 47 | } 48 | } 49 | } 50 | } 51 | 52 | impl SWHashToGroup for ark_bls12_381::g1::Parameters { 53 | // TODO: 54 | // overload hash to group with the method in 55 | // 56 | } 57 | 58 | impl SWHashToGroup for ark_bls12_377::g1::Parameters { 59 | // TODO: 60 | // overload hash to group with the method in 61 | // 62 | } 63 | 64 | #[cfg(test)] 65 | mod test { 66 | use super::*; 67 | use ark_std::vec; 68 | 69 | #[test] 70 | fn test_hash_to_group() { 71 | test_hash_to_group_helper::(); 72 | test_hash_to_group_helper::(); 73 | } 74 | 75 | fn test_hash_to_group_helper() { 76 | let data = vec![1u8, 2, 3, 4, 5]; 77 | let _g1 = 78 |

::hash_to_group::<&[u8]>(data.as_ref(), "bls signature".as_ref()) 79 | .unwrap(); 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/hash_to_group/twisted_edwards.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Hash to Elliptic Curve implementation of 8 | 9 | use crate::errors::PrimitivesError; 10 | use ark_ec::{ 11 | twisted_edwards_extended::{GroupAffine, GroupProjective}, 12 | AffineCurve, TEModelParameters, 13 | }; 14 | use ark_std::{ 15 | rand::{Rng, SeedableRng}, 16 | UniformRand, 17 | }; 18 | use digest::Digest; 19 | use rand_chacha::ChaCha20Rng; 20 | use sha2::Sha256; 21 | 22 | /// Trait definition and default implementation for hash to group functions for 23 | /// Twisted Edwards Curves. 24 | pub trait TEHashToGroup: TEModelParameters + Sized { 25 | /// Hash to Group point, using sha2-512 function 26 | /// hashing to G1 point of `C: ProjectiveCurve`. 27 | // Default implementation implements a naive solution via rejection sampling. 28 | // Slow, and non-constant time. 29 | // 30 | // For specific curves we may want to overload it with a more efficient 31 | // algorithm, such as IETF BLS draft. 32 | fn hash_to_group>( 33 | data: B, 34 | cs_id: B, 35 | ) -> Result, PrimitivesError> { 36 | let mut hasher = Sha256::new(); 37 | hasher.update([cs_id.as_ref(), data.as_ref()].concat()); 38 | let mut seed = [0u8; 32]; 39 | seed.copy_from_slice(hasher.finalize().as_ref()); 40 | let mut rng = ChaCha20Rng::from_seed(seed); 41 | loop { 42 | let x = Self::BaseField::rand(&mut rng); 43 | // a boolean flag to decide if y is positive or not 44 | let y_flag = rng.gen(); 45 | if let Some(p) = GroupAffine::::get_point_from_x(x, y_flag) { 46 | return Ok(p.mul_by_cofactor_to_projective()); 47 | } 48 | } 49 | } 50 | } 51 | 52 | impl TEHashToGroup for ark_ed_on_bls12_377::EdwardsParameters { 53 | // TODO: 54 | // overload hash to group with the method in 55 | // 56 | } 57 | 58 | impl TEHashToGroup for ark_ed_on_bls12_381::EdwardsParameters { 59 | // TODO: 60 | // overload hash to group with the method in 61 | // 62 | } 63 | 64 | #[cfg(test)] 65 | mod test { 66 | use super::*; 67 | use ark_std::vec; 68 | 69 | #[test] 70 | fn test_hash_to_group() { 71 | test_hash_to_group_helper::(); 72 | test_hash_to_group_helper::(); 73 | } 74 | 75 | fn test_hash_to_group_helper() { 76 | let data = vec![1u8, 2, 3, 4, 5]; 77 | let _g1 = 78 |

::hash_to_group::<&[u8]>(data.as_ref(), "Jubjub curves".as_ref()) 79 | .unwrap(); 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Crate implements various cryptography primitives, as 8 | //! well as the plonk circuit implementation of those primitives. 9 | 10 | #![cfg_attr(not(feature = "std"), no_std)] 11 | #![deny(warnings)] 12 | #![deny(missing_docs)] 13 | #[cfg(test)] 14 | extern crate std; 15 | 16 | #[macro_use] 17 | extern crate derivative; 18 | 19 | pub mod aead; 20 | pub mod circuit; 21 | pub mod commitment; 22 | pub mod constants; 23 | pub mod crhf; 24 | pub mod elgamal; 25 | pub mod errors; 26 | pub mod hash_to_group; 27 | pub mod merkle_tree; 28 | pub mod pasta; 29 | pub mod pcs; 30 | pub mod prf; 31 | pub mod rescue; 32 | pub mod scalars_n_bases; 33 | pub mod signatures; 34 | pub mod vrf; 35 | 36 | pub(crate) mod utils; 37 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/merkle_tree/examples.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Provides sample instantiations of merkle tree. 8 | //! E.g. Sparse merkle tree with BigUInt index. 9 | 10 | use super::{append_only::MerkleTree, prelude::RescueHash, DigestAlgorithm, Element, Index}; 11 | use crate::rescue::{sponge::RescueCRHF, RescueParameter}; 12 | use ark_ff::Field; 13 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, SerializationError, Write}; 14 | use sha3::{Digest, Sha3_256}; 15 | use typenum::U3; 16 | 17 | /// Element type for interval merkle tree 18 | #[derive(PartialEq, Eq, Copy, Clone)] 19 | pub struct Interval(pub F, pub F); 20 | // impl Element for Interval {} 21 | 22 | impl DigestAlgorithm, u64, F> for RescueHash { 23 | fn digest(data: &[F]) -> F { 24 | RescueCRHF::::sponge_no_padding(data, 1).unwrap()[0] 25 | } 26 | 27 | fn digest_leaf(pos: &u64, elem: &Interval) -> F { 28 | let data = [F::from(*pos), elem.0, elem.1]; 29 | RescueCRHF::::sponge_no_padding(&data, 1).unwrap()[0] 30 | } 31 | } 32 | 33 | /// Interval merkle tree instantiation for interval merkle tree using Rescue 34 | /// hash function. 35 | pub type IntervalMerkleTree = MerkleTree, RescueHash, u64, U3, F>; 36 | 37 | /// Update the array length here 38 | #[derive(Default, Eq, PartialEq, Clone, Copy, Debug, Ord, PartialOrd, Hash)] 39 | pub struct Sha3Node([u8; 32]); 40 | 41 | impl AsRef<[u8]> for Sha3Node { 42 | fn as_ref(&self) -> &[u8] { 43 | &self.0 44 | } 45 | } 46 | 47 | impl CanonicalSerialize for Sha3Node { 48 | fn serialize(&self, mut writer: W) -> Result<(), SerializationError> { 49 | writer.write_all(&self.0)?; 50 | Ok(()) 51 | } 52 | 53 | fn serialized_size(&self) -> usize { 54 | 32 55 | } 56 | } 57 | impl CanonicalDeserialize for Sha3Node { 58 | fn deserialize(mut reader: R) -> Result { 59 | let mut ret = [0u8; 32]; 60 | reader.read_exact(&mut ret)?; 61 | Ok(Sha3Node(ret)) 62 | } 63 | } 64 | 65 | /// Wrapper for SHA3_512 hash function 66 | pub struct Sha3Digest(); 67 | 68 | impl DigestAlgorithm for Sha3Digest { 69 | fn digest(data: &[Sha3Node]) -> Sha3Node { 70 | let mut hasher = Sha3_256::new(); 71 | for value in data { 72 | hasher.update(value); 73 | } 74 | Sha3Node(hasher.finalize().into()) 75 | } 76 | 77 | fn digest_leaf(_pos: &I, _elem: &E) -> Sha3Node { 78 | // Serialize and hash 79 | todo!() 80 | } 81 | } 82 | 83 | /// Merkle tree using SHA3 hash 84 | pub type SHA3MerkleTree = MerkleTree; 85 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/merkle_tree/prelude.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Prelude 8 | 9 | pub use crate::{ 10 | impl_to_traversal_path_biguint, impl_to_traversal_path_primitives, 11 | merkle_tree::{ 12 | append_only::MerkleTree, universal_merkle_tree::UniversalMerkleTree, 13 | AppendableMerkleTreeScheme, DigestAlgorithm, Element, ForgetableMerkleTreeScheme, 14 | ForgetableUniversalMerkleTreeScheme, Index, LookupResult, MerkleCommitment, 15 | MerkleTreeScheme, NodeValue, ToTraversalPath, UniversalMerkleTreeScheme, 16 | }, 17 | }; 18 | 19 | use crate::rescue::{sponge::RescueCRHF, RescueParameter}; 20 | use ark_std::marker::PhantomData; 21 | use num_bigint::BigUint; 22 | use typenum::U3; 23 | 24 | use super::light_weight::LightWeightMerkleTree; 25 | 26 | /// Wrapper for rescue hash function 27 | #[derive(Clone, Copy, Debug, PartialEq, Eq)] 28 | pub struct RescueHash { 29 | phantom_f: PhantomData, 30 | } 31 | 32 | impl DigestAlgorithm for RescueHash { 33 | fn digest(data: &[F]) -> F { 34 | RescueCRHF::::sponge_no_padding(data, 1).unwrap()[0] 35 | } 36 | 37 | fn digest_leaf(pos: &u64, elem: &F) -> F { 38 | let data = [F::zero(), F::from(*pos), *elem]; 39 | RescueCRHF::::sponge_no_padding(&data, 1).unwrap()[0] 40 | } 41 | } 42 | 43 | /// A standard merkle tree using RATE-3 rescue hash function 44 | pub type RescueMerkleTree = MerkleTree, u64, U3, F>; 45 | 46 | /// A standard light merkle tree using RATE-3 rescue hash function 47 | pub type RescueLightWeightMerkleTree = LightWeightMerkleTree, u64, U3, F>; 48 | 49 | impl DigestAlgorithm for RescueHash { 50 | fn digest(data: &[F]) -> F { 51 | RescueCRHF::::sponge_no_padding(data, 1).unwrap()[0] 52 | } 53 | 54 | fn digest_leaf(pos: &BigUint, elem: &F) -> F { 55 | let data = [F::zero(), F::from(pos.clone()), *elem]; 56 | RescueCRHF::::sponge_no_padding(&data, 1).unwrap()[0] 57 | } 58 | } 59 | 60 | impl DigestAlgorithm for RescueHash { 61 | fn digest(data: &[F]) -> F { 62 | RescueCRHF::::sponge_no_padding(data, 1).unwrap()[0] 63 | } 64 | 65 | fn digest_leaf(pos: &F, elem: &F) -> F { 66 | let data = [F::zero(), *pos, *elem]; 67 | RescueCRHF::::sponge_no_padding(&data, 1).unwrap()[0] 68 | } 69 | } 70 | 71 | /// Example instantiation of a SparseMerkleTree indexed by I 72 | pub type RescueSparseMerkleTree = UniversalMerkleTree, I, U3, F>; 73 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/pasta/mod.rs: -------------------------------------------------------------------------------- 1 | //! Trait implementations for the Pasta curves. 2 | 3 | use ark_pallas::Affine as PallasAffine; 4 | use ark_pallas::Projective as PallasProjective; 5 | use ark_pallas::{Fq, Fr}; 6 | use ark_vesta::Affine as VestaAffine; 7 | use ark_vesta::Projective as VestaProjective; 8 | 9 | // this is analogous to the ark_pallas::PallasParameters struct 10 | /// A struct to hang the `CommitmentGroup` trait for Pallas on. 11 | #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] 12 | pub struct PallasGroup; 13 | 14 | impl crate::pcs::CommitmentGroup for PallasGroup { 15 | type Fr = Fr; 16 | type G1Affine = PallasAffine; 17 | type G1Projective = PallasProjective; 18 | type Fq = Fq; 19 | } 20 | 21 | // this is analogous to the ark_pallas::VestaParameters struct 22 | /// A struct to hang the `CommitmentGroup` trait for Vesta on. 23 | #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] 24 | pub struct VestaGroup; 25 | 26 | impl crate::pcs::CommitmentGroup for VestaGroup { 27 | type Fr = Fq; 28 | type G1Affine = VestaAffine; 29 | type G1Projective = VestaProjective; 30 | type Fq = Fr; 31 | } 32 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/pcs/errors.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Error module. 8 | 9 | use super::transcript::TranscriptError; 10 | use ark_serialize::SerializationError; 11 | use ark_std::string::String; 12 | use displaydoc::Display; 13 | 14 | /// A `enum` specifying the possible failure modes of the PCS. 15 | #[derive(Display, Debug)] 16 | pub enum PCSError { 17 | /// Invalid Prover: {0} 18 | InvalidProver(String), 19 | /// Invalid Verifier: {0} 20 | InvalidVerifier(String), 21 | /// Invalid Proof: {0} 22 | InvalidProof(String), 23 | /// Invalid parameters: {0} 24 | InvalidParameters(String), 25 | /// An error during (de)serialization: {0} 26 | SerializationError(SerializationError), 27 | /// Transcript error {0} 28 | TranscriptError(TranscriptError), 29 | /// Error from the internal ark_poly_commit primitive 30 | ArkPolyCommitError(ark_poly_commit::Error), 31 | } 32 | 33 | impl From for PCSError { 34 | fn from(e: ark_serialize::SerializationError) -> Self { 35 | Self::SerializationError(e) 36 | } 37 | } 38 | 39 | impl From for PCSError { 40 | fn from(e: TranscriptError) -> Self { 41 | Self::TranscriptError(e) 42 | } 43 | } 44 | 45 | impl From for PCSError { 46 | fn from(e: ark_poly_commit::Error) -> Self { 47 | Self::ArkPolyCommitError(e) 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/pcs/prelude.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Prelude 8 | pub use crate::pcs::{ 9 | errors::PCSError, 10 | multilinear_kzg::{ 11 | srs::{MultilinearProverParam, MultilinearUniversalParams, MultilinearVerifierParam}, 12 | util::{compute_qx_degree, get_batched_nv, merge_polynomials}, 13 | MultilinearKzgBatchProof, MultilinearKzgPCS, MultilinearKzgProof, 14 | }, 15 | structs::Commitment, 16 | univariate_kzg::{ 17 | srs::{UnivariateProverParam, UnivariateUniversalParams, UnivariateVerifierParam}, 18 | UnivariateKzgBatchProof, UnivariateKzgPCS, UnivariateKzgProof, 19 | }, 20 | PolynomialCommitmentScheme, StructuredReferenceString, UVPCS, 21 | }; 22 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/pcs/structs.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, SerializationError, Write}; 8 | 9 | use super::CommitmentGroup; 10 | 11 | #[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] 12 | #[derivative( 13 | Default(bound = ""), 14 | Hash(bound = ""), 15 | Clone(bound = ""), 16 | Copy(bound = ""), 17 | Debug(bound = ""), 18 | PartialEq(bound = ""), 19 | Eq(bound = "") 20 | )] 21 | /// A commitment is an Affine point. 22 | pub struct Commitment( 23 | /// the actual commitment is an affine point. 24 | pub E::G1Affine, 25 | ); 26 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/pcs/transcript.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Module for PolyIOP transcript. 8 | 9 | mod errors { 10 | use ark_std::string::String; 11 | use displaydoc::Display; 12 | 13 | /// A `enum` specifying the possible failure modes of the Transcript. 14 | #[derive(Display, Debug)] 15 | pub enum TranscriptError { 16 | /// Invalid Transcript: {0} 17 | InvalidTranscript(String), 18 | /// An error during (de)serialization: {0} 19 | SerializationError(ark_serialize::SerializationError), 20 | } 21 | 22 | impl From for TranscriptError { 23 | fn from(e: ark_serialize::SerializationError) -> Self { 24 | Self::SerializationError(e) 25 | } 26 | } 27 | } 28 | 29 | pub(crate) use errors::TranscriptError; 30 | 31 | use ark_ff::PrimeField; 32 | use ark_serialize::CanonicalSerialize; 33 | use ark_std::{marker::PhantomData, string::ToString}; 34 | use jf_utils::to_bytes; 35 | use merlin::Transcript; 36 | 37 | /// An IOP transcript consists of a Merlin transcript and a flag `is_empty` to 38 | /// indicate that if the transcript is empty. 39 | /// 40 | /// It is associated with a prime field `F` for which challenges are generated 41 | /// over. 42 | /// 43 | /// The `is_empty` flag is useful in the case where a protocol is initiated by 44 | /// the verifier, in which case the prover should start its phase by receiving a 45 | /// `non-empty` transcript. 46 | #[derive(Clone)] 47 | pub(crate) struct IOPTranscript { 48 | transcript: Transcript, 49 | is_empty: bool, 50 | #[doc(hidden)] 51 | phantom: PhantomData, 52 | } 53 | 54 | // TODO: merge this with jf_plonk::transcript 55 | impl IOPTranscript { 56 | /// Create a new IOP transcript. 57 | pub fn new(label: &'static [u8]) -> Self { 58 | Self { 59 | transcript: Transcript::new(label), 60 | is_empty: true, 61 | phantom: PhantomData::default(), 62 | } 63 | } 64 | 65 | /// Append the message to the transcript. 66 | pub(crate) fn append_message( 67 | &mut self, 68 | label: &'static [u8], 69 | msg: &[u8], 70 | ) -> Result<(), TranscriptError> { 71 | self.transcript.append_message(label, msg); 72 | self.is_empty = false; 73 | Ok(()) 74 | } 75 | 76 | /// Append the message to the transcript. 77 | pub(crate) fn append_serializable_element( 78 | &mut self, 79 | label: &'static [u8], 80 | group_elem: &S, 81 | ) -> Result<(), TranscriptError> { 82 | self.append_message(label, &to_bytes!(group_elem)?) 83 | } 84 | 85 | /// Generate the challenge from the current transcript 86 | /// and append it to the transcript. 87 | /// 88 | /// The output field element is statistical uniform as long 89 | /// as the field has a size less than 2^384. 90 | pub(crate) fn get_and_append_challenge( 91 | &mut self, 92 | label: &'static [u8], 93 | ) -> Result { 94 | // we need to reject when transcript is empty 95 | if self.is_empty { 96 | return Err(TranscriptError::InvalidTranscript( 97 | "transcript is empty".to_string(), 98 | )); 99 | } 100 | 101 | let mut buf = [0u8; 64]; 102 | self.transcript.challenge_bytes(label, &mut buf); 103 | let challenge = F::from_le_bytes_mod_order(&buf); 104 | self.append_serializable_element(label, &challenge)?; 105 | Ok(challenge) 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/pcs/univariate_kzg/srs.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Implementing Structured Reference Strings for univariate polynomial KZG 8 | 9 | use crate::pcs::{PCSError, StructuredReferenceString, WithMaxDegree}; 10 | use ark_ec::{msm::FixedBaseMSM, AffineCurve, PairingEngine, ProjectiveCurve}; 11 | use ark_ff::PrimeField; 12 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read, SerializationError, Write}; 13 | use ark_std::{ 14 | end_timer, 15 | rand::{CryptoRng, RngCore}, 16 | start_timer, vec, 17 | vec::Vec, 18 | One, UniformRand, 19 | }; 20 | 21 | /// `UniversalParams` are the universal parameters for the KZG10 scheme. 22 | // Adapted from 23 | // https://github.com/arkworks-rs/poly-commit/blob/master/src/kzg10/data_structures.rs#L20 24 | #[derive(Debug, Clone, Eq, PartialEq, CanonicalSerialize, CanonicalDeserialize, Default)] 25 | pub struct UnivariateUniversalParams { 26 | /// Group elements of the form `{ \beta^i G }`, where `i` ranges from 0 to 27 | /// `degree`. 28 | pub powers_of_g: Vec, 29 | /// The generator of G2. 30 | pub h: E::G2Affine, 31 | /// \beta times the above generator of G2. 32 | pub beta_h: E::G2Affine, 33 | } 34 | 35 | impl UnivariateUniversalParams { 36 | /// Returns the maximum supported degree 37 | pub fn max_degree(&self) -> usize { 38 | self.powers_of_g.len() 39 | } 40 | } 41 | 42 | impl WithMaxDegree for UnivariateUniversalParams { 43 | fn max_degree(&self) -> usize { 44 | self.max_degree() 45 | } 46 | } 47 | 48 | /// `UnivariateProverParam` is used to generate a proof 49 | #[derive(CanonicalSerialize, CanonicalDeserialize, Clone, Debug, Eq, PartialEq, Default)] 50 | pub struct UnivariateProverParam { 51 | /// Parameters 52 | pub powers_of_g: Vec, 53 | } 54 | 55 | /// `UnivariateVerifierParam` is used to check evaluation proofs for a given 56 | /// commitment. 57 | #[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] 58 | #[derivative( 59 | Default(bound = ""), 60 | Clone(bound = ""), 61 | Copy(bound = ""), 62 | Debug(bound = ""), 63 | PartialEq(bound = ""), 64 | Eq(bound = "") 65 | )] 66 | pub struct UnivariateVerifierParam { 67 | /// The generator of G1. 68 | pub g: E::G1Affine, 69 | /// The generator of G2. 70 | pub h: E::G2Affine, 71 | /// \beta times the above generator of G2. 72 | pub beta_h: E::G2Affine, 73 | } 74 | 75 | impl StructuredReferenceString for UnivariateUniversalParams { 76 | type ProverParam = UnivariateProverParam; 77 | type VerifierParam = UnivariateVerifierParam; 78 | 79 | /// Extract the prover parameters from the public parameters. 80 | fn extract_prover_param(&self, supported_size: usize) -> Self::ProverParam { 81 | let powers_of_g = self.powers_of_g[..=supported_size].to_vec(); 82 | 83 | Self::ProverParam { powers_of_g } 84 | } 85 | 86 | /// Extract the verifier parameters from the public parameters. 87 | fn extract_verifier_param(&self, _supported_size: usize) -> Self::VerifierParam { 88 | Self::VerifierParam { 89 | g: self.powers_of_g[0], 90 | h: self.h, 91 | beta_h: self.beta_h, 92 | } 93 | } 94 | 95 | /// Trim the universal parameters to specialize the public parameters 96 | /// for univariate polynomials to the given `supported_size`, and 97 | /// returns committer key and verifier key. `supported_size` should 98 | /// be in range `1..params.len()` 99 | fn trim( 100 | &self, 101 | supported_size: usize, 102 | ) -> Result<(Self::ProverParam, Self::VerifierParam), PCSError> { 103 | let powers_of_g = self.powers_of_g[..=supported_size].to_vec(); 104 | 105 | let pk = Self::ProverParam { powers_of_g }; 106 | let vk = Self::VerifierParam { 107 | g: self.powers_of_g[0], 108 | h: self.h, 109 | beta_h: self.beta_h, 110 | }; 111 | Ok((pk, vk)) 112 | } 113 | 114 | /// Build SRS for testing. 115 | /// WARNING: THIS FUNCTION IS FOR TESTING PURPOSE ONLY. 116 | /// THE OUTPUT SRS SHOULD NOT BE USED IN PRODUCTION. 117 | fn gen_srs_for_testing( 118 | rng: &mut R, 119 | max_degree: usize, 120 | ) -> Result { 121 | let setup_time = start_timer!(|| format!("KZG10::Setup with degree {}", max_degree)); 122 | let beta = E::Fr::rand(rng); 123 | let g = E::G1Projective::rand(rng); 124 | let h = E::G2Projective::rand(rng); 125 | 126 | let mut powers_of_beta = vec![E::Fr::one()]; 127 | 128 | let mut cur = beta; 129 | for _ in 0..max_degree { 130 | powers_of_beta.push(cur); 131 | cur *= β 132 | } 133 | 134 | let window_size = FixedBaseMSM::get_mul_window_size(max_degree + 1); 135 | 136 | let scalar_bits = E::Fr::size_in_bits(); 137 | let g_time = start_timer!(|| "Generating powers of G"); 138 | // TODO: parallelization 139 | let g_table = FixedBaseMSM::get_window_table(scalar_bits, window_size, g); 140 | let powers_of_g = FixedBaseMSM::multi_scalar_mul::( 141 | scalar_bits, 142 | window_size, 143 | &g_table, 144 | &powers_of_beta, 145 | ); 146 | end_timer!(g_time); 147 | 148 | let powers_of_g = E::G1Projective::batch_normalization_into_affine(&powers_of_g); 149 | 150 | let h = h.into_affine(); 151 | let beta_h = h.mul(beta).into_affine(); 152 | 153 | let pp = Self { 154 | powers_of_g, 155 | h, 156 | beta_h, 157 | }; 158 | end_timer!(setup_time); 159 | Ok(pp) 160 | } 161 | } 162 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/prf.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! This module implements a pseudo random function that is derived from 8 | //! the rescue hash function. 9 | 10 | use crate::{ 11 | errors::PrimitivesError, 12 | rescue::{sponge::RescuePRFCore, RescueParameter}, 13 | }; 14 | use ark_std::{ 15 | borrow::Borrow, 16 | fmt::Debug, 17 | marker::PhantomData, 18 | rand::{CryptoRng, RngCore}, 19 | UniformRand, 20 | }; 21 | 22 | /// Trait for Pseudo-random Functions 23 | pub trait PRF { 24 | // TODO: (alex) add `CanonicalDeserialize` to `Input`, `CanonicalSerialize` to 25 | // `Output`, both to `Seed`, when we move to arkworks 0.4.0 26 | /// Input to the PRF 27 | type Input: Clone; 28 | /// Output of the PRF 29 | type Output: Clone + Debug + PartialEq + Eq; 30 | /// The random seed/key that index a specific function from the PRF 31 | /// ensembles 32 | type Seed: Clone + Debug + Default + UniformRand; 33 | 34 | /// Compute PRF output with a user-provided randomly generated `seed` 35 | fn evaluate, I: Borrow>( 36 | seed: S, 37 | input: I, 38 | ) -> Result; 39 | 40 | /// same as [`Self::evaluate`] except that we generate a fresh random seed 41 | /// for the evaluation 42 | fn evaluate_with_rand_seed>( 43 | rng: &mut R, 44 | input: T, 45 | ) -> Result<(Self::Seed, Self::Output), PrimitivesError> { 46 | let seed = Self::Seed::rand(rng); 47 | let output = Self::evaluate(&seed, input)?; 48 | Ok((seed, output)) 49 | } 50 | } 51 | 52 | #[derive(Debug, Clone)] 53 | /// A rescue-based PRF that leverages on Full State Keyed (FSK) sponge 54 | /// construction 55 | pub struct RescuePRF( 56 | PhantomData, 57 | ); 58 | 59 | impl PRF 60 | for RescuePRF 61 | { 62 | type Input = [F; INPUT_LEN]; 63 | type Output = [F; OUTPUT_LEN]; 64 | type Seed = F; 65 | 66 | fn evaluate, I: Borrow>( 67 | seed: S, 68 | input: I, 69 | ) -> Result { 70 | let mut output = [F::zero(); OUTPUT_LEN]; 71 | output.clone_from_slice(&RescuePRFCore::full_state_keyed_sponge_with_zero_padding( 72 | seed.borrow(), 73 | input.borrow(), 74 | OUTPUT_LEN, 75 | )?); 76 | Ok(output) 77 | } 78 | } 79 | #[cfg(test)] 80 | mod tests { 81 | use crate::{ 82 | prf::{RescuePRF, PRF}, 83 | rescue::sponge::RescuePRFCore, 84 | }; 85 | use ark_bls12_377::Fq as Fq377; 86 | use ark_ed_on_bls12_377::Fq as FqEd377; 87 | use ark_ed_on_bls12_381::Fq as FqEd381; 88 | use ark_ed_on_bn254::Fq as FqEd254; 89 | use ark_std::UniformRand; 90 | macro_rules! test_prf { 91 | ($tr:tt) => { 92 | let mut rng = ark_std::test_rng(); 93 | let seed = $tr::rand(&mut rng); 94 | let input = [$tr::from(1u8)]; 95 | 96 | assert!(RescuePRF::<$tr, 1, 15>::evaluate(&seed, &input).is_ok()); 97 | // check correctness 98 | assert_eq!( 99 | RescuePRF::<$tr, 1, 15>::evaluate(&seed, &input) 100 | .unwrap() 101 | .to_vec(), 102 | RescuePRFCore::full_state_keyed_sponge_with_zero_padding(&seed, &input, 15) 103 | .unwrap() 104 | ); 105 | }; 106 | } 107 | 108 | #[test] 109 | pub fn test_prf() { 110 | test_prf!(FqEd254); 111 | test_prf!(FqEd377); 112 | test_prf!(FqEd381); 113 | test_prf!(Fq377); 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/rescue/errors.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Error types. 8 | 9 | // using `displaydoc` instead of `thiserror`, see 10 | // https://github.com/dtolnay/thiserror/pull/64#issuecomment-735805334 11 | // `thiserror` does not support #![no_std] 12 | 13 | use ark_std::string::String; 14 | use displaydoc::Display; 15 | 16 | /// Various error modes. 17 | #[derive(Debug, Display, Eq, PartialEq)] 18 | pub enum RescueError { 19 | /// Bad parameter in function call, {0} 20 | ParameterError(String), 21 | } 22 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/rescue/rescue_constants/bls12_381_base.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! !!!Warning!!! 8 | //! The parameters in this file are mock parameters for testing purpose. 9 | //! They are not correct and shall not be used for anything else 10 | 11 | use crate::rescue::{RescueParameter, ROUNDS, STATE_SIZE}; 12 | use ark_bls12_381::Fq; 13 | 14 | /// This is a dummy implementation of Rescue parameters 15 | /// to satisfy trait bound for Fq. 16 | /// This code should not be used for any other purpose. 17 | impl RescueParameter for Fq { 18 | const A: u64 = 5; 19 | const A_INV: &'static [u64] = &[0, 0, 0, 0, 0, 0]; 20 | 21 | const MDS_LE: [[&'static [u8]; STATE_SIZE]; STATE_SIZE] = 22 | [[&[0u8; 32]; STATE_SIZE]; STATE_SIZE]; 23 | 24 | const INIT_VEC_LE: [&'static [u8]; STATE_SIZE] = [&[0u8; 32]; STATE_SIZE]; 25 | 26 | const KEY_INJECTION_LE: [[&'static [u8]; 4]; 2 * ROUNDS] = [[&[0u8; 32]; 4]; 2 * ROUNDS]; 27 | 28 | const PERMUTATION_ROUND_KEYS: [[&'static [u8]; 4]; 25] = [[&[0u8; 32]; 4]; 25]; 29 | } 30 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/rescue/rescue_constants/bn254_base.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! !!!Warning!!! 8 | //! The parameters in this file are mock parameters for testing purpose. 9 | //! They are not correct and shall not be used for anything else 10 | 11 | use crate::rescue::{RescueParameter, ROUNDS, STATE_SIZE}; 12 | use ark_bn254::Fq; 13 | 14 | /// This is a dummy implementation of Rescue parameters 15 | /// to satisfy trait bound for Fq. 16 | /// This code should not be used for any other purpose. 17 | impl RescueParameter for Fq { 18 | const A: u64 = 5; 19 | const A_INV: &'static [u64] = &[0, 0, 0, 0]; 20 | 21 | const MDS_LE: [[&'static [u8]; STATE_SIZE]; STATE_SIZE] = 22 | [[&[0u8; 32]; STATE_SIZE]; STATE_SIZE]; 23 | 24 | const INIT_VEC_LE: [&'static [u8]; STATE_SIZE] = [&[0u8; 32]; STATE_SIZE]; 25 | 26 | const KEY_INJECTION_LE: [[&'static [u8]; 4]; 2 * ROUNDS] = [[&[0u8; 32]; 4]; 2 * ROUNDS]; 27 | 28 | const PERMUTATION_ROUND_KEYS: [[&'static [u8]; 4]; 25] = [[&[0u8; 32]; 4]; 25]; 29 | } 30 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/rescue/rescue_constants/bw6_761_base.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! !!!Warning!!! 8 | //! The parameters in this file are mock parameters for testing purpose. 9 | //! They are not correct and shall not be used for anything else 10 | 11 | use crate::rescue::{RescueParameter, ROUNDS, STATE_SIZE}; 12 | use ark_bw6_761::Fq; 13 | 14 | /// This is a dummy implementation of Rescue parameters 15 | /// to satisfy trait bound for Fq. 16 | /// This code should not be used for any other purpose. 17 | impl RescueParameter for Fq { 18 | const A: u64 = 5; 19 | 20 | const A_INV: &'static [u64] = &[0, 0, 0, 0, 0, 0]; 21 | 22 | const MDS_LE: [[&'static [u8]; STATE_SIZE]; STATE_SIZE] = 23 | [[&[0u8; 32]; STATE_SIZE]; STATE_SIZE]; 24 | 25 | const INIT_VEC_LE: [&'static [u8]; STATE_SIZE] = [&[0u8; 32]; STATE_SIZE]; 26 | 27 | const KEY_INJECTION_LE: [[&'static [u8]; 4]; 2 * ROUNDS] = [[&[0u8; 32]; 4]; 2 * ROUNDS]; 28 | 29 | const PERMUTATION_ROUND_KEYS: [[&'static [u8]; 4]; 25] = [[&[0u8; 32]; 4]; 25]; 30 | } 31 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/rescue/rescue_constants/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | mod bls12_377_base; 8 | mod bls12_381_base; 9 | mod bn254_base; 10 | mod bw6_761_base; 11 | mod ed_on_bls12_377_base; 12 | mod ed_on_bls12_381_base; 13 | mod ed_on_bn254_base; 14 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/scalars_n_bases.rs: -------------------------------------------------------------------------------- 1 | //! A vector representation of bases and corresponding scalars 2 | use crate::pcs::CommitmentGroup; 3 | use ark_ec::msm::VariableBaseMSM; 4 | use ark_ff::vec::Vec; 5 | use ark_ff::PrimeField; 6 | use ark_std::Zero; 7 | use hashbrown::HashMap; 8 | 9 | /// The vector representation of bases and corresponding scalars. 10 | #[derive(Debug, Clone)] 11 | pub struct ScalarsAndBases { 12 | /// The scalars and bases collection 13 | pub base_scalar_map: HashMap, 14 | } 15 | 16 | impl ScalarsAndBases { 17 | /// Create an empty collection of scalars and bases. 18 | pub fn new() -> Self { 19 | Self { 20 | base_scalar_map: HashMap::new(), 21 | } 22 | } 23 | /// Insert a base point and the corresponding scalar. 24 | pub fn push(&mut self, scalar: E::Fr, base: E::G1Affine) { 25 | let entry_scalar = self.base_scalar_map.entry(base).or_insert_with(E::Fr::zero); 26 | *entry_scalar += scalar; 27 | } 28 | 29 | /// Add a list of scalars and bases into self, where each scalar is 30 | /// multiplied by a constant c. 31 | pub fn merge(&mut self, c: E::Fr, scalars_and_bases: &Self) { 32 | for (base, scalar) in &scalars_and_bases.base_scalar_map { 33 | self.push(c * scalar, *base); 34 | } 35 | } 36 | /// Compute the multi-scalar multiplication. 37 | pub fn multi_scalar_mul(&self) -> E::G1Projective { 38 | let (bases, scalars): (Vec<_>, Vec<_>) = self 39 | .base_scalar_map 40 | .iter() 41 | .map(|(base, scalar)| (*base, scalar.into_repr())) 42 | .unzip(); 43 | VariableBaseMSM::multi_scalar_mul(&bases, &scalars) 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/signatures/mod.rs: -------------------------------------------------------------------------------- 1 | //! Module for signature primitives. 2 | 3 | use crate::errors::PrimitivesError; 4 | use ark_std::rand::{CryptoRng, RngCore}; 5 | 6 | pub mod bls; 7 | pub mod schnorr; 8 | pub use bls::BLSSignatureScheme; 9 | use core::fmt::Debug; 10 | pub use schnorr::SchnorrSignatureScheme; 11 | use serde::{Deserialize, Serialize}; 12 | use zeroize::Zeroize; 13 | /// Trait definition for a signature scheme. 14 | // A signature scheme is associated with a hash function H that is 15 | // to be used for challenge generation. 16 | // FIXME: update H bound once hash-api is merged. 17 | pub trait SignatureScheme { 18 | /// Ciphersuite Identifier 19 | const CS_ID: &'static str; 20 | 21 | /// Signing key. 22 | type SigningKey: Debug 23 | + Clone 24 | + Send 25 | + Sync 26 | + Zeroize 27 | + for<'a> Deserialize<'a> 28 | + Serialize 29 | + PartialEq 30 | + Eq; 31 | 32 | /// Verification key 33 | type VerificationKey: Debug 34 | + Clone 35 | + Send 36 | + Sync 37 | + for<'a> Deserialize<'a> 38 | + Serialize 39 | + PartialEq 40 | + Eq; 41 | 42 | /// Public Parameter 43 | type PublicParameter; 44 | 45 | /// Signature 46 | type Signature: Debug 47 | + Clone 48 | + Send 49 | + Sync 50 | + for<'a> Deserialize<'a> 51 | + Serialize 52 | + PartialEq 53 | + Eq; 54 | 55 | /// A message is &\[MessageUnit\] 56 | type MessageUnit: Debug + Clone + Send + Sync; 57 | 58 | /// generate public parameters from RNG. 59 | /// If the RNG is not presented, use the default group generator. 60 | // FIXME: the API looks a bit strange when the default generator is used. 61 | // For example: 62 | // `S::param_gen::(None)` 63 | // wheere `StdRng` is redundent. 64 | fn param_gen( 65 | prng: Option<&mut R>, 66 | ) -> Result; 67 | 68 | /// Sample a pair of keys. 69 | fn key_gen( 70 | pp: &Self::PublicParameter, 71 | prng: &mut R, 72 | ) -> Result<(Self::SigningKey, Self::VerificationKey), PrimitivesError>; 73 | 74 | /// Sign a message with the signing key 75 | fn sign>( 76 | pp: &Self::PublicParameter, 77 | sk: &Self::SigningKey, 78 | msg: M, 79 | prng: &mut R, 80 | ) -> Result; 81 | 82 | /// Verify a signature. 83 | fn verify>( 84 | pp: &Self::PublicParameter, 85 | vk: &Self::VerificationKey, 86 | msg: M, 87 | sig: &Self::Signature, 88 | ) -> Result<(), PrimitivesError>; 89 | } 90 | 91 | /// Trait for aggregatable signatures. 92 | pub trait AggregateableSignatureSchemes: SignatureScheme { 93 | // TODO: APIs for aggregateable signatures 94 | } 95 | 96 | #[cfg(test)] 97 | mod tests { 98 | use super::*; 99 | use ark_std::{rand::prelude::StdRng, test_rng}; 100 | 101 | pub(crate) fn sign_and_verify(message: &[S::MessageUnit]) { 102 | let rng = &mut test_rng(); 103 | let parameters = S::param_gen(Some(rng)).unwrap(); 104 | let (sk, pk) = S::key_gen(¶meters, rng).unwrap(); 105 | let sig = S::sign(¶meters, &sk, message, rng).unwrap(); 106 | assert!(S::verify(¶meters, &pk, message, &sig).is_ok()); 107 | 108 | let parameters = S::param_gen::(None).unwrap(); 109 | let (sk, pk) = S::key_gen(¶meters, rng).unwrap(); 110 | let sig = S::sign(¶meters, &sk, message, rng).unwrap(); 111 | assert!(S::verify(¶meters, &pk, message, &sig).is_ok()); 112 | } 113 | 114 | pub(crate) fn failed_verification( 115 | message: &[S::MessageUnit], 116 | bad_message: &[S::MessageUnit], 117 | ) { 118 | let rng = &mut test_rng(); 119 | let parameters = S::param_gen(Some(rng)).unwrap(); 120 | let (sk, pk) = S::key_gen(¶meters, rng).unwrap(); 121 | let sig = S::sign(¶meters, &sk, message, rng).unwrap(); 122 | assert!(S::verify(¶meters, &pk, bad_message, &sig).is_err()); 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/utils.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | use crate::{elgamal, signatures::schnorr}; 8 | use ark_ec::{models::TEModelParameters, ProjectiveCurve, TEModelParameters as Parameters}; 9 | use ark_ff::{FpParameters, PrimeField}; 10 | use ark_std::vec::Vec; 11 | use jf_relation::Variable; 12 | 13 | impl From<&schnorr::VerKey

> for (F, F) 14 | where 15 | F: PrimeField, 16 | P: Parameters, 17 | { 18 | fn from(vk: &schnorr::VerKey

) -> Self { 19 | let point = vk.0.into_affine(); 20 | (point.x, point.y) 21 | } 22 | } 23 | 24 | impl

From<&elgamal::EncKey

> for (P::BaseField, P::BaseField) 25 | where 26 | P: Parameters, 27 | { 28 | fn from(pk: &elgamal::EncKey

) -> Self { 29 | let point = pk.key.into_affine(); 30 | (point.x, point.y) 31 | } 32 | } 33 | 34 | #[inline] 35 | pub(crate) fn pad_with(vec: &mut Vec, multiple: usize, var: Variable) { 36 | let len = vec.len(); 37 | let new_len = if len % multiple == 0 { 38 | len 39 | } else { 40 | len + multiple - len % multiple 41 | }; 42 | vec.resize(new_len, var); 43 | } 44 | 45 | #[inline] 46 | pub(crate) fn field_byte_len() -> usize { 47 | ((F::Params::MODULUS_BITS + 7) / 8) as usize 48 | } 49 | 50 | #[inline] 51 | pub(crate) fn field_bit_len() -> usize { 52 | F::Params::MODULUS_BITS as usize 53 | } 54 | 55 | #[inline] 56 | pub(crate) fn challenge_bit_len() -> usize { 57 | // Our challenge is of size 248 bits 58 | // This is enough for a soundness error of 2^-128 59 | (field_byte_len::() - 1) << 3 60 | } 61 | 62 | #[inline] 63 | pub(crate) fn curve_cofactor() -> u64 { 64 | P::COFACTOR[0] 65 | } 66 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/vrf/blsvrf.rs: -------------------------------------------------------------------------------- 1 | //! BLS signature based VRF 2 | use super::Vrf; 3 | use crate::{ 4 | errors::PrimitivesError, 5 | signatures::{ 6 | bls::{BLSSignKey, BLSSignature, BLSVerKey}, 7 | BLSSignatureScheme, SignatureScheme, 8 | }, 9 | }; 10 | use ark_std::{ 11 | boxed::Box, 12 | rand::{CryptoRng, RngCore}, 13 | vec::Vec, 14 | }; 15 | use digest::{Digest, DynDigest}; 16 | use sha2::{Sha256, Sha512}; 17 | 18 | /// Supported Cipher Suites for BLS VRF. 19 | #[allow(non_camel_case_types)] 20 | #[derive(Debug)] 21 | pub enum BLSVRFCipherSuite { 22 | /// using blst library and VRF output from SHA256 hashing 23 | VRF_BLS_12_381_SHA256, 24 | /// using blst library and VRF output from SHA512 hashing 25 | VRF_BLS_12_381_SHA512, 26 | } 27 | 28 | /// BLS VRF scheme. 29 | /// Optimized for signature size, i.e.: PK in G2 and sig in G1 30 | pub struct BLSVRFScheme { 31 | hasher: Box, 32 | } 33 | 34 | impl BLSVRFScheme { 35 | /// Creates a new BLS VRF instance with the given ciphersuite. 36 | pub fn new(cs_id: BLSVRFCipherSuite) -> Self { 37 | match cs_id { 38 | BLSVRFCipherSuite::VRF_BLS_12_381_SHA256 => Self { 39 | hasher: Box::new(Sha256::new()), 40 | }, 41 | BLSVRFCipherSuite::VRF_BLS_12_381_SHA512 => Self { 42 | hasher: Box::new(Sha512::new()), 43 | }, 44 | } 45 | } 46 | } 47 | 48 | impl Vrf for BLSVRFScheme { 49 | /// Public Parameter. 50 | /// For BLS signatures, we want to use default 51 | /// prime subgroup generators. So here we don't need 52 | /// to specify which PP it is. 53 | type PublicParameter = (); 54 | 55 | /// VRF public key. 56 | type PublicKey = BLSVerKey; 57 | 58 | /// VRF secret key. 59 | type SecretKey = BLSSignKey; 60 | 61 | /// VRF signature. 62 | type Proof = BLSSignature; 63 | 64 | /// The input of VRF proof. 65 | type Input = Vec; 66 | 67 | /// The output of VRF evaluation. 68 | type Output = Vec; 69 | 70 | /// generate public parameters from RNG. 71 | fn param_gen( 72 | &self, 73 | _prng: Option<&mut R>, 74 | ) -> Result { 75 | Ok(()) 76 | } 77 | 78 | /// Creates a pair of VRF public and private keys. 79 | fn key_gen( 80 | &self, 81 | pp: &Self::PublicParameter, 82 | prng: &mut R, 83 | ) -> Result<(Self::SecretKey, Self::PublicKey), PrimitivesError> { 84 | ::key_gen(pp, prng) 85 | } 86 | 87 | /// Creates the VRF proof associated with a VRF secret key. 88 | fn prove( 89 | &self, 90 | pp: &Self::PublicParameter, 91 | secret_key: &Self::SecretKey, 92 | input: &Self::Input, 93 | prng: &mut R, 94 | ) -> Result { 95 | ::sign(pp, secret_key, input, prng) 96 | } 97 | 98 | /// Computes the VRF output associated with a VRF proof. 99 | fn proof_to_hash( 100 | &mut self, 101 | _pp: &Self::PublicParameter, 102 | proof: &Self::Proof, 103 | ) -> Result { 104 | let proof_serialized = proof.serialize(); 105 | let mut hasher = (*self.hasher).box_clone(); 106 | hasher.update(&proof_serialized); 107 | let output = hasher.finalize(); 108 | Ok(output.to_vec()) 109 | } 110 | 111 | /// Verifies a VRF proof. 112 | fn verify( 113 | &mut self, 114 | pp: &Self::PublicParameter, 115 | proof: &Self::Proof, 116 | public_key: &Self::PublicKey, 117 | input: &Self::Input, 118 | ) -> Result<(bool, Option), PrimitivesError> { 119 | if ::verify(pp, public_key, input, proof).is_ok() { 120 | Ok((true, Some(Self::proof_to_hash(self, pp, proof).unwrap()))) 121 | } else { 122 | Ok((false, None)) 123 | } 124 | } 125 | } 126 | 127 | #[cfg(test)] 128 | mod test { 129 | use super::*; 130 | use ark_std::{rand::Rng, test_rng}; 131 | 132 | pub(crate) fn sign_and_verify( 133 | vrf: &mut BLSVRFScheme, 134 | message: &::Input, 135 | bad_message: &::Input, 136 | ) { 137 | let rng = &mut test_rng(); 138 | 139 | let parameters = vrf.param_gen(Some(rng)).unwrap(); 140 | let (sk, pk) = vrf.key_gen(¶meters, rng).unwrap(); 141 | let vrf_proof = vrf.prove(¶meters, &sk, message, rng).unwrap(); 142 | let vrf_output = vrf.proof_to_hash(¶meters, &vrf_proof).unwrap(); 143 | let (is_correct, output) = vrf.verify(¶meters, &vrf_proof, &pk, message).unwrap(); 144 | assert!(is_correct); 145 | // need to use the result 146 | assert!(output.is_some()); 147 | 148 | // check that proof_to_hash(proof) == evaluate(sk, message) 149 | let out = vrf.evaluate(¶meters, &sk, &message, rng).unwrap(); 150 | assert_eq!(out, vrf_output); 151 | 152 | // check the VRF output vs. hashing the proof directly 153 | let mut hasher = H::new(); 154 | hasher.update(vrf_proof.serialize()); 155 | let direct_hash_output = hasher.finalize().to_vec(); 156 | assert_eq!(direct_hash_output, vrf_output); 157 | 158 | // now test for bad message. User can choose to ignore the output if they really 159 | // want to. 160 | let (is_correct, _) = vrf 161 | .verify(¶meters, &vrf_proof, &pk, bad_message) 162 | .unwrap(); 163 | assert!(!is_correct); 164 | } 165 | 166 | #[test] 167 | fn test_bls_vrf() { 168 | let rng = &mut test_rng(); 169 | for _ in 0..10 { 170 | let message = rng.gen::<[u8; 32]>().to_vec(); 171 | // bad message is truncated 172 | let message_bad = message.clone()[..31].to_vec(); 173 | let mut blsvrf256 = BLSVRFScheme::new(BLSVRFCipherSuite::VRF_BLS_12_381_SHA256); 174 | 175 | sign_and_verify::(&mut blsvrf256, &message, &message_bad); 176 | 177 | let mut blsvrf512 = BLSVRFScheme::new(BLSVRFCipherSuite::VRF_BLS_12_381_SHA512); 178 | sign_and_verify::(&mut blsvrf512, &message, &message_bad); 179 | } 180 | } 181 | } 182 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/vrf/ecvrf.rs: -------------------------------------------------------------------------------- 1 | //! Place holder for ECVRF 2 | -------------------------------------------------------------------------------- /jellyfish/primitives/src/vrf/mod.rs: -------------------------------------------------------------------------------- 1 | //! Module for verifiable random functions. 2 | 3 | use crate::errors::PrimitivesError; 4 | use ark_std::rand::{CryptoRng, RngCore}; 5 | pub mod blsvrf; 6 | pub mod ecvrf; 7 | use core::fmt::Debug; 8 | use serde::{Deserialize, Serialize}; 9 | use zeroize::Zeroize; 10 | 11 | /// A trait for VRF proof, evaluation and verification. 12 | pub trait Vrf { 13 | /// Public parameters 14 | type PublicParameter; 15 | 16 | /// VRF public key. 17 | type PublicKey: Debug 18 | + Clone 19 | + Send 20 | + Sync 21 | + for<'a> Deserialize<'a> 22 | + Serialize 23 | + PartialEq 24 | + Eq; 25 | 26 | /// VRF secret key. 27 | type SecretKey: Debug 28 | + Clone 29 | + Send 30 | + Sync 31 | + Zeroize 32 | + for<'a> Deserialize<'a> 33 | + Serialize 34 | + PartialEq 35 | + Eq; 36 | 37 | /// VRF signature. 38 | type Proof: Debug + Clone + Send + Sync + for<'a> Deserialize<'a> + Serialize + PartialEq + Eq; 39 | 40 | /// The input of VRF proof. 41 | type Input: Debug + Clone + Send + Sync + for<'a> Deserialize<'a> + Serialize + PartialEq + Eq; 42 | 43 | /// The output of VRF evaluation. 44 | type Output: Debug + Clone + Send + Sync + for<'a> Deserialize<'a> + Serialize + PartialEq + Eq; 45 | 46 | /// generate public parameters from RNG. 47 | /// If the RNG is not presented, use the default group generator. 48 | // FIXME: the API looks a bit strange when the default generator is used. 49 | // For example: 50 | // `S::param_gen::(None)` 51 | // wheere `StdRng` is redundent. 52 | fn param_gen( 53 | &self, 54 | prng: Option<&mut R>, 55 | ) -> Result; 56 | 57 | /// Creates a pair of VRF public and private keys. 58 | fn key_gen( 59 | &self, 60 | pp: &Self::PublicParameter, 61 | prng: &mut R, 62 | ) -> Result<(Self::SecretKey, Self::PublicKey), PrimitivesError>; 63 | 64 | /// Creates the VRF proof associated with a VRF secret key. 65 | fn prove( 66 | &self, 67 | pp: &Self::PublicParameter, 68 | secret_key: &Self::SecretKey, 69 | input: &Self::Input, 70 | prng: &mut R, 71 | ) -> Result; 72 | 73 | /// Computes the VRF output associated with a VRF proof. 74 | fn proof_to_hash( 75 | &mut self, 76 | pp: &Self::PublicParameter, 77 | proof: &Self::Proof, 78 | ) -> Result; 79 | 80 | /// Computes the VRF output given a public input and a VRF secret key. 81 | fn evaluate( 82 | &mut self, 83 | pp: &Self::PublicParameter, 84 | secret_key: &Self::SecretKey, 85 | input: &Self::Input, 86 | prng: &mut R, 87 | ) -> Result { 88 | let proof = self.prove(pp, secret_key, input, prng)?; 89 | self.proof_to_hash(pp, &proof) 90 | } 91 | 92 | /// Verifies a VRF proof. 93 | #[must_use = "Output must be used"] 94 | fn verify( 95 | &mut self, 96 | pp: &Self::PublicParameter, 97 | proof: &Self::Proof, 98 | public_key: &Self::PublicKey, 99 | input: &Self::Input, 100 | ) -> Result<(bool, Option), PrimitivesError>; 101 | } 102 | -------------------------------------------------------------------------------- /jellyfish/relation/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "jf-relation" 3 | description = "Jellyfish constraint system for PLONK" 4 | version = { workspace = true } 5 | authors = { workspace = true } 6 | edition = { workspace = true } 7 | license = { workspace = true } 8 | rust-version = { workspace = true } 9 | 10 | [dependencies] 11 | ark-bls12-377 = { git = "https://github.com/arkworks-rs/curves", rev = "677b4ae751a274037880ede86e9b6f30f62635af" } 12 | ark-bls12-381 = "0.3.0" 13 | ark-bn254 = "0.3.0" 14 | ark-bw6-761 = { git = "https://github.com/arkworks-rs/curves", rev = "677b4ae751a274037880ede86e9b6f30f62635af" } 15 | ark-ec = "0.3.0" 16 | ark-ff = { version = "0.3.0", features = [ "asm" ] } 17 | ark-poly = "0.3.0" 18 | ark-serialize = "0.3.0" 19 | ark-std = { version = "0.3.0", default-features = false } 20 | derivative = { version = "2", features = ["use_core"] } 21 | displaydoc = { version = "0.2.3", default-features = false } 22 | downcast-rs = { version = "1.2.0", default-features = false } 23 | dyn-clone = "^1.0" 24 | hashbrown = "0.13.2" 25 | jf-utils = { path = "../utilities" } 26 | num-bigint = { version = "0.4", default-features = false } 27 | rand_chacha = { version = "0.3.1" } 28 | rayon = { version = "1.5.0", optional = true } 29 | 30 | [dev-dependencies] 31 | ark-ed-on-bls12-377 = { git = "https://github.com/arkworks-rs/curves", rev = "677b4ae751a274037880ede86e9b6f30f62635af" } 32 | ark-ed-on-bls12-381 = "0.3.0" 33 | ark-ed-on-bls12-381-bandersnatch = { git = "https://github.com/arkworks-rs/curves", rev = "677b4ae751a274037880ede86e9b6f30f62635af" } 34 | ark-ed-on-bn254 = "0.3.0" 35 | itertools = { version = "0.10.1", default-features = false } 36 | 37 | [features] 38 | default = ["parallel"] 39 | std = ["ark-std/std", "ark-serialize/std", "ark-ff/std", 40 | "ark-ec/std", "ark-poly/std"] 41 | parallel = ["ark-ff/parallel", "ark-ec/parallel", "ark-poly/parallel", 42 | "jf-utils/parallel", "rayon" ] 43 | -------------------------------------------------------------------------------- /jellyfish/relation/src/constants.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Crate wide constants. 8 | 9 | use ark_ff::{FftParameters, PrimeField}; 10 | use ark_std::{rand::SeedableRng, vec, vec::Vec}; 11 | use rand_chacha::ChaChaRng; 12 | 13 | // ========================== 14 | // Circuit-related constants. 15 | // ========================== 16 | 17 | /// The number of input wires. 18 | pub const GATE_WIDTH: usize = 4; 19 | /// The number of multiplication selectors. 20 | pub const N_MUL_SELECTORS: usize = 2; 21 | /// The number of TurboPlonk selectors. 22 | pub const N_TURBO_PLONK_SELECTORS: usize = 13; 23 | 24 | /// Compute constants K0, K1, ..., K_{`num_wire_types`-1} so that cosets {Ki * 25 | /// H} are disjoint, each coset |Ki * H| = `coset_size`. 26 | /// `coset_size` is optional, when provided, will accelerate constants 27 | /// searching. 28 | #[inline] 29 | #[allow(non_snake_case)] 30 | pub fn compute_coset_representatives( 31 | num_wire_types: usize, 32 | coset_size: Option, 33 | ) -> Vec { 34 | // check if two cosets `aH == bH` where `a, b` are cosets representations 35 | fn is_equal_coset(pow_a_N: F, pow_b_N: F) -> bool { 36 | // check (a^-1 * b)^`N` = 1 37 | pow_a_N 38 | .inverse() 39 | .expect("Unreachable: all elements in a prime field should have inverse") 40 | * pow_b_N 41 | == F::one() 42 | } 43 | 44 | // check if a new k is valid: i.e. doesn't represent the same coset as any 45 | // previous values `prev`. 46 | fn is_valid_k(pow_k_N: F, pow_prev_N: &[F]) -> bool { 47 | !pow_prev_N 48 | .iter() 49 | .any(|&pow_k_prev_N| is_equal_coset(pow_k_N, pow_k_prev_N)) 50 | } 51 | 52 | // storing cached `Ki -> Ki^coset_size` values. 53 | let mut pow_k_N_vec = vec![]; 54 | let mut k_vec = vec![]; 55 | let mut rng = ChaChaRng::from_seed([0u8; 32]); // empty bytes as seed 56 | 57 | // the exponent N for checking membership of domain H 58 | let N = match coset_size { 59 | Some(size) => size, 60 | None => { 61 | // let `2^s * t` be the size of the multiplicative group defined by the field 62 | // `F`, for some odd integer `t`, `s` is the 2-adicity of `F*`. 63 | // `2^s` is a guaranteed to be multiple of |H|. 64 | 2usize.pow(::TWO_ADICITY) 65 | }, 66 | }; 67 | for i in 0..num_wire_types { 68 | if i == 0 { 69 | // set first K0 = 1, namely the H itself 70 | k_vec.push(F::one()); 71 | pow_k_N_vec.push(F::one()); 72 | } else { 73 | let mut next = F::rand(&mut rng); 74 | let mut pow_next_N = next.pow([N as u64]); 75 | while !is_valid_k(pow_next_N, &pow_k_N_vec) { 76 | next = F::rand(&mut rng); 77 | pow_next_N = next.pow([N as u64]); 78 | } 79 | k_vec.push(next); 80 | pow_k_N_vec.push(pow_next_N); 81 | } 82 | } 83 | k_vec 84 | } 85 | -------------------------------------------------------------------------------- /jellyfish/relation/src/errors.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Error module. 8 | 9 | use ark_std::string::String; 10 | use displaydoc::Display; 11 | 12 | /// A `enum` specifying the possible failure modes of the circuit. 13 | #[derive(Display, Debug)] 14 | pub enum CircuitError { 15 | /// Failed to create domain 16 | DomainCreationError, 17 | /// Variable index {0} is larger than the bound {1}. 18 | VarIndexOutOfBound(usize, usize), 19 | /// Public input length {0} doesn't match num_inputs = {1}. 20 | PubInputLenMismatch(usize, usize), 21 | /// The {0}-th gate failed: {1} 22 | GateCheckFailure(usize, String), 23 | /// Invalid parameters: {0} 24 | ParameterError(String), 25 | /// The circuit is not finalized before doing arithmetization 26 | UnfinalizedCircuit, 27 | /// Attempt to modify the finalized circuit 28 | ModifyFinalizedCircuit, 29 | /// The circuit has wrong Plonk type 30 | WrongPlonkType, 31 | /// The circuit does not support lookup 32 | LookupUnsupported, 33 | /// Failed to get array value by index 34 | IndexError, 35 | /// Algebra over field failed: {0} 36 | FieldAlgebraError(String), 37 | #[rustfmt::skip] 38 | /// Unexpected field for elliptic curve operation, currently only support Bn254, BLS12-381/377 scalar field 39 | UnsupportedCurve, 40 | #[rustfmt::skip] 41 | /// ‼ ️Internal error! Please report to Crypto Team immediately!\n\Message: {0} 42 | InternalError(String), 43 | /// Feature not supported: {0} 44 | NotSupported(String), 45 | } 46 | 47 | #[cfg(feature = "std")] 48 | impl std::error::Error for CircuitError {} 49 | -------------------------------------------------------------------------------- /jellyfish/relation/src/gadgets/ecc/conversion.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! this file implements the conversion logic for elliptic curve point between 8 | //! - short Weierstrass form 9 | //! - twisted Edwards form 10 | //! 11 | //! Note that the APIs below create no circuits. 12 | //! An entity should either know both the SW and TE form of a 13 | //! point; or know none of the two. There is no need to generate 14 | //! a circuit for arguing secret knowledge of one form while 15 | //! the other form is public. In practice a prover will convert all of the 16 | //! points to the TE form and work on the TE form inside the circuits. 17 | 18 | use super::Point; 19 | use ark_ec::{short_weierstrass_jacobian::GroupAffine as SWAffine, SWModelParameters as SWParam}; 20 | use ark_ff::{BigInteger256, BigInteger384, BigInteger768, PrimeField}; 21 | 22 | impl From<&SWAffine

> for Point 23 | where 24 | F: PrimeField + SWToTEConParam, 25 | P: SWParam, 26 | { 27 | fn from(p: &SWAffine

) -> Self { 28 | // this function is only correct for BLS12-377 29 | // (other curves does not impl an SW form) 30 | 31 | // if p is an infinity point 32 | // return infinity point 33 | if p.infinity { 34 | return Self(F::zero(), F::one()); 35 | } 36 | 37 | // we need to firstly convert this point into 38 | // TE form, and then build the point 39 | 40 | // safe unwrap 41 | let s = F::from_repr(F::S).unwrap(); 42 | let neg_alpha = F::from_repr(F::NEG_ALPHA).unwrap(); 43 | let beta = F::from_repr(F::BETA).unwrap(); 44 | 45 | // we first transform the Weierstrass point (px, py) to Montgomery point (mx, 46 | // my) where mx = s * (px - alpha) 47 | // my = s * py 48 | let montgomery_x = s * (p.x + neg_alpha); 49 | let montgomery_y = s * p.y; 50 | // then we transform the Montgomery point (mx, my) to TE point (ex, ey) where 51 | // ex = beta * mx / my 52 | // ey = (mx - 1) / (mx + 1) 53 | let edwards_x = beta * montgomery_x / montgomery_y; 54 | let edwards_y = (montgomery_x - F::one()) / (montgomery_x + F::one()); 55 | 56 | Point(edwards_x, edwards_y) 57 | } 58 | } 59 | 60 | /// This trait holds constants that are used for curve conversion from 61 | /// short Weierstrass form to twisted Edwards form. 62 | pub trait SWToTEConParam: PrimeField { 63 | /// Parameter S. 64 | const S: Self::BigInt; 65 | /// Parameter 1/alpha. 66 | const NEG_ALPHA: Self::BigInt; 67 | /// Parameter beta. 68 | const BETA: Self::BigInt; 69 | } 70 | 71 | // ================================================ 72 | // BLS12-377::Fq specific implementations 73 | // ================================================ 74 | use ark_bls12_377::Fq as Fq377; 75 | impl SWToTEConParam for Fq377 { 76 | // s = 10189023633222963290707194929886294091415157242906428298294512798502806398782149227503530278436336312243746741931 77 | const S: Self::BigInt = BigInteger384([ 78 | 0x3401d618f0339eab, 79 | 0x0f793b8504b428d4, 80 | 0x0ff643cca95ccc0d, 81 | 0xd7a504665d66cc8c, 82 | 0x1dc07a44b1eeea84, 83 | 0x10f272020f118a, 84 | ]); 85 | 86 | // alpha = -1 87 | const NEG_ALPHA: Self::BigInt = BigInteger384([1, 0, 0, 0, 0, 0]); 88 | 89 | // beta = 23560188534917577818843641916571445935985386319233886518929971599490231428764380923487987729215299304184915158756 90 | const BETA: Self::BigInt = BigInteger384([ 91 | 0x450ae9206343e6e4, 92 | 0x7af39509df5027b6, 93 | 0xab82b31405cf8a30, 94 | 0x80d743e1f6c15c7c, 95 | 0x0cec22e650360183, 96 | 0x272fd56ac5c669, 97 | ]); 98 | } 99 | 100 | // ================================================ 101 | // Bn254::Fq dummy implementations 102 | // ================================================ 103 | use ark_bn254::Fq as Fq254; 104 | /// Dummy implementation for trait bounds 105 | impl SWToTEConParam for Fq254 { 106 | const S: Self::BigInt = BigInteger256([0, 0, 0, 0]); 107 | const NEG_ALPHA: Self::BigInt = BigInteger256([0, 0, 0, 0]); 108 | const BETA: Self::BigInt = BigInteger256([0, 0, 0, 0]); 109 | } 110 | 111 | // ================================================ 112 | // Bls12-381::Fq dummy implementations 113 | // ================================================ 114 | use ark_bls12_381::Fq as Fq381; 115 | /// Dummy implementation for trait bounds 116 | impl SWToTEConParam for Fq381 { 117 | const S: Self::BigInt = BigInteger384([0, 0, 0, 0, 0, 0]); 118 | const NEG_ALPHA: Self::BigInt = BigInteger384([0, 0, 0, 0, 0, 0]); 119 | const BETA: Self::BigInt = BigInteger384([0, 0, 0, 0, 0, 0]); 120 | } 121 | 122 | // ================================================ 123 | // Bw6-761::Fq dummy implementations 124 | // ================================================ 125 | use ark_bw6_761::Fq as Fq761; 126 | /// Dummy implementation for trait bounds 127 | impl SWToTEConParam for Fq761 { 128 | const S: Self::BigInt = BigInteger768([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]); 129 | const NEG_ALPHA: Self::BigInt = BigInteger768([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]); 130 | const BETA: Self::BigInt = BigInteger768([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]); 131 | } 132 | 133 | #[cfg(test)] 134 | mod test { 135 | use super::*; 136 | use ark_bls12_377::{G1Affine, G1Projective}; 137 | use ark_ec::{AffineCurve, ProjectiveCurve}; 138 | use ark_ff::{field_new, One}; 139 | use ark_std::{test_rng, UniformRand, Zero}; 140 | 141 | // a helper function to check if a point is on the ed curve 142 | // of bls12-377 G1 143 | fn is_on_bls12_377_ed_curve(p: &Point) -> bool { 144 | // Twisted Edwards curve 2: a * x² + y² = 1 + d * x² * y² 145 | let a = field_new!(Fq377, "-1"); 146 | let d = field_new!(Fq377, "122268283598675559488486339158635529096981886914877139579534153582033676785385790730042363341236035746924960903179"); 147 | 148 | let x2 = p.0 * p.0; 149 | let y2 = p.1 * p.1; 150 | 151 | let left = a * x2 + y2; 152 | let right = Fq377::one() + d * x2 * y2; 153 | 154 | left == right 155 | } 156 | 157 | #[allow(non_snake_case)] 158 | #[test] 159 | fn test_sw_to_te_conversion() { 160 | let mut rng = test_rng(); 161 | 162 | // test generator 163 | let g1 = &G1Affine::prime_subgroup_generator(); 164 | let p: Point = g1.into(); 165 | assert!(is_on_bls12_377_ed_curve(&p)); 166 | 167 | // test zero point 168 | let g1 = &G1Affine::zero(); 169 | let p: Point = g1.into(); 170 | assert_eq!(p.0, Fq377::zero()); 171 | assert_eq!(p.1, Fq377::one()); 172 | assert!(is_on_bls12_377_ed_curve(&p)); 173 | 174 | // test a random group element 175 | let g1 = &G1Projective::rand(&mut rng).into_affine(); 176 | let p: Point = g1.into(); 177 | assert!(is_on_bls12_377_ed_curve(&p)); 178 | } 179 | } 180 | -------------------------------------------------------------------------------- /jellyfish/relation/src/gadgets/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Gates and gadgets implementations 8 | 9 | pub mod ecc; 10 | pub mod ultraplonk; 11 | 12 | mod arithmetic; 13 | mod cmp; 14 | mod logic; 15 | mod range; 16 | pub use arithmetic::*; 17 | pub use cmp::*; 18 | pub use logic::*; 19 | pub use range::*; 20 | 21 | // Helper functions 22 | mod utils; 23 | 24 | /// Utils for test 25 | pub mod test_utils { 26 | use crate::{errors::CircuitError, Arithmetization, Circuit, PlonkCircuit}; 27 | use ark_ff::PrimeField; 28 | 29 | /// two circuit with the same statement should have the same extended 30 | /// permutation polynomials even with different variable assignment 31 | pub fn test_variable_independence_for_circuit( 32 | circuit_1: PlonkCircuit, 33 | circuit_2: PlonkCircuit, 34 | ) -> Result<(), CircuitError> { 35 | assert_eq!(circuit_1.num_gates(), circuit_2.num_gates()); 36 | assert_eq!(circuit_1.num_vars(), circuit_2.num_vars()); 37 | // Check extended permutation polynomials 38 | let sigma_polys_1 = circuit_1.compute_extended_permutation_polynomials()?; 39 | let sigma_polys_2 = circuit_2.compute_extended_permutation_polynomials()?; 40 | sigma_polys_1 41 | .iter() 42 | .zip(sigma_polys_2.iter()) 43 | .for_each(|(p1, p2)| assert_eq!(p1, p2)); 44 | Ok(()) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /jellyfish/relation/src/gadgets/ultraplonk/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Implements ultra-plonk related circuits. 8 | 9 | mod lookup_table; 10 | pub mod mod_arith; 11 | mod non_native_gates; 12 | mod range; 13 | -------------------------------------------------------------------------------- /jellyfish/relation/src/gadgets/utils.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Helper functions for circuit gadgets implementation 8 | 9 | use crate::errors::CircuitError; 10 | use ark_std::{cmp::Ordering, string::ToString}; 11 | 12 | // helper function to find the next multiple of `divisor` for `current` value 13 | pub(crate) fn next_multiple(current: usize, divisor: usize) -> Result { 14 | if divisor == 0 || divisor == 1 { 15 | return Err(CircuitError::InternalError( 16 | "can only be a multiple of divisor >= 2".to_string(), 17 | )); 18 | } 19 | match current.cmp(&divisor) { 20 | Ordering::Equal => Ok(current), 21 | Ordering::Less => Ok(divisor), 22 | Ordering::Greater => Ok((current / divisor + 1) * divisor), 23 | } 24 | } 25 | 26 | #[cfg(test)] 27 | mod test { 28 | use super::next_multiple; 29 | use crate::errors::CircuitError; 30 | 31 | #[test] 32 | fn test_helper_next_multiple() -> Result<(), CircuitError> { 33 | assert!(next_multiple(5, 0).is_err()); 34 | assert!(next_multiple(5, 1).is_err()); 35 | 36 | assert_eq!(next_multiple(5, 2)?, 6); 37 | assert_eq!(next_multiple(5, 3)?, 6); 38 | assert_eq!(next_multiple(5, 4)?, 8); 39 | assert_eq!(next_multiple(5, 5)?, 5); 40 | assert_eq!(next_multiple(5, 11)?, 11); 41 | Ok(()) 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /jellyfish/relation/src/gates/arithmetic.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Implementation of arithmetic gates 8 | 9 | use super::Gate; 10 | use crate::constants::{GATE_WIDTH, N_MUL_SELECTORS}; 11 | use ark_ff::Field; 12 | 13 | /// A constant gate 14 | #[derive(Debug, Clone)] 15 | pub struct ConstantGate(pub(crate) F); 16 | 17 | impl Gate for ConstantGate 18 | where 19 | F: Field, 20 | { 21 | fn name(&self) -> &'static str { 22 | "Constant Gate" 23 | } 24 | fn q_c(&self) -> F { 25 | self.0 26 | } 27 | fn q_o(&self) -> F { 28 | F::one() 29 | } 30 | } 31 | 32 | /// An addition gate 33 | #[derive(Debug, Clone)] 34 | pub struct AdditionGate; 35 | 36 | impl Gate for AdditionGate 37 | where 38 | F: Field, 39 | { 40 | fn name(&self) -> &'static str { 41 | "Addition Gate" 42 | } 43 | fn q_lc(&self) -> [F; GATE_WIDTH] { 44 | [F::one(), F::one(), F::zero(), F::zero()] 45 | } 46 | fn q_o(&self) -> F { 47 | F::one() 48 | } 49 | } 50 | 51 | /// Adding a variable by a constant. 52 | #[derive(Debug, Clone)] 53 | pub struct ConstantAdditionGate(pub(crate) F); 54 | 55 | impl Gate for ConstantAdditionGate 56 | where 57 | F: Field, 58 | { 59 | fn name(&self) -> &'static str { 60 | "Constant addition Gate" 61 | } 62 | fn q_lc(&self) -> [F; GATE_WIDTH] { 63 | [F::one(), F::zero(), F::zero(), F::zero()] 64 | } 65 | fn q_c(&self) -> F { 66 | self.0 67 | } 68 | fn q_o(&self) -> F { 69 | F::one() 70 | } 71 | } 72 | 73 | /// A subtraction gate 74 | #[derive(Debug, Clone)] 75 | pub struct SubtractionGate; 76 | 77 | impl Gate for SubtractionGate 78 | where 79 | F: Field, 80 | { 81 | fn name(&self) -> &'static str { 82 | "Subtraction Gate" 83 | } 84 | fn q_lc(&self) -> [F; GATE_WIDTH] { 85 | [F::one(), -F::one(), F::zero(), F::zero()] 86 | } 87 | fn q_o(&self) -> F { 88 | F::one() 89 | } 90 | } 91 | 92 | /// A multiplication gate 93 | #[derive(Debug, Clone)] 94 | pub struct MultiplicationGate; 95 | 96 | impl Gate for MultiplicationGate 97 | where 98 | F: Field, 99 | { 100 | fn name(&self) -> &'static str { 101 | "Multiplication Gate" 102 | } 103 | fn q_mul(&self) -> [F; N_MUL_SELECTORS] { 104 | [F::one(), F::zero()] 105 | } 106 | fn q_o(&self) -> F { 107 | F::one() 108 | } 109 | } 110 | 111 | /// A mul constant gate. 112 | /// Multiply the first variable with the constant. 113 | #[derive(Debug, Clone)] 114 | pub struct ConstantMultiplicationGate(pub(crate) F); 115 | 116 | impl Gate for ConstantMultiplicationGate 117 | where 118 | F: Field, 119 | { 120 | fn name(&self) -> &'static str { 121 | "Mul constant Gate" 122 | } 123 | fn q_lc(&self) -> [F; GATE_WIDTH] { 124 | [self.0, F::zero(), F::zero(), F::zero()] 125 | } 126 | fn q_o(&self) -> F { 127 | F::one() 128 | } 129 | } 130 | 131 | /// A boolean gate, selectors identical to `MultiplicationGate`, achieve through 132 | /// constraining a * a = a 133 | #[derive(Debug, Clone)] 134 | pub struct BoolGate; 135 | 136 | impl Gate for BoolGate 137 | where 138 | F: Field, 139 | { 140 | fn name(&self) -> &'static str { 141 | "Check Boolean Gate" 142 | } 143 | fn q_mul(&self) -> [F; N_MUL_SELECTORS] { 144 | MultiplicationGate.q_mul() 145 | } 146 | fn q_o(&self) -> F { 147 | MultiplicationGate.q_o() 148 | } 149 | } 150 | 151 | /// An equality gate, selectors identical to `SubtractionGate`, achieve through 152 | /// constraining a - b = 0 153 | #[derive(Debug, Clone)] 154 | pub struct EqualityGate; 155 | 156 | impl Gate for EqualityGate 157 | where 158 | F: Field, 159 | { 160 | fn name(&self) -> &'static str { 161 | "Check Equality Gate" 162 | } 163 | fn q_lc(&self) -> [F; GATE_WIDTH] { 164 | SubtractionGate.q_lc() 165 | } 166 | fn q_o(&self) -> F { 167 | SubtractionGate.q_o() 168 | } 169 | } 170 | 171 | /// An I/O gate for public inputs 172 | #[derive(Debug, Clone)] 173 | pub struct IoGate; 174 | 175 | impl Gate for IoGate 176 | where 177 | F: Field, 178 | { 179 | fn name(&self) -> &'static str { 180 | "Public I/O Gate" 181 | } 182 | fn q_o(&self) -> F { 183 | F::one() 184 | } 185 | } 186 | 187 | /// Gate for checking a value is the fifth root of another 188 | #[derive(Debug, Clone)] 189 | pub struct FifthRootGate; 190 | 191 | impl Gate for FifthRootGate { 192 | fn name(&self) -> &'static str { 193 | "Raise to the inverse of 5 power Gate" 194 | } 195 | 196 | fn q_hash(&self) -> [F; GATE_WIDTH] { 197 | [F::one(), F::zero(), F::zero(), F::zero()] 198 | } 199 | 200 | fn q_o(&self) -> F { 201 | F::one() 202 | } 203 | } 204 | 205 | /// A deg-2 polynomial gate 206 | #[derive(Clone)] 207 | pub struct QuadPolyGate { 208 | pub(crate) q_lc: [F; GATE_WIDTH], 209 | pub(crate) q_mul: [F; N_MUL_SELECTORS], 210 | pub(crate) q_o: F, 211 | pub(crate) q_c: F, 212 | } 213 | impl Gate for QuadPolyGate 214 | where 215 | F: Field, 216 | { 217 | fn name(&self) -> &'static str { 218 | "Deg-2 Polynomial Gate" 219 | } 220 | fn q_lc(&self) -> [F; GATE_WIDTH] { 221 | self.q_lc 222 | } 223 | fn q_mul(&self) -> [F; N_MUL_SELECTORS] { 224 | self.q_mul 225 | } 226 | fn q_o(&self) -> F { 227 | self.q_o 228 | } 229 | fn q_c(&self) -> F { 230 | self.q_c 231 | } 232 | } 233 | 234 | /// A linear combination gate 235 | #[derive(Clone)] 236 | pub struct LinCombGate { 237 | pub(crate) coeffs: [F; GATE_WIDTH], 238 | } 239 | impl Gate for LinCombGate 240 | where 241 | F: Field, 242 | { 243 | fn name(&self) -> &'static str { 244 | "Linear Combination Gate" 245 | } 246 | fn q_lc(&self) -> [F; GATE_WIDTH] { 247 | self.coeffs 248 | } 249 | fn q_o(&self) -> F { 250 | F::one() 251 | } 252 | } 253 | 254 | /// A multiplication-then-addition gate 255 | #[derive(Clone)] 256 | pub struct MulAddGate { 257 | pub(crate) coeffs: [F; N_MUL_SELECTORS], 258 | } 259 | impl Gate for MulAddGate 260 | where 261 | F: Field, 262 | { 263 | fn name(&self) -> &'static str { 264 | "Multiplication-then-addition Gate" 265 | } 266 | fn q_mul(&self) -> [F; N_MUL_SELECTORS] { 267 | self.coeffs 268 | } 269 | fn q_o(&self) -> F { 270 | F::one() 271 | } 272 | } 273 | 274 | /// A gate for conditional selection 275 | #[derive(Clone)] 276 | pub struct CondSelectGate; 277 | 278 | impl Gate for CondSelectGate 279 | where 280 | F: Field, 281 | { 282 | fn name(&self) -> &'static str { 283 | "Conditional Selection Gate" 284 | } 285 | fn q_lc(&self) -> [F; GATE_WIDTH] { 286 | [F::zero(), F::one(), F::zero(), F::zero()] 287 | } 288 | fn q_mul(&self) -> [F; N_MUL_SELECTORS] { 289 | [-F::one(), F::one()] 290 | } 291 | fn q_o(&self) -> F { 292 | F::one() 293 | } 294 | } 295 | -------------------------------------------------------------------------------- /jellyfish/relation/src/gates/ecc.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Implementation of ECC related gates 8 | 9 | use core::marker::PhantomData; 10 | 11 | use crate::{ 12 | constants::{GATE_WIDTH, N_MUL_SELECTORS}, 13 | gates::Gate, 14 | }; 15 | use ark_ec::TEModelParameters as Parameters; 16 | use ark_ff::PrimeField; 17 | use derivative::Derivative; 18 | 19 | #[inline] 20 | fn edwards_coeff_d() -> P::BaseField { 21 | P::COEFF_D 22 | } 23 | 24 | /// A gate for checking a point conforming the twisted Edwards curve equation 25 | #[derive(Derivative)] 26 | #[derivative(Clone(bound = "P: Parameters"))] 27 | pub struct EdwardsCurveEquationGate { 28 | pub(crate) _phantom: PhantomData

, 29 | } 30 | 31 | impl Gate for EdwardsCurveEquationGate

32 | where 33 | F: PrimeField, 34 | P: Parameters, 35 | { 36 | fn name(&self) -> &'static str { 37 | "Curve Equation Gate" 38 | } 39 | fn q_mul(&self) -> [F; N_MUL_SELECTORS] { 40 | // edwards equation: ax^2 + y^2 =1 + dx^2y^2 41 | // for ed_on_bn curves, we have a = 1 42 | // for ed_on_bls curves, we have a = -1 43 | [-P::COEFF_A, -F::one()] 44 | } 45 | fn q_c(&self) -> F { 46 | F::one() 47 | } 48 | fn q_ecc(&self) -> F { 49 | edwards_coeff_d::

() 50 | } 51 | } 52 | 53 | /// A gate for point addition on x-coordinate between two Curve Points 54 | #[derive(Derivative)] 55 | #[derivative(Clone(bound = "P: Parameters"))] 56 | pub struct CurvePointXAdditionGate { 57 | pub(crate) _phantom: PhantomData

, 58 | } 59 | 60 | impl Gate for CurvePointXAdditionGate

61 | where 62 | F: PrimeField, 63 | P: Parameters, 64 | { 65 | fn name(&self) -> &'static str { 66 | "Point Addition X-coordinate Gate" 67 | } 68 | fn q_mul(&self) -> [F; N_MUL_SELECTORS] { 69 | [F::one(), F::one()] 70 | } 71 | fn q_o(&self) -> F { 72 | F::one() 73 | } 74 | fn q_ecc(&self) -> F { 75 | let d: F = edwards_coeff_d::

(); 76 | -d 77 | } 78 | } 79 | 80 | /// A gate for point addition on y-coordinate between two Curve Points 81 | #[derive(Derivative)] 82 | #[derivative(Clone(bound = "P: Parameters"))] 83 | pub struct CurvePointYAdditionGate { 84 | pub(crate) _phantom: PhantomData

, 85 | } 86 | 87 | impl Gate for CurvePointYAdditionGate

88 | where 89 | F: PrimeField, 90 | P: Parameters, 91 | { 92 | fn name(&self) -> &'static str { 93 | "Point Addition Y-coordinate Gate" 94 | } 95 | fn q_mul(&self) -> [F; N_MUL_SELECTORS] { 96 | [-P::COEFF_A, F::one()] 97 | } 98 | fn q_o(&self) -> F { 99 | F::one() 100 | } 101 | fn q_ecc(&self) -> F { 102 | edwards_coeff_d::

() 103 | } 104 | } 105 | 106 | /// A point selection gate on x-coordinate for conditional selection among 4 107 | /// point candidates 108 | /// P0 is default neutral point, P1, P2, P3 are public constants 109 | #[derive(Clone)] 110 | pub struct QuaternaryPointSelectXGate { 111 | pub(crate) x1: F, 112 | pub(crate) x2: F, 113 | pub(crate) x3: F, 114 | } 115 | 116 | impl Gate for QuaternaryPointSelectXGate 117 | where 118 | F: PrimeField, 119 | { 120 | fn name(&self) -> &'static str { 121 | "4-ary Point Selection X-coordinate Gate" 122 | } 123 | fn q_lc(&self) -> [F; GATE_WIDTH] { 124 | [self.x1, self.x2, F::zero(), F::zero()] 125 | } 126 | fn q_mul(&self) -> [F; N_MUL_SELECTORS] { 127 | [self.x3 - self.x2 - self.x1, F::zero()] 128 | } 129 | fn q_o(&self) -> F { 130 | F::one() 131 | } 132 | } 133 | 134 | /// A point selection gate on y-coordinate for conditional selection among 4 135 | /// point candidates 136 | /// P0 is default neutral point, P1, P2, P3 are public constants 137 | #[derive(Clone)] 138 | pub struct QuaternaryPointSelectYGate { 139 | pub(crate) y1: F, 140 | pub(crate) y2: F, 141 | pub(crate) y3: F, 142 | } 143 | 144 | impl Gate for QuaternaryPointSelectYGate 145 | where 146 | F: PrimeField, 147 | { 148 | fn name(&self) -> &'static str { 149 | "4-ary Point Selection Y-coordinate Gate" 150 | } 151 | fn q_lc(&self) -> [F; GATE_WIDTH] { 152 | [self.y1 - F::one(), self.y2 - F::one(), F::zero(), F::zero()] 153 | } 154 | fn q_mul(&self) -> [F; N_MUL_SELECTORS] { 155 | [self.y3 - self.y2 - self.y1 + F::one(), F::zero()] 156 | } 157 | fn q_c(&self) -> F { 158 | F::one() 159 | } 160 | fn q_o(&self) -> F { 161 | F::one() 162 | } 163 | } 164 | -------------------------------------------------------------------------------- /jellyfish/relation/src/gates/logic.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Implementation of logic gates 8 | 9 | use super::Gate; 10 | use crate::constants::{GATE_WIDTH, N_MUL_SELECTORS}; 11 | use ark_ff::Field; 12 | 13 | /// A gate for logic OR 14 | #[derive(Clone)] 15 | pub struct LogicOrGate; 16 | 17 | impl Gate for LogicOrGate 18 | where 19 | F: Field, 20 | { 21 | fn name(&self) -> &'static str { 22 | "Logic OR Gate" 23 | } 24 | fn q_lc(&self) -> [F; GATE_WIDTH] { 25 | [F::one(), F::one(), F::zero(), F::zero()] 26 | } 27 | fn q_mul(&self) -> [F; N_MUL_SELECTORS] { 28 | [-F::one(), F::zero()] 29 | } 30 | fn q_c(&self) -> F { 31 | -F::one() 32 | } 33 | } 34 | 35 | /// A gate for computing the logic OR value of 2 variables 36 | #[derive(Clone)] 37 | pub struct LogicOrOutputGate; 38 | 39 | impl Gate for LogicOrOutputGate 40 | where 41 | F: Field, 42 | { 43 | fn name(&self) -> &'static str { 44 | "Logic OR Value Gate" 45 | } 46 | fn q_lc(&self) -> [F; GATE_WIDTH] { 47 | [F::one(), F::one(), F::zero(), F::zero()] 48 | } 49 | fn q_mul(&self) -> [F; N_MUL_SELECTORS] { 50 | [-F::one(), F::zero()] 51 | } 52 | fn q_o(&self) -> F { 53 | F::one() 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /jellyfish/relation/src/gates/lookup.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Implementation of UltraPlonk lookup gates 8 | 9 | use super::Gate; 10 | use ark_ff::Field; 11 | 12 | /// An UltraPlonk lookup gate 13 | #[derive(Debug, Clone)] 14 | pub struct LookupGate { 15 | pub(crate) q_dom_sep: F, 16 | pub(crate) table_dom_sep: F, 17 | pub(crate) table_key: F, 18 | } 19 | 20 | impl Gate for LookupGate 21 | where 22 | F: Field, 23 | { 24 | fn name(&self) -> &'static str { 25 | "UltraPlonk Lookup Gate" 26 | } 27 | fn q_lookup(&self) -> F { 28 | F::one() 29 | } 30 | fn q_dom_sep(&self) -> F { 31 | self.q_dom_sep 32 | } 33 | fn table_key(&self) -> F { 34 | self.table_key 35 | } 36 | fn table_dom_sep(&self) -> F { 37 | self.table_dom_sep 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /jellyfish/relation/src/gates/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Module for various circuit gates. 8 | use ark_ff::Field; 9 | use ark_std::boxed::Box; 10 | use core::fmt; 11 | use downcast_rs::{impl_downcast, Downcast}; 12 | use dyn_clone::DynClone; 13 | 14 | use crate::constants::{GATE_WIDTH, N_MUL_SELECTORS}; 15 | 16 | mod arithmetic; 17 | mod ecc; 18 | mod logic; 19 | mod lookup; 20 | 21 | pub use arithmetic::*; 22 | pub use ecc::*; 23 | pub use logic::*; 24 | pub use lookup::*; 25 | 26 | /// Describes a gate with getter for all selectors configuration 27 | pub trait Gate: Downcast + DynClone { 28 | /// Get the name of a gate. 29 | fn name(&self) -> &'static str; 30 | /// Selectors for linear combination. 31 | fn q_lc(&self) -> [F; GATE_WIDTH] { 32 | [F::zero(); GATE_WIDTH] 33 | } 34 | /// Selectors for Rescue hashes. 35 | fn q_hash(&self) -> [F; GATE_WIDTH] { 36 | [F::zero(); GATE_WIDTH] 37 | } 38 | /// Selectors for multiplication. 39 | fn q_mul(&self) -> [F; N_MUL_SELECTORS] { 40 | [F::zero(); N_MUL_SELECTORS] 41 | } 42 | /// The selector for elliptic curve operation. 43 | fn q_ecc(&self) -> F { 44 | F::zero() 45 | } 46 | /// Constant selector. 47 | fn q_c(&self) -> F { 48 | F::zero() 49 | } 50 | /// Output wire selector. 51 | fn q_o(&self) -> F { 52 | F::zero() 53 | } 54 | /// UltraPlonk lookup selector. 55 | fn q_lookup(&self) -> F { 56 | F::zero() 57 | } 58 | /// UltraPlonk lookup domain separation selector. 59 | fn q_dom_sep(&self) -> F { 60 | F::zero() 61 | } 62 | /// UltraPlonk table keys. 63 | fn table_key(&self) -> F { 64 | F::zero() 65 | } 66 | /// UltraPlonk table domain separation ids 67 | fn table_dom_sep(&self) -> F { 68 | F::zero() 69 | } 70 | } 71 | impl_downcast!(Gate where F: Field); 72 | 73 | impl Clone for Box> { 74 | fn clone(&self) -> Box> { 75 | dyn_clone::clone_box(&**self) 76 | } 77 | } 78 | 79 | impl fmt::Debug for (dyn Gate + 'static) { 80 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 81 | // TODO: (alex) add more context for debug 82 | f.write_str(self.name()) 83 | } 84 | } 85 | 86 | /// A empty gate for circuit padding 87 | #[derive(Debug, Clone)] 88 | pub struct PaddingGate; 89 | 90 | impl Gate for PaddingGate 91 | where 92 | F: Field, 93 | { 94 | fn name(&self) -> &'static str { 95 | "Padding Gate" 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /jellyfish/relation/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Interfaces for Plonk-based constraint systems 8 | 9 | #![cfg_attr(not(feature = "std"), no_std)] 10 | #![warn(missing_docs)] 11 | #![allow(clippy::derived_hash_with_manual_eq)] 12 | pub mod constants; 13 | pub mod errors; 14 | pub mod gadgets; 15 | pub mod gates; 16 | 17 | pub mod constraint_system; 18 | pub use constraint_system::*; 19 | -------------------------------------------------------------------------------- /jellyfish/rustfmt.toml: -------------------------------------------------------------------------------- 1 | reorder_imports = true 2 | wrap_comments = true 3 | normalize_comments = true 4 | use_try_shorthand = true 5 | match_block_trailing_comma = true 6 | use_field_init_shorthand = true 7 | edition = "2018" 8 | condense_wildcard_suffixes = true 9 | imports_granularity = "Crate" 10 | -------------------------------------------------------------------------------- /jellyfish/scripts/check_no_std.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | set -x 4 | 5 | cargo-nono check --no-default-features --package jf-utils 6 | cargo-nono check --no-default-features --package jf-relation 7 | cargo-nono check --no-default-features --package jf-primitives 8 | cargo-nono check --no-default-features --package jf-plonk 9 | -------------------------------------------------------------------------------- /jellyfish/scripts/run_benchmarks.m4: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # m4_ignore( 4 | echo "This is just a script template, not the script (yet) - pass it to 'argbash' to fix this." >&2 5 | exit 11 #)Created by argbash-init v2.10.0 6 | # ARG_OPTIONAL_BOOLEAN([asm]) 7 | # ARG_OPTIONAL_BOOLEAN([multi_threads]) 8 | # ARG_HELP([]) 9 | # ARGBASH_GO 10 | 11 | # [ <-- needed because of Argbash 12 | 13 | if [ "$_arg_multi_threads" = on ] 14 | then 15 | echo "Multi-threads: ON" 16 | # Do nothing 17 | else 18 | echo "Multi-threads: OFF" 19 | export RAYON_NUM_THREADS=1 20 | fi 21 | 22 | if [ "$_arg_asm" = on ] 23 | then 24 | echo "Asm feature: ON" 25 | export RUSTFLAGS="-C target-feature=+bmi2,+adx" 26 | else 27 | echo "Asm feature: OFF" 28 | # Do nothing 29 | fi 30 | 31 | # Run the benchmark binary 32 | set -e 33 | cargo +nightly bench 34 | 35 | 36 | # ^^^ TERMINATE YOUR CODE BEFORE THE BOTTOM ARGBASH MARKER ^^^ 37 | 38 | # ] <-- needed because of Argbash 39 | -------------------------------------------------------------------------------- /jellyfish/scripts/run_benchmarks.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Created by argbash-init v2.10.0 4 | # ARG_OPTIONAL_BOOLEAN([asm]) 5 | # ARG_OPTIONAL_BOOLEAN([multi_threads]) 6 | # ARG_HELP([]) 7 | # ARGBASH_GO() 8 | # needed because of Argbash --> m4_ignore([ 9 | ### START OF CODE GENERATED BY Argbash v2.10.0 one line above ### 10 | # Argbash is a bash code generator used to get arguments parsing right. 11 | # Argbash is FREE SOFTWARE, see https://argbash.io for more info 12 | 13 | 14 | die() 15 | { 16 | local _ret="${2:-1}" 17 | test "${_PRINT_HELP:-no}" = yes && print_help >&2 18 | echo "$1" >&2 19 | exit "${_ret}" 20 | } 21 | 22 | 23 | begins_with_short_option() 24 | { 25 | local first_option all_short_options='h' 26 | first_option="${1:0:1}" 27 | test "$all_short_options" = "${all_short_options/$first_option/}" && return 1 || return 0 28 | } 29 | 30 | # THE DEFAULTS INITIALIZATION - OPTIONALS 31 | _arg_asm="off" 32 | _arg_multi_threads="off" 33 | 34 | 35 | print_help() 36 | { 37 | printf '%s\n' "" 38 | printf 'Usage: %s [--(no-)asm] [--(no-)multi_threads] [-h|--help]\n' "$0" 39 | printf '\t%s\n' "-h, --help: Prints help" 40 | } 41 | 42 | 43 | parse_commandline() 44 | { 45 | while test $# -gt 0 46 | do 47 | _key="$1" 48 | case "$_key" in 49 | --no-asm|--asm) 50 | _arg_asm="on" 51 | test "${1:0:5}" = "--no-" && _arg_asm="off" 52 | ;; 53 | --no-multi_threads|--multi_threads) 54 | _arg_multi_threads="on" 55 | test "${1:0:5}" = "--no-" && _arg_multi_threads="off" 56 | ;; 57 | -h|--help) 58 | print_help 59 | exit 0 60 | ;; 61 | -h*) 62 | print_help 63 | exit 0 64 | ;; 65 | *) 66 | _PRINT_HELP=yes die "FATAL ERROR: Got an unexpected argument '$1'" 1 67 | ;; 68 | esac 69 | shift 70 | done 71 | } 72 | 73 | parse_commandline "$@" 74 | 75 | # OTHER STUFF GENERATED BY Argbash 76 | 77 | ### END OF CODE GENERATED BY Argbash (sortof) ### ]) 78 | # [ <-- needed because of Argbash 79 | 80 | cargo clean 81 | 82 | if [ "$_arg_multi_threads" = on ] 83 | then 84 | echo "Multi-threads: ON" 85 | # Do nothing 86 | else 87 | echo "Multi-threads: OFF" 88 | export RAYON_NUM_THREADS=1 89 | fi 90 | 91 | if [ "$_arg_asm" = on ] 92 | then 93 | echo "Asm feature: ON" 94 | export RUSTFLAGS="-C target-feature=+bmi2,+adx" 95 | else 96 | echo "Asm feature: OFF" 97 | # Do nothing 98 | fi 99 | 100 | # Run the benchmark binary 101 | set -e 102 | cargo bench 103 | 104 | 105 | # ^^^ TERMINATE YOUR CODE BEFORE THE BOTTOM ARGBASH MARKER ^^^ 106 | 107 | # ] <-- needed because of Argbash 108 | -------------------------------------------------------------------------------- /jellyfish/scripts/run_tests.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -e 3 | 4 | # We want the code to panic if there is an integer overflow 5 | export RUSTFLAGS="-C overflow-checks=on" 6 | 7 | cargo test --release -p jf-utils -- -Zunstable-options --report-time 8 | cargo test --release -p jf-plonk -- -Zunstable-options --report-time 9 | cargo test --release -p jf-primitives -- -Zunstable-options --report-time 10 | cargo test --release -p jf-relation -- -Zunstable-options --report-time 11 | -------------------------------------------------------------------------------- /jellyfish/scripts/test_coverage.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env nix-shell 2 | #!nix-shell ../nix/nightly.nix -i bash 3 | set -e 4 | set -o xtrace 5 | IGNORED_FILES="--ignore **/errors.rs\ 6 | --ignore **/src/bin/*\ 7 | --ignore transactions/src/parameters.rs\ 8 | --ignore transactions/src/bench_utils/*\ 9 | " 10 | export CARGO_INCREMENTAL=0 11 | export RUSTFLAGS="-Zprofile -Ccodegen-units=1 -Copt-level=3 -Clink-dead-code -Coverflow-checks=off -Zpanic_abort_tests" 12 | export RUSTDOCFLAGS="" 13 | rm -vf ./target/**/*.gcda 14 | cargo build 15 | cargo test --lib 16 | grcov . -s . --binary-path ./target/debug/ -t html --branch --ignore-not-existing $IGNORED_FILES -o ./target/debug/coverage/ 17 | echo "Coverage report available at target/debug/coverage/index.html." 18 | -------------------------------------------------------------------------------- /jellyfish/shell.nix: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | # This file is part of the Configurable Asset Privacy for Ethereum (CAPE) library. 3 | # 4 | # This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. 5 | # This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. 6 | # You should have received a copy of the GNU General Public License along with this program. If not, see . 7 | 8 | (import 9 | ( 10 | let 11 | lock = builtins.fromJSON (builtins.readFile ./flake.lock); 12 | in 13 | fetchTarball { 14 | url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; 15 | sha256 = lock.nodes.flake-compat.locked.narHash; 16 | } 17 | ) 18 | { 19 | src = ./.; 20 | }).shellNix 21 | -------------------------------------------------------------------------------- /jellyfish/utilities/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "jf-utils" 3 | description = "Utilities for Jellyfish cryptographic library" 4 | version = { workspace = true } 5 | authors = { workspace = true } 6 | edition = { workspace = true } 7 | license = { workspace = true } 8 | rust-version = { workspace = true } 9 | 10 | [dependencies] 11 | ark-ec = { version = "0.3.0", default-features = false } 12 | ark-ff = { version = "0.3.0", default-features = false, features = [ "asm" ] } 13 | ark-serialize = { version = "0.3.0", default-features = false } 14 | ark-std = { version = "0.3.0", default-features = false } 15 | digest = { version = "0.10.1", default-features = false } 16 | rayon = { version = "1.5.0", optional = true } 17 | serde = { version = "1.0", default-features = false, features = ["derive"] } 18 | sha2 = { version = "0.10.1", default-features = false } 19 | tagged-base64 = { git = "https://github.com/espressosystems/tagged-base64", tag = "0.2.4" } 20 | 21 | [dev-dependencies] 22 | ark-bls12-377 = { git = "https://github.com/arkworks-rs/curves", rev = "677b4ae751a274037880ede86e9b6f30f62635af" } 23 | ark-bls12-381 = "0.3.0" 24 | ark-bn254 = "0.3.0" 25 | ark-ed-on-bls12-377 = { git = "https://github.com/arkworks-rs/curves", rev = "677b4ae751a274037880ede86e9b6f30f62635af" } 26 | ark-ed-on-bls12-381 = "0.3.0" 27 | ark-ed-on-bn254 = "0.3.0" 28 | 29 | [features] 30 | default = [] 31 | std = ["ark-ff/std", "ark-std/std", "ark-ec/std", "ark-serialize/std"] 32 | parallel = ["ark-ff/parallel", "ark-std/parallel", "ark-ec/parallel", "rayon"] 33 | -------------------------------------------------------------------------------- /jellyfish/utilities/src/conversion.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | use ark_ec::ModelParameters; 8 | use ark_ff::{BigInteger, PrimeField}; 9 | use ark_std::{cmp::min, vec::Vec}; 10 | use sha2::{Digest, Sha512}; 11 | 12 | /// Convert a scalar field element to a base field element. 13 | /// Mod reduction is not performed since the conversion occurs 14 | /// for fields on a same curve. 15 | pub fn fr_to_fq(scalar: &P::ScalarField) -> F 16 | where 17 | F: PrimeField, 18 | P: ModelParameters, 19 | { 20 | // sanity checks: 21 | // ensure | jubjub scalar field | <= | BLS Scalar field | 22 | // jubjub scalar field: 23 | // 6554484396890773809930967563523245729705921265872317281365359162392183254199 24 | // BLS12-381 scalar field: 25 | // 52435875175126190479447740508185965837690552500527637822603658699938581184513 26 | // jubjub377 scalar field: 27 | // 2111115437357092606062206234695386632838870926408408195193685246394721360383 28 | // BLS12-377 scalar field: 29 | // 8444461749428370424248824938781546531375899335154063827935233455917409239041 30 | F::from_le_bytes_mod_order(&scalar.into_repr().to_bytes_le()) 31 | } 32 | 33 | /// Convert a base field element to a scalar field element. 34 | /// Perform a mod reduction if the base field element is greater than 35 | /// the modulus of the scalar field. 36 | pub fn fq_to_fr(base: &F) -> P::ScalarField 37 | where 38 | F: PrimeField, 39 | P: ModelParameters, 40 | { 41 | P::ScalarField::from_le_bytes_mod_order(&base.into_repr().to_bytes_le()) 42 | } 43 | 44 | /// Convert a field element in F(rom) to a field element in T(o), 45 | /// with |T| < |F|; truncating the element via masking the top 46 | /// F::size_in_bits() - T::size_in_bits() with 0s 47 | pub fn fq_to_fr_with_mask(base: &F) -> T 48 | where 49 | F: PrimeField, 50 | T: PrimeField, 51 | { 52 | assert!(T::size_in_bits() < F::size_in_bits()); 53 | let length = T::size_in_bits() >> 3; 54 | // ensure that no mod reduction happened 55 | T::from_le_bytes_mod_order(&base.into_repr().to_bytes_le()[0..length]) 56 | } 57 | 58 | // convert a field element in F(rom) 59 | // to a field element in T(o). 60 | // return an error if a mod reduction occurs. 61 | #[inline] 62 | pub fn field_switching(base: &F) -> T 63 | where 64 | F: PrimeField, 65 | T: PrimeField, 66 | { 67 | let bytes = base.into_repr().to_bytes_le(); 68 | let t = T::from_le_bytes_mod_order(&bytes); 69 | 70 | // check t == base 71 | // i.e., t did not overflow the target field 72 | let bytes_rec = t.into_repr().to_bytes_le(); 73 | let length = min(bytes.len(), bytes_rec.len()); 74 | assert_eq!(bytes_rec[0..length], bytes[0..length],); 75 | t 76 | } 77 | 78 | /// Hash a sequence of bytes to into a field 79 | /// element, whose order is less than 256 bits. 80 | pub fn hash_to_field(bytes: B) -> F 81 | where 82 | B: AsRef<[u8]>, 83 | F: PrimeField, 84 | { 85 | // we extract a random `rand_byte_len` bytes from the hash 86 | // the compute res = OS2IP(output) mod p 87 | // which is less than 2^-128 from uniform 88 | let rand_byte_len = (F::size_in_bits() + 7) / 8 + 128 / 8; 89 | let mut hasher = Sha512::default(); 90 | hasher.update(bytes.as_ref()); 91 | let output = &hasher.finalize()[0..rand_byte_len]; 92 | 93 | F::from_le_bytes_mod_order(output) 94 | } 95 | 96 | /// One-way, deterministic, infallible conversion between arbitrary bytes (of 97 | /// unknown length and potentially non-canonical) to field elements. 98 | /// This function converts bytes to vector of BaseField. 99 | pub fn bytes_to_field_elements(bytes: B) -> Vec 100 | where 101 | B: AsRef<[u8]> + Clone, 102 | F: PrimeField, 103 | { 104 | // segment the bytes into chunks of bytes, each chunk is of size 105 | // that is floor(F::size_in_bits/8). then, cast each chunk 106 | // into F via F::from_le_bytes_mod_order 107 | // note that mod_reduction is guaranteed to not occur 108 | 109 | // Field order is never a multiple of 8 110 | let chunk_length = F::size_in_bits() / 8; 111 | 112 | // pad the input to a multiple of chunk_length 113 | let padded_length = (bytes.as_ref().len() + chunk_length - 1) / chunk_length * chunk_length; 114 | let mut padded_bytes: Vec = bytes.as_ref().to_vec(); 115 | padded_bytes.resize(padded_length, 0u8); 116 | assert!(padded_bytes.len() % chunk_length == 0); 117 | 118 | let mut result = Vec::new(); 119 | for chunk in padded_bytes.chunks(chunk_length) { 120 | result.push(F::from_le_bytes_mod_order(chunk)); 121 | } 122 | result 123 | } 124 | 125 | #[cfg(test)] 126 | mod tests { 127 | use super::*; 128 | use ark_ed_on_bls12_377::{EdwardsParameters as Param377, Fr as Fr377}; 129 | use ark_ed_on_bls12_381::{EdwardsParameters as Param381, Fr as Fr381}; 130 | use ark_ed_on_bn254::{EdwardsParameters as Param254, Fr as Fr254}; 131 | use ark_std::UniformRand; 132 | 133 | #[test] 134 | fn test_bn254_scalar_conversion() { 135 | let mut rng = ark_std::test_rng(); 136 | for _ in 0..6 { 137 | let jj = Fr254::rand(&mut rng); 138 | let jj_bls = fr_to_fq::<_, Param254>(&jj); 139 | assert!(jj.into_repr() == jj_bls.into_repr()); 140 | } 141 | } 142 | 143 | #[test] 144 | fn test_jubjub_bls_scalar_conversion_377() { 145 | let mut rng = ark_std::test_rng(); 146 | for _ in 0..6 { 147 | let jj = Fr377::rand(&mut rng); 148 | let jj_bls = fr_to_fq::<_, Param377>(&jj); 149 | assert!(jj.into_repr() == jj_bls.into_repr()); 150 | } 151 | } 152 | 153 | #[test] 154 | fn test_jubjub_bls_scalar_conversion_381() { 155 | let mut rng = ark_std::test_rng(); 156 | for _ in 0..6 { 157 | let jj = Fr381::rand(&mut rng); 158 | let jj_bls = fr_to_fq::<_, Param381>(&jj); 159 | assert!(jj.into_repr() == jj_bls.into_repr()); 160 | } 161 | } 162 | } 163 | -------------------------------------------------------------------------------- /jellyfish/utilities/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | #![cfg_attr(not(test), no_std)] 8 | 9 | mod conversion; 10 | mod macros; 11 | mod multi_pairing; 12 | pub mod par_utils; 13 | mod serialize; 14 | 15 | use ark_ff::Field; 16 | pub use ark_std::vec::Vec; 17 | 18 | pub use conversion::*; 19 | pub use macros::*; 20 | pub use multi_pairing::*; 21 | pub use serialize::*; 22 | 23 | #[inline] 24 | pub fn compute_len_to_next_multiple(len: usize, multiple: usize) -> usize { 25 | if len % multiple == 0 { 26 | len 27 | } else { 28 | len + multiple - len % multiple 29 | } 30 | } 31 | 32 | // Pad message with 0 until `msg` is multiple of `multiple` 33 | #[inline] 34 | pub fn pad_with_zeros(vec: &mut Vec, multiple: usize) { 35 | let len = vec.len(); 36 | let new_len = compute_len_to_next_multiple(len, multiple); 37 | vec.resize(new_len, F::zero()) 38 | } 39 | -------------------------------------------------------------------------------- /jellyfish/utilities/src/macros.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! useful macros. 8 | 9 | /// Takes as input a struct, and converts them to a series of bytes. All traits 10 | /// that implement `CanonicalSerialize` can be automatically converted to bytes 11 | /// in this manner. 12 | #[macro_export] 13 | macro_rules! to_bytes { 14 | ($x:expr) => {{ 15 | let mut buf = ark_std::vec![]; 16 | ark_serialize::CanonicalSerialize::serialize($x, &mut buf).map(|_| buf) 17 | }}; 18 | } 19 | 20 | #[test] 21 | fn test_to_bytes() { 22 | use ark_bls12_381::Fr; 23 | use ark_serialize::CanonicalSerialize; 24 | use ark_std::One; 25 | let f1 = Fr::one(); 26 | 27 | let mut bytes = ark_std::vec![]; 28 | f1.serialize(&mut bytes).unwrap(); 29 | assert_eq!(bytes, to_bytes!(&f1).unwrap()); 30 | } 31 | -------------------------------------------------------------------------------- /jellyfish/utilities/src/multi_pairing.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! This module implements a simple wrapper of multi-pairing function 8 | 9 | use ark_ec::PairingEngine; 10 | use ark_std::vec::Vec; 11 | 12 | /// A simple wrapper of multi-pairing function. 13 | pub fn multi_pairing(g1_elems: &[E::G1Affine], g2_elems: &[E::G2Affine]) -> E::Fqk 14 | where 15 | E: PairingEngine, 16 | { 17 | let inputs: Vec<(E::G1Prepared, E::G2Prepared)> = g1_elems 18 | .iter() 19 | .zip(g2_elems.iter()) 20 | .map(|(g1, g2)| ((*g1).into(), (*g2).into())) 21 | .collect(); 22 | 23 | E::product_of_pairings(&inputs) 24 | } 25 | 26 | #[cfg(test)] 27 | mod test { 28 | use super::*; 29 | use ark_bls12_377::Bls12_377; 30 | use ark_bls12_381::Bls12_381; 31 | use ark_bn254::Bn254; 32 | use ark_ec::{AffineCurve, PairingEngine, ProjectiveCurve}; 33 | use ark_std::{test_rng, One, UniformRand}; 34 | 35 | #[test] 36 | fn test_multi_pairing() { 37 | test_multi_pairing_helper::(); 38 | test_multi_pairing_helper::(); 39 | test_multi_pairing_helper::(); 40 | } 41 | 42 | fn test_multi_pairing_helper() { 43 | let mut rng = test_rng(); 44 | 45 | // generators with single pairing 46 | let g1 = E::G1Affine::prime_subgroup_generator(); 47 | let g2 = E::G2Affine::prime_subgroup_generator(); 48 | let gt = E::pairing(g1, g2); 49 | 50 | assert_eq!(multi_pairing::(&[g1], &[g2]), gt); 51 | 52 | // random elements with single pairing 53 | let r1 = E::Fr::rand(&mut rng); 54 | let r2 = E::Fr::rand(&mut rng); 55 | let f1 = g1.mul(r1).into_affine(); 56 | let f2 = g2.mul(r2).into_affine(); 57 | let ft = E::pairing(f1, f2); 58 | 59 | assert_eq!(multi_pairing::(&[f1], &[f2]), ft); 60 | 61 | // random multi pairing products 62 | let ht = gt * ft; 63 | assert_eq!(multi_pairing::(&[g1, f1], &[g2, f2]), ht); 64 | 65 | // equality test 66 | assert_eq!(multi_pairing::(&[g1, -g1], &[g2, g2]), E::Fqk::one()); 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /jellyfish/utilities/src/par_utils.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | // You should have received a copy of the MIT License 4 | // along with the Jellyfish library. If not, see . 5 | 6 | //! Utilities for parallel code. 7 | 8 | /// this function helps with slice iterator creation that optionally use 9 | /// `par_iter()` when feature flag `parallel` is on. 10 | /// 11 | /// # Usage 12 | /// let v = [1, 2, 3, 4, 5]; 13 | /// let sum = parallelizable_slice_iter(&v).sum(); 14 | /// 15 | /// // the above code is a shorthand for (thus equivalent to) 16 | /// #[cfg(feature = "parallel")] 17 | /// let sum = v.par_iter().sum(); 18 | /// #[cfg(not(feature = "parallel"))] 19 | /// let sum = v.iter().sum(); 20 | #[cfg(feature = "parallel")] 21 | pub fn parallelizable_slice_iter(data: &[T]) -> rayon::slice::Iter { 22 | use rayon::iter::IntoParallelIterator; 23 | data.into_par_iter() 24 | } 25 | 26 | #[cfg(not(feature = "parallel"))] 27 | pub fn parallelizable_slice_iter(data: &[T]) -> ark_std::slice::Iter { 28 | data.iter() 29 | } 30 | -------------------------------------------------------------------------------- /jellyfish/utilities/src/serialize.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2022 Espresso Systems (espressosys.com) 2 | // This file is part of the Jellyfish library. 3 | 4 | // You should have received a copy of the MIT License 5 | // along with the Jellyfish library. If not, see . 6 | 7 | //! Various serialization functions. 8 | 9 | use ark_std::vec::Vec; 10 | use serde::{Deserialize, Serialize}; 11 | 12 | /// A helper for converting ark_serialize::CanonicalSerialize bytes to standard 13 | /// Serde bytes. Use this struct as intermediate target instead of directly 14 | /// deriving serde::Serialize/Deserialize to avoid implementation of Visitors. 15 | #[derive(Serialize, Deserialize)] 16 | pub struct CanonicalBytes(pub Vec); 17 | 18 | impl From for CanonicalBytes { 19 | fn from(obj: T) -> CanonicalBytes { 20 | let mut bytes = Vec::new(); 21 | obj.serialize(&mut bytes) 22 | .expect("fail to serialize to canonical bytes"); 23 | CanonicalBytes(bytes) 24 | } 25 | } 26 | 27 | #[macro_export] 28 | macro_rules! deserialize_canonical_bytes { 29 | ($t:ident) => { 30 | deserialize_canonical_bytes!($t<>); 31 | }; 32 | 33 | // match MyStruct<'a, 'b, T: MyTrait, R: MyTrait2, ...> where any number of lifetime and generic parameters 34 | ($t:ident < $( $lt:lifetime ),* $( $T:ident : $trait:ident ),* >) => { 35 | impl<$($lt),* $( $T: $trait ),*> From for $t<$($lt),* $( $T ),*> { 36 | fn from(bytes: CanonicalBytes) -> Self { 37 | ark_serialize::CanonicalDeserialize::deserialize(bytes.0.as_slice()) 38 | .expect("fail to deserialize canonical bytes") 39 | } 40 | } 41 | }; 42 | } 43 | 44 | /// Serializers for elements that are Ark-Works serializable but not serde 45 | /// serializable. 46 | /// 47 | /// Many cryptographic objects (e.g. finite field elements) are foreign types 48 | /// that we cannot apply [tagged] or `#[derive(Deserialize, Serialize)]` to. 49 | /// Instead, use `#[serde(with = "canonical")]` at the point where the object is 50 | /// used inside a struct or enum definition. 51 | /// 52 | /// [tagged]: tagged_base64::tagged 53 | pub mod canonical { 54 | use super::*; 55 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; 56 | use ark_std::format; 57 | use serde::{ 58 | de::{Deserializer, Error as DeError}, 59 | ser::{Error as SerError, Serializer}, 60 | }; 61 | use tagged_base64::TaggedBase64; 62 | 63 | pub fn serialize( 64 | elem: &T, 65 | serializer: S, 66 | ) -> Result { 67 | let mut bytes = Vec::new(); 68 | T::serialize(elem, &mut bytes).map_err(|e| S::Error::custom(format!("{e:?}")))?; 69 | Serialize::serialize(&TaggedBase64::new("FIELD", &bytes).unwrap(), serializer) 70 | } 71 | 72 | pub fn deserialize<'de, D: Deserializer<'de>, T: CanonicalDeserialize>( 73 | deserializer: D, 74 | ) -> Result { 75 | let tb64 = ::deserialize(deserializer)?; 76 | if tb64.tag() == "FIELD" { 77 | T::deserialize(tb64.as_ref()).map_err(|e| D::Error::custom(format!("{e:?}"))) 78 | } else { 79 | Err(D::Error::custom(format!( 80 | "incorrect tag (expected FIELD, got {})", 81 | tb64.tag() 82 | ))) 83 | } 84 | } 85 | } 86 | 87 | #[macro_export] 88 | macro_rules! test_serde_default { 89 | ($struct:tt) => { 90 | use ark_serialize::*; 91 | 92 | let data = $struct::default(); 93 | let mut ser_bytes: $crate::Vec = $crate::Vec::new(); 94 | data.serialize(&mut ser_bytes).unwrap(); 95 | let de: $struct = $struct::deserialize(&ser_bytes[..]).unwrap(); 96 | assert_eq!(de, data); 97 | }; 98 | } 99 | -------------------------------------------------------------------------------- /src/errors.rs: -------------------------------------------------------------------------------- 1 | use thiserror::Error; 2 | 3 | /// Errors returned by Sangria 4 | #[derive(Clone, Debug, Eq, PartialEq, Error)] 5 | pub enum SangriaError { 6 | /// returned if the supplied row or col in (row,col,val) tuple is out of range 7 | #[error("Index is out of bounds")] 8 | IndexOutOfBounds, 9 | 10 | /// returned if the commitment scheme returns an error 11 | #[error("An error occurred with the commitment scheme")] 12 | CommitmentError, 13 | } 14 | -------------------------------------------------------------------------------- /src/ivc.rs: -------------------------------------------------------------------------------- 1 | use ark_ff::PrimeField; 2 | use ark_std::{marker::PhantomData, rand::Rng}; 3 | 4 | use crate::{ 5 | folding_scheme::{self, FoldingCommitmentConfig}, 6 | RelaxedPLONKInstance, RelaxedPLONKWitness, StepCircuit, IVC, 7 | }; 8 | 9 | /// A "pre-sangria" scheme. Implements IVC from a NIFS as described in Construction3 of Nova. 10 | /// WARNING: this scheme is neither succinct nor zero-knowledge. 11 | /// 12 | /// This scheme makes use of a main field and a help field. The trace resulting of running one step of the computation 13 | /// is no longer in the MainField F_p, we have moved to some helper field F_q. Using cycles of curves, we can define a helper circuit 14 | /// in F_q that allows us to cycle back to F_p in order to compute our next step in F_p again. 15 | pub(crate) struct SangriaNoCompression< 16 | MainField: PrimeField, 17 | HelperField: PrimeField, 18 | Config: SangriaIVCConfig, 19 | SC: StepCircuit, 20 | >(PhantomData<(MainField, HelperField, Config, SC)>); 21 | 22 | /// A `SangriaIVCConfig` is a trait that allows to bundle types related to an IVC instantiation. 23 | /// By combining all the types here we avoid passing them as generics in structs such as `VerifierKey`, `ProverKey`, etc 24 | pub trait SangriaIVCConfig { 25 | type MainCommitmentSchemes: FoldingCommitmentConfig; 26 | 27 | type HelperCommitmentSchemes: FoldingCommitmentConfig; 28 | } 29 | 30 | /// Public parameters for the SangriaIVC scheme (no compression) contains commit parameters for the step circuit 31 | /// in the main field, and commit parameters for the helper circuit in the helper field. 32 | pub(crate) struct PublicParameters< 33 | MainField: PrimeField, 34 | HelperField: PrimeField, 35 | Config: SangriaIVCConfig, 36 | > { 37 | pub _main_nifs_pp: folding_scheme::PublicParameters, 38 | pub _helper_nifs_pp: 39 | folding_scheme::PublicParameters, 40 | } 41 | 42 | /// The SangriaIVC VerifierKey contains verifier keys for the foldings of the main and helper 43 | /// circuits. It also contains a description of the step circuit. 44 | pub(crate) struct VerifierKey< 45 | MainField: PrimeField, 46 | HelperField: PrimeField, 47 | Config: SangriaIVCConfig, 48 | SC: StepCircuit, 49 | > { 50 | pub _main_nifs_vk: folding_scheme::VerifierKey, 51 | pub _helper_nifs_vk: folding_scheme::VerifierKey, 52 | pub _step_circuit: SC, 53 | } 54 | 55 | /// The SangriaIVC ProverKey contains prover keys for the foldings of the main and helper 56 | /// circuits. It also contains a description of the step circuit. 57 | pub(crate) struct ProverKey< 58 | MainField: PrimeField, 59 | HelperField: PrimeField, 60 | Config: SangriaIVCConfig, 61 | SC: StepCircuit, 62 | > { 63 | pub _main_nifs_pk: folding_scheme::ProverKey, 64 | pub _helper_nifs_pk: folding_scheme::ProverKey, 65 | pub _step_circuit: SC, 66 | } 67 | 68 | /// A half cycle proof is composed of two instance-witness pairs: one running instance-witness 69 | /// that captures steps 0 to i-1 (via folding) and one instance-witness for the i-th step (the latest). 70 | pub(crate) struct HalfCycleProof> { 71 | pub _latest_step_instance: RelaxedPLONKInstance, 72 | pub _latest_step_witness: RelaxedPLONKWitness, 73 | pub _running_instance: RelaxedPLONKInstance, 74 | pub _running_witness: RelaxedPLONKWitness, 75 | } 76 | 77 | /// An IVC proof is composed of two half-cycle proofs. Each half cycle proof is composed 78 | /// of two instance-witness pairs: one running instance-witness that captures steps 0 to i-1 (via folding) 79 | /// and one instance-witness for the i-th step (the latest). 80 | pub(crate) struct IVCProof< 81 | MainField: PrimeField, 82 | HelperField: PrimeField, 83 | Config: SangriaIVCConfig, 84 | > { 85 | pub _main_half_proof: HalfCycleProof, 86 | pub _helper_half_proof: HalfCycleProof, 87 | } 88 | 89 | impl IVC 90 | for SangriaNoCompression 91 | where 92 | MainField: PrimeField, 93 | HelperField: PrimeField, 94 | Config: SangriaIVCConfig, 95 | SC: StepCircuit, 96 | { 97 | type PublicParameters = PublicParameters; 98 | type ProverKey = ProverKey; 99 | type VerifierKey = VerifierKey; 100 | type Proof = IVCProof; 101 | 102 | fn setup(_rng: &mut R) -> Self::PublicParameters { 103 | todo!() 104 | } 105 | 106 | fn encode( 107 | _public_parameters: &Self::PublicParameters, 108 | _step_circuit: &SC, 109 | _rng: &mut R, 110 | ) -> Result<(Self::ProverKey, Self::VerifierKey), crate::SangriaError> { 111 | todo!() 112 | } 113 | 114 | fn prove_step( 115 | _prover_key: &Self::ProverKey, 116 | _origin_state: &SC::State, 117 | _current_state: SC::State, 118 | _current_proof: Option, 119 | _current_witness: &SC::Witness, 120 | ) -> Result<(SC::State, Self::Proof), crate::SangriaError> { 121 | todo!() 122 | } 123 | 124 | fn verify( 125 | _verifier_key: &Self::VerifierKey, 126 | _origin_state: &SC::State, 127 | _current_state: SC::State, 128 | _current_proof: Option, 129 | ) -> Result<(), crate::SangriaError> { 130 | todo!() 131 | } 132 | } 133 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![warn(missing_docs)] 2 | //! Sangria is a Nova-like scheme for recursive zero-knowledge proofs. It implements incrementally 3 | //! verifiable computation by using a folding for PLONK. We use a modified version of HyperPlonk to 4 | //! compress the IVC proofs. 5 | 6 | use ark_ff::PrimeField; 7 | use ark_std::rand::Rng; 8 | 9 | /// Interface for an IVC scheme. 10 | pub trait IVC> { 11 | /// Public parameters for the IVC scheme. 12 | type PublicParameters; 13 | 14 | /// A collection of data needed for proving. 15 | type ProverKey; 16 | 17 | /// A collection of data needed for verifying. 18 | type VerifierKey; 19 | 20 | /// An IVC proof. 21 | type Proof; 22 | 23 | /// Run the IVC setup to produce public parameters. 24 | fn setup(rng: &mut R) -> Self::PublicParameters; 25 | 26 | /// Run the IVC encoder to produce a proving key and a verifying key. 27 | fn encode( 28 | public_parameters: &Self::PublicParameters, 29 | step_circuit: &SC, 30 | rng: &mut R, 31 | ) -> Result<(Self::ProverKey, Self::VerifierKey), SangriaError>; 32 | 33 | /// Prove a step of the IVC computation. Consume the current state and proof and produce the *next* state and proof. 34 | fn prove_step( 35 | prover_key: &Self::ProverKey, 36 | origin_state: &SC::State, 37 | current_state: SC::State, 38 | current_proof: Option, 39 | current_witness: &SC::Witness, 40 | ) -> Result<(SC::State, Self::Proof), SangriaError>; 41 | 42 | /// Verify a step of the IVC computation. 43 | fn verify( 44 | verifier_key: &Self::VerifierKey, 45 | origin_state: &SC::State, 46 | current_state: SC::State, 47 | current_proof: Option, 48 | ) -> Result<(), SangriaError>; 49 | } 50 | 51 | /// A marker trait for an IVC scheme which implements proof compression. 52 | pub trait IVCWithProofCompression>: IVC {} 53 | 54 | /// Interface for a single step of the incremental computation. 55 | pub trait StepCircuit { 56 | /// The output a single step of the IVC. 57 | type State; 58 | 59 | /// The non-deterministic input for a step of the computation 60 | type Witness; 61 | } 62 | 63 | /// Interface for a non-interactive folding scheme (NIFS). 64 | pub trait NonInteractiveFoldingScheme { 65 | /// A type to contain the arguments necessary to run `setup` 66 | type SetupInfo; 67 | 68 | /// Public parameters for the scheme. 69 | type PublicParameters; 70 | 71 | /// The structure of the underlying NP problem. 72 | type Structure; 73 | 74 | /// A collection of data needed for proving. 75 | type ProverKey; 76 | 77 | /// A collection of data needed for verifying. 78 | type VerifierKey; 79 | 80 | /// An instance of the relation that will be folded. 81 | type Instance; 82 | 83 | /// A witness for the relation to be folded. 84 | type Witness; 85 | 86 | /// The prover's message. 87 | type ProverMessage; 88 | 89 | /// Run the randomised setup for the folding scheme to produce public parameters. 90 | fn setup(info: &Self::SetupInfo, rng: &mut R) -> Self::PublicParameters; 91 | 92 | /// Using the public parameters, run the randomised encoder that produces a prover key and verifier key. 93 | fn encode( 94 | pp: &Self::PublicParameters, 95 | circuit: &Self::Structure, 96 | rng: &mut R, 97 | ) -> Result<(Self::ProverKey, Self::VerifierKey), SangriaError>; 98 | 99 | /// The folding scheme prover. Outputs a folded instance-witness pair and the prover's message. 100 | #[allow(clippy::type_complexity)] 101 | fn prover( 102 | public_parameters: &Self::PublicParameters, 103 | prover_key: &Self::ProverKey, 104 | left_instance: &Self::Instance, 105 | left_witness: &Self::Witness, 106 | right_instance: &Self::Instance, 107 | right_witness: &Self::Witness, 108 | ) -> Result<(Self::Instance, Self::Witness, Self::ProverMessage), SangriaError>; 109 | 110 | /// The folding scheme verifier. Outputs a folded instance. 111 | fn verifier( 112 | public_parameters: &Self::PublicParameters, 113 | verifier_key: &Self::VerifierKey, 114 | left_instance: &Self::Instance, 115 | right_instance: &Self::Instance, 116 | prover_message: &Self::ProverMessage, 117 | ) -> Result; 118 | } 119 | 120 | mod folding_scheme; 121 | pub use folding_scheme::PLONKFoldingScheme; 122 | 123 | // mod ivc; 124 | 125 | mod relaxed_plonk; 126 | pub use relaxed_plonk::{ 127 | PLONKCircuit, RelaxedPLONKInstance, RelaxedPLONKWitness, CONSTANT_SELECTOR_INDEX, 128 | LEFT_SELECTOR_INDEX, MULTIPLICATION_SELECTOR_INDEX, OUTPUT_SELECTOR_INDEX, 129 | RIGHT_SELECTOR_INDEX, 130 | }; 131 | 132 | mod sangria; 133 | pub use sangria::Sangria; 134 | 135 | mod errors; 136 | pub use errors::SangriaError; 137 | 138 | mod vector_commitment; 139 | -------------------------------------------------------------------------------- /src/sangria.rs: -------------------------------------------------------------------------------- 1 | /// The Sangria IVC scheme with proof compression and zero-knowledge 2 | pub struct Sangria {} 3 | -------------------------------------------------------------------------------- /src/vector_commitment/mod.rs: -------------------------------------------------------------------------------- 1 | // pub mod pedersen; 2 | 3 | use crate::errors::SangriaError; 4 | use ark_ff::{Field, ToBytes, Zero}; 5 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; 6 | use ark_sponge::Absorb; 7 | use ark_std::rand::Rng; 8 | use std::{iter::Sum, ops}; 9 | 10 | /// Trait defining the types and functions needed for an additively homomorphic commitment scheme. 11 | /// The scheme is defined with respect to a finite field `F` for which scalar multiplication is preserved. 12 | pub trait HomomorphicCommitmentScheme { 13 | type CommitKey: Clone + CanonicalSerialize + CanonicalDeserialize + ToBytes; 14 | 15 | /// Represent a ciphertext from a generic homomorphic encryption scheme. To manifest the homomorphic 16 | /// property of the scheme, we require that some arithmetic operations (add and multiply by scalar) are implemented. 17 | type Commitment: PartialEq 18 | + Copy 19 | + Clone 20 | + ops::Add 21 | + ops::Mul 22 | + CanonicalSerialize 23 | + CanonicalDeserialize 24 | + Zero 25 | + Sum 26 | + ToBytes 27 | + Absorb; 28 | 29 | /// Generate a commit key using the provided length 30 | fn setup(public_randomness: &mut R, len: usize) -> Self::CommitKey; 31 | 32 | /// Commit to a vector of scalars using the commit key 33 | fn commit( 34 | commit_key: &Self::CommitKey, 35 | x: &[F], 36 | r: F, 37 | ) -> Result; 38 | } 39 | -------------------------------------------------------------------------------- /src/vector_commitment/pedersen/arithmetic_definitions/commitment.rs: -------------------------------------------------------------------------------- 1 | use super::super::Commitment; 2 | use ark_ec::{AffineCurve, ProjectiveCurve}; 3 | use ark_ff::Zero; 4 | use ark_std::UniformRand; 5 | use rand::Rng; 6 | use std::ops::Mul; 7 | 8 | impl Mul for Commitment { 9 | type Output = Self; 10 | fn mul(self, x: C::ScalarField) -> Self::Output { 11 | Self(self.0.mul(x).into_affine()) 12 | } 13 | } 14 | 15 | impl std::ops::Add for Commitment { 16 | type Output = Self; 17 | 18 | fn add(self, _rhs: Self) -> Self { 19 | Self(self.0 + _rhs.0) 20 | } 21 | } 22 | 23 | impl std::iter::Sum for Commitment { 24 | fn sum>(iter: I) -> Self { 25 | iter.fold(Self::zero(), |a, b| a + b) 26 | } 27 | } 28 | 29 | impl Zero for Commitment { 30 | fn zero() -> Self { 31 | Self(C::Affine::zero()) 32 | } 33 | 34 | fn is_zero(&self) -> bool { 35 | *self == Self(C::Affine::zero()) 36 | } 37 | } 38 | 39 | impl UniformRand for Commitment { 40 | fn rand(rng: &mut R) -> Self { 41 | Self(C::rand(rng).into_affine()) 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/vector_commitment/pedersen/arithmetic_definitions/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod commitment; 2 | -------------------------------------------------------------------------------- /src/vector_commitment/pedersen/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::errors::SangriaError; 2 | use crate::vector_commitment::HomomorphicCommitmentScheme; 3 | 4 | use ark_ec::{msm::VariableBaseMSM, ProjectiveCurve}; 5 | use ark_ff::{PrimeField, ToBytes}; 6 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, SerializationError}; 7 | use ark_std::{ 8 | io::{Read, Write}, 9 | marker::PhantomData, 10 | }; 11 | use ark_std::rand::Rng; 12 | 13 | pub mod arithmetic_definitions; 14 | mod tests; 15 | 16 | pub struct PedersenCommitment { 17 | _curve: PhantomData, 18 | } 19 | 20 | #[derive(Clone, CanonicalSerialize, CanonicalDeserialize, Debug)] 21 | pub struct CommitKey { 22 | g: Vec, 23 | h: C::Affine, 24 | } 25 | 26 | impl CommitKey { 27 | pub fn new(g: Vec, h: C::Affine) -> Self { 28 | Self { g, h } 29 | } 30 | } 31 | 32 | impl ToBytes for CommitKey { 33 | fn write(&self, mut w: W) -> ark_std::io::Result<()> { 34 | self.g.write(&mut w)?; 35 | self.h.write(&mut w)?; 36 | 37 | Ok(()) 38 | } 39 | } 40 | 41 | #[derive(Clone, Copy, Debug, PartialEq, CanonicalSerialize, CanonicalDeserialize)] 42 | pub struct Commitment(pub C::Affine); 43 | 44 | impl ToBytes for Commitment { 45 | fn write(&self, mut w: W) -> ark_std::io::Result<()> { 46 | self.0.write(&mut w)?; 47 | 48 | Ok(()) 49 | } 50 | } 51 | 52 | impl HomomorphicCommitmentScheme for PedersenCommitment { 53 | type CommitKey = CommitKey; 54 | type Commitment = Commitment; 55 | 56 | fn setup(public_randomess: &mut R, len: usize) -> CommitKey { 57 | let mut g = Vec::with_capacity(len); 58 | for _ in 0..len { 59 | g.push(C::rand(public_randomess).into_affine()); 60 | } 61 | let h = C::rand(public_randomess).into_affine(); 62 | CommitKey:: { g, h } 63 | } 64 | 65 | fn commit( 66 | commit_key: &CommitKey, 67 | x: &Vec, 68 | r: C::ScalarField, 69 | ) -> Result { 70 | if x.len() > commit_key.g.len() { 71 | return Err(CryptoError::CommitmentLengthError( 72 | String::from("Pedersen"), 73 | x.len(), 74 | commit_key.g.len(), 75 | )); 76 | } 77 | 78 | let scalars = [&[r], x.as_slice()] 79 | .concat() 80 | .iter() 81 | .map(|x| x.into_repr()) 82 | .collect::>(); 83 | 84 | let bases = [&[commit_key.h], &commit_key.g[..]].concat(); 85 | 86 | Ok(Commitment( 87 | VariableBaseMSM::multi_scalar_mul(&bases, &scalars[..]).into_affine(), 88 | )) 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /src/vector_commitment/pedersen/tests.rs: -------------------------------------------------------------------------------- 1 | #[cfg(test)] 2 | mod test { 3 | use crate::utils::rand::sample_vector; 4 | use crate::vector_commitment::{pedersen, HomomorphicCommitmentScheme}; 5 | use ark_ff::Zero; 6 | use ark_std::{rand::thread_rng, UniformRand}; 7 | use starknet_curve; 8 | use std::ops::Mul; 9 | 10 | // Define type aliases for succinctness 11 | type Curve = starknet_curve::Projective; 12 | type Scalar = starknet_curve::Fr; 13 | type Pedersen = pedersen::PedersenCommitment; 14 | 15 | #[test] 16 | fn additive_homomorphism() { 17 | let rng = &mut thread_rng(); 18 | let n = 52; 19 | 20 | let commit_key = Pedersen::setup(rng, n); 21 | 22 | let r1 = Scalar::rand(rng); 23 | let r2 = Scalar::rand(rng); 24 | 25 | let v1: Vec = sample_vector(rng, n); 26 | let v2: Vec = sample_vector(rng, n); 27 | 28 | let alpha = Scalar::rand(rng); 29 | let beta = Scalar::rand(rng); 30 | 31 | let v3 = v1 32 | .iter() 33 | .zip(v2.iter()) 34 | .map(|(&a, &b)| a * alpha + b * beta) 35 | .collect::>(); 36 | let r3 = alpha * r1 + beta * r2; 37 | 38 | let commit_v1 = Pedersen::commit(&commit_key, &v1, r1).unwrap(); 39 | let commit_v2 = Pedersen::commit(&commit_key, &v2, r2).unwrap(); 40 | let commit_v3 = Pedersen::commit(&commit_key, &v3, r3).unwrap(); 41 | 42 | let expected = commit_v1.mul(alpha) + commit_v2.mul(beta); 43 | 44 | assert_eq!(expected, commit_v3) 45 | } 46 | 47 | #[test] 48 | fn short_commitment() { 49 | let rng = &mut thread_rng(); 50 | let n = 10; 51 | 52 | let commit_key = Pedersen::setup(rng, n); 53 | 54 | let r = Scalar::rand(rng); 55 | 56 | let s1 = Scalar::rand(rng); 57 | let zero = Scalar::zero(); 58 | 59 | let v1 = vec![s1, zero, zero, zero]; 60 | 61 | let commit_v1 = Pedersen::commit(&commit_key, &v1, r).unwrap(); 62 | 63 | let commit_s1 = Pedersen::commit(&commit_key, &vec![s1], r).unwrap(); 64 | 65 | assert_eq!(v1[0], s1); 66 | assert_eq!(commit_v1, commit_s1); 67 | } 68 | 69 | #[test] 70 | #[should_panic] 71 | fn too_many_values() { 72 | let rng = &mut thread_rng(); 73 | let n = 5; 74 | 75 | let commit_key = Pedersen::setup(rng, n); 76 | 77 | let r = Scalar::rand(rng); 78 | 79 | let s1 = Scalar::rand(rng); 80 | 81 | let too_long = vec![s1; n + 2]; 82 | 83 | let _commit = Pedersen::commit(&commit_key, &too_long, r).unwrap(); 84 | } 85 | } 86 | --------------------------------------------------------------------------------