├── .github └── workflows │ └── ci.yml ├── .gitignore ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── ci_free_disk_space.sh ├── contracts ├── .eslintrc.cjs ├── .gitattributes ├── .gitignore ├── .npmignore ├── .prettierignore ├── .prettierrc ├── LICENSE ├── babel.config.cjs ├── config.json ├── jest-resolver.cjs ├── jest.config.js ├── package-lock.json ├── package.json ├── src │ ├── array_list_hasher.ts │ ├── aux_witness.ts │ ├── blobstream │ │ ├── batcher.ts │ │ ├── blobstream_contract.ts │ │ ├── prove_zkps.ts │ │ ├── rollup.ts │ │ ├── sp1_to_env.ts │ │ ├── verify_blob_inclusion.ts │ │ └── verify_blobstream.ts │ ├── build_vk_tree.ts │ ├── compile_recursion_vks.ts │ ├── compressor │ │ ├── compressor.ts │ │ └── layer1node.ts │ ├── ec │ │ ├── g1.ts │ │ ├── g2.ts │ │ └── index.ts │ ├── groth │ │ ├── accumulate_lines.ts │ │ ├── compute_pi.ts │ │ ├── e2e_test.ts │ │ ├── example_jsons │ │ │ ├── aux_witness.json │ │ │ ├── proof.json │ │ │ └── vk.json │ │ ├── proof.ts │ │ ├── proof_to_env.ts │ │ ├── recursion │ │ │ ├── data.ts │ │ │ ├── prove_zkps.ts │ │ │ ├── zkp0.ts │ │ │ ├── zkp1.ts │ │ │ ├── zkp10.ts │ │ │ ├── zkp11.ts │ │ │ ├── zkp12.ts │ │ │ ├── zkp13.ts │ │ │ ├── zkp14.ts │ │ │ ├── zkp15.ts │ │ │ ├── zkp2.ts │ │ │ ├── zkp3.ts │ │ │ ├── zkp4.ts │ │ │ ├── zkp5.ts │ │ │ ├── zkp6.ts │ │ │ ├── zkp7.ts │ │ │ ├── zkp8.ts │ │ │ └── zkp9.ts │ │ ├── risc0 │ │ │ └── decode_tx.ts │ │ ├── serialize_mlo.ts │ │ ├── verifier.ts │ │ ├── vk.ts │ │ ├── vk_from_env.ts │ │ └── witness_tracker.ts │ ├── kzg │ │ └── structs.ts │ ├── line_parser.ts │ ├── lines │ │ ├── coeffs.ts │ │ ├── index.ts │ │ └── precompute.ts │ ├── node_resolver.ts │ ├── plonk │ │ ├── accumulator.ts │ │ ├── aux_witness.ts │ │ ├── e2e_test.ts │ │ ├── e2e_verify.ts │ │ ├── fiat-shamir │ │ │ ├── index.ts │ │ │ └── sha_to_fr.ts │ │ ├── get_mlo.ts │ │ ├── helpers.ts │ │ ├── mm_loop │ │ │ ├── accumulate_lines.ts │ │ │ ├── e2e_test.ts │ │ │ ├── g2_lines.json │ │ │ ├── helpers.ts │ │ │ ├── multi_miller.ts │ │ │ ├── precompute_lines.ts │ │ │ └── tau_lines.json │ │ ├── parse_pi.ts │ │ ├── piop │ │ │ ├── e2e_playground.ts │ │ │ ├── e2e_test.ts │ │ │ ├── hash_fr.ts │ │ │ ├── piop.ts │ │ │ └── plonk_utils.ts │ │ ├── proof.ts │ │ ├── recursion │ │ │ ├── line_parser.ts │ │ │ ├── prove_zkps.ts │ │ │ ├── witness_tracker.ts │ │ │ ├── zkp0.ts │ │ │ ├── zkp1.ts │ │ │ ├── zkp10.ts │ │ │ ├── zkp11.ts │ │ │ ├── zkp12.ts │ │ │ ├── zkp13.ts │ │ │ ├── zkp14.ts │ │ │ ├── zkp15.ts │ │ │ ├── zkp16.ts │ │ │ ├── zkp17.ts │ │ │ ├── zkp18.ts │ │ │ ├── zkp19.ts │ │ │ ├── zkp2.ts │ │ │ ├── zkp20.ts │ │ │ ├── zkp21.ts │ │ │ ├── zkp22.ts │ │ │ ├── zkp23.ts │ │ │ ├── zkp3.ts │ │ │ ├── zkp4.ts │ │ │ ├── zkp5.ts │ │ │ ├── zkp6.ts │ │ │ ├── zkp7.ts │ │ │ ├── zkp8.ts │ │ │ └── zkp9.ts │ │ ├── serialize_mlo.ts │ │ ├── state.ts │ │ ├── utils.ts │ │ ├── verifier.ts │ │ ├── vk.ts │ │ ├── vk_1.0.1.ts │ │ ├── vk_1.0.5.ts │ │ ├── vk_1.0.8.ts │ │ └── vk_1.1.0.ts │ ├── risc_zero │ │ ├── prove_zkps.ts │ │ └── verify_risc_zero.ts │ ├── sha │ │ ├── bits.py │ │ ├── compress.py │ │ ├── modulus.py │ │ ├── pad_741_bytes.py │ │ ├── sha_hash.ts │ │ ├── update.ts │ │ └── utils.ts │ ├── structs.ts │ ├── towers │ │ ├── assert-mul.ts │ │ ├── consts.ts │ │ ├── fp.ts │ │ ├── fp12-benchmark.ts │ │ ├── fp12.ts │ │ ├── fp2.ts │ │ ├── fp6.ts │ │ ├── fr.ts │ │ ├── gamma_1s.json │ │ ├── gamma_2s.json │ │ ├── gamma_3s.json │ │ ├── index.ts │ │ ├── neg_gamma.json │ │ ├── precompute.ts │ │ └── precomputed.ts │ └── tree_of_vks.ts └── tsconfig.json ├── package-lock.json ├── pairing-utils ├── .gitignore ├── Cargo.toml └── src │ ├── bin │ ├── alphabeta.rs │ └── aux_witness.rs │ ├── constants.rs │ ├── eth_root.rs │ ├── kzg.rs │ ├── lib.rs │ ├── risc0_vk.rs │ ├── serialize.rs │ ├── tonelli_shanks.rs │ ├── utils.rs │ └── write.rs └── scripts ├── .env.example ├── blobstream_example ├── blobInclusionSP1Proof.json ├── blobstreamSP1Proof.json └── e2e_blobstream_inclusion.sh ├── e2e_groth16.sh ├── e2e_plonk.sh ├── e2e_verify_plonk.sh ├── get_alphabeta_groth16.sh ├── get_aux_witness_groth16.sh ├── get_aux_witness_plonk.sh ├── groth16_tree.sh ├── plonk_tree.sh ├── prepare_vk.sh ├── risc_zero_example ├── e2e_risc_zero.sh ├── risc_zero_proof.json └── risc_zero_raw_vk.json └── verify_non_recursive.sh /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | run-name: ${{ github.actor }} is testing out o1js-blobstream 3 | on: [push] 4 | #on: 5 | #push: 6 | #branches: [ "main" ] 7 | #pull_request: 8 | #branches: [ "main" ] 9 | 10 | env: 11 | MAX_THREADS: 2 12 | 13 | jobs: 14 | setup_node_and_rust: 15 | runs-on: ubuntu-latest 16 | strategy: 17 | matrix: 18 | node-version: [22.x] 19 | steps: 20 | - 21 | name: Checkout code 22 | uses: actions/checkout@v4 23 | 24 | - 25 | name: Clear up disk space 26 | run: | 27 | echo "free space:" 28 | df -h 29 | bash ./ci_free_disk_space.sh 30 | echo "free space:" 31 | df -h 32 | 33 | - 34 | name: Clear up disk space #2 35 | uses: jlumbroso/free-disk-space@main 36 | with: 37 | # this might remove tools that are actually needed, 38 | # if set to "true" but frees about 6 GB 39 | tool-cache: false 40 | 41 | # all of these default to true, but feel free to set to 42 | # "false" if necessary for your workflow 43 | android: true 44 | dotnet: true 45 | haskell: true 46 | large-packages: true 47 | docker-images: true 48 | swap-storage: true 49 | 50 | - 51 | name: Use Rust 52 | uses: actions-rust-lang/setup-rust-toolchain@v1 53 | 54 | - 55 | name: Use Node.js ${{ matrix.node-version }} 56 | uses: actions/setup-node@v4 57 | with: 58 | node-version: 22 59 | 60 | - 61 | name: Install build dependencies 62 | run: | 63 | sudo apt-get -y -qq install build-essential 64 | 65 | - 66 | name: Run e2e_risc_zero.sh 67 | working-directory: scripts/risc_zero_example/ 68 | run: | 69 | bash ./e2e_risc_zero.sh 70 | 71 | - 72 | name: Run e2e_blobstream_inclusion.sh 73 | working-directory: scripts/blobstream_example/ 74 | run: | 75 | bash ./e2e_blobstream_inclusion.sh 76 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | o1js 2 | scripts/blobstream_example/run 3 | scripts/risc_zero_example/run 4 | scripts/risc_zero_example/work_dir 5 | scripts/risc_zero_example/cache_dir 6 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright 2024 Geometry Research Ltd. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # o1js-pairing 2 | 3 | See the [Gitbook](https://o1js-blobstream.gitbook.io/o1js-blobstream) for documentation. 4 | 5 | ## License 6 | 7 | This project is licensed under either of 8 | 9 | - [Apache License, Version 2.0](https://www.apache.org/licenses/LICENSE-2.0) ([`LICENSE-APACHE`](LICENSE-APACHE)) 10 | - [MIT license](https://opensource.org/licenses/MIT) ([`LICENSE-MIT`](LICENSE-MIT)) 11 | 12 | at your option. 13 | 14 | The [SPDX](https://spdx.dev) license identifier for this project is `MIT OR Apache-2.0`. 15 | -------------------------------------------------------------------------------- /ci_free_disk_space.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # From https://raw.githubusercontent.com/apache/flink/02d30ace69dc18555a5085eccf70ee884e73a16e/tools/azure-pipelines/free_disk_space.sh 4 | 5 | # Licensed to the Apache Software Foundation (ASF) under one or more 6 | # contributor license agreements. See the NOTICE file distributed with 7 | # this work for additional information regarding copyright ownership. 8 | # The ASF licenses this file to You under the Apache License, Version 2.0 9 | # (the "License"); you may not use this file except in compliance with 10 | # the License. You may obtain a copy of the License at 11 | # 12 | # http://www.apache.org/licenses/LICENSE-2.0 13 | # 14 | # Unless required by applicable law or agreed to in writing, software 15 | # distributed under the License is distributed on an "AS IS" BASIS, 16 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 17 | # See the License for the specific language governing permissions and 18 | # limitations under the License. 19 | 20 | 21 | # The Azure provided machines typically have the following disk allocation: 22 | # Total space: 85GB 23 | # Allocated: 67 GB 24 | # Free: 17 GB 25 | # This script frees up 28 GB of disk space by deleting unneeded packages and 26 | # large directories. 27 | # The Flink end to end tests download and generate more than 17 GB of files, 28 | # causing unpredictable behavior and build failures. 29 | echo "Freeing up disk space on CI system" 30 | 31 | echo "Listing 100 largest packages" 32 | dpkg-query -Wf '${Installed-Size}\t${Package}\n' | sort -n | tail -n 100 33 | 34 | df -h 35 | 36 | echo "Removing large packages" 37 | sudo apt-get remove -y '^ghc-8.*' 38 | sudo apt-get remove -y '^dotnet-.*' 39 | sudo apt-get remove -y -qq '^llvm-.*' 40 | sudo apt-get remove -y -qq 'php.*' 41 | sudo apt-get remove -y -qq 'cpp.*' 42 | sudo apt-get remove -y -qq 'ruby.*' 43 | sudo apt-get remove -y -qq 'mysql*' 44 | sudo apt-get remove -y -qq 'aspnetcore-runtime-*' 45 | sudo apt-get remove -y -qq humanity-icon-theme monodoc-manual libruby3.0 mono-utils mono-llvm-support gfortran-11 dotnet-targeting-pack-7.0 vim-runtime postgresql-14 46 | sudo apt-get remove -y -qq azure-cli google-cloud-sdk hhvm google-chrome-stable firefox powershell mono-devel 47 | sudo apt-get autoremove -y 48 | sudo apt-get clean 49 | 50 | echo "Removing /usr/share/dotnet/" 51 | rm -rf /usr/share/dotnet/ 52 | -------------------------------------------------------------------------------- /contracts/.eslintrc.cjs: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | root: true, 3 | env: { 4 | browser: true, 5 | node: true, 6 | jest: true, 7 | }, 8 | extends: [ 9 | 'eslint:recommended', 10 | 'plugin:@typescript-eslint/eslint-recommended', 11 | 'plugin:@typescript-eslint/recommended', 12 | 'plugin:o1js/recommended', 13 | ], 14 | parser: '@typescript-eslint/parser', 15 | parserOptions: { 16 | ecmaVersion: 'latest', 17 | }, 18 | plugins: ['@typescript-eslint', 'o1js'], 19 | rules: { 20 | 'no-constant-condition': 'off', 21 | 'prefer-const': 'off', 22 | }, 23 | }; 24 | -------------------------------------------------------------------------------- /contracts/.gitattributes: -------------------------------------------------------------------------------- 1 | # Use line endings appropriate for the system. This prevents Git from 2 | # complaining about project template line endings when committing on Windows. 3 | * text=auto eol=lf 4 | -------------------------------------------------------------------------------- /contracts/.gitignore: -------------------------------------------------------------------------------- 1 | # NodeJS 2 | node_modules 3 | build 4 | coverage 5 | 6 | # Editor 7 | .vscode 8 | 9 | # System 10 | .DS_Store 11 | 12 | # Never commit keys to Git! 13 | keys 14 | 15 | groth16_cache 16 | 17 | src/recursion/vks 18 | src/recursion/proofs 19 | 20 | src/plonk/recursion/vks 21 | src/plonk/recursion/proofs -------------------------------------------------------------------------------- /contracts/.npmignore: -------------------------------------------------------------------------------- 1 | # TypeScript files 2 | src 3 | 4 | # Editor 5 | .vscode 6 | 7 | # System 8 | .DS_Store 9 | 10 | # Never reveal your keys! 11 | keys 12 | -------------------------------------------------------------------------------- /contracts/.prettierignore: -------------------------------------------------------------------------------- 1 | # NodeJS 2 | node_modules 3 | build 4 | coverage 5 | .husky 6 | 7 | # Editor 8 | .vscode 9 | 10 | # System 11 | .DS_Store 12 | 13 | # Misc 14 | LICENSE 15 | -------------------------------------------------------------------------------- /contracts/.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "semi": true, 3 | "singleQuote": true, 4 | "tabWidth": 2, 5 | "trailingComma": "es5" 6 | } 7 | -------------------------------------------------------------------------------- /contracts/babel.config.cjs: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | presets: [['@babel/preset-env', { targets: { node: 'current' } }]], 3 | }; 4 | -------------------------------------------------------------------------------- /contracts/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "deployAliases": {} 4 | } 5 | -------------------------------------------------------------------------------- /contracts/jest-resolver.cjs: -------------------------------------------------------------------------------- 1 | module.exports = (request, options) => { 2 | return options.defaultResolver(request, { 3 | ...options, 4 | packageFilter: (pkg) => { 5 | // When importing o1js, we specify the Node ESM import as Jest by default imports the web version 6 | if (pkg.name === 'o1js') { 7 | return { 8 | ...pkg, 9 | main: pkg.exports.node.import, 10 | }; 11 | } 12 | if (pkg.name === 'node-fetch') { 13 | return { ...pkg, main: pkg.main }; 14 | } 15 | return { 16 | ...pkg, 17 | main: pkg.module || pkg.main, 18 | }; 19 | }, 20 | }); 21 | }; 22 | -------------------------------------------------------------------------------- /contracts/jest.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('@ts-jest/dist/types').InitialOptionsTsJest} */ 2 | export default { 3 | verbose: true, 4 | preset: 'ts-jest/presets/default-esm', 5 | testEnvironment: 'node', 6 | globals: { 7 | 'ts-jest': { 8 | useESM: true, 9 | }, 10 | }, 11 | testTimeout: 1_000_000, 12 | transform: { 13 | '^.+\\.(t)s$': 'ts-jest', 14 | '^.+\\.(j)s$': 'babel-jest', 15 | }, 16 | resolver: '/jest-resolver.cjs', 17 | transformIgnorePatterns: [ 18 | '/node_modules/(?!(tslib|o1js/node_modules/tslib))', 19 | ], 20 | modulePathIgnorePatterns: ['/build/'], 21 | moduleNameMapper: { 22 | '^(\\.{1,2}/.+)\\.js$': '$1', 23 | }, 24 | }; 25 | -------------------------------------------------------------------------------- /contracts/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pairing", 3 | "version": "0.1.0", 4 | "description": "", 5 | "author": "", 6 | "license": "Apache-2.0", 7 | "keywords": [ 8 | "mina-zkapp", 9 | "mina-zk-app", 10 | "mina-dapp", 11 | "zkapp" 12 | ], 13 | "type": "module", 14 | "main": "build/src/index.js", 15 | "types": "build/src/index.d.ts", 16 | "scripts": { 17 | "build": "tsc", 18 | "buildw": "tsc --watch", 19 | "coverage": "node --experimental-vm-modules node_modules/jest/bin/jest.js --coverage", 20 | "format": "prettier --write --ignore-unknown **/*", 21 | "test": "npm run build && node --max-old-space-size=65536 build/src/plonk/e2e_test.js && node --max-old-space-size=65536 build/src/plonk/mm_loop/e2e_test.js && node --max-old-space-size=65536 build/src/plonk/piop/e2e_test.js && node --max-old-space-size=65536 build/src/plonk/piop/e2e_test.js && node --max-old-space-size=65536 build/src/groth/e2e_test.js", 22 | "testw": "node --experimental-vm-modules node_modules/jest/bin/jest.js --watch", 23 | "lint": "npx eslint src/* --fix" 24 | }, 25 | "dependencies": { 26 | "bytebuffer": "^5.0.1", 27 | "ethers": "^6.13.1", 28 | "o1js": "^1.5.0", 29 | "typescript": "^5.4.5" 30 | }, 31 | "devDependencies": { 32 | "@babel/preset-env": "^7.16.4", 33 | "@babel/preset-typescript": "^7.16.0", 34 | "@types/jest": "^27.0.3", 35 | "@types/node": "^20.12.12", 36 | "@typescript-eslint/eslint-plugin": "^5.5.0", 37 | "@typescript-eslint/parser": "^5.5.0", 38 | "eslint": "^8.7.0", 39 | "eslint-plugin-o1js": "^0.4.0", 40 | "jest": "^27.3.1", 41 | "prettier": "^2.3.2" 42 | }, 43 | "overrides": { 44 | "node-fetch@2.x": { 45 | "whatwg-url": "14.x" 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /contracts/src/array_list_hasher.ts: -------------------------------------------------------------------------------- 1 | import { Field, Poseidon, Provable } from "o1js"; 2 | import { ATE_LOOP_COUNT, Fp12 } from "./towers/index.js"; 3 | 4 | class ArrayListHasher { 5 | static n: number; 6 | 7 | static empty(): Field { 8 | const a = new Array(this.n).fill(Field(0n)) 9 | return Poseidon.hashPacked(Provable.Array(Field, this.n), a) 10 | } 11 | 12 | static hash(arr: Array): Field { 13 | return Poseidon.hashPacked(Provable.Array(Field, this.n), arr) 14 | } 15 | 16 | static open(lhs: Array, opening: Array, rhs: Array): Field { 17 | const opening_hashes: Field[] = opening.map((x) => Poseidon.hashPacked(Fp12, x)); 18 | 19 | let arr: Field[] = [] 20 | arr = arr.concat(lhs) 21 | arr = arr.concat(opening_hashes) 22 | arr = arr.concat(rhs) 23 | 24 | return this.hash(arr) 25 | } 26 | } 27 | 28 | ArrayListHasher.n = ATE_LOOP_COUNT.length 29 | 30 | export { ArrayListHasher } -------------------------------------------------------------------------------- /contracts/src/aux_witness.ts: -------------------------------------------------------------------------------- 1 | import { Field, Struct } from "o1js" 2 | import { Fp12, Fp12Type } from "./towers/fp12.js" 3 | import fs from "fs" 4 | 5 | export type AuXWitnessType = { 6 | c: Fp12Type, 7 | shift_power: string 8 | } 9 | 10 | export class AuXWitness extends Struct({ 11 | c: Fp12, 12 | shift_power: Field 13 | }) { 14 | static parse(path: string): AuXWitness { 15 | const data = fs.readFileSync(path, 'utf-8'); 16 | const obj: AuXWitnessType = JSON.parse(data) 17 | 18 | return new AuXWitness({ 19 | c: Fp12.loadFromJSON(obj.c), 20 | shift_power: Field.from(obj.shift_power) 21 | }) 22 | } 23 | } -------------------------------------------------------------------------------- /contracts/src/blobstream/blobstream_contract.ts: -------------------------------------------------------------------------------- 1 | import { 2 | Field, 3 | PrivateKey, 4 | Provable, 5 | SmartContract, 6 | State, 7 | VerificationKey, 8 | method, 9 | state, 10 | Poseidon, 11 | UInt8, 12 | Bytes, 13 | Gadgets, 14 | MerkleTree, 15 | MerkleWitness, 16 | Undefined, 17 | Proof, 18 | } from 'o1js'; 19 | import { FrC } from '../towers/index.js'; 20 | import { NodeProofLeft } from '../structs.js'; 21 | import fs from "fs" 22 | import { parsePublicInputs, parsePublicInputsProvable } from '../plonk/parse_pi.js'; 23 | import { provableBn254ScalarFieldToBytes } from '../sha/utils.js'; 24 | import { BlobstreamInput, BlobstreamProof, blobstreamVerifier, Bytes32 } from './verify_blobstream.js'; 25 | 26 | export const adminPrivateKey = PrivateKey.fromBase58( 27 | 'EKFcef5HKXAn7V2rQntLiXtJr15dkxrsrQ1G4pnYemhMEAWYbkZW' 28 | ); 29 | 30 | export const adminPublicKey = adminPrivateKey.toPublicKey(); 31 | 32 | export class BlobstreamMerkleWitness extends MerkleWitness(32) {} 33 | 34 | class BlobstreamProofType extends BlobstreamProof {} 35 | 36 | export class BlobstreamProcessor extends SmartContract { 37 | 38 | @state(Field) parametersWereSet = State(); 39 | @state(Field) commitmentsRoot = State(); 40 | @state(Field) currentLeafIndex = State(); 41 | @state(Field) trustedBlock = State(); 42 | 43 | init() { 44 | super.init(); 45 | this.commitmentsRoot.set(Field.from(19057105225525447794058879360670244229202611178388892366137113354909512903676n)); 46 | this.currentLeafIndex.set(Field(0)); 47 | this.account.delegate.set(adminPublicKey); 48 | this.parametersWereSet.set(Field(0)); 49 | } 50 | 51 | @method async setParameters(trustedBlock: Field) { 52 | const parametersWereSet = this.parametersWereSet.getAndRequireEquals(); 53 | parametersWereSet.assertEquals(Field(0)); 54 | 55 | this.trustedBlock.set(trustedBlock); 56 | this.parametersWereSet.set(Field(1)); 57 | } 58 | 59 | @method async update(admin: PrivateKey, blobstreamProof: BlobstreamProofType, path: BlobstreamMerkleWitness) { 60 | blobstreamProof.verify() 61 | let leafIndex = this.currentLeafIndex.getAndRequireEquals(); 62 | 63 | let commitmentsRoot = this.commitmentsRoot.getAndRequireEquals(); 64 | 65 | path.calculateRoot(Field(0)).assertEquals(commitmentsRoot); 66 | const newRoot = path.calculateRoot(Poseidon.hash([ 67 | ...blobstreamProof.publicInput.dataCommitment.toFields(), 68 | ])); 69 | 70 | let trustedBlock = this.trustedBlock.getAndRequireEquals(); 71 | trustedBlock.assertEquals(Poseidon.hashPacked(Bytes32.provable, blobstreamProof.publicInput.trustedHeaderHash)); 72 | 73 | this.trustedBlock.set(Poseidon.hashPacked(Bytes32.provable, blobstreamProof.publicInput.targetHeaderHash)); 74 | 75 | this.commitmentsRoot.set(newRoot); 76 | 77 | this.currentLeafIndex.set(leafIndex.add(Field.from(1))); 78 | 79 | const adminPk = admin.toPublicKey(); 80 | this.account.delegate.requireEquals(adminPk); 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /contracts/src/blobstream/sp1_to_env.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs'; 2 | 3 | const args = process.argv; 4 | 5 | const sp1Proof = args[2]; 6 | const runDir = args[3]; 7 | const workDir = args[4]; 8 | const proofName = args[5]; 9 | const envPath = `${runDir}/env.${proofName}`; 10 | 11 | const sp1 = JSON.parse(fs.readFileSync(sp1Proof, 'utf8')); 12 | const hexPi = Buffer.from(sp1.public_values.buffer.data).toString('hex'); 13 | const programVk = sp1.proof.Plonk.public_inputs[0]; 14 | const encodedProof = sp1.proof.Plonk.encoded_proof; 15 | 16 | const env = `\ 17 | WORK_DIR=${workDir}/${proofName}/e2e_plonk 18 | CACHE_DIR=${workDir}/plonk_cache 19 | HEX_PROOF="0x00000000${encodedProof}" 20 | PROGRAM_VK="${programVk}" 21 | HEX_PI="0x${hexPi}" 22 | `; 23 | 24 | fs.writeFileSync(envPath, env, 'utf8'); -------------------------------------------------------------------------------- /contracts/src/blobstream/verify_blob_inclusion.ts: -------------------------------------------------------------------------------- 1 | import { 2 | Field, 3 | PrivateKey, 4 | Provable, 5 | SmartContract, 6 | State, 7 | VerificationKey, 8 | method, 9 | state, 10 | Poseidon, 11 | UInt8, 12 | Bytes, 13 | Gadgets, 14 | ZkProgram, 15 | Struct, 16 | UInt64, 17 | Undefined, 18 | } from 'o1js'; 19 | import { FrC } from '../towers/index.js'; 20 | import { NodeProofLeft } from '../structs.js'; 21 | import { parseDigestProvable, parsePublicInputs, parsePublicInputsProvable } from '../plonk/parse_pi.js'; 22 | import { provableBn254ScalarFieldToBytes, wordToBytes } from '../sha/utils.js'; 23 | import fs from 'fs'; 24 | import { blob } from 'stream/consumers'; 25 | import { Bytes32 } from './verify_blobstream.js'; 26 | 27 | class BlobInclusionInput extends Struct({ 28 | digest: Bytes32.provable, 29 | }) {} 30 | 31 | const blobInclusionVerifier = ZkProgram({ 32 | name: 'blobInclusionVerifier', 33 | publicInput: BlobInclusionInput, 34 | publicOutput: Undefined, 35 | methods: { 36 | compute: { 37 | privateInputs: [NodeProofLeft], 38 | async method( 39 | input: BlobInclusionInput, 40 | proof: NodeProofLeft, 41 | ) { 42 | let vk: VerificationKey; 43 | let blobInclusionNodeVk: Field; 44 | let blobInclusionProgramVk: FrC; 45 | if (process.env.BLOB_INCLUSION_ENABLED == 'true') { 46 | blobInclusionProgramVk = FrC.from(process.env.BLOB_INCLUSION_PROGRAM_VK || "0" as string) 47 | const workDir = process.env.BLOB_INCLUSION_WORK_DIR as string; 48 | 49 | blobInclusionNodeVk = Field.from(JSON.parse(fs.readFileSync(`${workDir}/proofs/layer5/p0.json`, 'utf8')).publicOutput[2]); 50 | vk = VerificationKey.fromJSON(JSON.parse(fs.readFileSync(`${workDir}/vks/nodeVk.json`, 'utf8'))) 51 | } else { 52 | blobInclusionProgramVk = FrC.from(0n); 53 | blobInclusionNodeVk = Field.from(0n); 54 | vk = VerificationKey.empty(); 55 | } 56 | proof.verify(vk); 57 | proof.publicOutput.subtreeVkDigest.assertEquals(blobInclusionNodeVk); 58 | 59 | const pi0 = blobInclusionProgramVk; 60 | const pi1 = parseDigestProvable(Bytes.from(input.digest)); 61 | 62 | const piDigest = Poseidon.hashPacked(Provable.Array(FrC.provable, 2), [pi0, pi1]) 63 | piDigest.assertEquals(proof.publicOutput.rightOut) 64 | 65 | return undefined; 66 | }, 67 | }, 68 | }, 69 | }); 70 | 71 | const BlobInclusionProof = ZkProgram.Proof(blobInclusionVerifier); 72 | export { blobInclusionVerifier, BlobInclusionProof, BlobInclusionInput, Bytes32 }; -------------------------------------------------------------------------------- /contracts/src/blobstream/verify_blobstream.ts: -------------------------------------------------------------------------------- 1 | import { 2 | Field, 3 | PrivateKey, 4 | Provable, 5 | SmartContract, 6 | State, 7 | VerificationKey, 8 | method, 9 | state, 10 | Poseidon, 11 | UInt8, 12 | Bytes, 13 | Gadgets, 14 | ZkProgram, 15 | Struct, 16 | UInt64, 17 | Undefined, 18 | } from 'o1js'; 19 | import { FrC } from '../towers/index.js'; 20 | import { NodeProofLeft } from '../structs.js'; 21 | import { parsePublicInputs, parsePublicInputsProvable } from '../plonk/parse_pi.js'; 22 | import { provableBn254ScalarFieldToBytes, wordToBytes } from '../sha/utils.js'; 23 | import fs from 'fs'; 24 | 25 | class Bytes32 extends Bytes(32) {} 26 | 27 | class BlobstreamInput extends Struct({ 28 | trustedHeaderHash: Bytes32.provable, 29 | targetHeaderHash: Bytes32.provable, 30 | dataCommitment: Bytes32.provable, 31 | trustedBlockHeight: UInt64, 32 | targetBlockHeight: UInt64, 33 | validatorBitmap: Bytes32.provable, 34 | }) {} 35 | 36 | const padUInt64To32Bytes = (num: UInt64): UInt8[] => { 37 | const unpadded = wordToBytes(num.toFields()[0]) 38 | return [ 39 | ...unpadded, 40 | ...Array(24).fill(UInt8.from(0)), 41 | ].reverse(); 42 | } 43 | 44 | const blobstreamVerifier = ZkProgram({ 45 | name: 'blobstreamVerifier', 46 | publicInput: BlobstreamInput, 47 | publicOutput: Undefined, 48 | methods: { 49 | compute: { 50 | privateInputs: [NodeProofLeft], 51 | async method( 52 | input: BlobstreamInput, 53 | proof: NodeProofLeft, 54 | ) { 55 | let blobstreamProgramVk: FrC; 56 | let blobstreamNodeVk: Field; 57 | let vk: VerificationKey; 58 | 59 | if (process.env.BLOBSTREAM_ENABLED == 'true') { 60 | blobstreamProgramVk = FrC.from(process.env.BLOBSTREAM_PROGRAM_VK as string) 61 | const workDir = process.env.BLOBSTREAM_WORK_DIR as string; 62 | 63 | blobstreamNodeVk = Field.from(JSON.parse(fs.readFileSync(`${workDir}/proofs/layer5/p0.json`, 'utf8')).publicOutput[2]); 64 | vk = VerificationKey.fromJSON(JSON.parse(fs.readFileSync(`${workDir}/vks/nodeVk.json`, 'utf8'))) 65 | } else { 66 | blobstreamProgramVk = FrC.from(0n); 67 | blobstreamNodeVk = Field.from(0n); 68 | vk = VerificationKey.empty(); 69 | } 70 | 71 | proof.verify(vk) 72 | proof.publicOutput.subtreeVkDigest.assertEquals(blobstreamNodeVk) 73 | 74 | 75 | let bytes: UInt8[] = []; 76 | bytes = bytes.concat(input.trustedHeaderHash.bytes); 77 | bytes = bytes.concat(input.targetHeaderHash.bytes); 78 | bytes = bytes.concat(input.dataCommitment.bytes); 79 | bytes = bytes.concat(padUInt64To32Bytes(input.trustedBlockHeight)); 80 | bytes = bytes.concat(padUInt64To32Bytes(input.targetBlockHeight)); 81 | bytes = bytes.concat(input.validatorBitmap.bytes); 82 | 83 | const pi0 = blobstreamProgramVk; 84 | const pi1 = parsePublicInputsProvable(Bytes.from(bytes)); 85 | 86 | const piDigest = Poseidon.hashPacked(Provable.Array(FrC.provable, 2), [pi0, pi1]) 87 | piDigest.assertEquals(proof.publicOutput.rightOut) 88 | 89 | return undefined; 90 | }, 91 | }, 92 | }, 93 | }); 94 | 95 | const BlobstreamProof = ZkProgram.Proof(blobstreamVerifier); 96 | export { blobstreamVerifier, BlobstreamProof, BlobstreamInput, Bytes32 }; -------------------------------------------------------------------------------- /contracts/src/build_vk_tree.ts: -------------------------------------------------------------------------------- 1 | import { VerificationKey } from "o1js"; 2 | import fs from "fs"; 3 | import { buildTreeOfVks } from "./tree_of_vks.js"; 4 | 5 | const workDir = process.argv[2]; 6 | 7 | const vk0 = await VerificationKey.fromJSON(JSON.parse(fs.readFileSync(`${workDir}/vks/vk0.json`, 'utf8'))).hash; 8 | const vk1 = await VerificationKey.fromJSON(JSON.parse(fs.readFileSync(`${workDir}/vks/vk1.json`, 'utf8'))).hash; 9 | const vk2 = await VerificationKey.fromJSON(JSON.parse(fs.readFileSync(`${workDir}/vks/vk2.json`, 'utf8'))).hash; 10 | const vk3 = await VerificationKey.fromJSON(JSON.parse(fs.readFileSync(`${workDir}/vks/vk3.json`, 'utf8'))).hash; 11 | const vk4 = await VerificationKey.fromJSON(JSON.parse(fs.readFileSync(`${workDir}/vks/vk4.json`, 'utf8'))).hash; 12 | const vk5 = await VerificationKey.fromJSON(JSON.parse(fs.readFileSync(`${workDir}/vks/vk5.json`, 'utf8'))).hash; 13 | const vk6 = await VerificationKey.fromJSON(JSON.parse(fs.readFileSync(`${workDir}/vks/vk6.json`, 'utf8'))).hash; 14 | const vk7 = await VerificationKey.fromJSON(JSON.parse(fs.readFileSync(`${workDir}/vks/vk7.json`, 'utf8'))).hash; 15 | const vk8 = await VerificationKey.fromJSON(JSON.parse(fs.readFileSync(`${workDir}/vks/vk8.json`, 'utf8'))).hash; 16 | const vk9 = await VerificationKey.fromJSON(JSON.parse(fs.readFileSync(`${workDir}/vks/vk9.json`, 'utf8'))).hash; 17 | const vk10 = await VerificationKey.fromJSON(JSON.parse(fs.readFileSync(`${workDir}/vks/vk10.json`, 'utf8'))).hash; 18 | const vk11 = await VerificationKey.fromJSON(JSON.parse(fs.readFileSync(`${workDir}/vks/vk11.json`, 'utf8'))).hash; 19 | const vk12 = await VerificationKey.fromJSON(JSON.parse(fs.readFileSync(`${workDir}/vks/vk12.json`, 'utf8'))).hash; 20 | const vk13 = await VerificationKey.fromJSON(JSON.parse(fs.readFileSync(`${workDir}/vks/vk13.json`, 'utf8'))).hash; 21 | 22 | 23 | const layer1Vk = await VerificationKey.fromJSON(JSON.parse(fs.readFileSync(`${workDir}/vks/layer1Vk.json`, 'utf8'))).hash; 24 | const nodeVk = await VerificationKey.fromJSON(JSON.parse(fs.readFileSync(`${workDir}/vks/nodeVk.json`, 'utf8'))).hash; 25 | 26 | 27 | const baseVksHashes = [vk0, vk1, vk2, vk3, vk4, vk5, vk6, vk7, vk8, vk9, vk10, vk11, vk12, vk13]; 28 | const root = await buildTreeOfVks(baseVksHashes, layer1Vk, nodeVk); 29 | 30 | console.log(root.toBigInt()); 31 | -------------------------------------------------------------------------------- /contracts/src/compile_recursion_vks.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs"; 2 | import { Cache } from "o1js"; 3 | import { layer1 } from "./compressor/layer1node.js"; 4 | import { node } from "./compressor/compressor.js"; 5 | 6 | const workDir = process.argv[2]; 7 | const cacheDir = process.argv[3]; 8 | 9 | const layer1Vk = (await layer1.compile({cache: Cache.FileSystem(cacheDir)})).verificationKey; 10 | fs.writeFileSync(`${workDir}/vks/layer1Vk.json`, JSON.stringify(layer1Vk), 'utf8'); 11 | 12 | const nodeVk = (await node.compile({cache: Cache.FileSystem(cacheDir)})).verificationKey; 13 | fs.writeFileSync(`${workDir}/vks/nodeVk.json`, JSON.stringify(nodeVk), 'utf8'); 14 | -------------------------------------------------------------------------------- /contracts/src/compressor/compressor.ts: -------------------------------------------------------------------------------- 1 | import { Field, Poseidon, Undefined, VerificationKey, ZkProgram } from "o1js"; 2 | import { NodeProofLeft, NodeProofRight, SubtreeCarry } from "../structs.js"; 3 | 4 | const node = ZkProgram({ 5 | name: 'node', 6 | publicInput: Undefined, 7 | publicOutput: SubtreeCarry, 8 | methods: { 9 | compute: { 10 | privateInputs: [NodeProofLeft, VerificationKey, NodeProofRight, VerificationKey, Field], 11 | async method( 12 | piLeft: NodeProofLeft, 13 | vkLeft: VerificationKey, 14 | piRight: NodeProofRight, 15 | vkRight: VerificationKey, 16 | layer: Field 17 | ) { 18 | piLeft.verify(vkLeft); 19 | piRight.verify(vkRight); 20 | 21 | piLeft.publicOutput.rightOut.assertEquals(piRight.publicOutput.leftIn); 22 | 23 | const subtreeVkDigest = Poseidon.hash([ 24 | vkLeft.hash, 25 | vkRight.hash, 26 | piLeft.publicOutput.subtreeVkDigest, 27 | piRight.publicOutput.subtreeVkDigest, 28 | layer 29 | ]); 30 | 31 | return new SubtreeCarry({ 32 | leftIn: piLeft.publicOutput.leftIn, 33 | rightOut: piRight.publicOutput.rightOut, 34 | subtreeVkDigest 35 | }); 36 | }, 37 | }, 38 | }, 39 | }); 40 | 41 | export { node } -------------------------------------------------------------------------------- /contracts/src/compressor/layer1node.ts: -------------------------------------------------------------------------------- 1 | import { Bool, Field, Poseidon, Provable, Undefined, VerificationKey, ZkProgram } from "o1js"; 2 | import { NOTHING_UP_MY_SLEEVE, SubtreeCarry, ZkpProofLeft, ZkpProofRight } from "../structs.js"; 3 | 4 | /* 5 | When base layer of zkps is not power of 2 we extend it with dummy proofs to make it power of 2 6 | Other techniques would be to dynamically add dummy proofs at higher layers when needed. For now we skip it for simplicity 7 | */ 8 | 9 | const layer1 = ZkProgram({ 10 | name: 'layer1', 11 | publicInput: Undefined, 12 | publicOutput: SubtreeCarry, 13 | methods: { 14 | compute: { 15 | privateInputs: [ZkpProofLeft, VerificationKey, Bool, ZkpProofRight, VerificationKey, Bool], 16 | async method( 17 | piLeft: ZkpProofLeft, 18 | vkLeft: VerificationKey, 19 | verifyLeft: Bool, 20 | piRight: ZkpProofRight, 21 | vkRight: VerificationKey, 22 | verifyRight: Bool 23 | ) { 24 | piLeft.verifyIf(vkLeft, verifyLeft); 25 | piRight.verifyIf(vkRight, verifyRight); 26 | 27 | piLeft.publicOutput.assertEquals(piRight.publicInput); 28 | 29 | const leftVkHash = Provable.if(verifyLeft, vkLeft.hash, NOTHING_UP_MY_SLEEVE); 30 | const rightVkHash = Provable.if(verifyRight, vkRight.hash, NOTHING_UP_MY_SLEEVE); 31 | 32 | const subtreeVkDigest = Poseidon.hash([ 33 | leftVkHash, 34 | rightVkHash, 35 | Field(1), // layer 36 | ]); 37 | 38 | return new SubtreeCarry({ 39 | leftIn: piLeft.publicInput, 40 | rightOut: piRight.publicOutput, 41 | subtreeVkDigest: subtreeVkDigest 42 | }); 43 | }, 44 | }, 45 | }, 46 | }); 47 | 48 | export { layer1 } -------------------------------------------------------------------------------- /contracts/src/ec/g1.ts: -------------------------------------------------------------------------------- 1 | import { createForeignCurve } from "o1js"; 2 | import { createField } from "o1js/dist/node/bindings/crypto/finite-field"; 3 | 4 | var bn254Params = { 5 | name: "bn254", 6 | modulus: 21888242871839275222246405745257275088696311157297823662689037894645226208583n, 7 | order: 21888242871839275222246405745257275088548364400416034343698204186575808495617n, 8 | a: 0n, 9 | b: 3n, 10 | generator: { 11 | x: 1n, 12 | y: 2n 13 | } 14 | }; 15 | 16 | const bn254 = createForeignCurve(bn254Params) 17 | 18 | export { bn254 } -------------------------------------------------------------------------------- /contracts/src/ec/g2.ts: -------------------------------------------------------------------------------- 1 | import { Field, Struct } from 'o1js'; 2 | import { FpC, Fp2, Fp6, Fp12 } from '../towers/index.js'; 3 | import { GAMMA_1S, NEG_GAMMA_13 } from '../towers/precomputed.js'; 4 | 5 | class G2Affine extends Struct({ x: Fp2, y: Fp2 }) { 6 | equals(rhs: G2Affine): Field { 7 | let same_x: Field = this.x.equals(rhs.x); 8 | let same_y: Field = this.y.equals(rhs.y); 9 | 10 | return same_x.mul(same_y); 11 | } 12 | 13 | neg() { 14 | return new G2Affine({ x: this.x, y: this.y.neg() }); 15 | } 16 | 17 | // a = 0 for bn 18 | computeLambdaSame(): Fp2 { 19 | // λ = 3x^2 / 2y 20 | let num = this.x.square().mul_by_fp(FpC.from(3n)); 21 | let denom = this.y.mul_by_fp(FpC.from(2n)).inverse(); 22 | 23 | return num.mul(denom); 24 | } 25 | 26 | computeLambdaDiff(rhs: G2Affine): Fp2 { 27 | // λ = (y2 - y1) / (x2 - x1) 28 | let num = rhs.y.sub(this.y); 29 | let denom = rhs.x.sub(this.x).inverse(); 30 | 31 | return num.mul(denom); 32 | } 33 | 34 | computeMu(lambda: Fp2) { 35 | return this.y.sub(this.x.mul(lambda)); 36 | } 37 | 38 | // assumes that this and rhs are not 0 points 39 | add(rhs: G2Affine): G2Affine { 40 | const eq = this.equals(rhs); 41 | 42 | let lambda; 43 | if (eq.toBigInt() === 1n) { 44 | lambda = this.computeLambdaSame(); 45 | } else { 46 | lambda = this.computeLambdaDiff(rhs); 47 | } 48 | 49 | const x_3 = lambda.square().sub(this.x).sub(rhs.x); 50 | const y_3 = lambda.mul(this.x.sub(x_3)).sub(this.y); 51 | 52 | return new G2Affine({ x: x_3, y: y_3 }); 53 | } 54 | 55 | double_from_line(lambda: Fp2) { 56 | const x_3 = lambda.square().sub(this.x).sub(this.x); // x_3 = λ^2 - 2x_1 57 | const y_3 = lambda.mul(this.x.sub(x_3)).sub(this.y); // y_3 = λ(x_1 - x_3) - y_1 58 | 59 | return new G2Affine({ x: x_3, y: y_3 }); 60 | } 61 | 62 | add_from_line(lambda: Fp2, rhs: G2Affine) { 63 | const x_3 = lambda.square().sub(this.x).sub(rhs.x); // x_3 = λ^2 - x_1 - x_2 64 | const y_3 = lambda.mul(this.x.sub(x_3)).sub(this.y); // y_3 = λ(x_1 - x_3) - y_1 65 | 66 | return new G2Affine({ x: x_3, y: y_3 }); 67 | } 68 | 69 | frobenius() { 70 | const x = this.x.conjugate().mul(GAMMA_1S[1]); 71 | const y = this.y.conjugate().mul(GAMMA_1S[2]); 72 | 73 | return new G2Affine({ x, y }); 74 | } 75 | 76 | frobFromInputs(g1: Fp2, g2: Fp2) { 77 | const x = this.x.conjugate().mul(g1); 78 | const y = this.y.conjugate().mul(g2); 79 | 80 | return new G2Affine({ x, y }); 81 | } 82 | 83 | negative_frobenius() { 84 | const x = this.x.conjugate().mul(GAMMA_1S[1]); 85 | const y = this.y.conjugate().mul(NEG_GAMMA_13); 86 | 87 | return new G2Affine({ x, y }); 88 | } 89 | 90 | negFrobFromInputs(g1: Fp2, g2: Fp2) { 91 | const x = this.x.conjugate().mul(g1); 92 | const y = this.y.conjugate().mul(g2); 93 | 94 | return new G2Affine({ x, y }); 95 | } 96 | 97 | // g + hw = g0 + h0W + g1W^2 + h1W^3 + g2W^4 + h2W^5 98 | // PSI: (x, y) -> (w^2x, w^3y) 99 | hom(): [Fp12, Fp12] { 100 | const x_g = new Fp6({ c0: Fp2.zero(), c1: this.x, c2: Fp2.zero() }); 101 | const x_h = Fp6.zero(); 102 | const x = new Fp12({ c0: x_g, c1: x_h }); 103 | 104 | const y_g = Fp6.zero(); 105 | const y_h = new Fp6({ c0: Fp2.zero(), c1: this.y, c2: Fp2.zero() }); 106 | const y = new Fp12({ c0: y_g, c1: y_h }); 107 | 108 | return [x, y]; 109 | } 110 | } 111 | 112 | export { G2Affine }; 113 | -------------------------------------------------------------------------------- /contracts/src/ec/index.ts: -------------------------------------------------------------------------------- 1 | import { Struct } from 'o1js'; 2 | import { FpA } from '../towers/fp.js'; 3 | import { G2Affine } from './g2.js'; 4 | 5 | class G1Affine extends Struct({ x: FpA.provable, y: FpA.provable }) {} 6 | 7 | export { G1Affine, G2Affine }; 8 | -------------------------------------------------------------------------------- /contracts/src/groth/accumulate_lines.ts: -------------------------------------------------------------------------------- 1 | /* 2 | Accumulate lines to utilize sparse multiplications: 3 | - It checks if lines are correct (if point is not fixed) and evaluates them with sparse mul 4 | */ 5 | 6 | import { G1Affine, G2Affine } from "../ec/index.js"; 7 | import { G2Line } from "../lines/index.js"; 8 | import { AffineCache } from "../lines/precompute.js"; 9 | import { ATE_LOOP_COUNT, Fp12 } from "../towers/index.js"; 10 | 11 | 12 | class LineAccumulator { 13 | static accumulate( 14 | b_lines: Array, 15 | gamma_lines: Array, 16 | delta_lines: Array, 17 | B: G2Affine, 18 | negA: G1Affine, 19 | PI: G1Affine, 20 | C: G1Affine 21 | ): Array { 22 | const g: Array = []; 23 | 24 | const a_cache = new AffineCache(negA); 25 | const c_cache = new AffineCache(C); 26 | const pi_cache = new AffineCache(PI); 27 | 28 | let T = new G2Affine({ x: B.x, y: B.y }); 29 | const negB = B.neg(); 30 | 31 | let idx = 0; 32 | let line_cnt = 0; 33 | for (let i = 1; i < ATE_LOOP_COUNT.length; i++) { 34 | idx = i - 1; 35 | 36 | const b_line = b_lines[line_cnt]; 37 | const delta_line = delta_lines[line_cnt]; 38 | const gamma_line = gamma_lines[line_cnt]; 39 | line_cnt += 1; 40 | 41 | b_line.assert_is_tangent(T); 42 | 43 | g.push(b_line.psi(a_cache)); 44 | g[idx] = g[idx].sparse_mul(delta_line.psi(c_cache)); 45 | g[idx] = g[idx].sparse_mul(gamma_line.psi(pi_cache)); 46 | 47 | T = T.double_from_line(b_line.lambda); 48 | 49 | if (ATE_LOOP_COUNT[i] == 1 || ATE_LOOP_COUNT[i] == -1) { 50 | const b_line = b_lines[line_cnt]; 51 | const delta_line = delta_lines[line_cnt]; 52 | const gamma_line = gamma_lines[line_cnt]; 53 | line_cnt += 1; 54 | 55 | if(ATE_LOOP_COUNT[i] == 1) { 56 | b_line.assert_is_line(T, B); 57 | T = T.add_from_line(b_line.lambda, B); 58 | } else { 59 | b_line.assert_is_line(T, negB); 60 | T = T.add_from_line(b_line.lambda, negB); 61 | } 62 | 63 | g[idx] = g[idx].sparse_mul(b_line.psi(a_cache)); 64 | g[idx] = g[idx].sparse_mul(delta_line.psi(c_cache)); 65 | g[idx] = g[idx].sparse_mul(gamma_line.psi(pi_cache)); 66 | } 67 | } 68 | 69 | let b_line = b_lines[line_cnt]; 70 | let delta_line = delta_lines[line_cnt]; 71 | let gamma_line = gamma_lines[line_cnt]; 72 | line_cnt += 1; 73 | idx += 1; 74 | 75 | g.push(b_line.psi(a_cache)); 76 | g[idx] = g[idx].sparse_mul(delta_line.psi(c_cache)); 77 | g[idx] = g[idx].sparse_mul(gamma_line.psi(pi_cache)); 78 | 79 | let piB = B.frobenius(); 80 | b_line.assert_is_line(T, piB); 81 | T = T.add_from_line(b_line.lambda, piB); 82 | 83 | b_line = b_lines[line_cnt]; 84 | delta_line = delta_lines[line_cnt]; 85 | gamma_line = gamma_lines[line_cnt]; 86 | line_cnt += 1; 87 | 88 | g[idx] = g[idx].sparse_mul(b_line.psi(a_cache)); 89 | g[idx] = g[idx].sparse_mul(delta_line.psi(c_cache)); 90 | g[idx] = g[idx].sparse_mul(gamma_line.psi(pi_cache)); 91 | 92 | let pi_2_B = piB.negative_frobenius(); 93 | b_line.assert_is_line(T, pi_2_B); 94 | 95 | return g; 96 | } 97 | } 98 | 99 | export { LineAccumulator }; 100 | -------------------------------------------------------------------------------- /contracts/src/groth/compute_pi.ts: -------------------------------------------------------------------------------- 1 | import { GrothVk } from "./vk.js"; 2 | import { Proof } from "./proof.js"; 3 | import { bn254 } from "../ec/g1.js"; 4 | import { FrC } from "../towers/fr.js"; 5 | import { ForeignCurve } from "o1js"; 6 | 7 | // pis are sent without beginning 1 8 | export function computePI(VK: GrothVk, pis: Array): ForeignCurve { 9 | let acc = new bn254({ x: VK.ic0.x, y: VK.ic0.y }); 10 | 11 | acc = acc.add(VK.ic1.scale(pis[0])); 12 | acc = acc.add(VK.ic2.scale(pis[1])); 13 | acc = acc.add(VK.ic3.scale(pis[2])); 14 | acc = acc.add(VK.ic4.scale(pis[3])); 15 | acc = acc.add(VK.ic5.scale(pis[4])); 16 | 17 | return acc 18 | } -------------------------------------------------------------------------------- /contracts/src/groth/e2e_test.ts: -------------------------------------------------------------------------------- 1 | import { Provable } from 'o1js'; 2 | import { Groth16Verifier } from './verifier.js'; 3 | 4 | const grothVerifier = new Groth16Verifier("./src/groth/example_jsons/vk.json") 5 | 6 | function main() { 7 | const proof = Provable.witness(Proof, () => Proof.parse(grothVerifier.vk, "./src/groth/example_jsons/proof.json")) 8 | const aux_witness = Provable.witness(AuXWitness, () => AuXWitness.parse("./src/groth/example_jsons/aux_witness.json")); 9 | grothVerifier.verify(proof, aux_witness); 10 | } 11 | 12 | // npm run build && node --max-old-space-size=65536 build/src/groth/e2e_test.js 13 | import v8 from 'v8'; 14 | import { Proof } from './proof.js'; 15 | import { AuXWitness } from '../aux_witness.js'; 16 | (async () => { 17 | console.time('running Fp constant version'); 18 | main(); 19 | console.timeEnd('running Fp constant version'); 20 | 21 | console.time('running Fp witness generation & checks'); 22 | await Provable.runAndCheck(main); 23 | console.timeEnd('running Fp witness generation & checks'); 24 | 25 | console.time('creating Fp constraint system'); 26 | let cs = await Provable.constraintSystem(main); 27 | console.timeEnd('creating Fp constraint system'); 28 | 29 | console.log(cs.summary()); 30 | const totalHeapSize = v8.getHeapStatistics().total_available_size; 31 | let totalHeapSizeinGB = (totalHeapSize / 1024 / 1024 / 1024).toFixed(2); 32 | console.log(`Total heap size: ${totalHeapSizeinGB} GB`); 33 | 34 | // used_heap_size 35 | const usedHeapSize = v8.getHeapStatistics().used_heap_size; 36 | let usedHeapSizeinGB = (usedHeapSize / 1024 / 1024 / 1024).toFixed(2); 37 | console.log(`Used heap size: ${usedHeapSizeinGB} GB`); 38 | })(); 39 | -------------------------------------------------------------------------------- /contracts/src/groth/example_jsons/aux_witness.json: -------------------------------------------------------------------------------- 1 | {"c":{"g00":"21579243941203312144539369067593279785506538735269263133152072764829981108922","g01":"7428412522856244998952925899744633594829401118152227043461677505894661114998","g10":"6617657349298610071261310610000147120757864937836476082865111352387115627411","g11":"14325241483671357378564865691628888565722955894927591792722112184422070195679","g20":"1212811781220709389023905579160339808215104730189504635331881369816540067575","g21":"16240577791285747556230729711703121323140330822460233534527233159030236399451","h00":"21735689257625312225782366918367847447893780771437698623715635390088464033764","h01":"19920411212492769485910178765915091420206135463140470843553446673474869754289","h10":"995023729558377164397645083447338478678989263589757351734693574548120863690","h11":"18757306007446249524968745682246513638657424084664362755269621108463224242906","h20":"18155309550328611327537118566404247046264796419947574549591408240251275971793","h21":"1554910929496379058117154888236332874964133158890063733045832045890145909362"},"shift_power":"0"} -------------------------------------------------------------------------------- /contracts/src/groth/example_jsons/proof.json: -------------------------------------------------------------------------------- 1 | { 2 | "negA": { 3 | "x": "16465199099708604290698553000024942000051030364759839088954586362243760185403", 4 | "y": "17690359568564825813235988832215237195831246832056909949821693820324615702030" 5 | }, 6 | "B": { 7 | "x_c0": "20076621026680381759767634077167710158570125456580059736389163589252903861997", 8 | "x_c1": "8939596936503745624468413156089285325774309438533412822132357238045755689387", 9 | "y_c0": "111879341840300391556371653123940615424189534247854096298516707026393487948", 10 | "y_c1": "7719123513247232282802073769919057076444760653726977674407327771459600820251" 11 | }, 12 | "C": { 13 | "x": "10184405014965771627427034456113644883671783030647002464387923536809721376302", 14 | "y": "3331725942843591742687069662771052794291623321043694393039994836222102831251" 15 | }, 16 | "pi1": "19350802088444617183621339156085479077", 17 | "pi2": "61803236023146647725736150410140474743", 18 | "pi3": "224573707671822082550326687066026912541", 19 | "pi4": "215629468736039306773365103130456808007", 20 | "pi5": "6655704183316983190945468237220041514376883004657559498672647785620383118673" 21 | } -------------------------------------------------------------------------------- /contracts/src/groth/example_jsons/vk.json: -------------------------------------------------------------------------------- 1 | {"alpha":{"x":"20491192805390485299153009773594534940189261866228447918068658471970481763042","y":"9383485363053290200918347156157836566562967994039712273449902621266178545958"},"alpha_beta":{"g00":"5697245924082314955838557878331368209814247075300951521701254589084804234970","g01":"6607404321972637550020783611836551818248636342709305900123466355480118576099","g10":"3670608949875518863244021916950733644062078743044974108285053440592297066931","g11":"541250262476899488042926921352922562849943764605667374596011538907110731629","g20":"18124534578366443052930374136100844352060568779165176942539622400742155990163","g21":"254425962675264208268606525222145870537081446419126819669512313449894617711","h00":"14682601387374107597061811864806746474418747695496995459538035046909465405682","h01":"380626998809465124537308088395106769155276843034064955773603619257287977827","h10":"20937581287674855219025710764445572864809573889072173538957685817558553170744","h11":"1875554049818610060118039934136718085309900630088668968527777353177281637676","h20":"10677133934211244973062689418678002923080465282453292613142034282094753864563","h21":"5217526782465309563115572197406844083015292505138779767346232600336410380272"},"beta":{"x_c0":"6375614351688725206403948262868962793625744043794305715222011528459656738731","x_c1":"4252822878758300859123897981450591353533073413197771768651442665752259397132","y_c0":"10505242626370262277552901082094356697409835680220590971873171140371331206856","y_c1":"21847035105528745403288232691147584728191162732299865338377159692350059136679"},"delta":{"x_c0":"12043754404802191763554326994664886008979042643626290185762540825416902247219","x_c1":"1668323501672964604911431804142266013250380587483576094566949227275849579036","y_c0":"13740680757317479711909903993315946540841369848973133181051452051592786724563","y_c1":"7710631539206257456743780535472368339139328733484942210876916214502466455394"},"gamma":{"x_c0":"10857046999023057135944570762232829481370756359578518086990519993285655852781","x_c1":"11559732032986387107991004021392285783925812861821192530917403151452391805634","y_c0":"8495653923123431417604973247489272438418190587263600148770280649306958101930","y_c1":"4082367875863433681332203403145435568316851327593401208105741076214120093531"},"ic0":{"x":"8446592859352799428420270221449902464741693648963397251242447530457567083492","y":"1064796367193003797175961162477173481551615790032213185848276823815288302804"},"ic1":{"x":"3179835575189816632597428042194253779818690147323192973511715175294048485951","y":"20895841676865356752879376687052266198216014795822152491318012491767775979074"},"ic2":{"x":"5332723250224941161709478398807683311971555792614491788690328996478511465287","y":"21199491073419440416471372042641226693637837098357067793586556692319371762571"},"ic3":{"x":"12457994489566736295787256452575216703923664299075106359829199968023158780583","y":"19706766271952591897761291684837117091856807401404423804318744964752784280790"},"ic4":{"x":"19617808913178163826953378459323299110911217259216006187355745713323154132237","y":"21663537384585072695701846972542344484111393047775983928357046779215877070466"},"ic5":{"x":"6834578911681792552110317589222010969491336870276623105249474534788043166867","y":"15060583660288623605191393599883223885678013570733629274538391874953353488393"},"w27":{"g00":"0","g01":"0","g10":"0","g11":"0","g20":"8204864362109909869166472767738877274689483185363591877943943203703805152849","g21":"17912368812864921115467448876996876278487602260484145953989158612875588124088","h00":"0","h01":"0","h10":"0","h11":"0","h20":"0","h21":"0"}} -------------------------------------------------------------------------------- /contracts/src/groth/proof.ts: -------------------------------------------------------------------------------- 1 | import { G1Affine, G2Affine } from "../ec/index.js"; 2 | import fs from "fs"; 3 | import { ATE_LOOP_COUNT, Fp2, FpC, FrC } from "../towers/index.js"; 4 | import { Provable, Struct } from "o1js"; 5 | import { G2Line, computeLineCoeffs } from "../lines/index.js"; 6 | import { computePI } from "./compute_pi.js"; 7 | import { GrothVk } from "./vk.js"; 8 | 9 | const getNumOfLines = () => { 10 | let cnt = 0; 11 | 12 | for (let i = 1; i < ATE_LOOP_COUNT.length; i++) { 13 | cnt += 1; 14 | if (ATE_LOOP_COUNT[i] !== 0) cnt += 1 15 | } 16 | 17 | // add two more for frobenius 18 | return cnt + 2 19 | } 20 | 21 | 22 | type SerializedProof = { 23 | negA: { 24 | x: string, 25 | y: string 26 | }, 27 | B: { 28 | x_c0: string, 29 | x_c1: string, 30 | y_c0: string, 31 | y_c1: string 32 | }, 33 | C: { 34 | x: string, 35 | y: string 36 | }, 37 | pi1: string, 38 | pi2: string, 39 | pi3: string, 40 | pi4: string, 41 | pi5: string, 42 | } 43 | 44 | class Proof extends Struct({ 45 | negA: G1Affine, 46 | B: G2Affine, 47 | C: G1Affine, 48 | PI: G1Affine, 49 | b_lines: Provable.Array(G2Line, getNumOfLines()), 50 | pis: Provable.Array(FrC.provable, 5) 51 | }) { 52 | static parse(vk: GrothVk, path: string): Proof { 53 | const data = fs.readFileSync(path, 'utf-8'); 54 | const obj: SerializedProof = JSON.parse(data); 55 | 56 | const negA = new G1Affine({ x: FpC.from(obj.negA.x), y: FpC.from(obj.negA.y )}); 57 | const C = new G1Affine({ x: FpC.from(obj.C.x), y: FpC.from(obj.C.y )}); 58 | 59 | const pis = [FrC.from(obj.pi1), FrC.from(obj.pi2), FrC.from(obj.pi3), FrC.from(obj.pi4), FrC.from(obj.pi5)]; 60 | let piBn = computePI(vk, pis); 61 | const PI = new G1Affine({ x: FpC.from(piBn.x).assertCanonical(), y: FpC.from(piBn.y).assertCanonical()}); 62 | 63 | const bx = new Fp2({ c0: FpC.from(obj.B.x_c0), c1: FpC.from(obj.B.x_c1)}); 64 | const by = new Fp2({ c0: FpC.from(obj.B.y_c0), c1: FpC.from(obj.B.y_c1)}); 65 | const B = new G2Affine({ x: bx, y: by }); 66 | 67 | const b_lines = computeLineCoeffs(B) 68 | 69 | return new Proof({ negA, B, C, PI, b_lines, pis }); 70 | } 71 | } 72 | 73 | export { Proof } -------------------------------------------------------------------------------- /contracts/src/groth/proof_to_env.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs'; 2 | 3 | const args = process.argv; 4 | 5 | const groth16ProofPath = args[2]; 6 | const groth16RawVKPath = args[3]; 7 | const groth16VKPath = args[4]; 8 | const runDir = args[5]; 9 | const workDir = args[6]; 10 | const proofName = args[7]; 11 | const envPath = `${runDir}/env.${proofName}`; 12 | 13 | const env = `\ 14 | WORK_DIR=${workDir}/${proofName}/e2e_groth16 15 | CACHE_DIR=${workDir}/groth16_cache 16 | RAW_VK_PATH=${groth16RawVKPath} 17 | VK_PATH=${groth16VKPath} 18 | PROOF_PATH=${groth16ProofPath} 19 | `; 20 | 21 | fs.writeFileSync(envPath, env, 'utf8'); -------------------------------------------------------------------------------- /contracts/src/groth/recursion/data.ts: -------------------------------------------------------------------------------- 1 | import { Field, Provable, Struct } from "o1js"; 2 | import { G1Affine, G2Affine } from "../../ec/index.js"; 3 | import { Fp12 } from "../../towers/index.js"; 4 | 5 | class RecursionProof extends Struct({ 6 | negA: G1Affine, 7 | B: G2Affine, 8 | C: G1Affine, 9 | PI: G1Affine, 10 | c: Fp12, 11 | c_inv: Fp12, 12 | shift_power: Field, 13 | }) { 14 | deepClone() { 15 | return new RecursionProof({ 16 | negA: new G1Affine({ x: this.negA.x, y: this.negA.y }), 17 | B: new G2Affine({ x: this.B.x, y: this.B.y }), 18 | C: new G1Affine({ x: this.C.x, y: this.C.y }), 19 | PI: new G1Affine({ x: this.PI.x, y: this.PI.y }), 20 | c: new Fp12({ c0: this.c.c0, c1: this.c.c1 }), 21 | c_inv: new Fp12({ c0: this.c_inv.c0, c1: this.c_inv.c1 }), 22 | shift_power: Field.from(this.shift_power) 23 | }) 24 | } 25 | }; 26 | 27 | class State extends Struct({ 28 | T: G2Affine, 29 | f: Fp12, 30 | g_digest: Field 31 | }) { 32 | deepClone() { 33 | return new State({ 34 | T: new G2Affine({ x: this.T.x, y: this.T.y }), 35 | f: new Fp12({ c0: this.f.c0, c1: this.f.c1 }), 36 | g_digest: Field.from(this.g_digest) 37 | }) 38 | } 39 | } 40 | 41 | class Accumulator extends Struct({ 42 | proof: RecursionProof, 43 | state: State 44 | }) { 45 | deepClone() { 46 | return new Accumulator({ 47 | proof: this.proof.deepClone(), 48 | state: this.state.deepClone() 49 | }) 50 | } 51 | } 52 | 53 | export { RecursionProof, State, Accumulator } -------------------------------------------------------------------------------- /contracts/src/groth/recursion/zkp10.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | Provable, 6 | } from 'o1js'; 7 | 8 | import { Accumulator } from './data.js'; 9 | import { Fp12 } from '../../towers/fp12.js'; 10 | import { ATE_LOOP_COUNT } from '../../towers/consts.js'; 11 | import { ArrayListHasher } from '../../array_list_hasher.js'; 12 | 13 | 14 | const zkp10 = ZkProgram({ 15 | name: 'zkp10', 16 | publicInput: Field, 17 | publicOutput: Field, 18 | methods: { 19 | compute: { 20 | privateInputs: [Accumulator, Provable.Array(Field, 9 + 11 + 11), Provable.Array(Fp12, 11), Provable.Array(Field, ATE_LOOP_COUNT.length - 9 - 11 - 11 - 11)], 21 | async method( 22 | input: Field, 23 | acc: Accumulator, 24 | lhs_line_hashes: Array, 25 | g_chunk: Array, 26 | rhs_lines_hashes: Array 27 | ) { 28 | input.assertEquals(Poseidon.hashPacked(Accumulator, acc)); 29 | 30 | const opening = ArrayListHasher.open(lhs_line_hashes, g_chunk, rhs_lines_hashes); 31 | acc.state.g_digest.assertEquals(opening); 32 | 33 | let f = acc.state.f; 34 | 35 | let idx = 0; 36 | for (let i = 32; i < 43; i++) { 37 | f = f.square().mul(g_chunk[idx]); 38 | 39 | if (ATE_LOOP_COUNT[i] == 1) { 40 | f = f.mul(acc.proof.c_inv); 41 | } 42 | 43 | if (ATE_LOOP_COUNT[i] == -1) { 44 | f = f.mul(acc.proof.c); 45 | } 46 | 47 | idx += 1 48 | } 49 | 50 | acc.state.f = f; 51 | return Poseidon.hashPacked(Accumulator, acc); 52 | }, 53 | }, 54 | }, 55 | }); 56 | 57 | const ZKP10Proof = ZkProgram.Proof(zkp10); 58 | export { ZKP10Proof, zkp10 } -------------------------------------------------------------------------------- /contracts/src/groth/recursion/zkp11.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | Provable, 6 | } from 'o1js'; 7 | 8 | import { Accumulator } from './data.js'; 9 | import { Fp12 } from '../../towers/fp12.js'; 10 | import { ATE_LOOP_COUNT } from '../../towers/consts.js'; 11 | import { ArrayListHasher } from '../../array_list_hasher.js'; 12 | 13 | 14 | const zkp11 = ZkProgram({ 15 | name: 'zkp11', 16 | publicInput: Field, 17 | publicOutput: Field, 18 | methods: { 19 | compute: { 20 | privateInputs: [Accumulator, Provable.Array(Field, 9 + 11 + 11 + 11), Provable.Array(Fp12, 11), Provable.Array(Field, ATE_LOOP_COUNT.length - 9 - 11 - 11 - 11 - 11)], 21 | async method( 22 | input: Field, 23 | acc: Accumulator, 24 | lhs_line_hashes: Array, 25 | g_chunk: Array, 26 | rhs_lines_hashes: Array 27 | ) { 28 | input.assertEquals(Poseidon.hashPacked(Accumulator, acc)); 29 | 30 | const opening = ArrayListHasher.open(lhs_line_hashes, g_chunk, rhs_lines_hashes); 31 | acc.state.g_digest.assertEquals(opening); 32 | 33 | let f = acc.state.f; 34 | 35 | let idx = 0; 36 | for (let i = 43; i < 54; i++) { 37 | f = f.square().mul(g_chunk[idx]); 38 | 39 | if (ATE_LOOP_COUNT[i] == 1) { 40 | f = f.mul(acc.proof.c_inv); 41 | } 42 | 43 | if (ATE_LOOP_COUNT[i] == -1) { 44 | f = f.mul(acc.proof.c); 45 | } 46 | 47 | idx += 1 48 | } 49 | 50 | acc.state.f = f; 51 | return Poseidon.hashPacked(Accumulator, acc); 52 | }, 53 | }, 54 | }, 55 | }); 56 | 57 | const ZKP11Proof = ZkProgram.Proof(zkp11); 58 | export { ZKP11Proof, zkp11 } -------------------------------------------------------------------------------- /contracts/src/groth/recursion/zkp12.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | Provable, 6 | } from 'o1js'; 7 | 8 | import { Accumulator } from './data.js'; 9 | import { Fp12 } from '../../towers/fp12.js'; 10 | import { ATE_LOOP_COUNT } from '../../towers/consts.js'; 11 | import { ArrayListHasher } from '../../array_list_hasher.js'; 12 | 13 | 14 | const zkp12 = ZkProgram({ 15 | name: 'zkp12', 16 | publicInput: Field, 17 | publicOutput: Field, 18 | methods: { 19 | compute: { 20 | privateInputs: [Accumulator, Provable.Array(Field, 9 + 11 + 11 + 11 + 11), Provable.Array(Fp12, 11), Provable.Array(Field, ATE_LOOP_COUNT.length - 9 - 11 - 11 - 11 - 11 - 11)], 21 | async method( 22 | input: Field, 23 | acc: Accumulator, 24 | lhs_line_hashes: Array, 25 | g_chunk: Array, 26 | rhs_lines_hashes: Array 27 | ) { 28 | input.assertEquals(Poseidon.hashPacked(Accumulator, acc)); 29 | 30 | const opening = ArrayListHasher.open(lhs_line_hashes, g_chunk, rhs_lines_hashes); 31 | acc.state.g_digest.assertEquals(opening); 32 | 33 | let f = acc.state.f; 34 | 35 | let idx = 0; 36 | for (let i = 54; i < 65; i++) { 37 | f = f.square().mul(g_chunk[idx]); 38 | 39 | if (ATE_LOOP_COUNT[i] == 1) { 40 | f = f.mul(acc.proof.c_inv); 41 | } 42 | 43 | if (ATE_LOOP_COUNT[i] == -1) { 44 | f = f.mul(acc.proof.c); 45 | } 46 | 47 | idx += 1 48 | } 49 | 50 | acc.state.f = f; 51 | return Poseidon.hashPacked(Accumulator, acc); 52 | }, 53 | }, 54 | }, 55 | }); 56 | 57 | const ZKP12Proof = ZkProgram.Proof(zkp12); 58 | export { ZKP12Proof, zkp12 } -------------------------------------------------------------------------------- /contracts/src/groth/recursion/zkp13.ts: -------------------------------------------------------------------------------- 1 | import { Field, Poseidon, Provable, ZkProgram } from "o1js"; 2 | import { Accumulator } from "./data.js"; 3 | import { Fp12 } from "../../towers/index.js"; 4 | import { ArrayListHasher } from "../../array_list_hasher.js"; 5 | import { VK } from "../vk_from_env.js"; 6 | import { G1Affine } from "../../ec/index.js"; 7 | 8 | const zkp13 = ZkProgram({ 9 | name: 'zkp13', 10 | publicInput: Field, 11 | publicOutput: Field, 12 | methods: { 13 | compute: { 14 | privateInputs: [Accumulator, Provable.Array(Field, 64), Provable.Array(Fp12, 1)], 15 | async method( 16 | input: Field, 17 | acc: Accumulator, 18 | lhs_line_hashes: Array, 19 | g_chunk: Array, 20 | ) { 21 | input.assertEquals(Poseidon.hashPacked(Accumulator, acc)); 22 | 23 | const opening = ArrayListHasher.open(lhs_line_hashes, g_chunk, []) 24 | acc.state.g_digest.assertEquals(opening) 25 | 26 | let f = acc.state.f; 27 | f = f.mul(g_chunk[0]); 28 | 29 | f = f 30 | .mul(acc.proof.c_inv.frobenius_pow_p()) 31 | .mul(acc.proof.c.frobenius_pow_p_squared()) 32 | .mul(acc.proof.c_inv.frobenius_pow_p_cubed()) 33 | .mul(VK.alpha_beta) 34 | 35 | const shift = Provable.switch([acc.proof.shift_power.equals(Field(0)), acc.proof.shift_power.equals(Field(1)), acc.proof.shift_power.equals(Field(2))], Fp12, [Fp12.one(), VK.w27, VK.w27_square]); 36 | f = f.mul(shift); 37 | 38 | f.assert_equals(Fp12.one()); 39 | 40 | acc.state.f = f; 41 | return Poseidon.hashPacked(G1Affine, acc.proof.PI); 42 | }, 43 | }, 44 | }, 45 | }); 46 | 47 | 48 | const ZKP13Proof = ZkProgram.Proof(zkp13); 49 | export { ZKP13Proof, zkp13 } -------------------------------------------------------------------------------- /contracts/src/groth/recursion/zkp14.ts: -------------------------------------------------------------------------------- 1 | import { Field, Poseidon, Provable, ZkProgram } from "o1js"; 2 | import { Accumulator } from "./data.js"; 3 | import { Fp12, FrC } from "../../towers/index.js"; 4 | import { ArrayListHasher } from "../../array_list_hasher.js"; 5 | import { VK } from "../vk_from_env.js"; 6 | import { G1Affine } from "../../ec/index.js"; 7 | import { bn254 } from "../../ec/g1.js"; 8 | 9 | const zkp14 = ZkProgram({ 10 | name: 'zkp14', 11 | publicInput: Field, 12 | publicOutput: Field, 13 | methods: { 14 | compute: { 15 | privateInputs: [Provable.Array(FrC.provable, 5)], 16 | async method( 17 | input: Field, 18 | pis: Array, 19 | ) { 20 | const pis_hash = Poseidon.hashPacked(Provable.Array(FrC.provable, 5), pis); 21 | 22 | let acc = new bn254({ x: VK.ic0.x, y: VK.ic0.y }); 23 | 24 | acc = acc.add(VK.ic1.scale(pis[0])); 25 | acc = acc.add(VK.ic2.scale(pis[1])); 26 | acc = acc.add(VK.ic3.scale(pis[2])); 27 | // acc = acc.add(VK.ic4.scale(pis[3])); 28 | // acc = acc.add(VK.ic5.scale(pis[4])); 29 | 30 | // assert that sum ic_i * pis[i] = PI 31 | 32 | const acc_aff = new G1Affine({ x: acc.x.assertCanonical(), y: acc.y.assertCanonical() }) 33 | const acc_hash = Poseidon.hashPacked(G1Affine, acc_aff); 34 | 35 | return Poseidon.hashPacked(Provable.Array(Field, 3), [input, pis_hash, acc_hash]); 36 | }, 37 | }, 38 | }, 39 | }); 40 | 41 | 42 | const ZKP14Proof = ZkProgram.Proof(zkp14); 43 | export { ZKP14Proof, zkp14 } -------------------------------------------------------------------------------- /contracts/src/groth/recursion/zkp15.ts: -------------------------------------------------------------------------------- 1 | import { Field, Poseidon, Provable, ZkProgram } from "o1js"; 2 | import { Accumulator } from "./data.js"; 3 | import { Fp12, FrC } from "../../towers/index.js"; 4 | import { ArrayListHasher } from "../../array_list_hasher.js"; 5 | import { VK } from "../vk_from_env.js"; 6 | import { G1Affine } from "../../ec/index.js"; 7 | import { bn254 } from "../../ec/g1.js"; 8 | 9 | const zkp15 = ZkProgram({ 10 | name: 'zkp15', 11 | publicInput: Field, 12 | publicOutput: Field, 13 | methods: { 14 | compute: { 15 | privateInputs: [G1Affine, G1Affine, Provable.Array(FrC.provable, 5)], 16 | async method( 17 | input: Field, 18 | PI: G1Affine, 19 | acc: G1Affine, 20 | pis: Array, 21 | ) { 22 | const pi_hash = Poseidon.hashPacked(G1Affine, PI); 23 | const pis_hash = Poseidon.hashPacked(Provable.Array(FrC.provable, 5), pis); 24 | const acc_hash = Poseidon.hashPacked(G1Affine, acc); 25 | input.assertEquals(Poseidon.hashPacked(Provable.Array(Field, 3), [pi_hash, pis_hash, acc_hash])); 26 | 27 | 28 | let accBn = new bn254({ x: acc.x, y: acc.y }); 29 | accBn = accBn.add(VK.ic4.scale(pis[3])); 30 | accBn = accBn.add(VK.ic5.scale(pis[4])); 31 | 32 | accBn.x.assertCanonical().assertEquals(PI.x); 33 | accBn.y.assertCanonical().assertEquals(PI.y); 34 | 35 | return pis_hash; 36 | }, 37 | }, 38 | }, 39 | }); 40 | 41 | 42 | const ZKP15Proof = ZkProgram.Proof(zkp15); 43 | export { ZKP15Proof, zkp15 } -------------------------------------------------------------------------------- /contracts/src/groth/recursion/zkp7.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | Provable, 6 | } from 'o1js'; 7 | 8 | import { Accumulator } from './data.js'; 9 | import { Fp12 } from '../../towers/fp12.js'; 10 | import { ATE_LOOP_COUNT } from '../../towers/consts.js'; 11 | import { ArrayListHasher } from '../../array_list_hasher.js'; 12 | 13 | 14 | const zkp7 = ZkProgram({ 15 | name: 'zkp7', 16 | publicInput: Field, 17 | publicOutput: Field, 18 | methods: { 19 | compute: { 20 | privateInputs: [Accumulator, Provable.Array(Fp12, 9), Provable.Array(Field, ATE_LOOP_COUNT.length - 9)], 21 | async method( 22 | input: Field, 23 | acc: Accumulator, 24 | g_chunk: Array, 25 | rhs_lines_hashes: Array 26 | ) { 27 | input.assertEquals(Poseidon.hashPacked(Accumulator, acc)); 28 | 29 | const opening = ArrayListHasher.open([], g_chunk, rhs_lines_hashes); 30 | acc.state.g_digest.assertEquals(opening); 31 | 32 | let f = acc.proof.c_inv; 33 | 34 | let idx = 0; 35 | for (let i = 1; i < 10; i++) { 36 | f = f.square().mul(g_chunk[idx]); 37 | 38 | if (ATE_LOOP_COUNT[i] == 1) { 39 | f = f.mul(acc.proof.c_inv); 40 | } 41 | 42 | if (ATE_LOOP_COUNT[i] == -1) { 43 | f = f.mul(acc.proof.c); 44 | } 45 | 46 | idx += 1 47 | } 48 | 49 | acc.state.f = f; 50 | return Poseidon.hashPacked(Accumulator, acc); 51 | }, 52 | }, 53 | }, 54 | }); 55 | 56 | const ZKP7Proof = ZkProgram.Proof(zkp7); 57 | export { ZKP7Proof, zkp7 } -------------------------------------------------------------------------------- /contracts/src/groth/recursion/zkp8.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | Provable, 6 | } from 'o1js'; 7 | 8 | import { Accumulator } from './data.js'; 9 | import { Fp12 } from '../../towers/fp12.js'; 10 | import { ATE_LOOP_COUNT } from '../../towers/consts.js'; 11 | import { ArrayListHasher } from '../../array_list_hasher.js'; 12 | 13 | 14 | const zkp8 = ZkProgram({ 15 | name: 'zkp8', 16 | publicInput: Field, 17 | publicOutput: Field, 18 | methods: { 19 | compute: { 20 | privateInputs: [Accumulator, Provable.Array(Field, 9), Provable.Array(Fp12, 11), Provable.Array(Field, ATE_LOOP_COUNT.length - 9 - 11)], 21 | async method( 22 | input: Field, 23 | acc: Accumulator, 24 | lhs_line_hashes: Array, 25 | g_chunk: Array, 26 | rhs_lines_hashes: Array 27 | ) { 28 | input.assertEquals(Poseidon.hashPacked(Accumulator, acc)); 29 | 30 | const opening = ArrayListHasher.open(lhs_line_hashes, g_chunk, rhs_lines_hashes); 31 | acc.state.g_digest.assertEquals(opening); 32 | 33 | let f = acc.state.f; 34 | 35 | let idx = 0; 36 | for (let i = 10; i < 21; i++) { 37 | f = f.square().mul(g_chunk[idx]); 38 | 39 | if (ATE_LOOP_COUNT[i] == 1) { 40 | f = f.mul(acc.proof.c_inv); 41 | } 42 | 43 | if (ATE_LOOP_COUNT[i] == -1) { 44 | f = f.mul(acc.proof.c); 45 | } 46 | 47 | idx += 1 48 | } 49 | 50 | acc.state.f = f; 51 | return Poseidon.hashPacked(Accumulator, acc); 52 | }, 53 | }, 54 | }, 55 | }); 56 | 57 | const ZKP8Proof = ZkProgram.Proof(zkp8); 58 | export { ZKP8Proof, zkp8 } -------------------------------------------------------------------------------- /contracts/src/groth/recursion/zkp9.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | Provable, 6 | } from 'o1js'; 7 | 8 | import { Accumulator } from './data.js'; 9 | import { Fp12 } from '../../towers/fp12.js'; 10 | import { ATE_LOOP_COUNT } from '../../towers/consts.js'; 11 | import { ArrayListHasher } from '../../array_list_hasher.js'; 12 | 13 | 14 | const zkp9 = ZkProgram({ 15 | name: 'zkp9', 16 | publicInput: Field, 17 | publicOutput: Field, 18 | methods: { 19 | compute: { 20 | privateInputs: [Accumulator, Provable.Array(Field, 9 + 11), Provable.Array(Fp12, 11), Provable.Array(Field, ATE_LOOP_COUNT.length - 9 - 11 - 11)], 21 | async method( 22 | input: Field, 23 | acc: Accumulator, 24 | lhs_line_hashes: Array, 25 | g_chunk: Array, 26 | rhs_lines_hashes: Array 27 | ) { 28 | input.assertEquals(Poseidon.hashPacked(Accumulator, acc)); 29 | 30 | const opening = ArrayListHasher.open(lhs_line_hashes, g_chunk, rhs_lines_hashes); 31 | acc.state.g_digest.assertEquals(opening); 32 | 33 | let f = acc.state.f; 34 | 35 | let idx = 0; 36 | for (let i = 21; i < 32; i++) { 37 | f = f.square().mul(g_chunk[idx]); 38 | 39 | if (ATE_LOOP_COUNT[i] == 1) { 40 | f = f.mul(acc.proof.c_inv); 41 | } 42 | 43 | if (ATE_LOOP_COUNT[i] == -1) { 44 | f = f.mul(acc.proof.c); 45 | } 46 | 47 | idx += 1 48 | } 49 | 50 | acc.state.f = f; 51 | return Poseidon.hashPacked(Accumulator, acc); 52 | }, 53 | }, 54 | }, 55 | }); 56 | 57 | const ZKP9Proof = ZkProgram.Proof(zkp9); 58 | export { ZKP9Proof, zkp9 } -------------------------------------------------------------------------------- /contracts/src/groth/risc0/decode_tx.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert" 2 | 3 | const tx = "0x8b2829470000000000000000000000000000000000000000000000000000000000bc614e00000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000104310fe59824347dfbbac696b61f3a6bb2a407f0310c106bcdd04658a3745ee0054500c8c5057b623dbc4c2bfcb148445aba34356267a72113652b820ff8eb620519f198881cefe7273391ef5efb81321b5490ca00825cace44e44b9ea53e4659ac76eabd32e3502a35d37cbd4919863c279a53f27265d22b765391ea8b67e7bc6e94b39cc0a0ecbd1b6886132d44065749c488f65dc5c58d0389b02162a905cfb299d99ca264656fbdc4dcee63b670af767db40c11e84fe81e15e23ad867752d538821d722f3f9c069ef9bba4ec6c41791264fb085b5a09e42a4cad50be403c53bfc4ee7b0527e048886cc45a67f68ce51746cfc6bb9a734d252027e64d11fb5ed92600f300000000000000000000000000000000000000000000000000000000" 4 | 5 | const checkSize = () => { 6 | // expected_length_in_bytes = 0x(2) + 4(selector) + 32(a.x) + 32(a.y) + 64(b.x) + 64(b.y) + 32(c.x) + 32(c.y) + 32(2 non hardcoded public inputs that we split) 7 | const expectedLength = 2 + 4 + 32 + 32 + 64 + 64 + 32 + 32 + 32 8 | // assert(tx.length === expectedLength) 9 | 10 | console.log(expectedLength) 11 | console.log(tx.length) 12 | } 13 | 14 | checkSize(); -------------------------------------------------------------------------------- /contracts/src/groth/serialize_mlo.ts: -------------------------------------------------------------------------------- 1 | import { Proof } from "./proof.js"; 2 | import { Groth16Verifier } from "./verifier.js"; 3 | import fs from 'fs'; 4 | 5 | // args = [vk_path, proof_path, mlo_write_path] 6 | 7 | const groth16 = new Groth16Verifier(process.argv[2]); 8 | const proof = Proof.parse(groth16.vk, process.argv[3]); 9 | const mlo = groth16.multiMillerLoop(proof); 10 | 11 | fs.writeFileSync(process.argv[4], mlo.toJSON(), 'utf-8'); 12 | console.log(`JSON data has been written to ${process.argv[4]}`); -------------------------------------------------------------------------------- /contracts/src/groth/verifier.ts: -------------------------------------------------------------------------------- 1 | import { Field, Provable } from "o1js"; 2 | import { G2Line } from "../lines/index.js"; 3 | import { ATE_LOOP_COUNT, Fp12 } from "../towers/index.js"; 4 | import { LineAccumulator } from "./accumulate_lines.js"; 5 | import { GrothVk } from "./vk.js"; 6 | import { Proof } from "./proof.js"; 7 | import { AuXWitness } from "../aux_witness.js"; 8 | 9 | class Groth16Verifier { 10 | vk: GrothVk; 11 | 12 | constructor( 13 | path_to_vk: string 14 | ) { 15 | this.vk = GrothVk.parse(path_to_vk); 16 | } 17 | 18 | multiMillerLoop( 19 | proof: Proof, 20 | ) { 21 | let g = LineAccumulator.accumulate( 22 | proof.b_lines, 23 | this.vk.gamma_lines, 24 | this.vk.delta_lines, 25 | proof.B, 26 | proof.negA, 27 | proof.PI, 28 | proof.C 29 | ); 30 | 31 | let mlo = Fp12.one(); 32 | let mlo_idx = 0; 33 | for (let i = 1; i < ATE_LOOP_COUNT.length; i++) { 34 | mlo_idx = i - 1; 35 | mlo = mlo.square().mul(g[mlo_idx]); 36 | } 37 | 38 | mlo_idx += 1; 39 | mlo = mlo.mul(g[mlo_idx]); 40 | mlo = mlo.mul(this.vk.alpha_beta) 41 | 42 | return mlo 43 | } 44 | 45 | verify( 46 | proof: Proof, 47 | aux_witness: AuXWitness, 48 | ) { 49 | let g = LineAccumulator.accumulate( 50 | proof.b_lines, 51 | this.vk.gamma_lines, 52 | this.vk.delta_lines, 53 | proof.B, 54 | proof.negA, 55 | proof.PI, 56 | proof.C 57 | ); 58 | 59 | const { c, shift_power } = aux_witness; 60 | const c_inv = c.inverse(); 61 | let f = c_inv; 62 | 63 | let idx = 0; 64 | 65 | for (let i = 1; i < ATE_LOOP_COUNT.length; i++) { 66 | idx = i - 1; 67 | f = f.square().mul(g[idx]); 68 | 69 | if (ATE_LOOP_COUNT[i] == 1) { 70 | f = f.mul(c_inv); 71 | } 72 | 73 | if (ATE_LOOP_COUNT[i] == -1) { 74 | f = f.mul(c); 75 | } 76 | } 77 | 78 | idx += 1; 79 | f = f.mul(g[idx]); 80 | 81 | f = f 82 | .mul(c_inv.frobenius_pow_p()) 83 | .mul(c.frobenius_pow_p_squared()) 84 | .mul(c_inv.frobenius_pow_p_cubed()) 85 | .mul(this.vk.alpha_beta); 86 | 87 | const shift = Provable.switch([shift_power.equals(Field(0)), shift_power.equals(Field(1)), shift_power.equals(Field(2))], Fp12, [Fp12.one(), this.vk.w27, this.vk.w27_square]); 88 | f = f.mul(shift); 89 | 90 | f.assert_equals(Fp12.one()); 91 | } 92 | } 93 | 94 | export { Groth16Verifier } -------------------------------------------------------------------------------- /contracts/src/groth/vk_from_env.ts: -------------------------------------------------------------------------------- 1 | import { GrothVk } from "./vk.js"; 2 | 3 | // export static VK 4 | const VK = GrothVk.parse(process.env.GROTH16_VK_PATH as string) 5 | export { VK } -------------------------------------------------------------------------------- /contracts/src/kzg/structs.ts: -------------------------------------------------------------------------------- 1 | import { Field, Poseidon, Provable, Struct } from "o1js"; 2 | import { ATE_LOOP_COUNT, Fp12, FrC } from "../towers/index.js"; 3 | import { G1Affine } from "../ec/index.js"; 4 | import { G2Line } from "../lines/index.js"; 5 | 6 | // e(A, [1])*e(negB, [x]) = 1 7 | class KzgProof extends Struct({ 8 | A: G1Affine, 9 | negB: G1Affine, 10 | shift_power: Field, 11 | c: Fp12, 12 | c_inv: Fp12, 13 | pi0: FrC.provable, 14 | pi1: FrC.provable, 15 | }) {}; 16 | 17 | class KzgState extends Struct({ 18 | f: Fp12, 19 | lines_hashes_digest: Field 20 | }) 21 | { 22 | deepClone() { 23 | return new KzgState({ 24 | f: new Fp12({ c0: this.f.c0, c1: this.f.c1 }), 25 | lines_hashes_digest: Field.from(this.lines_hashes_digest.toBigInt()) 26 | }) 27 | } 28 | } 29 | 30 | 31 | class KzgAccumulator extends Struct({ 32 | proof: KzgProof, 33 | state: KzgState 34 | }) 35 | { 36 | deepClone() { 37 | return new KzgAccumulator({ 38 | proof: this.proof, 39 | state: this.state.deepClone() 40 | }) 41 | } 42 | } 43 | 44 | 45 | class ArrayListHasher { 46 | static n: number; 47 | 48 | static empty(): Field { 49 | const a = new Array(this.n).fill(Field(0n)) 50 | return Poseidon.hashPacked(Provable.Array(Field, ATE_LOOP_COUNT.length), a) 51 | } 52 | 53 | static hash(arr: Array): Field { 54 | return Poseidon.hashPacked(Provable.Array(Field, ATE_LOOP_COUNT.length), arr) 55 | } 56 | 57 | static open(lhs: Array, opening: Array, rhs: Array): Field { 58 | const opening_hashes: Field[] = opening.map((x) => Poseidon.hashPacked(Fp12, x)); 59 | 60 | let arr: Field[] = [] 61 | arr = arr.concat(lhs) 62 | arr = arr.concat(opening_hashes) 63 | arr = arr.concat(rhs) 64 | 65 | return this.hash(arr) 66 | } 67 | } 68 | 69 | ArrayListHasher.n = ATE_LOOP_COUNT.length 70 | 71 | export { KzgProof, KzgState, KzgAccumulator, ArrayListHasher } -------------------------------------------------------------------------------- /contracts/src/line_parser.ts: -------------------------------------------------------------------------------- 1 | import { G2Line } from "./lines/index.js"; 2 | import { ATE_LOOP_COUNT } from "./towers/consts.js"; 3 | 4 | function ateCntSlice(from: number, to: number) { 5 | let line_cnt = 0; 6 | 7 | for (let i = from; i < to; i++) { 8 | if (ATE_LOOP_COUNT[i] == 0) { 9 | line_cnt += 1 10 | } else { 11 | line_cnt += 2 12 | } 13 | } 14 | 15 | return line_cnt 16 | } 17 | 18 | class LineParser { 19 | static parse(from: number, to: number, lines: Array): Array { 20 | let start = ateCntSlice(1, from); 21 | let toSlice = ateCntSlice(from, to); 22 | return lines.slice(start, start + toSlice) 23 | } 24 | 25 | static frobenius_lines(lines: Array) { 26 | return lines.slice(-2) 27 | } 28 | } 29 | 30 | export { LineParser } -------------------------------------------------------------------------------- /contracts/src/lines/coeffs.ts: -------------------------------------------------------------------------------- 1 | import { G2Line } from './index.js'; 2 | import { G2Affine } from '../ec/index.js'; 3 | import { ATE_LOOP_COUNT } from '../towers/index.js'; 4 | 5 | const computeLineCoeffs = (Q: G2Affine): Array => { 6 | const negQ = Q.neg(); 7 | const lines: Array = []; 8 | 9 | let T = new G2Affine({ x: Q.x, y: Q.y }); 10 | 11 | let line; 12 | for (let i = 1; i < ATE_LOOP_COUNT.length; i++) { 13 | line = G2Line.fromPoints(T, T); 14 | lines.push(line); 15 | T = T.double_from_line(line.lambda); 16 | 17 | if (ATE_LOOP_COUNT[i] == 1) { 18 | line = G2Line.fromPoints(T, Q); 19 | lines.push(line); 20 | 21 | T = T.add_from_line(line.lambda, Q); 22 | } else if (ATE_LOOP_COUNT[i] == -1) { 23 | line = G2Line.fromPoints(T, negQ); 24 | lines.push(line); 25 | 26 | T = T.add_from_line(line.lambda, negQ); 27 | } 28 | } 29 | 30 | let Q1 = Q.frobenius(); 31 | let Q2 = Q1.negative_frobenius(); 32 | 33 | line = G2Line.fromPoints(T, Q1); 34 | lines.push(line); 35 | 36 | T = T.add_from_line(line.lambda, Q1); 37 | 38 | line = G2Line.fromPoints(T, Q2); 39 | lines.push(line); 40 | 41 | return lines; 42 | }; 43 | 44 | export { computeLineCoeffs }; 45 | -------------------------------------------------------------------------------- /contracts/src/lines/index.ts: -------------------------------------------------------------------------------- 1 | import { Field, Struct, assert } from 'o1js'; 2 | import { G1Affine, G2Affine } from '../ec/index.js'; 3 | import { FpC, Fp2, Fp6, Fp12 } from '../towers/index.js'; 4 | import { computeLineCoeffs } from './coeffs.js'; 5 | import { AffineCache } from './precompute.js'; 6 | 7 | const F_ONE = Field(1); 8 | const ZERO = Fp2.zero(); 9 | 10 | class G2Line extends Struct({ lambda: Fp2, neg_mu: Fp2 }) { 11 | constructor(lambda: Fp2, neg_mu: Fp2) { 12 | super({ lambda, neg_mu }); 13 | } 14 | 15 | static fromJSON(json: any): G2Line { 16 | let value = super.fromJSON(json); 17 | return new G2Line(value.lambda, value.neg_mu); 18 | } 19 | 20 | static fromPoints(lhs: G2Affine, rhs: G2Affine): G2Line { 21 | const eq = lhs.equals(rhs); 22 | 23 | let lambda: Fp2; 24 | if (eq.toBigInt() === 1n) { 25 | lambda = lhs.computeLambdaSame(); 26 | } else { 27 | lambda = lhs.computeLambdaDiff(rhs); 28 | } 29 | 30 | return new G2Line(lambda, lhs.computeMu(lambda).neg()); 31 | } 32 | 33 | // g + hw = g0 + h0W + g1W^2 + h1W^3 + g2W^4 + h2W^5 34 | psi(cache: AffineCache): Fp12 { 35 | const g0 = new Fp2({ c0: FpC.from(1n), c1: FpC.from(0) }); 36 | const h0 = this.lambda.mul_by_fp(cache.xp_prime); 37 | const g1 = Fp2.zero(); 38 | const h1 = this.neg_mu.mul_by_fp(cache.yp_prime); 39 | const g2 = Fp2.zero(); 40 | const h2 = Fp2.zero(); 41 | 42 | const c0 = new Fp6({ c0: g0, c1: g1, c2: g2 }); 43 | const c1 = new Fp6({ c0: h0, c1: h1, c2: h2 }); 44 | 45 | return new Fp12({ c0, c1 }); 46 | } 47 | 48 | // L, T : Y − (λX + µ) = 0 49 | evaluate(p: G2Affine): Fp2 { 50 | let t = this.lambda.mul(p.x); 51 | t = t.neg(); 52 | t = t.add(this.neg_mu); 53 | return t.add(p.y); 54 | } 55 | 56 | // L, T : Y − (λX + µ) = 0 57 | assert_is_line(t: G2Affine, q: G2Affine) { 58 | let e1 = this.evaluate(t); 59 | let e2 = this.evaluate(q); 60 | 61 | e1.assert_equals(ZERO); 62 | e2.assert_equals(ZERO); 63 | } 64 | 65 | assert_is_tangent(p: G2Affine) { 66 | let e = this.evaluate(p); 67 | e.assert_equals(ZERO); 68 | 69 | let dbl_lambda_y = this.lambda.add(this.lambda).mul(p.y); 70 | const x_square = p.x.square(); 71 | dbl_lambda_y.assert_equals(x_square.mul_by_fp(FpC.from(3n))); 72 | // dbl_lambda_y.assert_equals(x_square.add(x_square).add(x_square)); 73 | } 74 | 75 | // L, T : Y − (λX + µ) = 0 76 | evaluate_g1(p: G1Affine): Fp2 { 77 | let t = this.lambda.mul_by_fp(p.x); 78 | t = t.neg(); 79 | t = t.add(this.neg_mu); 80 | return t.add_fp(p.y); 81 | } 82 | } 83 | 84 | export { G2Line, computeLineCoeffs }; 85 | -------------------------------------------------------------------------------- /contracts/src/lines/precompute.ts: -------------------------------------------------------------------------------- 1 | // stores some of the data that can be precomputed when using affine repr: 2 | 3 | import { Provable } from 'o1js'; 4 | import { G1Affine } from '../ec/index.js'; 5 | import { Fp2, FpC } from '../towers/index.js'; 6 | 7 | // see: https://eprint.iacr.org/2013/722.pdf 8 | class AffineCache { 9 | xp_neg: FpC; 10 | yp_prime: FpC; 11 | xp_prime: FpC; 12 | 13 | constructor(p: G1Affine) { 14 | this.xp_neg = p.x.neg().assertCanonical(); 15 | this.yp_prime = p.y.inv().assertCanonical(); 16 | this.yp_prime = Provable.witness(FpC.provable, () => 17 | p.y.inv().assertCanonical() 18 | ); 19 | this.yp_prime.mul(p.y).assertEquals(FpC.from(1n)); 20 | this.xp_prime = this.xp_neg.mul(this.yp_prime).assertCanonical(); 21 | } 22 | } 23 | 24 | export { AffineCache }; 25 | -------------------------------------------------------------------------------- /contracts/src/plonk/accumulator.ts: -------------------------------------------------------------------------------- 1 | import { Struct } from "o1js"; 2 | import { Sp1PlonkProof } from "./proof.js"; 3 | import { Sp1PlonkFiatShamir } from "./fiat-shamir/index.js"; 4 | import { StateUntilPairing } from "./state.js"; 5 | 6 | // This is running accumulator that we pass through recursion 7 | // Not all values are needed at each circuit but since Poseidon is cheap we take this approach for now 8 | class Accumulator extends Struct({ 9 | proof: Sp1PlonkProof, 10 | fs: Sp1PlonkFiatShamir, 11 | state: StateUntilPairing 12 | }) { 13 | deepClone() { 14 | return new Accumulator({ 15 | proof: this.proof, 16 | fs: this.fs.deepClone(), 17 | state: this.state.deepClone() 18 | }) 19 | } 20 | } 21 | 22 | export { Accumulator } -------------------------------------------------------------------------------- /contracts/src/plonk/aux_witness.ts: -------------------------------------------------------------------------------- 1 | import { Field, Struct } from "o1js" 2 | import { Fp12, Fp12Type } from "../towers/fp12.js" 3 | import fs from "fs" 4 | 5 | export type AuXWitnessType = { 6 | c: Fp12Type, 7 | shift_power: string 8 | } 9 | 10 | export class AuXWitness extends Struct({ 11 | c: Fp12, 12 | shift_power: Field 13 | }) { 14 | static loadFromPath(path: string): AuXWitness { 15 | const data = fs.readFileSync(path, 'utf-8'); 16 | const obj: AuXWitnessType = JSON.parse(data) 17 | 18 | return new AuXWitness({ 19 | c: Fp12.loadFromJSON(obj.c), 20 | shift_power: Field.from(obj.shift_power) 21 | }) 22 | } 23 | 24 | static loadFromJSON(obj: AuXWitnessType): AuXWitness { 25 | return new AuXWitness({ 26 | c: Fp12.loadFromJSON(obj.c), 27 | shift_power: Field.from(obj.shift_power) 28 | }) 29 | } 30 | } -------------------------------------------------------------------------------- /contracts/src/plonk/e2e_verify.ts: -------------------------------------------------------------------------------- 1 | import { Provable } from 'o1js'; 2 | import v8 from 'v8'; 3 | import { Sp1PlonkVerifier } from './verifier.js'; 4 | import { VK } from './vk.js'; 5 | import fs from "fs" 6 | import { FrC } from '../towers/fr.js'; 7 | import { Sp1PlonkProof, deserializeProof } from './proof.js'; 8 | import { parsePublicInputs } from './parse_pi.js'; 9 | import { AuXWitness } from './aux_witness.js'; 10 | 11 | const args = process.argv; 12 | 13 | const hexProof = args[2] 14 | const programVk = args[3] 15 | const hexPi = args[4] 16 | const auxWtnsPath = args[5] 17 | const auxWitness = AuXWitness.loadFromPath(auxWtnsPath) 18 | 19 | const g2_lines = fs.readFileSync(`./src/plonk/mm_loop/g2_lines.json`, 'utf8'); 20 | const tau_lines = fs.readFileSync(`./src/plonk/mm_loop/tau_lines.json`, 'utf8'); 21 | 22 | const Verifier = new Sp1PlonkVerifier(VK, g2_lines, tau_lines) 23 | 24 | function main() { 25 | const [pi0, pi1] = Provable.witness(Provable.Array(FrC.provable, 2), () => parsePublicInputs(programVk, hexPi)); 26 | const proof = Provable.witness(Sp1PlonkProof, () => new Sp1PlonkProof(deserializeProof(hexProof))) 27 | 28 | Verifier.verify(proof, pi0, pi1, auxWitness); 29 | } 30 | 31 | // npm run build && node --max-old-space-size=65536 build/src/plonk/e2e_verify.js 32 | (async () => { 33 | console.time('running Fp constant version'); 34 | main(); 35 | console.timeEnd('running Fp constant version'); 36 | 37 | console.time('running Fp witness generation & checks'); 38 | await Provable.runAndCheck(main); 39 | console.timeEnd('running Fp witness generation & checks'); 40 | 41 | console.time('creating Fp constraint system'); 42 | let cs = await Provable.constraintSystem(main); 43 | console.timeEnd('creating Fp constraint system'); 44 | 45 | console.log(cs.summary()); 46 | const totalHeapSize = v8.getHeapStatistics().total_available_size; 47 | let totalHeapSizeinGB = (totalHeapSize / 1024 / 1024 / 1024).toFixed(2); 48 | console.log(`Total heap size: ${totalHeapSizeinGB} GB`); 49 | 50 | // used_heap_size 51 | const usedHeapSize = v8.getHeapStatistics().used_heap_size; 52 | let usedHeapSizeinGB = (usedHeapSize / 1024 / 1024 / 1024).toFixed(2); 53 | console.log(`Used heap size: ${usedHeapSizeinGB} GB`); 54 | })(); 55 | -------------------------------------------------------------------------------- /contracts/src/plonk/fiat-shamir/sha_to_fr.ts: -------------------------------------------------------------------------------- 1 | import { Bool, Bytes, Provable } from "o1js"; 2 | import { FrC, FrU } from "../../towers/index.js"; 3 | 4 | export function shaToFr(hashDigest: Bytes): FrC 5 | { 6 | let fields = hashDigest.toFields() 7 | 8 | const shaBitRepr: Bool[] = [] 9 | let bit255 = Bool(false) 10 | let bit256 = Bool(false) 11 | 12 | for (let i = 31; i >= 0; i--) { 13 | const bits = fields[i].toBits(); 14 | for (let j = 0; j < 8; j++) { 15 | // we skip last 2 bits 16 | if (i == 0 && j == 6) { 17 | bit255 = bits[j]; 18 | } else if (i == 0 && j == 7) { 19 | bit256 = bits[j]; 20 | } else { 21 | shaBitRepr.push(bits[j]) 22 | } 23 | } 24 | } 25 | 26 | const sh254 = FrC.from(7059779437489773633646340506914701874769131765994106666166191815402473914367n) // 2^254 % r 27 | const sh255 = FrC.from(14119558874979547267292681013829403749538263531988213332332383630804947828734n) // 2^255 % r 28 | 29 | let x = FrU.fromBits(shaBitRepr) 30 | 31 | const a = Provable.if(bit255.equals(Bool(true)), FrC.provable, sh254, FrC.from(0n)) 32 | const b = Provable.if(bit256.equals(Bool(true)), FrC.provable, sh255, FrC.from(0n)) 33 | 34 | const res: FrC = x.add(a).add(b).assertCanonical(); 35 | return res 36 | } -------------------------------------------------------------------------------- /contracts/src/plonk/get_mlo.ts: -------------------------------------------------------------------------------- 1 | import { Sp1PlonkVerifier } from './verifier.js'; 2 | import { VK } from './vk.js'; 3 | import fs from "fs" 4 | import { Sp1PlonkProof, deserializeProof } from './proof.js'; 5 | import { parsePublicInputs } from './parse_pi.js'; 6 | import { Fp12 } from '../towers/fp12.js'; 7 | 8 | const g2_lines = fs.readFileSync(`./src/plonk/mm_loop/g2_lines.json`, 'utf8'); 9 | const tau_lines = fs.readFileSync(`./src/plonk/mm_loop/tau_lines.json`, 'utf8'); 10 | 11 | export function getMlo(hexProof: string, programVk: string, hexPi: string): Fp12 { 12 | console.log(hexProof, programVk, hexPi) 13 | const [pi0, pi1] = parsePublicInputs(programVk, hexPi) 14 | 15 | const Verifier = new Sp1PlonkVerifier(VK, g2_lines, tau_lines) 16 | 17 | const proof = new Sp1PlonkProof(deserializeProof(hexProof)) 18 | return Verifier.computeMlo(proof, pi0, pi1) 19 | } 20 | 21 | -------------------------------------------------------------------------------- /contracts/src/plonk/helpers.ts: -------------------------------------------------------------------------------- 1 | import { Fp12, Fp2, Fp6, FpC } from "../towers/index.js"; 2 | 3 | // const get_shift_power = () => { 4 | // return Field.from(1n) 5 | // } 6 | 7 | const make_w27 = () => { 8 | const g00 = FpC.from(0n); 9 | const g01 = FpC.from(0n); 10 | const g0 = new Fp2({ c0: g00, c1: g01 }); 11 | 12 | const g10 = FpC.from(0n); 13 | const g11 = FpC.from(0n); 14 | const g1 = new Fp2({ c0: g10, c1: g11 }); 15 | 16 | const g20 = FpC.from(8204864362109909869166472767738877274689483185363591877943943203703805152849n); 17 | const g21 =FpC.from(17912368812864921115467448876996876278487602260484145953989158612875588124088n); 18 | const g2 = new Fp2({ c0: g20, c1: g21 }); 19 | 20 | const g = new Fp6({ c0: g0, c1: g1, c2: g2 }); 21 | 22 | const h00 = FpC.from(0n); 23 | const h01 = FpC.from(0n); 24 | const h0 = new Fp2({ c0: h00, c1: h01 }); 25 | 26 | const h10 = FpC.from(0n); 27 | const h11 = FpC.from(0n); 28 | const h1 = new Fp2({ c0: h10, c1: h11 }); 29 | 30 | const h20 = FpC.from(0n); 31 | const h21 = FpC.from(0n); 32 | const h2 = new Fp2({ c0: h20, c1: h21 }); 33 | 34 | const h = new Fp6({ c0: h0, c1: h1, c2: h2 }); 35 | 36 | return new Fp12({ c0: g, c1: h }); 37 | }; 38 | 39 | // const make_c = () => { 40 | // const g00 = FpC.from(17809908658599905669233067874409671242283811456930317134097852515933265216479n); 41 | // const g01 = FpC.from(10470319950495296318475942057390759978984283582646520325845146396832798744381n); 42 | // const g0 = new Fp2({ c0: g00, c1: g01 }); 43 | 44 | // const g10 = FpC.from(1373135849386422495692138679924894529759861193424843995200995811039858963801n); 45 | // const g11 = FpC.from(4151889749263745023524983530491306316029185969007955602029676318334863673425n); 46 | // const g1 = new Fp2({ c0: g10, c1: g11 }); 47 | 48 | // const g20 = FpC.from(1883139602444365611950631463211279742694452790893121626873037845779196729729n); 49 | // const g21 = FpC.from(5970861074310287921791209887447017145467548262755834228090160275948965220592n); 50 | // const g2 = new Fp2({ c0: g20, c1: g21 }); 51 | 52 | // const g = new Fp6({ c0: g0, c1: g1, c2: g2 }); 53 | 54 | // const h00 = FpC.from(8272275029453109257537215541598684465966378905168481611395466647313920794223n); 55 | // const h01 = FpC.from(6528355645505131592004806143872955449923182807855245558498656878832420827687n); 56 | // const h0 = new Fp2({ c0: h00, c1: h01 }); 57 | 58 | // const h10 = FpC.from(3573455312579151391773341072183096633393861827822237744511204344095866198236n); 59 | // const h11 = FpC.from(13887087321099986238457788805257237158292466867317026248356158744036572390154n); 60 | // const h1 = new Fp2({ c0: h10, c1: h11 }); 61 | 62 | // const h20 = FpC.from(8758905191894461468078152818257133000648036138216514283495174743031726740128n); 63 | // const h21 = FpC.from(1605680599485677076580175190060679598446825635319440181718819634340079700385n); 64 | // const h2 = new Fp2({ c0: h20, c1: h21 }); 65 | 66 | // const h = new Fp6({ c0: h0, c1: h1, c2: h2 }); 67 | 68 | // return new Fp12({ c0: g, c1: h }); 69 | // } 70 | 71 | export { make_w27 } -------------------------------------------------------------------------------- /contracts/src/plonk/mm_loop/accumulate_lines.ts: -------------------------------------------------------------------------------- 1 | /* 2 | Accumulate lines to utilize sparse multiplications: 3 | - It checks if lines are correct (if point is not fixed) and evaluates them with sparse mul 4 | */ 5 | 6 | import { G1Affine, G2Affine } from "../../ec/index.js"; 7 | import { G2Line } from "../../lines/index.js"; 8 | import { AffineCache } from "../../lines/precompute.js"; 9 | import { ATE_LOOP_COUNT } from "../../towers/consts.js"; 10 | import { Fp12 } from "../../towers/fp12.js"; 11 | 12 | 13 | class KZGLineAccumulator { 14 | static accumulate( 15 | g2_lines: Array, 16 | tau_lines: Array, 17 | A: G1Affine, 18 | negB: G1Affine, 19 | ): Array { 20 | const g: Array = []; 21 | 22 | const a_cache = new AffineCache(A); 23 | const negB_cache = new AffineCache(negB); 24 | 25 | let idx = 0; 26 | let line_cnt = 0; 27 | for (let i = 1; i < ATE_LOOP_COUNT.length; i++) { 28 | idx = i - 1; 29 | 30 | let g2_line = g2_lines[line_cnt]; 31 | let tau_line = tau_lines[line_cnt]; 32 | line_cnt += 1; 33 | 34 | g.push(g2_line.psi(a_cache)); 35 | g[idx] = g[idx].sparse_mul(tau_line.psi(negB_cache)); 36 | 37 | if (ATE_LOOP_COUNT[i] == 1 || ATE_LOOP_COUNT[i] == -1) { 38 | let g2_line = g2_lines[line_cnt]; 39 | let tau_line = tau_lines[line_cnt]; 40 | line_cnt += 1; 41 | 42 | g[idx] = g[idx].sparse_mul(g2_line.psi(a_cache)); 43 | g[idx] = g[idx].sparse_mul(tau_line.psi(negB_cache)); 44 | } 45 | } 46 | 47 | let g2_line = g2_lines[line_cnt]; 48 | let tau_line = tau_lines[line_cnt]; 49 | line_cnt += 1; 50 | idx += 1; 51 | 52 | g.push(g2_line.psi(a_cache)); 53 | g[idx] = g[idx].sparse_mul(tau_line.psi(negB_cache)); 54 | 55 | g2_line = g2_lines[line_cnt]; 56 | tau_line = tau_lines[line_cnt]; 57 | g[idx] = g[idx].sparse_mul(g2_line.psi(a_cache)); 58 | g[idx] = g[idx].sparse_mul(tau_line.psi(negB_cache)); 59 | 60 | return g; 61 | } 62 | } 63 | 64 | export { KZGLineAccumulator }; 65 | -------------------------------------------------------------------------------- /contracts/src/plonk/mm_loop/e2e_test.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs'; 2 | import { get_shift_power, make_A, make_c, make_negB, make_w27 } from './helpers.js'; 3 | import { KZGPairing } from './multi_miller.js'; 4 | import { Provable } from 'o1js'; 5 | import { G1Affine } from '../../ec/index.js'; 6 | import { Fp12 } from '../../towers/fp12.js'; 7 | 8 | const g2_lines = fs.readFileSync(`./src/plonk/mm_loop/g2_lines.json`, 'utf8'); 9 | const tau_lines = fs.readFileSync(`./src/plonk/mm_loop/tau_lines.json`, 'utf8'); 10 | const kzgP = new KZGPairing(g2_lines, tau_lines, make_w27()); 11 | 12 | function main() { 13 | let A = Provable.witness(G1Affine, () => make_A()); 14 | let negB = Provable.witness(G1Affine, () => make_negB()); 15 | let c = Provable.witness(Fp12, () => make_c()); 16 | 17 | kzgP.proveEqual(A, negB, get_shift_power(), c); 18 | } 19 | 20 | 21 | // npm run build && node --max-old-space-size=65536 build/src/plonk/mm_loop/e2e_test.js 22 | import v8 from 'v8'; 23 | (async () => { 24 | console.time('running Fp constant version'); 25 | main(); 26 | console.timeEnd('running Fp constant version'); 27 | 28 | console.time('running Fp witness generation & checks'); 29 | await Provable.runAndCheck(main); 30 | console.timeEnd('running Fp witness generation & checks'); 31 | 32 | console.time('creating Fp constraint system'); 33 | let cs = await Provable.constraintSystem(main); 34 | console.timeEnd('creating Fp constraint system'); 35 | 36 | console.log(cs.summary()); 37 | const totalHeapSize = v8.getHeapStatistics().total_available_size; 38 | let totalHeapSizeinGB = (totalHeapSize / 1024 / 1024 / 1024).toFixed(2); 39 | console.log(`Total heap size: ${totalHeapSizeinGB} GB`); 40 | 41 | // used_heap_size 42 | const usedHeapSize = v8.getHeapStatistics().used_heap_size; 43 | let usedHeapSizeinGB = (usedHeapSize / 1024 / 1024 / 1024).toFixed(2); 44 | console.log(`Used heap size: ${usedHeapSizeinGB} GB`); 45 | })(); 46 | -------------------------------------------------------------------------------- /contracts/src/plonk/mm_loop/multi_miller.ts: -------------------------------------------------------------------------------- 1 | import { G2Line } from '../../lines/index.js'; 2 | import { G1Affine } from '../../ec/index.js'; 3 | import { ATE_LOOP_COUNT, Fp12 } from '../../towers/index.js'; 4 | import { KZGLineAccumulator } from './accumulate_lines.js'; 5 | import { Field, Provable } from 'o1js'; 6 | 7 | class KZGPairing { 8 | g2_lines: Array; 9 | tau_lines: Array; 10 | w27: Array; 11 | 12 | constructor( 13 | g2_lines: string, 14 | tau_lines: string, 15 | w27: Fp12, 16 | ) { 17 | let parsed_g2_lines: any[] = JSON.parse(g2_lines); 18 | this.g2_lines = parsed_g2_lines.map( 19 | (g: any): G2Line => G2Line.fromJSON(g) 20 | ); 21 | 22 | let parsed_tau_lines: any[] = JSON.parse(tau_lines); 23 | this.tau_lines = parsed_tau_lines.map( 24 | (g: any): G2Line => G2Line.fromJSON(g) 25 | ); 26 | 27 | this.w27 = [Fp12.one(), w27, w27.mul(w27)]; 28 | } 29 | 30 | multiMillerLoop( 31 | A: G1Affine, 32 | negB: G1Affine, 33 | ): Fp12 { 34 | const g = KZGLineAccumulator.accumulate(this.g2_lines, this.tau_lines, A, negB); 35 | 36 | let mlo = Fp12.one(); 37 | let mlo_idx = 0; 38 | for (let i = 1; i < ATE_LOOP_COUNT.length; i++) { 39 | mlo_idx = i - 1; 40 | mlo = mlo.square().mul(g[mlo_idx]); 41 | } 42 | 43 | mlo_idx += 1; 44 | mlo = mlo.mul(g[mlo_idx]); 45 | 46 | return mlo 47 | } 48 | 49 | proveEqual( 50 | A: G1Affine, 51 | negB: G1Affine, 52 | shift_power: Field, 53 | c: Fp12 54 | ) { 55 | const g = KZGLineAccumulator.accumulate(this.g2_lines, this.tau_lines, A, negB); 56 | 57 | const c_inv = c.inverse(); 58 | let f = c_inv; 59 | 60 | let idx = 0; 61 | 62 | for (let i = 1; i < ATE_LOOP_COUNT.length; i++) { 63 | idx = i - 1; 64 | f = f.square().mul(g[idx]); 65 | 66 | if (ATE_LOOP_COUNT[i] == 1) { 67 | f = f.mul(c_inv); 68 | } 69 | 70 | if (ATE_LOOP_COUNT[i] == -1) { 71 | f = f.mul(c); 72 | } 73 | } 74 | 75 | idx += 1; 76 | f = f.mul(g[idx]); 77 | 78 | f = f 79 | .mul(c_inv.frobenius_pow_p()) 80 | .mul(c.frobenius_pow_p_squared()) 81 | .mul(c_inv.frobenius_pow_p_cubed()); 82 | 83 | const shift = Provable.switch([shift_power.equals(Field(0)), shift_power.equals(Field(1)), shift_power.equals(Field(2))], Fp12, [Fp12.one(), this.w27[1], this.w27[2]]); 84 | f = f.mul(shift); 85 | 86 | f.assert_equals(Fp12.one()); 87 | } 88 | } 89 | 90 | export { KZGPairing } -------------------------------------------------------------------------------- /contracts/src/plonk/mm_loop/precompute_lines.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs'; 2 | import { Fp2, FpC } from '../../towers/index.js'; 3 | import { G2Affine } from '../../ec/g2.js'; 4 | import { computeLineCoeffs } from '../../lines/coeffs.js'; 5 | import { G2Line } from '../../lines/index.js'; 6 | 7 | const g2_lines_path = `./src/plonk/mm_loop/g2_lines.json`; 8 | const tau_lines_path = `./src/plonk/mm_loop/tau_lines.json`; 9 | 10 | function precompute_lines( 11 | g2_x_0: bigint, 12 | g2_x_1: bigint, 13 | g2_y_0: bigint, 14 | g2_y_1: bigint, 15 | 16 | tau_x_0: bigint, 17 | tau_x_1: bigint, 18 | tau_y_0: bigint, 19 | tau_y_1: bigint, 20 | ) { 21 | let g2_x = new Fp2({ c0: FpC.from(g2_x_0), c1: FpC.from(g2_x_1)}) 22 | let g2_y = new Fp2({ c0: FpC.from(g2_y_0), c1: FpC.from(g2_y_1)}) 23 | let g2 = new G2Affine({ x: g2_x, y: g2_y }); 24 | 25 | let g2_lines = computeLineCoeffs(g2); 26 | 27 | fs.writeFile( 28 | g2_lines_path, 29 | JSON.stringify(g2_lines.map((line: G2Line) => G2Line.toJSON(line))), 30 | 'utf8', 31 | (err: any) => { 32 | if (err) { 33 | console.error('Error writing to file:', err) 34 | return; 35 | } 36 | console.log('g2 lines successfully written') 37 | } 38 | ); 39 | 40 | let tau_x = new Fp2({ c0: FpC.from(tau_x_0), c1: FpC.from(tau_x_1)}) 41 | let tau_y = new Fp2({ c0: FpC.from(tau_y_0), c1: FpC.from(tau_y_1)}) 42 | 43 | let tau = new G2Affine({ x: tau_x, y: tau_y }); 44 | 45 | let tau_lines = computeLineCoeffs(tau); 46 | 47 | fs.writeFile( 48 | tau_lines_path, 49 | JSON.stringify(tau_lines.map((line: G2Line) => G2Line.toJSON(line))), 50 | 'utf8', 51 | (err: any) => { 52 | if (err) { 53 | console.error('Error writing to file:', err); 54 | return; 55 | } 56 | console.log('tau lines successfully written'); 57 | } 58 | ) 59 | } 60 | 61 | precompute_lines( 62 | // g2_x 63 | 10857046999023057135944570762232829481370756359578518086990519993285655852781n, 64 | 11559732032986387107991004021392285783925812861821192530917403151452391805634n, 65 | 66 | // g2_y 67 | 8495653923123431417604973247489272438418190587263600148770280649306958101930n, 68 | 4082367875863433681332203403145435568316851327593401208105741076214120093531n, 69 | 70 | // tau_x 71 | 19089565590083334368588890253123139704298730990782503769911324779715431555531n, 72 | 15805639136721018565402881920352193254830339253282065586954346329754995870280n, 73 | 74 | // tau_y 75 | 6779728121489434657638426458390319301070371227460768374343986326751507916979n, 76 | 9779648407879205346559610309258181044130619080926897934572699915909528404984n, 77 | ) -------------------------------------------------------------------------------- /contracts/src/plonk/parse_pi.ts: -------------------------------------------------------------------------------- 1 | import { ethers } from "ethers"; 2 | import { FrC } from "../towers/index.js"; 3 | import { Bool, Bytes, Field, Gadgets, UInt8 } from "o1js"; 4 | import { shaToFr } from "./fiat-shamir/sha_to_fr.js"; 5 | import { Bytes32 } from "./fiat-shamir/index.js"; 6 | 7 | // TODO: some stuff here can be hardcoded 8 | export function parsePublicInputs(programVk: string, piHex: string): [FrC, FrC] { 9 | const digest = ethers.sha256(piHex); 10 | const bytes = ethers.getBytes(digest); 11 | 12 | const k: bigint = 14474011154664524427946373126085988481658748083205070504932198000989141204991n; // (1 << 253) - 1 13 | const k_pad = ethers.zeroPadBytes("0x" + k.toString(16), 32); 14 | 15 | const k_bytes = ethers.getBytes(k_pad); 16 | 17 | const pi1_bytes = []; 18 | for (let i = 0; i < 32; i++) { 19 | pi1_bytes.push(bytes[i] & k_bytes[i]); 20 | } 21 | 22 | let pi1 = ethers.hexlify(new Uint8Array(pi1_bytes)); 23 | 24 | return [FrC.from(programVk), FrC.from(pi1)] 25 | } 26 | 27 | export function parseDigestProvable(digest: Bytes): FrC { 28 | const k = [ 29 | Field.from(0x1fn), 30 | ...Array(31).fill(Field.from(0xffn)), 31 | ]; 32 | 33 | const fields = digest.toFields(); 34 | let bytes: UInt8[] = [] 35 | 36 | for (let i = 0; i < 32; i++) { 37 | bytes.push(UInt8.Unsafe.fromField(Gadgets.and(fields[i], k[i], 8))) 38 | } 39 | 40 | return shaToFr(Bytes32.from(bytes)) 41 | } 42 | 43 | export function parsePublicInputsProvable(piBytes: Bytes): FrC { 44 | const digest = Gadgets.SHA256.hash(piBytes); 45 | return parseDigestProvable(digest); 46 | } -------------------------------------------------------------------------------- /contracts/src/plonk/piop/e2e_test.ts: -------------------------------------------------------------------------------- 1 | import { Provable } from 'o1js'; 2 | import v8 from 'v8'; 3 | import { Sp1PlonkProof, deserializeProof } from '../proof.js'; 4 | import { FrC } from '../../towers/fr.js'; 5 | import { PlonkVerifierPIOP } from './piop.js'; 6 | import { VK } from '../vk.js'; 7 | 8 | const hexProof = "0x801c66ac0adb18b19c32120abcaea2dfa6ebc07925a4c12abbb823ffa50aeae202c3b8910a8d533f786b3f53345442e25ec85abd1ba147574d276f2242ff7831b8bea1402648e4c4e876f53fb2d6211414fc5da6e1441484a1a7ccc599621663ad6d628621f6e3a0ded5513478fa59e788b4e06102202cb4663002b9b30467c4054aaf512ecd8e695bb68bf9500cd3de1da60d8084c3f2bf5de1d748d4b01131b9545f9e14507651644746c0952ada51abba4358bba695fcfa5162f013b044e93f486e7704d08d5ee2e0bcd5bbc01b8e6e12f0d09df5a285dc0da05840e5fc1a2f7fb6e200fdb49c7bdf737927f8f9b4f60a000baa9c4377964155caf01e701a1b35d5e92ec3ef85185eb95cb37e92cccb85a35617e7cafa2fe942d0c8a1845540ec1d4d2745400e54065f8601ff4ea8985dad2f3b8000b35e1b90e5525938d5d30157212509e6e2b6bc3b1dc0c71f04c735c431473e1776f138c8e5808e8a99cc59669916d026eafe692c6a8345c17239d6e7683a924360336ad10f948b4bfb2041226b043ad28ad6471c591ec17c09b84c591740e751c04018873ac617df8c2ffa52651b096bd46e6a04bf3e1797e903f47fbb64761a028967c5b3f748165358c8b6b027af7d77b3ca83fee575b2d39e5874128ac952016bda7aca187426bcb7a0460d0111783b814486fdf46d76d6854ed3889036126f3af5ffef96efea25d73524230c22a1f411ec42f76a07c2d5f3b78d2311550b790be1303a81c9aee96077b72a2575c5739eab16dee3e1f3fdaddfc9278814b9ffc764fd883c59fb4a0c4fd577081e07e2504b9eabfbf2962d03873f5ce9ff38ffe0f20446ae43b7abd35e84aa243c4a64ae86448e02a0728c10c8e38e226854d34bcb8ae85b19856e908ad4d01c1a70e88c77dafad62c1eca5dcd0a640558b9162b0fff944cfca3d330ba0e870b306fa22276609649e111fafd23b8eaf4571d6bc47b4d963ad28d80e2e3fcbf04ca5a5f641b32333729d102a9b4a9d26ac03c6e4f15adfedf250f506c4d79f10342a6ccde9efa0bce51fe08df09d697f07e38487f2bd7a04f4bc410eba22c2e1b3517159a47eb183a51cad319b54d3c645d56db854739bb844f8c7e49207f0807d26e1a837bdea04a774f09e64a2ff4cb852ba5f31849c8451330c4b8ab85b5261b092715702b7604202584c70431947264f339486a6222843ff99810d6fb05" 9 | const piopVerifier = new PlonkVerifierPIOP(VK) 10 | 11 | function main() { 12 | let pi0 = Provable.witness(FrC.provable, () => FrC.from("0x0097228875a04c12dda0a76b705856f1a99fd19613c0ba69b056f4c4d18921e5")) 13 | let pi1 = Provable.witness(FrC.provable, () => FrC.from("0x048e48f4b209e2dc6d92839ecba0e9321e83ea61ecb6430fc737b1e94c3fabbb")) 14 | 15 | let proof = Provable.witness(Sp1PlonkProof, () => new Sp1PlonkProof(deserializeProof(hexProof))) 16 | 17 | piopVerifier.piop(proof, pi0, pi1); 18 | } 19 | 20 | 21 | // npm run build && node --max-old-space-size=65536 build/src/plonk/piop/e2e_test.js 22 | (async () => { 23 | console.time('running Fp constant version'); 24 | main(); 25 | console.timeEnd('running Fp constant version'); 26 | 27 | console.time('running Fp witness generation & checks'); 28 | await Provable.runAndCheck(main); 29 | console.timeEnd('running Fp witness generation & checks'); 30 | 31 | console.time('creating Fp constraint system'); 32 | let cs = await Provable.constraintSystem(main); 33 | console.timeEnd('creating Fp constraint system'); 34 | 35 | console.log(cs.summary()); 36 | const totalHeapSize = v8.getHeapStatistics().total_available_size; 37 | let totalHeapSizeinGB = (totalHeapSize / 1024 / 1024 / 1024).toFixed(2); 38 | console.log(`Total heap size: ${totalHeapSizeinGB} GB`); 39 | 40 | // used_heap_size 41 | const usedHeapSize = v8.getHeapStatistics().used_heap_size; 42 | let usedHeapSizeinGB = (usedHeapSize / 1024 / 1024 / 1024).toFixed(2); 43 | console.log(`Used heap size: ${usedHeapSizeinGB} GB`); 44 | })(); 45 | -------------------------------------------------------------------------------- /contracts/src/plonk/piop/piop.ts: -------------------------------------------------------------------------------- 1 | import { FpC, FrC } from "../../towers/index.js"; 2 | import { Sp1PlonkFiatShamir } from "../fiat-shamir/index.js"; 3 | import { Sp1PlonkProof } from "../proof.js"; 4 | import { Sp1PlonkVk } from "../vk.js"; 5 | import { compute_alpha_square_lagrange_0, compute_commitment_linearized_polynomial, customPiLagrange, evalVanishing, fold_quotient, fold_state, opening_of_linearized_polynomial, pi_contribution, preparePairing } from "./plonk_utils.js"; 6 | 7 | // expects only two public inputs as in Sp1 Plonk verifier 8 | 9 | class PlonkVerifierPIOP { 10 | VK: Sp1PlonkVk 11 | constructor(VK: Sp1PlonkVk) { 12 | this.VK = VK; 13 | } 14 | 15 | piop(proof: Sp1PlonkProof, pi0: FrC, pi1: FrC): [FpC, FpC, FpC, FpC] { 16 | const fs = Sp1PlonkFiatShamir.empty() 17 | 18 | fs.squeezeGamma(proof, pi0, pi1, this.VK) 19 | fs.squeezeBeta() 20 | fs.squeezeAlpha(proof) 21 | fs.squeezeZeta(proof) 22 | 23 | const [zeta_pow_n, zh_eval] = evalVanishing(fs.zeta, this.VK) 24 | 25 | const alpha_2_l0 = compute_alpha_square_lagrange_0(zh_eval, fs.zeta, fs.alpha, this.VK); 26 | 27 | const [hx, hy] = fold_quotient(proof.h0_x, proof.h0_y, proof.h1_x, proof.h1_y, proof.h2_x, proof.h2_y, fs.zeta, zeta_pow_n, zh_eval) 28 | 29 | const pis = pi_contribution([pi0, pi1], fs.zeta, zh_eval, this.VK.inv_domain_size, this.VK.omega) 30 | const l_pi_commit = customPiLagrange(fs.zeta, zh_eval, proof.qcp_0_wire_x, proof.qcp_0_wire_y, this.VK) 31 | const pi = pis.add(l_pi_commit).assertCanonical(); 32 | 33 | const linearized_opening = opening_of_linearized_polynomial(proof, fs.alpha, fs.beta, fs.gamma, pi, alpha_2_l0); 34 | 35 | const [lcm_x, lcm_y] = compute_commitment_linearized_polynomial(this.VK, proof, fs.alpha, fs.beta, fs.gamma, fs.zeta, alpha_2_l0, hx, hy) 36 | 37 | fs.squeezeGammaKzg(proof, this.VK, lcm_x, lcm_y, linearized_opening) 38 | 39 | const [cm_x, cm_y, cm_opening] = fold_state(this.VK, proof, lcm_x, lcm_y, linearized_opening, fs.gamma_kzg); 40 | 41 | const random = fs.squeezeRandomForKzg(proof, cm_x, cm_y) 42 | 43 | const [kzg_cm_x, kzg_cm_y, neg_fq_x, neg_fq_y] = preparePairing(this.VK, proof, random, cm_x, cm_y, cm_opening, fs.zeta) 44 | 45 | return [kzg_cm_x, kzg_cm_y, neg_fq_x, neg_fq_y] 46 | } 47 | } 48 | 49 | export { PlonkVerifierPIOP } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/line_parser.ts: -------------------------------------------------------------------------------- 1 | import { G2Line } from "../../lines/index.js"; 2 | import fs from "fs" 3 | import { ATE_LOOP_COUNT } from "../../towers/consts.js"; 4 | 5 | function ateCntSlice(from: number, to: number) { 6 | let line_cnt = 0; 7 | 8 | for (let i = from; i < to; i++) { 9 | if (ATE_LOOP_COUNT[i] == 0) { 10 | line_cnt += 1 11 | } else { 12 | line_cnt += 2 13 | } 14 | } 15 | 16 | return line_cnt 17 | } 18 | 19 | class LineParser { 20 | g2_lines: Array 21 | tau_lines: Array 22 | 23 | constructor(g2_lines: Array, tau_lines: Array) { 24 | this.g2_lines = g2_lines; 25 | this.tau_lines = tau_lines; 26 | } 27 | 28 | static init() { 29 | const g2_lines_path = fs.readFileSync(`./src/plonk/mm_loop/g2_lines.json`, 'utf8'); 30 | const tau_lines_path = fs.readFileSync(`./src/plonk/mm_loop/tau_lines.json`, 'utf8'); 31 | 32 | let parsed_g2_lines: any[] = JSON.parse(g2_lines_path); 33 | let g2_lines = parsed_g2_lines.map( 34 | (g: any): G2Line => G2Line.fromJSON(g) 35 | ); 36 | 37 | let parsed_tau_lines: any[] = JSON.parse(tau_lines_path); 38 | let tau_lines = parsed_tau_lines.map( 39 | (tau: any): G2Line => G2Line.fromJSON(tau) 40 | ); 41 | 42 | return new LineParser(g2_lines, tau_lines) 43 | } 44 | 45 | parse_g2(from: number, to: number) { 46 | let start = ateCntSlice(1, from); 47 | let toSlice = ateCntSlice(from, to); 48 | return this.g2_lines.slice(start, start + toSlice) 49 | } 50 | 51 | parse_tau(from: number, to: number) { 52 | let start = ateCntSlice(1, from); 53 | let toSlice = ateCntSlice(from, to); 54 | return this.tau_lines.slice(start, start + toSlice) 55 | } 56 | 57 | frobenius_g2_lines() { 58 | return this.g2_lines.slice(-2) 59 | } 60 | 61 | frobenius_tau_lines() { 62 | return this.tau_lines.slice(-2) 63 | } 64 | } 65 | 66 | export { LineParser } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp0.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | } from 'o1js'; 6 | import { Accumulator } from '../accumulator.js'; 7 | import { VK } from '../vk.js'; 8 | 9 | const zkp0 = ZkProgram({ 10 | name: 'zkp0', 11 | publicInput: Field, 12 | publicOutput: Field, 13 | methods: { 14 | compute: { 15 | privateInputs: [Accumulator], 16 | async method( 17 | input: Field, 18 | acc: Accumulator 19 | ) { 20 | const inDigest = Poseidon.hashPacked(Accumulator, acc); 21 | inDigest.assertEquals(input); 22 | 23 | acc.fs.squeezeGamma(acc.proof, acc.state.pi0, acc.state.pi1, VK) 24 | acc.fs.squeezeBeta() 25 | 26 | return Poseidon.hashPacked(Accumulator, acc); 27 | }, 28 | }, 29 | }, 30 | }); 31 | 32 | 33 | const ZKP0Proof = ZkProgram.Proof(zkp0); 34 | export { ZKP0Proof, zkp0 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp1.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | } from 'o1js'; 6 | import { Accumulator } from '../accumulator.js'; 7 | import { compute_alpha_square_lagrange_0, evalVanishing } from '../piop/plonk_utils.js'; 8 | import { VK } from '../vk.js'; 9 | 10 | const zkp1 = ZkProgram({ 11 | name: 'zkp1', 12 | publicInput: Field, 13 | publicOutput: Field, 14 | methods: { 15 | compute: { 16 | privateInputs: [Accumulator], 17 | async method( 18 | input: Field, 19 | acc: Accumulator 20 | ) { 21 | const inDigest = Poseidon.hashPacked(Accumulator, acc); 22 | inDigest.assertEquals(input); 23 | 24 | acc.fs.squeezeAlpha(acc.proof) 25 | acc.fs.squeezeZeta(acc.proof) 26 | 27 | const [zeta_pow_n, zh_eval] = evalVanishing(acc.fs.zeta, VK) 28 | const alpha_2_l0 = compute_alpha_square_lagrange_0(zh_eval, acc.fs.zeta, acc.fs.alpha, VK); 29 | 30 | acc.state.zeta_pow_n = zeta_pow_n; 31 | acc.state.zh_eval = zh_eval; 32 | acc.state.alpha_2_l0 = alpha_2_l0; 33 | 34 | return Poseidon.hashPacked(Accumulator, acc); 35 | }, 36 | }, 37 | }, 38 | }); 39 | 40 | 41 | const ZKP1Proof = ZkProgram.Proof(zkp1); 42 | export { ZKP1Proof, zkp1 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp10.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | } from 'o1js'; 6 | import { Accumulator } from '../accumulator.js'; 7 | import { fold_state_2 } from '../piop/plonk_utils.js'; 8 | import { VK } from '../vk.js'; 9 | 10 | const zkp10 = ZkProgram({ 11 | name: 'zkp10', 12 | publicInput: Field, 13 | publicOutput: Field, 14 | methods: { 15 | compute: { 16 | privateInputs: [Accumulator], 17 | async method( 18 | input: Field, 19 | acc: Accumulator 20 | ) { 21 | const inDigest = Poseidon.hashPacked(Accumulator, acc); 22 | inDigest.assertEquals(input); 23 | 24 | const [cm_x, cm_y] = fold_state_2(VK, acc.proof, acc.state.cm_x, acc.state.cm_y, acc.fs.gamma_kzg) 25 | const kzg_random = acc.fs.squeezeRandomForKzg(acc.proof, cm_x, cm_y) 26 | 27 | acc.state.cm_x = cm_x; 28 | acc.state.cm_y = cm_y; 29 | acc.state.kzg_random = kzg_random; 30 | 31 | return Poseidon.hashPacked(Accumulator, acc); 32 | }, 33 | }, 34 | }, 35 | }); 36 | 37 | 38 | const ZKP10Proof = ZkProgram.Proof(zkp10); 39 | export { ZKP10Proof, zkp10 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp11.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | } from 'o1js'; 6 | import { Accumulator } from '../accumulator.js'; 7 | import { preparePairing_0 } from '../piop/plonk_utils.js'; 8 | import { VK } from '../vk.js'; 9 | 10 | const zkp11 = ZkProgram({ 11 | name: 'zkp11', 12 | publicInput: Field, 13 | publicOutput: Field, 14 | methods: { 15 | compute: { 16 | privateInputs: [Accumulator], 17 | async method( 18 | input: Field, 19 | acc: Accumulator 20 | ) { 21 | const inDigest = Poseidon.hashPacked(Accumulator, acc); 22 | inDigest.assertEquals(input); 23 | 24 | const [kzg_cm_x, kzg_cm_y, neg_fq_x, neg_fq_y] = preparePairing_0( 25 | VK, 26 | acc.proof, 27 | acc.state.kzg_random, 28 | acc.state.cm_x, 29 | acc.state.cm_y, 30 | acc.state.cm_opening 31 | ) 32 | 33 | acc.state.kzg_cm_x = kzg_cm_x; 34 | acc.state.kzg_cm_y = kzg_cm_y; 35 | acc.state.neg_fq_x = neg_fq_x; 36 | acc.state.neg_fq_y = neg_fq_y; 37 | 38 | return Poseidon.hashPacked(Accumulator, acc); 39 | }, 40 | }, 41 | }, 42 | }); 43 | 44 | 45 | const ZKP11Proof = ZkProgram.Proof(zkp11); 46 | export { ZKP11Proof, zkp11 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp12.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | } from 'o1js'; 6 | import { Accumulator } from '../accumulator.js'; 7 | import { preparePairing_1 } from '../piop/plonk_utils.js'; 8 | import { VK } from '../vk.js'; 9 | import { G1Affine } from '../../ec/index.js'; 10 | import { ArrayListHasher, KzgAccumulator, KzgProof, KzgState } from '../../kzg/structs.js'; 11 | import { Fp12 } from '../../towers/fp12.js'; 12 | 13 | const zkp12 = ZkProgram({ 14 | name: 'zkp12', 15 | publicInput: Field, 16 | publicOutput: Field, 17 | methods: { 18 | compute: { 19 | privateInputs: [Accumulator, Field, Fp12], 20 | async method( 21 | input: Field, 22 | acc: Accumulator, 23 | shift_power: Field, 24 | c: Fp12, 25 | ) { 26 | const inDigest = Poseidon.hashPacked(Accumulator, acc); 27 | inDigest.assertEquals(input); 28 | 29 | const [kzg_cm_x, kzg_cm_y] = preparePairing_1( 30 | VK, 31 | acc.proof, 32 | acc.state.kzg_random, 33 | acc.state.kzg_cm_x, 34 | acc.state.kzg_cm_y, 35 | acc.fs.zeta 36 | ) 37 | 38 | const A = new G1Affine({ x: kzg_cm_x, y: kzg_cm_y }); 39 | const negB = new G1Affine({ x: acc.state.neg_fq_x, y: acc.state.neg_fq_y }); 40 | 41 | let c_inv = c.inverse(); 42 | let kzgProof = new KzgProof({ 43 | A, 44 | negB, 45 | shift_power, 46 | c, 47 | c_inv, 48 | pi0: acc.state.pi0, 49 | pi1: acc.state.pi1 50 | }) 51 | 52 | let kzgState = new KzgState({ 53 | f: c_inv, 54 | lines_hashes_digest: ArrayListHasher.empty() 55 | }) 56 | 57 | let kzgAccumulator = new KzgAccumulator({ 58 | proof: kzgProof, 59 | state: kzgState 60 | }) 61 | 62 | return Poseidon.hashPacked(KzgAccumulator, kzgAccumulator); 63 | }, 64 | }, 65 | }, 66 | }); 67 | 68 | 69 | const ZKP12Proof = ZkProgram.Proof(zkp12); 70 | export { ZKP12Proof, zkp12 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp13.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | Provable, 6 | } from 'o1js'; 7 | import { ArrayListHasher, KzgAccumulator } from '../../kzg/structs.js'; 8 | import { Fp12 } from '../../towers/fp12.js'; 9 | import { ATE_LOOP_COUNT } from '../../towers/consts.js'; 10 | import { AffineCache } from '../../lines/precompute.js'; 11 | import { LineParser } from './line_parser.js'; 12 | 13 | const lineParser = LineParser.init() 14 | const g2_lines = lineParser.parse_g2(1, ATE_LOOP_COUNT.length - 46); 15 | const tau_lines = lineParser.parse_tau(1, ATE_LOOP_COUNT.length - 46); 16 | 17 | const zkp13 = ZkProgram({ 18 | name: 'zkp13', 19 | publicInput: Field, 20 | publicOutput: Field, 21 | methods: { 22 | compute: { 23 | privateInputs: [KzgAccumulator, Provable.Array(Field, ATE_LOOP_COUNT.length)], 24 | async method( 25 | input: Field, 26 | acc: KzgAccumulator, 27 | lines_hashes: Array 28 | ) { 29 | const inDigest = Poseidon.hashPacked(KzgAccumulator, acc); 30 | inDigest.assertEquals(input); 31 | 32 | const lines_digest = ArrayListHasher.hash(lines_hashes); 33 | acc.state.lines_hashes_digest.assertEquals(lines_digest); 34 | 35 | const a_cache = new AffineCache(acc.proof.A); 36 | const b_cache = new AffineCache(acc.proof.negB); 37 | 38 | let idx = 0; 39 | let line_cnt = 0; 40 | 41 | let g; 42 | for (let i = 1; i < ATE_LOOP_COUNT.length - 46; i++) { 43 | idx = i - 1; 44 | 45 | let g_line = g2_lines[line_cnt]; 46 | let tau_line = tau_lines[line_cnt]; 47 | line_cnt += 1; 48 | 49 | g = g_line.psi(a_cache); 50 | g = g.sparse_mul(tau_line.psi(b_cache)) 51 | 52 | if (ATE_LOOP_COUNT[i] == 1 || ATE_LOOP_COUNT[i] == -1) { 53 | let g_line = g2_lines[line_cnt]; 54 | let tau_line = tau_lines[line_cnt]; 55 | line_cnt += 1; 56 | 57 | g = g.sparse_mul(g_line.psi(a_cache)); 58 | g = g.sparse_mul(tau_line.psi(b_cache)); 59 | } 60 | 61 | lines_hashes[idx] = Poseidon.hashPacked(Fp12, g); 62 | } 63 | 64 | let new_lines_hashes_digest = ArrayListHasher.hash(lines_hashes); 65 | acc.state.lines_hashes_digest = new_lines_hashes_digest; 66 | 67 | return Poseidon.hashPacked(KzgAccumulator, acc); 68 | }, 69 | }, 70 | }, 71 | }); 72 | 73 | 74 | const ZKP13Proof = ZkProgram.Proof(zkp13); 75 | export { ZKP13Proof, zkp13 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp14.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | Provable, 6 | } from 'o1js'; 7 | import { ArrayListHasher, KzgAccumulator } from '../../kzg/structs.js'; 8 | import { Fp12 } from '../../towers/fp12.js'; 9 | import fs from "fs"; 10 | import { ATE_LOOP_COUNT } from '../../towers/consts.js'; 11 | import { AffineCache } from '../../lines/precompute.js'; 12 | import { G2Line } from '../../lines/index.js'; 13 | import { LineParser } from './line_parser.js'; 14 | 15 | const lineParser = LineParser.init() 16 | const g2_lines = lineParser.parse_g2(ATE_LOOP_COUNT.length - 46, ATE_LOOP_COUNT.length - 26); 17 | const tau_lines = lineParser.parse_tau(ATE_LOOP_COUNT.length - 46, ATE_LOOP_COUNT.length - 26); 18 | 19 | const zkp14 = ZkProgram({ 20 | name: 'zkp14', 21 | publicInput: Field, 22 | publicOutput: Field, 23 | methods: { 24 | compute: { 25 | privateInputs: [KzgAccumulator, Provable.Array(Field, ATE_LOOP_COUNT.length)], 26 | async method( 27 | input: Field, 28 | acc: KzgAccumulator, 29 | lines_hashes: Array 30 | ) { 31 | const inDigest = Poseidon.hashPacked(KzgAccumulator, acc); 32 | inDigest.assertEquals(input); 33 | 34 | const lines_digest = ArrayListHasher.hash(lines_hashes); 35 | acc.state.lines_hashes_digest.assertEquals(lines_digest); 36 | 37 | const a_cache = new AffineCache(acc.proof.A); 38 | const b_cache = new AffineCache(acc.proof.negB); 39 | 40 | let idx = 0; 41 | let line_cnt = 0; 42 | 43 | let g; 44 | for (let i = ATE_LOOP_COUNT.length - 46; i < ATE_LOOP_COUNT.length - 26; i++) { 45 | idx = i - 1; 46 | 47 | let g_line = g2_lines[line_cnt]; 48 | let tau_line = tau_lines[line_cnt]; 49 | line_cnt += 1; 50 | 51 | g = g_line.psi(a_cache); 52 | g = g.sparse_mul(tau_line.psi(b_cache)) 53 | 54 | if (ATE_LOOP_COUNT[i] === 1 || ATE_LOOP_COUNT[i] === -1) { 55 | let g_line = g2_lines[line_cnt]; 56 | let tau_line = tau_lines[line_cnt]; 57 | line_cnt += 1; 58 | 59 | g = g.sparse_mul(g_line.psi(a_cache)); 60 | g = g.sparse_mul(tau_line.psi(b_cache)); 61 | } 62 | 63 | lines_hashes[idx] = Poseidon.hashPacked(Fp12, g); 64 | } 65 | 66 | let new_lines_hashes_digest = ArrayListHasher.hash(lines_hashes); 67 | acc.state.lines_hashes_digest = new_lines_hashes_digest; 68 | 69 | return Poseidon.hashPacked(KzgAccumulator, acc); 70 | }, 71 | }, 72 | }, 73 | }); 74 | 75 | 76 | const ZKP14Proof = ZkProgram.Proof(zkp14); 77 | export { ZKP14Proof, zkp14 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp15.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | Provable, 6 | } from 'o1js'; 7 | import { ArrayListHasher, KzgAccumulator } from '../../kzg/structs.js'; 8 | import { Fp12 } from '../../towers/fp12.js'; 9 | import fs from "fs"; 10 | import { ATE_LOOP_COUNT } from '../../towers/consts.js'; 11 | import { AffineCache } from '../../lines/precompute.js'; 12 | import { G2Line } from '../../lines/index.js'; 13 | import { LineParser } from './line_parser.js'; 14 | 15 | const lineParser = LineParser.init() 16 | const g2_lines = lineParser.parse_g2(ATE_LOOP_COUNT.length - 26, ATE_LOOP_COUNT.length - 6); 17 | const tau_lines = lineParser.parse_tau(ATE_LOOP_COUNT.length - 26, ATE_LOOP_COUNT.length - 6); 18 | 19 | const zkp15 = ZkProgram({ 20 | name: 'zkp15', 21 | publicInput: Field, 22 | publicOutput: Field, 23 | methods: { 24 | compute: { 25 | privateInputs: [KzgAccumulator, Provable.Array(Field, ATE_LOOP_COUNT.length)], 26 | async method( 27 | input: Field, 28 | acc: KzgAccumulator, 29 | lines_hashes: Array 30 | ) { 31 | const inDigest = Poseidon.hashPacked(KzgAccumulator, acc); 32 | inDigest.assertEquals(input); 33 | 34 | const lines_digest = ArrayListHasher.hash(lines_hashes); 35 | acc.state.lines_hashes_digest.assertEquals(lines_digest); 36 | 37 | const a_cache = new AffineCache(acc.proof.A); 38 | const b_cache = new AffineCache(acc.proof.negB); 39 | 40 | let idx = 0; 41 | let line_cnt = 0; 42 | 43 | let g; 44 | for (let i = ATE_LOOP_COUNT.length - 26; i < ATE_LOOP_COUNT.length - 6; i++) { 45 | idx = i - 1; 46 | 47 | let g_line = g2_lines[line_cnt]; 48 | let tau_line = tau_lines[line_cnt]; 49 | line_cnt += 1; 50 | 51 | g = g_line.psi(a_cache); 52 | g = g.sparse_mul(tau_line.psi(b_cache)) 53 | 54 | if (ATE_LOOP_COUNT[i] === 1 || ATE_LOOP_COUNT[i] === -1) { 55 | let g_line = g2_lines[line_cnt]; 56 | let tau_line = tau_lines[line_cnt]; 57 | line_cnt += 1; 58 | 59 | g = g.sparse_mul(g_line.psi(a_cache)); 60 | g = g.sparse_mul(tau_line.psi(b_cache)); 61 | } 62 | 63 | lines_hashes[idx] = Poseidon.hashPacked(Fp12, g); 64 | } 65 | 66 | let new_lines_hashes_digest = ArrayListHasher.hash(lines_hashes); 67 | acc.state.lines_hashes_digest = new_lines_hashes_digest; 68 | 69 | return Poseidon.hashPacked(KzgAccumulator, acc); 70 | }, 71 | }, 72 | }, 73 | }); 74 | 75 | 76 | const ZKP15Proof = ZkProgram.Proof(zkp15); 77 | export { ZKP15Proof, zkp15 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp16.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | Provable, 6 | } from 'o1js'; 7 | import { ArrayListHasher, KzgAccumulator } from '../../kzg/structs.js'; 8 | import { Fp12 } from '../../towers/fp12.js'; 9 | import { ATE_LOOP_COUNT } from '../../towers/consts.js'; 10 | import { AffineCache } from '../../lines/precompute.js'; 11 | import { LineParser } from './line_parser.js'; 12 | 13 | const lineParser = LineParser.init() 14 | const g2_lines = lineParser.parse_g2(ATE_LOOP_COUNT.length - 6, ATE_LOOP_COUNT.length); 15 | const tau_lines = lineParser.parse_tau(ATE_LOOP_COUNT.length - 6, ATE_LOOP_COUNT.length); 16 | 17 | const zkp16 = ZkProgram({ 18 | name: 'zkp16', 19 | publicInput: Field, 20 | publicOutput: Field, 21 | methods: { 22 | compute: { 23 | privateInputs: [KzgAccumulator, Provable.Array(Field, ATE_LOOP_COUNT.length)], 24 | async method( 25 | input: Field, 26 | acc: KzgAccumulator, 27 | lines_hashes: Array 28 | ) { 29 | const inDigest = Poseidon.hashPacked(KzgAccumulator, acc); 30 | inDigest.assertEquals(input); 31 | 32 | const lines_digest = ArrayListHasher.hash(lines_hashes); 33 | acc.state.lines_hashes_digest.assertEquals(lines_digest); 34 | 35 | const a_cache = new AffineCache(acc.proof.A); 36 | const b_cache = new AffineCache(acc.proof.negB); 37 | 38 | let idx = 0; 39 | let line_cnt = 0; 40 | 41 | let g; 42 | for (let i = ATE_LOOP_COUNT.length - 6; i < ATE_LOOP_COUNT.length; i++) { 43 | idx = i - 1; 44 | 45 | let g_line = g2_lines[line_cnt]; 46 | let tau_line = tau_lines[line_cnt]; 47 | line_cnt += 1; 48 | 49 | g = g_line.psi(a_cache); 50 | g = g.sparse_mul(tau_line.psi(b_cache)) 51 | 52 | if (ATE_LOOP_COUNT[i] === 1 || ATE_LOOP_COUNT[i] === -1) { 53 | let g_line = g2_lines[line_cnt]; 54 | let tau_line = tau_lines[line_cnt]; 55 | line_cnt += 1; 56 | 57 | g = g.sparse_mul(g_line.psi(a_cache)); 58 | g = g.sparse_mul(tau_line.psi(b_cache)); 59 | } 60 | 61 | lines_hashes[idx] = Poseidon.hashPacked(Fp12, g); 62 | } 63 | 64 | let [g2_frob_0, g2_frob_1] = lineParser.frobenius_g2_lines() 65 | let [tau_frob_0, tau_frob_1] = lineParser.frobenius_tau_lines() 66 | 67 | g = g2_frob_0.psi(a_cache) 68 | g = g.sparse_mul(tau_frob_0.psi(b_cache)) 69 | 70 | g = g.sparse_mul(g2_frob_1.psi(a_cache)) 71 | g = g.sparse_mul(tau_frob_1.psi(b_cache)) 72 | 73 | lines_hashes[ATE_LOOP_COUNT.length - 1] = Poseidon.hashPacked(Fp12, g); 74 | 75 | let new_lines_hashes_digest = ArrayListHasher.hash(lines_hashes); 76 | acc.state.lines_hashes_digest = new_lines_hashes_digest; 77 | 78 | return Poseidon.hashPacked(KzgAccumulator, acc); 79 | }, 80 | }, 81 | }, 82 | }); 83 | 84 | 85 | const ZKP16Proof = ZkProgram.Proof(zkp16); 86 | export { ZKP16Proof, zkp16 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp17.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | Provable, 6 | } from 'o1js'; 7 | import { ArrayListHasher, KzgAccumulator } from '../../kzg/structs.js'; 8 | import { Fp12 } from '../../towers/fp12.js'; 9 | import { ATE_LOOP_COUNT } from '../../towers/consts.js'; 10 | 11 | const zkp17 = ZkProgram({ 12 | name: 'zkp17', 13 | publicInput: Field, 14 | publicOutput: Field, 15 | methods: { 16 | compute: { 17 | privateInputs: [KzgAccumulator, Provable.Array(Fp12, 9), Provable.Array(Field, ATE_LOOP_COUNT.length - 9)], 18 | async method( 19 | input: Field, 20 | acc: KzgAccumulator, 21 | g_chunk: Array, 22 | rhs_lines_hashes: Array 23 | ) { 24 | const inDigest = Poseidon.hashPacked(KzgAccumulator, acc); 25 | inDigest.assertEquals(input); 26 | 27 | const opening = ArrayListHasher.open([], g_chunk, rhs_lines_hashes) 28 | acc.state.lines_hashes_digest.assertEquals(opening) 29 | 30 | let f = acc.state.f; 31 | 32 | let idx = 0; 33 | for (let i = 1; i < 10; i++) { 34 | f = f.square().mul(g_chunk[idx]); 35 | 36 | if (ATE_LOOP_COUNT[i] == 1) { 37 | f = f.mul(acc.proof.c_inv); 38 | } 39 | 40 | if (ATE_LOOP_COUNT[i] == -1) { 41 | f = f.mul(acc.proof.c); 42 | } 43 | 44 | idx += 1 45 | } 46 | 47 | acc.state.f = f; 48 | return Poseidon.hashPacked(KzgAccumulator, acc); 49 | }, 50 | }, 51 | }, 52 | }); 53 | 54 | 55 | const ZKP17Proof = ZkProgram.Proof(zkp17); 56 | export { ZKP17Proof, zkp17 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp18.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | Provable, 6 | } from 'o1js'; 7 | import { ArrayListHasher, KzgAccumulator } from '../../kzg/structs.js'; 8 | import { Fp12 } from '../../towers/fp12.js'; 9 | import { ATE_LOOP_COUNT } from '../../towers/consts.js'; 10 | 11 | const zkp18 = ZkProgram({ 12 | name: 'zkp18', 13 | publicInput: Field, 14 | publicOutput: Field, 15 | methods: { 16 | compute: { 17 | privateInputs: [KzgAccumulator, Provable.Array(Field, 9), Provable.Array(Fp12, 11), Provable.Array(Field, ATE_LOOP_COUNT.length - 9 - 11)], 18 | async method( 19 | input: Field, 20 | acc: KzgAccumulator, 21 | lhs_line_hashes: Array, 22 | g_chunk: Array, 23 | rhs_lines_hashes: Array 24 | ) { 25 | const inDigest = Poseidon.hashPacked(KzgAccumulator, acc); 26 | inDigest.assertEquals(input); 27 | 28 | const opening = ArrayListHasher.open(lhs_line_hashes, g_chunk, rhs_lines_hashes) 29 | acc.state.lines_hashes_digest.assertEquals(opening) 30 | 31 | let f = acc.state.f; 32 | 33 | let idx = 0; 34 | for (let i = 10; i < 21; i++) { 35 | f = f.square().mul(g_chunk[idx]); 36 | 37 | if (ATE_LOOP_COUNT[i] == 1) { 38 | f = f.mul(acc.proof.c_inv); 39 | } 40 | 41 | if (ATE_LOOP_COUNT[i] == -1) { 42 | f = f.mul(acc.proof.c); 43 | } 44 | 45 | idx += 1; 46 | } 47 | 48 | acc.state.f = f; 49 | return Poseidon.hashPacked(KzgAccumulator, acc); 50 | }, 51 | }, 52 | }, 53 | }); 54 | 55 | 56 | const ZKP18Proof = ZkProgram.Proof(zkp18); 57 | export { ZKP18Proof, zkp18 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp19.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | Provable, 6 | } from 'o1js'; 7 | import { ArrayListHasher, KzgAccumulator } from '../../kzg/structs.js'; 8 | import { Fp12 } from '../../towers/fp12.js'; 9 | import { ATE_LOOP_COUNT } from '../../towers/consts.js'; 10 | 11 | const zkp19 = ZkProgram({ 12 | name: 'zkp19', 13 | publicInput: Field, 14 | publicOutput: Field, 15 | methods: { 16 | compute: { 17 | privateInputs: [KzgAccumulator, Provable.Array(Field, 20), Provable.Array(Fp12, 11), Provable.Array(Field, ATE_LOOP_COUNT.length - 20 - 11)], 18 | async method( 19 | input: Field, 20 | acc: KzgAccumulator, 21 | lhs_line_hashes: Array, 22 | g_chunk: Array, 23 | rhs_lines_hashes: Array 24 | ) { 25 | const inDigest = Poseidon.hashPacked(KzgAccumulator, acc); 26 | inDigest.assertEquals(input); 27 | 28 | const opening = ArrayListHasher.open(lhs_line_hashes, g_chunk, rhs_lines_hashes) 29 | acc.state.lines_hashes_digest.assertEquals(opening) 30 | 31 | let f = acc.state.f; 32 | 33 | let idx = 0; 34 | for (let i = 21; i < 32; i++) { 35 | f = f.square().mul(g_chunk[idx]); 36 | 37 | if (ATE_LOOP_COUNT[i] == 1) { 38 | f = f.mul(acc.proof.c_inv); 39 | } 40 | 41 | if (ATE_LOOP_COUNT[i] == -1) { 42 | f = f.mul(acc.proof.c); 43 | } 44 | 45 | idx += 1; 46 | } 47 | 48 | acc.state.f = f; 49 | return Poseidon.hashPacked(KzgAccumulator, acc); 50 | }, 51 | }, 52 | }, 53 | }); 54 | 55 | 56 | const ZKP19Proof = ZkProgram.Proof(zkp19); 57 | export { ZKP19Proof, zkp19 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp2.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | } from 'o1js'; 6 | import { Accumulator } from '../accumulator.js'; 7 | import { fold_quotient } from '../piop/plonk_utils.js'; 8 | import { VK } from '../vk.js'; 9 | 10 | const zkp2 = ZkProgram({ 11 | name: 'zkp2', 12 | publicInput: Field, 13 | publicOutput: Field, 14 | methods: { 15 | compute: { 16 | privateInputs: [Accumulator], 17 | async method( 18 | input: Field, 19 | acc: Accumulator 20 | ) { 21 | const inDigest = Poseidon.hashPacked(Accumulator, acc); 22 | inDigest.assertEquals(input); 23 | 24 | const [hx, hy] = fold_quotient( 25 | acc.proof.h0_x, 26 | acc.proof.h0_y, 27 | acc.proof.h1_x, 28 | acc.proof.h1_y, 29 | acc.proof.h2_x, 30 | acc.proof.h2_y, 31 | acc.fs.zeta, 32 | acc.state.zeta_pow_n, 33 | acc.state.zh_eval 34 | ) 35 | 36 | acc.state.hx = hx; 37 | acc.state.hy = hy; 38 | 39 | return Poseidon.hashPacked(Accumulator, acc); 40 | }, 41 | }, 42 | }, 43 | }); 44 | 45 | 46 | const ZKP2Proof = ZkProgram.Proof(zkp2); 47 | export { ZKP2Proof, zkp2 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp20.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | Provable, 6 | } from 'o1js'; 7 | import { ArrayListHasher, KzgAccumulator } from '../../kzg/structs.js'; 8 | import { Fp12 } from '../../towers/fp12.js'; 9 | import { ATE_LOOP_COUNT } from '../../towers/consts.js'; 10 | 11 | const zkp20 = ZkProgram({ 12 | name: 'zkp20', 13 | publicInput: Field, 14 | publicOutput: Field, 15 | methods: { 16 | compute: { 17 | privateInputs: [KzgAccumulator, Provable.Array(Field, 31), Provable.Array(Fp12, 11), Provable.Array(Field, ATE_LOOP_COUNT.length - 31 - 11)], 18 | async method( 19 | input: Field, 20 | acc: KzgAccumulator, 21 | lhs_line_hashes: Array, 22 | g_chunk: Array, 23 | rhs_lines_hashes: Array 24 | ) { 25 | const inDigest = Poseidon.hashPacked(KzgAccumulator, acc); 26 | inDigest.assertEquals(input); 27 | 28 | const opening = ArrayListHasher.open(lhs_line_hashes, g_chunk, rhs_lines_hashes) 29 | acc.state.lines_hashes_digest.assertEquals(opening) 30 | 31 | let f = acc.state.f; 32 | 33 | let idx = 0; 34 | for (let i = 32; i < 43; i++) { 35 | f = f.square().mul(g_chunk[idx]); 36 | 37 | if (ATE_LOOP_COUNT[i] == 1) { 38 | f = f.mul(acc.proof.c_inv); 39 | } 40 | 41 | if (ATE_LOOP_COUNT[i] == -1) { 42 | f = f.mul(acc.proof.c); 43 | } 44 | 45 | idx += 1; 46 | } 47 | 48 | acc.state.f = f; 49 | return Poseidon.hashPacked(KzgAccumulator, acc); 50 | }, 51 | }, 52 | }, 53 | }); 54 | 55 | 56 | const ZKP20Proof = ZkProgram.Proof(zkp20); 57 | export { ZKP20Proof, zkp20 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp21.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | Provable, 6 | } from 'o1js'; 7 | import { ArrayListHasher, KzgAccumulator } from '../../kzg/structs.js'; 8 | import { Fp12 } from '../../towers/fp12.js'; 9 | import { ATE_LOOP_COUNT } from '../../towers/consts.js'; 10 | 11 | const zkp21 = ZkProgram({ 12 | name: 'zkp21', 13 | publicInput: Field, 14 | publicOutput: Field, 15 | methods: { 16 | compute: { 17 | privateInputs: [KzgAccumulator, Provable.Array(Field, 42), Provable.Array(Fp12, 11), Provable.Array(Field, ATE_LOOP_COUNT.length - 42 - 11)], 18 | async method( 19 | input: Field, 20 | acc: KzgAccumulator, 21 | lhs_line_hashes: Array, 22 | g_chunk: Array, 23 | rhs_lines_hashes: Array 24 | ) { 25 | const inDigest = Poseidon.hashPacked(KzgAccumulator, acc); 26 | inDigest.assertEquals(input); 27 | 28 | const opening = ArrayListHasher.open(lhs_line_hashes, g_chunk, rhs_lines_hashes) 29 | acc.state.lines_hashes_digest.assertEquals(opening) 30 | 31 | let f = acc.state.f; 32 | 33 | let idx = 0; 34 | for (let i = 43; i < 54; i++) { 35 | f = f.square().mul(g_chunk[idx]); 36 | 37 | if (ATE_LOOP_COUNT[i] == 1) { 38 | f = f.mul(acc.proof.c_inv); 39 | } 40 | 41 | if (ATE_LOOP_COUNT[i] == -1) { 42 | f = f.mul(acc.proof.c); 43 | } 44 | 45 | idx += 1; 46 | } 47 | 48 | acc.state.f = f; 49 | return Poseidon.hashPacked(KzgAccumulator, acc); 50 | }, 51 | }, 52 | }, 53 | }); 54 | 55 | 56 | const ZKP21Proof = ZkProgram.Proof(zkp21); 57 | export { ZKP21Proof, zkp21 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp22.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | Provable, 6 | } from 'o1js'; 7 | import { ArrayListHasher, KzgAccumulator } from '../../kzg/structs.js'; 8 | import { Fp12 } from '../../towers/fp12.js'; 9 | import { ATE_LOOP_COUNT } from '../../towers/consts.js'; 10 | 11 | const zkp22 = ZkProgram({ 12 | name: 'zkp22', 13 | publicInput: Field, 14 | publicOutput: Field, 15 | methods: { 16 | compute: { 17 | privateInputs: [KzgAccumulator, Provable.Array(Field, 53), Provable.Array(Fp12, 11), Provable.Array(Field, ATE_LOOP_COUNT.length - 53 - 11)], 18 | async method( 19 | input: Field, 20 | acc: KzgAccumulator, 21 | lhs_line_hashes: Array, 22 | g_chunk: Array, 23 | rhs_lines_hashes: Array 24 | ) { 25 | const inDigest = Poseidon.hashPacked(KzgAccumulator, acc); 26 | inDigest.assertEquals(input); 27 | 28 | const opening = ArrayListHasher.open(lhs_line_hashes, g_chunk, rhs_lines_hashes) 29 | acc.state.lines_hashes_digest.assertEquals(opening) 30 | 31 | let f = acc.state.f; 32 | 33 | let idx = 0; 34 | for (let i = 54; i < 65; i++) { 35 | f = f.square().mul(g_chunk[idx]); 36 | 37 | if (ATE_LOOP_COUNT[i] == 1) { 38 | f = f.mul(acc.proof.c_inv); 39 | } 40 | 41 | if (ATE_LOOP_COUNT[i] == -1) { 42 | f = f.mul(acc.proof.c); 43 | } 44 | 45 | idx += 1; 46 | } 47 | 48 | acc.state.f = f; 49 | return Poseidon.hashPacked(KzgAccumulator, acc); 50 | }, 51 | }, 52 | }, 53 | }); 54 | 55 | 56 | const ZKP22Proof = ZkProgram.Proof(zkp22); 57 | export { ZKP22Proof, zkp22 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp23.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | Provable, 6 | } from 'o1js'; 7 | import { ArrayListHasher, KzgAccumulator } from '../../kzg/structs.js'; 8 | import { Fp12 } from '../../towers/fp12.js'; 9 | import { ATE_LOOP_COUNT } from '../../towers/consts.js'; 10 | import { make_w27 } from '../helpers.js'; 11 | import { FrC } from '../../towers/fr.js'; 12 | 13 | const w27 = make_w27() 14 | const w27_sq = w27.square(); 15 | 16 | const zkp23 = ZkProgram({ 17 | name: 'zkp23', 18 | publicInput: Field, 19 | publicOutput: Field, 20 | methods: { 21 | compute: { 22 | privateInputs: [KzgAccumulator, Provable.Array(Field, 64), Provable.Array(Fp12, 1)], 23 | async method( 24 | input: Field, 25 | acc: KzgAccumulator, 26 | lhs_line_hashes: Array, 27 | g_chunk: Array, 28 | ) { 29 | const inDigest = Poseidon.hashPacked(KzgAccumulator, acc); 30 | inDigest.assertEquals(input); 31 | 32 | const opening = ArrayListHasher.open(lhs_line_hashes, g_chunk, []) 33 | acc.state.lines_hashes_digest.assertEquals(opening) 34 | 35 | let f = acc.state.f; 36 | f = f.mul(g_chunk[0]); 37 | 38 | f = f 39 | .mul(acc.proof.c_inv.frobenius_pow_p()) 40 | .mul(acc.proof.c.frobenius_pow_p_squared()) 41 | .mul(acc.proof.c_inv.frobenius_pow_p_cubed()); 42 | 43 | const shift = Provable.switch([acc.proof.shift_power.equals(Field(0)), acc.proof.shift_power.equals(Field(1)), acc.proof.shift_power.equals(Field(2))], Fp12, [Fp12.one(), w27, w27_sq]); 44 | f = f.mul(shift); 45 | 46 | f.assert_equals(Fp12.one()); 47 | 48 | acc.state.f = f; 49 | return Poseidon.hashPacked(Provable.Array(FrC.provable, 2), [acc.proof.pi0, acc.proof.pi1]); 50 | }, 51 | }, 52 | }, 53 | }); 54 | 55 | 56 | const ZKP23Proof = ZkProgram.Proof(zkp23); 57 | export { ZKP23Proof, zkp23 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp3.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | } from 'o1js'; 6 | import { Accumulator } from '../accumulator.js'; 7 | import { customPiLagrange, opening_of_linearized_polynomial, pi_contribution } from '../piop/plonk_utils.js'; 8 | import { VK } from '../vk.js'; 9 | 10 | const zkp3 = ZkProgram({ 11 | name: 'zkp3', 12 | publicInput: Field, 13 | publicOutput: Field, 14 | methods: { 15 | compute: { 16 | privateInputs: [Accumulator], 17 | async method( 18 | input: Field, 19 | acc: Accumulator 20 | ) { 21 | const inDigest = Poseidon.hashPacked(Accumulator, acc); 22 | inDigest.assertEquals(input); 23 | 24 | const pis = pi_contribution([acc.state.pi0, acc.state.pi1], acc.fs.zeta, acc.state.zh_eval, VK.inv_domain_size, VK.omega) 25 | 26 | // ~32k 27 | const l_pi_commit = customPiLagrange(acc.fs.zeta, acc.state.zh_eval, acc.proof.qcp_0_wire_x, acc.proof.qcp_0_wire_y, VK) 28 | const pi = pis.add(l_pi_commit).assertCanonical() 29 | 30 | // very cheap 31 | const linearized_opening = opening_of_linearized_polynomial(acc.proof, acc.fs.alpha, acc.fs.beta, acc.fs.gamma, pi, acc.state.alpha_2_l0) 32 | 33 | acc.state.pi = pi; 34 | acc.state.linearized_opening = linearized_opening; 35 | 36 | return Poseidon.hashPacked(Accumulator, acc); 37 | }, 38 | }, 39 | }, 40 | }); 41 | 42 | 43 | const ZKP3Proof = ZkProgram.Proof(zkp3); 44 | export { ZKP3Proof, zkp3 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp4.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | } from 'o1js'; 6 | import { Accumulator } from '../accumulator.js'; 7 | import { compute_commitment_linearized_polynomial_split_0, customPiLagrange, opening_of_linearized_polynomial, pi_contribution } from '../piop/plonk_utils.js'; 8 | import { VK } from '../vk.js'; 9 | 10 | const zkp4 = ZkProgram({ 11 | name: 'zkp4', 12 | publicInput: Field, 13 | publicOutput: Field, 14 | methods: { 15 | compute: { 16 | privateInputs: [Accumulator], 17 | async method( 18 | input: Field, 19 | acc: Accumulator 20 | ) { 21 | const inDigest = Poseidon.hashPacked(Accumulator, acc); 22 | inDigest.assertEquals(input); 23 | 24 | const [lcm_x, lcm_y] = compute_commitment_linearized_polynomial_split_0(acc.proof, VK); 25 | 26 | acc.state.lcm_x = lcm_x; 27 | acc.state.lcm_y = lcm_y; 28 | 29 | return Poseidon.hashPacked(Accumulator, acc); 30 | }, 31 | }, 32 | }, 33 | }); 34 | 35 | 36 | const ZKP4Proof = ZkProgram.Proof(zkp4); 37 | export { ZKP4Proof, zkp4 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp5.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | } from 'o1js'; 6 | import { Accumulator } from '../accumulator.js'; 7 | import { compute_commitment_linearized_polynomial_split_0, compute_commitment_linearized_polynomial_split_1, customPiLagrange, opening_of_linearized_polynomial, pi_contribution } from '../piop/plonk_utils.js'; 8 | import { VK } from '../vk.js'; 9 | 10 | const zkp5 = ZkProgram({ 11 | name: 'zkp5', 12 | publicInput: Field, 13 | publicOutput: Field, 14 | methods: { 15 | compute: { 16 | privateInputs: [Accumulator], 17 | async method( 18 | input: Field, 19 | acc: Accumulator 20 | ) { 21 | const inDigest = Poseidon.hashPacked(Accumulator, acc); 22 | inDigest.assertEquals(input); 23 | 24 | const [lcm_x, lcm_y] = compute_commitment_linearized_polynomial_split_1( 25 | acc.state.lcm_x, 26 | acc.state.lcm_y, 27 | acc.proof, 28 | VK, 29 | acc.fs.beta, 30 | acc.fs.gamma, 31 | acc.fs.alpha 32 | ); 33 | 34 | acc.state.lcm_x = lcm_x; 35 | acc.state.lcm_y = lcm_y; 36 | 37 | return Poseidon.hashPacked(Accumulator, acc); 38 | }, 39 | }, 40 | }, 41 | }); 42 | 43 | 44 | const ZKP5Proof = ZkProgram.Proof(zkp5); 45 | export { ZKP5Proof, zkp5 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp6.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | } from 'o1js'; 6 | import { Accumulator } from '../accumulator.js'; 7 | import { compute_commitment_linearized_polynomial_split_0, compute_commitment_linearized_polynomial_split_1, compute_commitment_linearized_polynomial_split_2, customPiLagrange, opening_of_linearized_polynomial, pi_contribution } from '../piop/plonk_utils.js'; 8 | import { VK } from '../vk.js'; 9 | 10 | const zkp6 = ZkProgram({ 11 | name: 'zkp6', 12 | publicInput: Field, 13 | publicOutput: Field, 14 | methods: { 15 | compute: { 16 | privateInputs: [Accumulator], 17 | async method( 18 | input: Field, 19 | acc: Accumulator 20 | ) { 21 | const inDigest = Poseidon.hashPacked(Accumulator, acc); 22 | inDigest.assertEquals(input); 23 | 24 | const [lcm_x, lcm_y] = compute_commitment_linearized_polynomial_split_2( 25 | acc.state.lcm_x, 26 | acc.state.lcm_y, 27 | acc.proof, 28 | VK, 29 | acc.fs.beta, 30 | acc.fs.gamma, 31 | acc.fs.alpha, 32 | acc.fs.zeta, 33 | acc.state.alpha_2_l0, 34 | acc.state.hx, 35 | acc.state.hy 36 | ); 37 | 38 | acc.state.lcm_x = lcm_x; 39 | acc.state.lcm_y = lcm_y; 40 | 41 | return Poseidon.hashPacked(Accumulator, acc); 42 | }, 43 | }, 44 | }, 45 | }); 46 | 47 | 48 | const ZKP6Proof = ZkProgram.Proof(zkp6); 49 | export { ZKP6Proof, zkp6 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp7.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | } from 'o1js'; 6 | import { Accumulator } from '../accumulator.js'; 7 | import { VK } from '../vk.js'; 8 | 9 | const zkp7 = ZkProgram({ 10 | name: 'zkp7', 11 | publicInput: Field, 12 | publicOutput: Field, 13 | methods: { 14 | compute: { 15 | privateInputs: [Accumulator], 16 | async method( 17 | input: Field, 18 | acc: Accumulator 19 | ) { 20 | const inDigest = Poseidon.hashPacked(Accumulator, acc); 21 | inDigest.assertEquals(input); 22 | 23 | let H = acc.fs.gammaKzgDigest_part0(acc.proof, VK, acc.state.lcm_x, acc.state.lcm_y, acc.state.linearized_opening); 24 | acc.state.H = H; 25 | 26 | return Poseidon.hashPacked(Accumulator, acc); 27 | }, 28 | }, 29 | }, 30 | }); 31 | 32 | 33 | const ZKP7Proof = ZkProgram.Proof(zkp7); 34 | export { ZKP7Proof, zkp7 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp8.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | } from 'o1js'; 6 | import { Accumulator } from '../accumulator.js'; 7 | import { fold_state_0 } from '../piop/plonk_utils.js'; 8 | 9 | const zkp8 = ZkProgram({ 10 | name: 'zkp8', 11 | publicInput: Field, 12 | publicOutput: Field, 13 | methods: { 14 | compute: { 15 | privateInputs: [Accumulator], 16 | async method( 17 | input: Field, 18 | acc: Accumulator 19 | ) { 20 | const inDigest = Poseidon.hashPacked(Accumulator, acc); 21 | inDigest.assertEquals(input); 22 | 23 | acc.fs.gammaKzgDigest_part1(acc.proof, acc.state.H); 24 | acc.fs.squeezeGammaKzgFromDigest(); 25 | 26 | const [cm_x, cm_y, cm_opening] = fold_state_0(acc.proof, acc.state.lcm_x, acc.state.lcm_y, acc.state.linearized_opening, acc.fs.gamma_kzg); 27 | 28 | acc.state.cm_x = cm_x; 29 | acc.state.cm_y = cm_y; 30 | acc.state.cm_opening = cm_opening; 31 | 32 | return Poseidon.hashPacked(Accumulator, acc); 33 | }, 34 | }, 35 | }, 36 | }); 37 | 38 | 39 | const ZKP8Proof = ZkProgram.Proof(zkp8); 40 | export { ZKP8Proof, zkp8 } -------------------------------------------------------------------------------- /contracts/src/plonk/recursion/zkp9.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ZkProgram, 3 | Field, 4 | Poseidon, 5 | } from 'o1js'; 6 | import { Accumulator } from '../accumulator.js'; 7 | import { fold_state_0, fold_state_1 } from '../piop/plonk_utils.js'; 8 | import { VK } from '../vk.js'; 9 | 10 | const zkp9 = ZkProgram({ 11 | name: 'zkp9', 12 | publicInput: Field, 13 | publicOutput: Field, 14 | methods: { 15 | compute: { 16 | privateInputs: [Accumulator], 17 | async method( 18 | input: Field, 19 | acc: Accumulator 20 | ) { 21 | const inDigest = Poseidon.hashPacked(Accumulator, acc); 22 | inDigest.assertEquals(input); 23 | 24 | const [cm_x, cm_y] = fold_state_1(VK, acc.proof, acc.state.cm_x, acc.state.cm_y, acc.fs.gamma_kzg); 25 | 26 | acc.state.cm_x = cm_x; 27 | acc.state.cm_y = cm_y; 28 | 29 | return Poseidon.hashPacked(Accumulator, acc); 30 | }, 31 | }, 32 | }, 33 | }); 34 | 35 | 36 | const ZKP9Proof = ZkProgram.Proof(zkp9); 37 | export { ZKP9Proof, zkp9 } -------------------------------------------------------------------------------- /contracts/src/plonk/serialize_mlo.ts: -------------------------------------------------------------------------------- 1 | import { getMlo } from "./get_mlo.js"; 2 | import fs from "fs" 3 | 4 | const args = process.argv; 5 | // assert(args.length >= 6) 6 | 7 | const mloPath = args[2] 8 | const hexProof = args[3] 9 | const programVk = args[4] 10 | const hexPi = args[5] 11 | 12 | const mlo = getMlo(hexProof, programVk, hexPi) 13 | fs.writeFileSync(mloPath, mlo.toJSON(), 'utf-8'); 14 | console.log(`JSON data has been written to ${mloPath}`) -------------------------------------------------------------------------------- /contracts/src/plonk/utils.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert" 2 | 3 | function assertPointOnBn(x: bigint, y: bigint) { 4 | const q = 21888242871839275222246405745257275088696311157297823662689037894645226208583n 5 | 6 | assert(x < q) 7 | assert(y < q) 8 | 9 | const v = y * y - x*x*x - 3n 10 | assert(v % q === 0n) 11 | } 12 | 13 | function assertInBnField(x: bigint) { 14 | const r = 21888242871839275222246405745257275088548364400416034343698204186575808495617n 15 | assert(x < r) 16 | } 17 | 18 | // assumes that string is valid hex 19 | function unsafeHex2Bytes(hex: string): Uint8Array { 20 | const key = '0123456789abcdef' 21 | 22 | const bytes = [] 23 | for (let i = 2; i < hex.length; i+= 2) { 24 | let byte = key.indexOf(hex[i]) << 4 25 | byte += key.indexOf(hex[i + 1]) 26 | 27 | bytes.push(byte) 28 | } 29 | 30 | return Uint8Array.from(bytes) 31 | } 32 | 33 | // counts amount of uin256 numbers in hex string 34 | function numOfUin256s(hex: string) { 35 | // each hex char is 4 bits so 64 chars is 1 uint256 36 | // skip "0x" and skip first 4 bytes that Sp1.Verifier skips 37 | 38 | assert((hex.length - 10) % 64 == 0) 39 | return (hex.length - 10) / 64 40 | } 41 | 42 | export { assertPointOnBn, assertInBnField, unsafeHex2Bytes, numOfUin256s } -------------------------------------------------------------------------------- /contracts/src/plonk/verifier.ts: -------------------------------------------------------------------------------- 1 | import { G1Affine } from "../ec/index.js"; 2 | import { Fp12, FrC } from "../towers/index.js"; 3 | import { AuXWitness } from "./aux_witness.js"; 4 | import { make_w27 } from "./helpers.js"; 5 | import { KZGPairing } from "./mm_loop/multi_miller.js"; 6 | import { PlonkVerifierPIOP } from "./piop/piop.js"; 7 | import { Sp1PlonkProof } from "./proof.js"; 8 | import { Sp1PlonkVk } from "./vk.js"; 9 | 10 | class Sp1PlonkVerifier { 11 | piopV: PlonkVerifierPIOP 12 | kzgP: KZGPairing 13 | 14 | constructor( 15 | VK: Sp1PlonkVk, 16 | g2_lines: string, 17 | tau_lines: string, 18 | ) { 19 | let w27 = make_w27() 20 | this.piopV = new PlonkVerifierPIOP(VK) 21 | this.kzgP = new KZGPairing(g2_lines, tau_lines, w27) 22 | } 23 | 24 | verify(proof: Sp1PlonkProof, pi0: FrC, pi1: FrC, auxWitness: AuXWitness) { 25 | const [kzg_cm_x, kzg_cm_y, neg_fq_x, neg_fq_y] = this.piopV.piop(proof, pi0, pi1) 26 | 27 | const A = new G1Affine({ x: kzg_cm_x, y: kzg_cm_y }); 28 | const negB = new G1Affine({ x: neg_fq_x, y: neg_fq_y }); 29 | 30 | this.kzgP.proveEqual(A, negB, auxWitness.shift_power, auxWitness.c); 31 | } 32 | 33 | computeMlo(proof: Sp1PlonkProof, pi0: FrC, pi1: FrC): Fp12 { 34 | const [kzg_cm_x, kzg_cm_y, neg_fq_x, neg_fq_y] = this.piopV.piop(proof, pi0, pi1); 35 | const A = new G1Affine({ x: kzg_cm_x, y: kzg_cm_y }); 36 | const negB = new G1Affine({ x: neg_fq_x, y: neg_fq_y }); 37 | 38 | return this.kzgP.multiMillerLoop(A, negB) 39 | } 40 | } 41 | 42 | export { Sp1PlonkVerifier } -------------------------------------------------------------------------------- /contracts/src/risc_zero/prove_zkps.ts: -------------------------------------------------------------------------------- 1 | import { Provable, Poseidon, UInt64, verify, MerkleTree, Mina, AccountUpdate, Cache } from "o1js"; 2 | import { riscZeroExampleVerifier, RiscZeroExampleProof } from "./verify_risc_zero.js"; 3 | import { NodeProofLeft } from "../structs.js"; 4 | import fs from "fs"; 5 | import { ethers } from "ethers"; 6 | import { Proof } from "../groth/proof.js"; 7 | import { FrC } from "../towers/fr.js"; 8 | import { VK } from "../groth/vk_from_env.js"; 9 | 10 | const args = process.argv; 11 | 12 | async function prove_risc_zero_example() { 13 | const riscZeroExampleTreeProofPath = args[3]; 14 | const riscZeroExampleGroth16ProofPath = args[4]; 15 | const riscZeroExampleOutputProofPath = args[5]; 16 | const cacheDir = args[6]; 17 | 18 | const riscZeroGroth16Proof = Proof.parse(VK, riscZeroExampleGroth16ProofPath); 19 | 20 | const riscZeroPlonkProof = await NodeProofLeft.fromJSON(JSON.parse(fs.readFileSync(riscZeroExampleTreeProofPath, 'utf8'))); 21 | 22 | const vk = (await riscZeroExampleVerifier.compile({cache: Cache.FileSystem(cacheDir)})).verificationKey; 23 | 24 | const proof = await riscZeroExampleVerifier.compute(Poseidon.hashPacked(Provable.Array(FrC.provable, 5), riscZeroGroth16Proof.pis), riscZeroPlonkProof, riscZeroGroth16Proof.pis); 25 | 26 | const valid = await verify(proof, vk); 27 | 28 | fs.writeFileSync(riscZeroExampleOutputProofPath, JSON.stringify(proof), 'utf8'); 29 | console.log("valid risc zero proof?: ", valid); 30 | } 31 | 32 | switch(args[2]) { 33 | case 'risc_zero': 34 | await prove_risc_zero_example(); 35 | break; 36 | } -------------------------------------------------------------------------------- /contracts/src/risc_zero/verify_risc_zero.ts: -------------------------------------------------------------------------------- 1 | import { 2 | Field, 3 | PrivateKey, 4 | Provable, 5 | SmartContract, 6 | State, 7 | VerificationKey, 8 | method, 9 | state, 10 | Poseidon, 11 | UInt8, 12 | Bytes, 13 | Gadgets, 14 | ZkProgram, 15 | Struct, 16 | UInt64, 17 | Undefined, 18 | } from 'o1js'; 19 | import { FrC } from '../towers/index.js'; 20 | import { NodeProofLeft } from '../structs.js'; 21 | import fs from 'fs'; 22 | 23 | const workDir = process.env.RISC_ZERO_EXAMPLE_WORK_DIR as string; 24 | const riscZeroExampleNodeVk: Field = Field.from(JSON.parse(fs.readFileSync(`${workDir}/proofs/layer4/p0.json`, 'utf8')).publicOutput[2]); 25 | 26 | const vk = VerificationKey.fromJSON(JSON.parse(fs.readFileSync(`${workDir}/vks/nodeVk.json`, 'utf8'))) 27 | 28 | const riscZeroExampleVerifier = ZkProgram({ 29 | name: 'RiscZeroExampleVerifier', 30 | publicInput: Field, 31 | publicOutput: Undefined, 32 | methods: { 33 | compute: { 34 | privateInputs: [NodeProofLeft, Provable.Array(FrC.provable, 5)], 35 | async method( 36 | input: Field, 37 | proof: NodeProofLeft, 38 | pis: Array, 39 | ) { 40 | proof.verify(vk) 41 | proof.publicOutput.subtreeVkDigest.assertEquals(riscZeroExampleNodeVk) 42 | 43 | const piDigest = Poseidon.hashPacked(Provable.Array(FrC.provable, 5), pis); 44 | piDigest.assertEquals(input); 45 | 46 | piDigest.assertEquals(proof.publicOutput.rightOut) 47 | 48 | return undefined; 49 | }, 50 | }, 51 | }, 52 | }); 53 | 54 | const RiscZeroExampleProof = ZkProgram.Proof(riscZeroExampleVerifier); 55 | export { riscZeroExampleVerifier, RiscZeroExampleProof }; -------------------------------------------------------------------------------- /contracts/src/sha/bits.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | 3 | def bit_string_8(x): 4 | bn = bin(x)[2:] 5 | mod = len(bn) % 8 6 | 7 | if mod == 0: 8 | return bn 9 | 10 | prefix = '0' * (8 - mod) 11 | return prefix + bn 12 | 13 | def bits_to_bytes(bits): 14 | assert(len(bits) % 8 == 0) 15 | byte_array = bytearray() 16 | for i in range(0, len(bits), 8): 17 | byte_array.append(int(bits[i:i+8], 2)) 18 | return bytes(byte_array) 19 | 20 | # gamma = 0x62657461 21 | # vx = 10627327753818917257580031743580923447218792977466576262416509126412843282369 22 | 23 | # print(bit_string_8(0x62657461)) 24 | # print(len(bit_string_8(0x62657461))) 25 | # print(len(bit_string_8(0x616C706861))) 26 | # print(len(bin(0x616C706861)[2:])) 27 | 28 | # zeta = 0x7a657461 29 | # print(len(bit_string_8(zeta))) 30 | # print(len(bin(zeta)[2:])) 31 | 32 | # print(bin(67108864)) 33 | 34 | # bit_string = bit_string_8(gamma) + bit_string_8(vx) 35 | # byte_data = bits_to_bytes(bit_string) 36 | 37 | # sha256_hash = hashlib.sha256(byte_data).hexdigest() 38 | # print(f"SHA-256: {int(sha256_hash, 16)}") 39 | # print(f"SHA-256: {sha256_hash}") 40 | 41 | print((1 << 253) - 1) -------------------------------------------------------------------------------- /contracts/src/sha/modulus.py: -------------------------------------------------------------------------------- 1 | q = 21888242871839275222246405745257275088548364400416034343698204186575808495617 2 | 3 | # we want to compute x % q where x is 256 bit number and we can't work with field larger than 2^254 4 | 5 | # print(len(bin(2**255 % q)[2:])) 6 | # print(len(bin(2**256 % q)[2:])) 7 | 8 | sh254 = (2**254) % q 9 | sh255 = (2**255) % q 10 | 11 | print(sh254) 12 | print(sh255) 13 | 14 | def bin_256(x): 15 | assert(x < 2**256) 16 | return (bin(x)[2:]).rjust(256, '0') 17 | 18 | 19 | def modulus(x): 20 | bin_x = bin_256(x) 21 | 22 | # take first 254 bits 23 | low = int(bin_x[2:], 2) 24 | low_mod = low % q 25 | 26 | bit_256 = int(bin_x[0]) 27 | bit_255 = int(bin_x[1]) 28 | 29 | mid = 0 30 | if bit_255 == 1: 31 | mid = sh254 32 | 33 | hi = 0 34 | if bit_256 == 1: 35 | hi = sh255 36 | 37 | 38 | assert(x == low + bit_255*2**254 + bit_256*2**255) 39 | 40 | return (low_mod + mid + hi) % q 41 | 42 | 43 | x = 41934624648789633692325845435058116789193741866735454986050232623745889205117 44 | assert(x % q == modulus(x)) 45 | print(x % q) -------------------------------------------------------------------------------- /contracts/src/sha/sha_hash.ts: -------------------------------------------------------------------------------- 1 | import { Bytes, Hash, Gadgets, UInt8, Field, UInt32, Provable } from "o1js"; 2 | 3 | /* 4 | we have 741 bytes to hash: 5 | 6 | l = 741 * 8 = 5928 7 | l + 1 = 5928 8 | l + 1 % 512 = 297 9 | 10 | k = 448 - 297 = 151 11 | 12 | bin_len(l) = ceil(log2(l * 8)) = 13 13 | 14 | 15 | so padding should be: 16 | 1 + 0*(k + 64 - 13) + bin(l) 17 | */ 18 | 19 | const padding = [ 20 | UInt8.from(0x80n), 21 | UInt8.from(0), 22 | UInt8.from(0), 23 | UInt8.from(0), 24 | UInt8.from(0), 25 | UInt8.from(0), 26 | UInt8.from(0), 27 | UInt8.from(0), 28 | UInt8.from(0), 29 | UInt8.from(0), 30 | UInt8.from(0), 31 | UInt8.from(0), 32 | UInt8.from(0), 33 | UInt8.from(0), 34 | UInt8.from(0), 35 | UInt8.from(0), 36 | UInt8.from(0), 37 | UInt8.from(0), 38 | UInt8.from(0), 39 | UInt8.from(0), 40 | UInt8.from(0), 41 | UInt8.from(0), 42 | UInt8.from(0), 43 | UInt8.from(0), 44 | UInt8.from(0), 45 | UInt8.from(0x17n), 46 | UInt8.from(0x28n), 47 | ] 48 | 49 | let s = "a".repeat(741) 50 | 51 | class Bytes741 extends Bytes(741) {} 52 | let preimageBytes = Bytes741.fromString(s); 53 | let hash = Gadgets.SHA256.hash(preimageBytes); 54 | console.log(hash.toHex()); 55 | //96505839157e4f0984258b89bda90c3661bfce8505d34120f2989236f4c576a2 56 | 57 | let preimage: UInt8[] = preimageBytes.bytes.concat(padding); 58 | 59 | 60 | function bytesToWord(wordBytes: UInt8[]): Field { 61 | return wordBytes.reduce((acc, byte, idx) => { 62 | const shift = 1n << BigInt(8 * idx); 63 | return acc.add(byte.value.mul(shift)); 64 | }, Field.from(0)); 65 | } 66 | 67 | function wordToBytes(word: Field, bytesPerWord = 8): UInt8[] { 68 | let bytes = Provable.witness(Provable.Array(UInt8, bytesPerWord), () => { 69 | let w = word.toBigInt(); 70 | return Array.from({ length: bytesPerWord }, (_, k) => 71 | UInt8.from((w >> BigInt(8 * k)) & 0xffn) 72 | ); 73 | }); 74 | 75 | // check decomposition 76 | bytesToWord(bytes).assertEquals(word); 77 | 78 | return bytes; 79 | } 80 | 81 | 82 | const chunks: UInt32[] = []; 83 | 84 | let H = Gadgets.SHA256.initialState 85 | 86 | for (let i = 0; i < preimage.length; i += 4) { 87 | const chunk = UInt32.Unsafe.fromField( 88 | bytesToWord(preimage.slice(i, i + 4).reverse()) 89 | ) 90 | chunks.push(chunk); 91 | } 92 | 93 | const n = 12; 94 | 95 | for (let i = 0; i < n; i++) { 96 | const messageBlock = chunks.slice(16*i, 16*(i+1)) 97 | let W = Gadgets.SHA256.createMessageSchedule(messageBlock) 98 | H = Gadgets.SHA256.compression(H, W) 99 | } 100 | 101 | const digest_bytes = Bytes.from(H.map((x) => wordToBytes(x.value, 4).reverse()).flat()); 102 | console.log(digest_bytes.toHex()) -------------------------------------------------------------------------------- /contracts/src/sha/update.ts: -------------------------------------------------------------------------------- 1 | import { Bytes, Hash, Gadgets, UInt32 } from "o1js"; 2 | 3 | let preimage = 'aaaabbbb'; 4 | class Bytes8 extends Bytes(8) {} 5 | let preimageBytes = Bytes8.fromString(preimage); 6 | 7 | let hash = Gadgets.SHA256.hash(preimageBytes); 8 | console.log(hash.toHex()); // e5c1edb50ff8b4fcc3ead3a845ffbe1ad51c9dae5d44335a5c333b57ac8df062 9 | 10 | // let's now split sha into two runs 11 | class Bytes4 extends Bytes(4) {} 12 | 13 | const hash_1 = Gadgets.SHA256.hash(Bytes4.fromString("aaaa")); 14 | console.log(hash_1.toHex()); 15 | 16 | const h2 = Gadgets.SHA256.initialState 17 | 18 | // const H: UInt32[] = []; 19 | // for (let i = 0; i < 32; i+= 4) { 20 | // H.push(hash_1.bytes[i].toUInt32()) 21 | // } 22 | 23 | // const hash_2 = Gadgets.SHA256.update(Bytes4.fromString("bbbb"), H) 24 | // console.log(hash_2.toHex()); 25 | // const W = Gadgets.SHA256.createMessageSchedule() -------------------------------------------------------------------------------- /contracts/src/sha/utils.ts: -------------------------------------------------------------------------------- 1 | import { Bool, Field, UInt8, Provable } from "o1js"; 2 | import { FpC, FrC } from "../towers/index.js"; 3 | 4 | const provableBn254BaseFieldToBytes = (x: FpC) => { 5 | // append 2 zero bits to make it a multiple of 8 (256 bits) 6 | let bits = [Bool(false), Bool(false)]; 7 | bits = x.toBits().concat(bits) 8 | 9 | const chunks: UInt8[] = []; 10 | 11 | for (let i = 0; i < bits.length; i += 8) { 12 | let chunk = Field.fromBits(bits.slice(i, i + 8)); 13 | chunks.push(UInt8.Unsafe.fromField(chunk)); 14 | } 15 | 16 | return chunks.reverse(); 17 | } 18 | 19 | const provableBn254ScalarFieldToBytes = (x: FrC) => { 20 | // append 2 zero bits to make it a multiple of 8 (256 bits) 21 | let bits = [Bool(false), Bool(false)]; 22 | bits = x.toBits().concat(bits) 23 | 24 | const chunks: UInt8[] = []; 25 | 26 | for (let i = 0; i < bits.length; i += 8) { 27 | let chunk = Field.fromBits(bits.slice(i, i + 8)); 28 | chunks.push(UInt8.Unsafe.fromField(chunk)); 29 | } 30 | 31 | return chunks.reverse(); 32 | } 33 | 34 | function bytesToWord(wordBytes: UInt8[]): Field { 35 | return wordBytes.reduce((acc, byte, idx) => { 36 | const shift = 1n << BigInt(8 * idx); 37 | return acc.add(byte.value.mul(shift)); 38 | }, Field.from(0)); 39 | } 40 | 41 | function wordToBytes(word: Field, bytesPerWord = 8): UInt8[] { 42 | let bytes = Provable.witness(Provable.Array(UInt8, bytesPerWord), () => { 43 | let w = word.toBigInt(); 44 | return Array.from({ length: bytesPerWord }, (_, k) => 45 | UInt8.from((w >> BigInt(8 * k)) & 0xffn) 46 | ); 47 | }); 48 | 49 | // check decomposition 50 | bytesToWord(bytes).assertEquals(word); 51 | 52 | return bytes; 53 | } 54 | 55 | 56 | 57 | export { provableBn254BaseFieldToBytes, provableBn254ScalarFieldToBytes, wordToBytes, bytesToWord } 58 | 59 | 60 | -------------------------------------------------------------------------------- /contracts/src/structs.ts: -------------------------------------------------------------------------------- 1 | import { DynamicProof, Field, Struct, Undefined, FeatureFlags } from "o1js" 2 | 3 | class ZkpProofLeft extends DynamicProof { 4 | static publicInputType = Field; 5 | static publicOutputType = Field; 6 | static maxProofsVerified = 0 as const; 7 | 8 | static featureFlags = FeatureFlags.allMaybe; 9 | } 10 | 11 | class ZkpProofRight extends DynamicProof { 12 | static publicInputType = Field; 13 | static publicOutputType = Field; 14 | static maxProofsVerified = 0 as const; 15 | 16 | static featureFlags = FeatureFlags.allMaybe; 17 | } 18 | 19 | class SubtreeCarry extends Struct({ 20 | leftIn: Field, 21 | rightOut: Field, 22 | subtreeVkDigest: Field 23 | }) {}; 24 | 25 | class NodeProofLeft extends DynamicProof { 26 | static publicInputType = Undefined; 27 | static publicOutputType = SubtreeCarry; 28 | static maxProofsVerified = 2 as const; 29 | 30 | static featureFlags = FeatureFlags.allMaybe; 31 | } 32 | 33 | class NodeProofRight extends DynamicProof { 34 | static publicInputType = Undefined; 35 | static publicOutputType = SubtreeCarry; 36 | static maxProofsVerified = 2 as const; 37 | 38 | static featureFlags = FeatureFlags.allMaybe; 39 | } 40 | 41 | const NOTHING_UP_MY_SLEEVE = Field(0); 42 | 43 | export { ZkpProofLeft, ZkpProofRight, NodeProofLeft, NodeProofRight, SubtreeCarry, NOTHING_UP_MY_SLEEVE } 44 | -------------------------------------------------------------------------------- /contracts/src/towers/assert-mul.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * More efficient multiplication of sums 3 | */ 4 | import { 5 | AlmostForeignField, 6 | Field, 7 | ForeignField, 8 | Gadgets, 9 | Provable, 10 | assert, 11 | } from 'o1js'; 12 | 13 | export { assertMul, UnreducedSum, AlmostReducedSum }; 14 | 15 | type ForeignFieldSum = ReturnType; 16 | 17 | // typed version of `Gadgets.ForeignField.assertMul()` 18 | 19 | function assertMul( 20 | a: AlmostReducedSum | AlmostForeignField, 21 | b: AlmostReducedSum | AlmostForeignField, 22 | c: UnreducedSum | ForeignField 23 | ) { 24 | assert(a.modulus === b.modulus && a.modulus === c.modulus); 25 | a = AlmostReducedSum.from(a); 26 | b = AlmostReducedSum.from(b); 27 | c = UnreducedSum.from(c); 28 | 29 | // finish the b and c sums with a zero gate 30 | let bF = b.value.finishForMulInput(a.modulus); 31 | let cF = c.value.finish(a.modulus); 32 | 33 | // if not all inputs are constant, but either b or c are, 34 | // move them into variables first to prevent them from breaking the gate chain 35 | let allConstant = 36 | a.value.isConstant() && 37 | Gadgets.Field3.isConstant(bF) && 38 | Gadgets.Field3.isConstant(cF); 39 | 40 | if (!allConstant) { 41 | let [b0, b1, b2] = bF.map(toVariable); 42 | bF = [b0, b1, b2]; 43 | let [c0, c1, c2] = cF.map(toVariable); 44 | cF = [c0, c1, c2]; 45 | } 46 | 47 | Gadgets.ForeignField.assertMul(a.value, bF, cF, a.modulus); 48 | } 49 | 50 | // typed wrappers around `Gadgets.ForeignField.Sum` 51 | 52 | class UnreducedSum { 53 | value: ForeignFieldSum; 54 | modulus: bigint; 55 | type: typeof ForeignField; 56 | 57 | constructor(input: ForeignField) { 58 | this.value = Gadgets.ForeignField.Sum(input.value); 59 | this.modulus = input.modulus; 60 | this.type = input.Constructor; 61 | } 62 | 63 | add(input: ForeignField) { 64 | this.value = this.value.add(input.value); 65 | return this; 66 | } 67 | sub(input: ForeignField) { 68 | this.value = this.value.sub(input.value); 69 | return this; 70 | } 71 | 72 | toBigint() { 73 | return Gadgets.Field3.toBigint(this.value.finish(this.modulus)); 74 | } 75 | 76 | static from(sum: S | ForeignField): S { 77 | if (sum instanceof UnreducedSum) return sum; 78 | return new (this as any)(sum); 79 | } 80 | } 81 | 82 | class AlmostReducedSum extends UnreducedSum { 83 | constructor(input: AlmostForeignField) { 84 | super(input); 85 | } 86 | 87 | add(input: AlmostForeignField) { 88 | this.value = this.value.add(input.value); 89 | return this; 90 | } 91 | sub(input: AlmostForeignField) { 92 | this.value = this.value.sub(input.value); 93 | return this; 94 | } 95 | } 96 | 97 | function toVariable(x: Field) { 98 | let xv = Provable.witness(Field, () => x); 99 | xv.assertEquals(x); 100 | return xv; 101 | } -------------------------------------------------------------------------------- /contracts/src/towers/consts.ts: -------------------------------------------------------------------------------- 1 | const P = 2 | 21888242871839275222246405745257275088696311157297823662689037894645226208583n; 3 | const BETA = -1n; 4 | 5 | const P_MINUS_1_DIV_6 = 6 | 3648040478639879203707734290876212514782718526216303943781506315774204368097n; 7 | 8 | // BE naf representation of 6x + 2 9 | const ATE_LOOP_COUNT = [ 10 | 1, 1, 0, 1, 0, 0, -1, 0, 1, 1, 0, 0, 0, -1, 0, 0, 1, 1, 0, 0, -1, 0, 0, 0, 0, 11 | 0, 1, 0, 0, -1, 0, 0, 1, 1, 1, 0, 0, 0, 0, -1, 0, 1, 0, 0, -1, 0, 1, 1, 0, 0, 12 | 1, 0, 0, -1, 1, 0, 0, -1, 0, 1, 0, 1, 0, 0, 0, 13 | ]; 14 | 15 | const atc = [ 16 | 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 17 | 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 18 | 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 19 | ]; 20 | 21 | export { P, P_MINUS_1_DIV_6, BETA, ATE_LOOP_COUNT, atc }; 22 | -------------------------------------------------------------------------------- /contracts/src/towers/fp.ts: -------------------------------------------------------------------------------- 1 | import { Crypto, Provable, Struct, createForeignField, provable } from 'o1js'; 2 | import { P } from './consts.js'; 3 | // import { AnyTuple } from 'o1js/dist/node/lib/util/types'; 4 | // const P = 21888242871839275222246405745257275088696311157297823662689037894645226208583n 5 | class Fp extends createForeignField(P) {} 6 | 7 | class FpU extends Fp.Unreduced {} 8 | class FpA extends Fp.AlmostReduced {} 9 | class FpC extends Fp.Canonical {} 10 | 11 | // function main() { 12 | // let s = Provable.witness( 13 | // FpC.provable, 14 | // () => FpC.from(21888242871839275222246405745257275088696357297823662689037894645226208581n) 15 | // ); 16 | // let t = Provable.witness( 17 | // FpC.provable, 18 | // () => FpC.from(21888242871839275246405745257275088696311157297823662689037894645226208581n) 19 | // ); 20 | // let a = Provable.witness( 21 | // FpC.provable, 22 | // () => FpC.from(218882428718392752464745257275088696311157297823662689037894645226208581n) 23 | // ); 24 | // let b = Provable.witness( 25 | // FpC.provable, 26 | // () => FpC.from(21888242871839275246474525727508396311157297823662689037894645226208581n) 27 | // ); 28 | // s.mul(t).assertCanonical(); 29 | // a.mul(b).assertCanonical(); 30 | // } 31 | 32 | // (async () => { 33 | // console.time('running Fp constant version'); 34 | // await main(); 35 | // console.timeEnd('running Fp constant version'); 36 | 37 | // console.time('running Fp witness generation & checks'); 38 | // await Provable.runAndCheck(main); 39 | // console.timeEnd('running Fp witness generation & checks'); 40 | 41 | // console.time('creating Fp constraint system'); 42 | // let cs = await Provable.constraintSystem(main); 43 | // console.timeEnd('creating Fp constraint system'); 44 | 45 | // console.log(cs.summary()); 46 | // })(); 47 | 48 | export { Fp, FpU, FpA, FpC }; 49 | -------------------------------------------------------------------------------- /contracts/src/towers/fp12-benchmark.ts: -------------------------------------------------------------------------------- 1 | import { Provable } from 'o1js'; 2 | import { Fp12 } from './fp12.js'; 3 | 4 | function main() { 5 | let a = Provable.witness(Fp12, () => Fp12.one()); 6 | let b = Provable.witness(Fp12, () => Fp12.one()); 7 | a.mul(b); 8 | } 9 | 10 | let cs = await Provable.constraintSystem(main); 11 | console.log(cs.summary()); 12 | -------------------------------------------------------------------------------- /contracts/src/towers/fr.ts: -------------------------------------------------------------------------------- 1 | import { Gadgets, createForeignField } from 'o1js'; 2 | class Fr extends createForeignField(21888242871839275222246405745257275088548364400416034343698204186575808495617n) {} 3 | 4 | class FrU extends Fr.Unreduced {} 5 | class FrA extends Fr.AlmostReduced {} 6 | class FrC extends Fr.Canonical {} 7 | 8 | function powFr(x: FrC, exp: Array) { 9 | let r = Fr.from(x).assertCanonical() 10 | 11 | const n = exp.length; 12 | for (let i = 1; i < n; i++) { 13 | r = r.mul(r).assertCanonical(); 14 | 15 | if (exp[i] == 1) { 16 | r = r.mul(x).assertCanonical(); 17 | } 18 | 19 | } 20 | 21 | return r; 22 | } 23 | 24 | function xorFr(x: FrC, y: FrC) { 25 | let fieldsX = x.toFields() 26 | let fieldsY = y.toFields() 27 | 28 | let xoredFields = [] 29 | for (let i = 0; i < 3; i++) { 30 | xoredFields.push(Gadgets.xor(fieldsX[i], fieldsY[i], 96)) 31 | } 32 | 33 | return FrC.provable.fromFields(xoredFields) 34 | } 35 | 36 | export { Fr, FrU, FrA, FrC, powFr, xorFr}; 37 | -------------------------------------------------------------------------------- /contracts/src/towers/gamma_1s.json: -------------------------------------------------------------------------------- 1 | [{"c0":"8376118865763821496583973867626364092589906065868298776909617916018768340080","c1":"16469823323077808223889137241176536799009286646108169935659301613961712198316"},{"c0":"21575463638280843010398324269430826099269044274347216827212613867836435027261","c1":"10307601595873709700152284273816112264069230130616436755625194854815875713954"},{"c0":"2821565182194536844548159561693502659359617185244120367078079554186484126554","c1":"3505843767911556378687030309984248845540243509899259641013678093033130930403"},{"c0":"2581911344467009335267311115468803099551665605076196740867805258568234346338","c1":"19937756971775647987995932169929341994314640652964949448313374472400716661030"},{"c0":"685108087231508774477564247770172212460312782337200605669322048753928464687","c1":"8447204650696766136447902020341177575205426561248465145919723016860428151883"}] -------------------------------------------------------------------------------- /contracts/src/towers/gamma_2s.json: -------------------------------------------------------------------------------- 1 | [{"c0":"21888242871839275220042445260109153167277707414472061641714758635765020556617","c1":"0"},{"c0":"21888242871839275220042445260109153167277707414472061641714758635765020556616","c1":"0"},{"c0":"21888242871839275222246405745257275088696311157297823662689037894645226208582","c1":"0"},{"c0":"2203960485148121921418603742825762020974279258880205651966","c1":"0"},{"c0":"2203960485148121921418603742825762020974279258880205651967","c1":"0"}] -------------------------------------------------------------------------------- /contracts/src/towers/gamma_3s.json: -------------------------------------------------------------------------------- 1 | [{"c0":"11697423496358154304825782922584725312912383441159505038794027105778954184319","c1":"303847389135065887422783454877609941456349188919719272345083954437860409601"},{"c0":"3772000881919853776433695186713858239009073593817195771773381919316419345261","c1":"2236595495967245188281701248203181795121068902605861227855261137820944008926"},{"c0":"19066677689644738377698246183563772429336693972053703295610958340458742082029","c1":"18382399103927718843559375435273026243156067647398564021675359801612095278180"},{"c0":"5324479202449903542726783395506214481928257762400643279780343368557297135718","c1":"16208900380737693084919495127334387981393726419856888799917914180988844123039"},{"c0":"8941241848238582420466759817324047081148088512956452953208002715982955420483","c1":"10338197737521362862238855242243140895517409139741313354160881284257516364953"}] -------------------------------------------------------------------------------- /contracts/src/towers/index.ts: -------------------------------------------------------------------------------- 1 | export { FpC } from './fp.js'; 2 | export { Fp2 } from './fp2.js'; 3 | export { Fp6 } from './fp6.js'; 4 | export { Fp12 } from './fp12.js'; 5 | export { Fr, FrU, FrA, FrC } from "./fr.js" 6 | export { ATE_LOOP_COUNT } from './consts.js'; 7 | -------------------------------------------------------------------------------- /contracts/src/towers/neg_gamma.json: -------------------------------------------------------------------------------- 1 | {"c0":"19066677689644738377698246183563772429336693972053703295610958340458742082029","c1":"18382399103927718843559375435273026243156067647398564021675359801612095278180"} -------------------------------------------------------------------------------- /contracts/src/towers/precompute.ts: -------------------------------------------------------------------------------- 1 | import { FpC } from "./fp.js"; 2 | import { Fp2 } from "./fp2.js"; 3 | import fs from 'fs'; 4 | import { GAMMA_1S, GAMMA_2S, GAMMA_3S, NEG_GAMMA_13 } from "./precomputed.js"; 5 | 6 | fs.writeFile( 7 | './src/towers/gamma_1s.json', 8 | JSON.stringify(GAMMA_1S.map((g: Fp2) => Fp2.toJSON(g))), 9 | 'utf8', 10 | (err: any) => { 11 | if (err) { 12 | console.error('Error writing to file:', err); 13 | return; 14 | } 15 | console.log('Data has been written to gamma_1s.json'); 16 | } 17 | ); 18 | 19 | fs.writeFile( 20 | './src/towers/gamma_2s.json', 21 | JSON.stringify(GAMMA_2S.map((g: Fp2) => Fp2.toJSON(g))), 22 | 'utf8', 23 | (err: any) => { 24 | if (err) { 25 | console.error('Error writing to file:', err); 26 | return; 27 | } 28 | console.log('Data has been written to gamma_2s.json'); 29 | } 30 | ); 31 | 32 | fs.writeFile( 33 | './src/towers/gamma_3s.json', 34 | JSON.stringify(GAMMA_3S.map((g: Fp2) => Fp2.toJSON(g))), 35 | 'utf8', 36 | (err: any) => { 37 | if (err) { 38 | console.error('Error writing to file:', err); 39 | return; 40 | } 41 | console.log('Data has been written to gamma_3s.json'); 42 | } 43 | ); 44 | 45 | fs.writeFile( 46 | './src/towers/neg_gamma.json', 47 | JSON.stringify(Fp2.toJSON(NEG_GAMMA_13)), 48 | 'utf8', 49 | (err: any) => { 50 | if (err) { 51 | console.error('Error writing to file:', err); 52 | return; 53 | } 54 | console.log('Data has been written to neg_gamma.json'); 55 | } 56 | ); 57 | 58 | 59 | // let gamma_1s_input = fs.readFileSync('./src/towers/gamma_1s.json', 'utf8'); 60 | // let parsed_gamma_1s: any[] = JSON.parse(gamma_1s_input); 61 | // let gamma_1s = parsed_gamma_1s.map( 62 | // (g: any): Fp2 => Fp2.fromJSON(g) 63 | // ); 64 | 65 | // gamma_1s[0].assert_equals(GAMMA_1S[0]); -------------------------------------------------------------------------------- /contracts/src/towers/precomputed.ts: -------------------------------------------------------------------------------- 1 | import { FpC } from './fp.js'; 2 | import { Fp2 } from './fp2.js'; 3 | 4 | const fp2_non_residue = new Fp2({ c0: FpC.from(9n), c1: FpC.from(1n) }); 5 | 6 | // (9 + u)^(i * (p - 1) / 6) for i in 1..5 7 | const GAMMA_1S = [ 8 | new Fp2({ 9 | c0: FpC.from( 10 | 8376118865763821496583973867626364092589906065868298776909617916018768340080n 11 | ), 12 | c1: FpC.from( 13 | 16469823323077808223889137241176536799009286646108169935659301613961712198316n 14 | ), 15 | }), 16 | new Fp2({ 17 | c0: FpC.from( 18 | 21575463638280843010398324269430826099269044274347216827212613867836435027261n 19 | ), 20 | c1: FpC.from( 21 | 10307601595873709700152284273816112264069230130616436755625194854815875713954n 22 | ), 23 | }), 24 | new Fp2({ 25 | c0: FpC.from( 26 | 2821565182194536844548159561693502659359617185244120367078079554186484126554n 27 | ), 28 | c1: FpC.from( 29 | 3505843767911556378687030309984248845540243509899259641013678093033130930403n 30 | ), 31 | }), 32 | new Fp2({ 33 | c0: FpC.from( 34 | 2581911344467009335267311115468803099551665605076196740867805258568234346338n 35 | ), 36 | c1: FpC.from( 37 | 19937756971775647987995932169929341994314640652964949448313374472400716661030n 38 | ), 39 | }), 40 | new Fp2({ 41 | c0: FpC.from( 42 | 685108087231508774477564247770172212460312782337200605669322048753928464687n 43 | ), 44 | c1: FpC.from( 45 | 8447204650696766136447902020341177575205426561248465145919723016860428151883n 46 | ), 47 | }), 48 | ]; 49 | 50 | // TODO: hardcode this 51 | const NEG_GAMMA_13 = GAMMA_1S[2].neg(); 52 | 53 | // TODO: hardcode these 54 | // γ_2i = γ_1i * γ_1i_conjugate 55 | const GAMMA_2S = [ 56 | GAMMA_1S[0].mul(GAMMA_1S[0].conjugate()), 57 | GAMMA_1S[1].mul(GAMMA_1S[1].conjugate()), 58 | GAMMA_1S[2].mul(GAMMA_1S[2].conjugate()), 59 | GAMMA_1S[3].mul(GAMMA_1S[3].conjugate()), 60 | GAMMA_1S[4].mul(GAMMA_1S[4].conjugate()), 61 | ]; 62 | 63 | // γ_3i = γ_1i * γ_2i 64 | const GAMMA_3S = [ 65 | GAMMA_1S[0].mul(GAMMA_2S[0]), 66 | GAMMA_1S[1].mul(GAMMA_2S[1]), 67 | GAMMA_1S[2].mul(GAMMA_2S[2]), 68 | GAMMA_1S[3].mul(GAMMA_2S[3]), 69 | GAMMA_1S[4].mul(GAMMA_2S[4]), 70 | ]; 71 | 72 | export { fp2_non_residue, GAMMA_1S, GAMMA_2S, GAMMA_3S, NEG_GAMMA_13 }; 73 | -------------------------------------------------------------------------------- /contracts/src/tree_of_vks.ts: -------------------------------------------------------------------------------- 1 | import { Field, Poseidon } from "o1js"; 2 | import { NOTHING_UP_MY_SLEEVE } from "./structs.js"; 3 | 4 | const isPowerOf2 = (n: number) => { 5 | return n > 0 && (n & (n - 1)) === 0; 6 | } 7 | 8 | const nextPowerOf2 = (n: number) => { 9 | const is = isPowerOf2(n); 10 | if (is) return n; 11 | 12 | const log = Math.ceil(Math.log2(n)); 13 | return 1 << log; 14 | } 15 | 16 | const appendToMakePowerOf2 = (baseVksHashes: Array) => { 17 | const n = baseVksHashes.length; 18 | const nn = nextPowerOf2(n); 19 | 20 | const dummyVks = new Array(nn - n).fill(Field(NOTHING_UP_MY_SLEEVE)); 21 | return baseVksHashes.concat(dummyVks) 22 | } 23 | 24 | 25 | const buildTreeOfVks = (baseVksHashes: Array, layer1VkHash: Field, nodeVkHash: Field) => { 26 | const leaves = appendToMakePowerOf2(baseVksHashes); 27 | 28 | const baseLayerHashes: Array = []; 29 | for (let i = 0; i < leaves.length; i += 2) { 30 | const digest = Poseidon.hash([leaves[i], leaves[i+1], Field(1)]); 31 | baseLayerHashes.push(digest); 32 | } 33 | 34 | let layerHashes: Array = baseLayerHashes.map(x => Field.from(x)); 35 | let layer = Field(1); 36 | while (layerHashes.length > 1) { 37 | let runningLayer: Array = []; 38 | layer = layer.add(Field(1)); 39 | 40 | let [vkLeft, vkRight] = layer.equals(Field(2)).toBoolean() ? [layer1VkHash, layer1VkHash] : [nodeVkHash, nodeVkHash]; 41 | 42 | for (let i = 0; i < layerHashes.length; i += 2) { 43 | const digest = Poseidon.hash([vkLeft, vkRight, layerHashes[i], layerHashes[i + 1], layer]); 44 | runningLayer.push(digest); 45 | } 46 | 47 | console.log('---------------') 48 | runningLayer.forEach(h => { 49 | console.log(h.toBigInt()); 50 | }); 51 | console.log('---------------') 52 | 53 | layerHashes = runningLayer.map(x=>Field.from(x)); 54 | } 55 | 56 | return layerHashes[0] 57 | } 58 | 59 | export { buildTreeOfVks } -------------------------------------------------------------------------------- /contracts/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es2020", 4 | "module": "es2022", 5 | "lib": ["dom", "esnext"], 6 | "outDir": "./build", 7 | "rootDir": ".", 8 | "strict": true, 9 | "strictPropertyInitialization": false, // to enable generic constructors, e.g. on CircuitValue 10 | "skipLibCheck": true, 11 | "forceConsistentCasingInFileNames": true, 12 | "esModuleInterop": true, 13 | "moduleResolution": "node", 14 | "experimentalDecorators": true, 15 | "emitDecoratorMetadata": true, 16 | "allowJs": true, 17 | "declaration": true, 18 | "sourceMap": true, 19 | "noFallthroughCasesInSwitch": true, 20 | "allowSyntheticDefaultImports": true, 21 | "useDefineForClassFields": false, 22 | }, 23 | "include": ["./src"], 24 | } 25 | -------------------------------------------------------------------------------- /package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "pairing", 3 | "lockfileVersion": 3, 4 | "requires": true, 5 | "packages": {} 6 | } 7 | -------------------------------------------------------------------------------- /pairing-utils/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | /Cargo.lock -------------------------------------------------------------------------------- /pairing-utils/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "pairing-utils" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [lib] 7 | name = "pairing_utils" 8 | path = "src/lib.rs" 9 | 10 | [dependencies] 11 | ark-ff = "0.4.2" 12 | ark-ff-macros = "0.4.2" 13 | ark-bn254 = "0.4.0" 14 | ark-std = "0.4.0" 15 | ark-ec = "0.4.2" 16 | ark-poly = "0.4.0" 17 | ark-serialize = "0.4.2" 18 | serde = { version = "1.0", features = ["derive"] } 19 | serde_json = "1.0" 20 | num-bigint = "0.4" 21 | -------------------------------------------------------------------------------- /pairing-utils/src/bin/alphabeta.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | use std::process; 3 | 4 | use pairing_utils::make_alpha_beta; 5 | 6 | fn main() { 7 | let args: Vec = env::args().collect(); 8 | 9 | if args.len() != 3 { 10 | eprintln!( 11 | "Usage: cargo run --bin alphabeta -- path_to_risc_zero_vk path_to_save_alphabeta" 12 | ); 13 | process::exit(1); 14 | } 15 | 16 | make_alpha_beta(&args[1], &args[2]); 17 | } 18 | -------------------------------------------------------------------------------- /pairing-utils/src/bin/aux_witness.rs: -------------------------------------------------------------------------------- 1 | use pairing_utils::compute_and_serialize_aux_witness; 2 | use std::env; 3 | use std::process; 4 | 5 | fn main() { 6 | let args: Vec = env::args().collect(); 7 | 8 | if args.len() != 3 { 9 | eprintln!("Usage: cargo run --bin aux_witness -- path_to_mlo_o1js_output path_where_to_save_aux_witness"); 10 | process::exit(1); 11 | } 12 | 13 | compute_and_serialize_aux_witness(&args[1], &args[2]); 14 | } 15 | -------------------------------------------------------------------------------- /pairing-utils/src/eth_root.rs: -------------------------------------------------------------------------------- 1 | use ark_bn254::Fq12; 2 | use ark_std::One; 3 | 4 | use crate::{ 5 | constants::{H, H_PRIME, R, S_PRIME, U, U_PRIME}, 6 | tonelli_shanks::TS, 7 | utils::exp, 8 | }; 9 | 10 | // e = 6x + 2 + p - p^2 + p^3 11 | pub(crate) fn eth_root(y: Fq12, ts: TS) -> Fq12 { 12 | let c = exp(y, &U); 13 | assert_eq!(exp(c, &R), y); 14 | assert_eq!(exp(c, &H), Fq12::one()); 15 | 16 | assert_ne!(c, Fq12::one()); 17 | 18 | let a = exp(c, &U_PRIME); 19 | assert_eq!(exp(a, &S_PRIME), c); 20 | assert_eq!(exp(a, &H_PRIME), Fq12::one()); 21 | 22 | assert_ne!(a, Fq12::one()); 23 | 24 | ts.cube_root(a) 25 | } 26 | 27 | #[cfg(test)] 28 | mod eth_root_tests { 29 | use crate::{constants::E, utils::sample_27th_root_of_unity}; 30 | use ark_std::{ 31 | rand::{rngs::StdRng, SeedableRng}, 32 | UniformRand, 33 | }; 34 | 35 | use super::*; 36 | 37 | #[test] 38 | fn test_eth_root() { 39 | let rng = &mut StdRng::seed_from_u64(9837981739u64); 40 | let w = sample_27th_root_of_unity(rng); 41 | 42 | let ts = TS { w }; 43 | 44 | for _ in 0..5 { 45 | let x = Fq12::rand(rng); 46 | let y = exp(x, &E); 47 | 48 | let root = eth_root(y, ts); 49 | assert_eq!(exp(root, &E), y); 50 | } 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /pairing-utils/src/risc0_vk.rs: -------------------------------------------------------------------------------- 1 | #[cfg(test)] 2 | mod tests { 3 | use crate::display_fq12; 4 | use ark_bn254::{Bn254, Fq, Fq2, G1Affine, G2Affine}; 5 | use ark_ec::pairing::Pairing; 6 | use ark_ff::MontFp; 7 | 8 | #[test] 9 | fn make_alpha_beta() { 10 | let alpha_x: Fq = MontFp!( 11 | "20491192805390485299153009773594534940189261866228447918068658471970481763042" 12 | ); 13 | let alpha_y: Fq = 14 | MontFp!("9383485363053290200918347156157836566562967994039712273449902621266178545958"); 15 | 16 | let beta_x_c0: Fq = 17 | MontFp!("6375614351688725206403948262868962793625744043794305715222011528459656738731"); 18 | let beta_x_c1: Fq = 19 | MontFp!("4252822878758300859123897981450591353533073413197771768651442665752259397132"); 20 | 21 | let beta_y_c0: Fq = MontFp!( 22 | "10505242626370262277552901082094356697409835680220590971873171140371331206856" 23 | ); 24 | let beta_y_c1: Fq = MontFp!( 25 | "21847035105528745403288232691147584728191162732299865338377159692350059136679" 26 | ); 27 | 28 | let beta_x = Fq2::new(beta_x_c0, beta_x_c1); 29 | let beta_y = Fq2::new(beta_y_c0, beta_y_c1); 30 | 31 | let alpha = G1Affine::new(alpha_x, alpha_y); 32 | let beta = G2Affine::new(beta_x, beta_y); 33 | 34 | let alpha_beta = Bn254::multi_miller_loop(&[alpha], &[beta]).0; 35 | display_fq12(alpha_beta, "alpha_beta"); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /pairing-utils/src/serialize.rs: -------------------------------------------------------------------------------- 1 | use ark_bn254::{Fq, Fq12, Fq2, Fq6}; 2 | use ark_std::Zero; 3 | use serde::{Deserialize, Serialize}; 4 | use std::str::FromStr; 5 | 6 | #[derive(Serialize, Deserialize, Debug)] 7 | pub struct Field12 { 8 | pub g00: String, 9 | pub g01: String, 10 | 11 | pub g10: String, 12 | pub g11: String, 13 | 14 | pub g20: String, 15 | pub g21: String, 16 | 17 | pub h00: String, 18 | pub h01: String, 19 | 20 | pub h10: String, 21 | pub h11: String, 22 | 23 | pub h20: String, 24 | pub h21: String, 25 | } 26 | 27 | #[derive(Serialize, Deserialize, Debug)] 28 | pub struct AuxWitness { 29 | c: Field12, 30 | shift_power: String, 31 | } 32 | 33 | pub fn serialize_aux_witness(c: Fq12, shift_pow: u8, path: &str) { 34 | let c_serialized = serialize_fq12(c); 35 | let aux_witness = AuxWitness { 36 | c: c_serialized, 37 | shift_power: shift_pow.to_string(), 38 | }; 39 | 40 | let json = serde_json::to_string(&aux_witness).unwrap(); 41 | std::fs::write(path, &json).unwrap(); 42 | } 43 | 44 | pub fn serialize_fq12(f: Fq12) -> Field12 { 45 | let to_string = |x: Fq| -> String { 46 | if x == Fq::zero() { 47 | "0".to_string() 48 | } else { 49 | x.to_string() 50 | } 51 | }; 52 | 53 | Field12 { 54 | g00: to_string(f.c0.c0.c0), 55 | g01: to_string(f.c0.c0.c1), 56 | 57 | g10: to_string(f.c0.c1.c0), 58 | g11: to_string(f.c0.c1.c1), 59 | 60 | g20: to_string(f.c0.c2.c0), 61 | g21: to_string(f.c0.c2.c1), 62 | 63 | h00: to_string(f.c1.c0.c0), 64 | h01: to_string(f.c1.c0.c1), 65 | 66 | h10: to_string(f.c1.c1.c0), 67 | h11: to_string(f.c1.c1.c1), 68 | 69 | h20: to_string(f.c1.c2.c0), 70 | h21: to_string(f.c1.c2.c1), 71 | } 72 | } 73 | 74 | pub fn deserialize_fq12(path: &str) -> Fq12 { 75 | let json = std::fs::read_to_string(path).unwrap(); 76 | let f12: Field12 = serde_json::from_str(&json).unwrap(); 77 | 78 | let g00: Fq = Fq::from_str(&f12.g00).unwrap(); 79 | let g01: Fq = Fq::from_str(&f12.g01).unwrap(); 80 | let g0 = Fq2::new(g00, g01); 81 | 82 | let g10: Fq = Fq::from_str(&f12.g10).unwrap(); 83 | let g11: Fq = Fq::from_str(&f12.g11).unwrap(); 84 | let g1 = Fq2::new(g10, g11); 85 | 86 | let g20: Fq = Fq::from_str(&f12.g20).unwrap(); 87 | let g21: Fq = Fq::from_str(&f12.g21).unwrap(); 88 | let g2 = Fq2::new(g20, g21); 89 | 90 | let g: Fq6 = Fq6::new(g0, g1, g2); 91 | 92 | let h00: Fq = Fq::from_str(&f12.h00).unwrap(); 93 | let h01: Fq = Fq::from_str(&f12.h01).unwrap(); 94 | let h0 = Fq2::new(h00, h01); 95 | 96 | let h10: Fq = Fq::from_str(&f12.h10).unwrap(); 97 | let h11: Fq = Fq::from_str(&f12.h11).unwrap(); 98 | let h1 = Fq2::new(h10, h11); 99 | 100 | let h20: Fq = Fq::from_str(&f12.h20).unwrap(); 101 | let h21: Fq = Fq::from_str(&f12.h21).unwrap(); 102 | let h2 = Fq2::new(h20, h21); 103 | 104 | let h: Fq6 = Fq6::new(h0, h1, h2); 105 | 106 | Fq12::new(g, h) 107 | } 108 | -------------------------------------------------------------------------------- /pairing-utils/src/utils.rs: -------------------------------------------------------------------------------- 1 | use crate::constants::K; 2 | use ark_bn254::Fq12; 3 | use ark_ff::Field; 4 | use ark_std::{rand::RngCore, One, UniformRand}; 5 | 6 | // bits are in big endian 7 | pub(crate) fn exp(x: F, bits: &[u8]) -> F { 8 | let mut r = x; 9 | 10 | let n = bits.len(); 11 | for i in 1..n { 12 | r *= r; 13 | if bits[i] == 1 { 14 | r = r * x; 15 | } 16 | } 17 | 18 | return r; 19 | } 20 | 21 | // K = (p^12 - 1)/27 22 | pub(crate) fn sample_27th_root_of_unity(rng: &mut R) -> Fq12 { 23 | let one = Fq12::one(); 24 | 25 | let pow_9 = |x: Fq12| -> bool { x.pow(&[9, 0, 0, 0]) != one }; 26 | 27 | loop { 28 | let x = Fq12::rand(rng); 29 | let w = exp(x, &K); 30 | if w != one && pow_9(w) { 31 | return w; 32 | } 33 | } 34 | } 35 | 36 | #[cfg(test)] 37 | mod utils_tests { 38 | use ark_ff::Field; 39 | use ark_std::rand::{rngs::StdRng, SeedableRng}; 40 | 41 | use super::*; 42 | 43 | #[test] 44 | fn test_exp() { 45 | let rng = &mut StdRng::seed_from_u64(0u64); 46 | 47 | let x = Fq12::rand(rng); 48 | 49 | let p: u64 = 8904238409183123512; 50 | let p_bits: [u8; 63] = [ 51 | 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 52 | 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 0, 1, 53 | 1, 1, 0, 0, 0, 54 | ]; 55 | let x_pow = x.pow(&[p]); 56 | let x_pow_hand = exp(x, &p_bits); 57 | assert_eq!(x_pow, x_pow_hand); 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /pairing-utils/src/write.rs: -------------------------------------------------------------------------------- 1 | #[cfg(test)] 2 | mod tests { 3 | use ark_ff::MontFp; 4 | 5 | use ark_bn254::{Fq, Fq12}; 6 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; 7 | use ark_std::One; 8 | 9 | #[test] 10 | fn write_fp12() { 11 | // Create or open the file 12 | // let mut file = File::create("output").unwrap(); 13 | let mut buffer = Vec::::new(); 14 | 15 | let f = Fq12::one(); 16 | f.serialize_compressed(&mut buffer).unwrap(); 17 | 18 | let f_read = Fq12::deserialize_compressed(buffer.as_slice()).unwrap(); 19 | println!("{}", f_read); 20 | 21 | // Ok(()) 22 | } 23 | 24 | use serde::{Deserialize, Serialize}; 25 | 26 | #[derive(Serialize, Deserialize, Debug)] 27 | struct Person { 28 | name: String, 29 | age: u8, 30 | email: String, 31 | } 32 | 33 | #[test] 34 | fn write_person() { 35 | let person = Person { 36 | name: String::from("Alice"), 37 | age: 30, 38 | email: String::from("alice@example.com"), 39 | }; 40 | 41 | // Serialize the struct to a JSON string 42 | let json = serde_json::to_string(&person).unwrap(); 43 | println!("Serialized JSON: {}", json); 44 | 45 | // Write the JSON string to a file 46 | // std::fs::write("person.json", &json).unwrap(); 47 | } 48 | 49 | #[derive(Serialize, Deserialize, Debug)] 50 | struct FieldElement { 51 | x: String, 52 | } 53 | 54 | #[test] 55 | fn fq_to_json() { 56 | let x: Fq = MontFp!( 57 | "15616337568370127376524227028151073256580278759114373848263446467695063344960" 58 | ); 59 | 60 | let f = FieldElement { x: x.to_string() }; 61 | 62 | let json = serde_json::to_string(&f).unwrap(); 63 | println!("Serialized JSON: {}", json); 64 | 65 | // Write the JSON string to a file 66 | // std::fs::write("person.json", &json).unwrap(); 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /scripts/.env.example: -------------------------------------------------------------------------------- 1 | WORK_DIR=e2e_plonk 2 | CACHE_DIR=plonk_cache 3 | HEX_PROOF="0x000000002a0a43cf6baf67b9d40e1cb4cc816d8680aa624f4d1d6764c98625e4058aabe32cfae4e2bd16f3db6a711e639e3713bb6a3bb34cfea43a2aa79088b8978fc3642b1f5606964ce47bc07ac3759d983dfdb889b3f1b685608ad304e4e9655b0e0c1aee8b949a7cea5c368dbf6f4b4a7dc125d3d5bab0fcef0e756eae77e5267ae91c057832bccbc57f245ac5a17f099d7ae4658f8f805470b533a8dd52df3894170ce88fd926f2b1235bb3f41fb16b14572bc451e0bae1e0d6fb8b9544440e34d519e0d4fd5bfb02296477a6d6c9e38490025ec9ca4384a53911cf834294e5a0c417db11ab57fc768c0461b212bcdc46188a5fefc9a45d63be88139c7536579c970092ee1cf046b49a1a50fdf93a31fc320e13f630a10c62e616b9d5e4676af2a51e6b9dbb792ef6b95ca0bec2dca6dea46b4c0c67cb9b51b08f43ad71a39a0044018d65ea2c1dcc7a10d328793c82530790c018879c34f2ca02a3be77807584c60f7c43f1b1d3384c337932f059f38d980476fbc30a428673d7a978cf40e39a502290f4e605abdd07346cf52785b4f3bcba42a6a32404f302e9915092d75a611518d80a4754ddc4743c6c7583b86d1916bea533fdd4d1ba7a5bbb939a40f9b66305f6154933550dbf8076d683c3c7d56717a0587d12274e3ebdb020276c735f491e687fcee4b6169c3f77a06b604ededce72861eaf5ce0b0d9dd041e5d76f9e28086888969a0eb3310bddf4215c0f421fd8989cb9dfc0d699d37c6571bd7912dc0e73e2d383ff4a066c20cb34038a2bf5f104c5a0dddf418f17145f73bde8dea628d8107ee55c352d0c4a4d2c8caada5645996fd2d031f5c723c366f7a9c66b1a17727ecfe131cb7d0e31a00b1f0826d19cd6e9b63426d32425b35e2c93be32bf2c3fdb87898c549069c13218729cef0ea3745760c1665f4bdc4991db042fe50c0b08a6c13f3508005bc344ef7ef25185fb2a4f0be1833e653d722fdeacfb79ef0eb96ddc4500bee6aa9b368def766dd1447c18e2f4aeb3478778f347d460fe0f1905f2a262d0a4c88dab019c83345289e40d6636d3da4de847b8480b8421a57c12be6ce901d4d636cecdf7ec4a8e36bbfbc7ffb750a5e3e4ba94547e056d8532094c559b50294e6865725ba29d0f2d39fd2c857135011b25b12cca2453c0b0cb010b3fa2d3a6f4417121f42c890dd0e7d1ae507e94f201f7ef43a8b139113817" 4 | PROGRAM_VK="205396879370976310672062186070586146996586893314981151421601444555690613634" 5 | HEX_PI="0x17deda8a1add5b686434afd29af7d7f762aeaa69c04584519b014f6b71c7153b54c29754d3f1c3612a1b42c741b484a767427e24d015ade74663c0948858f5549388fe7538e3ee24a72d29fe0731783ad1aafbfd2351fb9b13f589dc955ad95900000000000000000000000000000000000000000000000000000000001d3cda00000000000000000000000000000000000000000000000000000000001d3cee000000000000000000000000000000000000000fffffffffffffffffffffffff" -------------------------------------------------------------------------------- /scripts/e2e_groth16.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # exit if any of scripts exit 4 | set -e 5 | 6 | args=("$@") 7 | ENV=$(realpath ${args[0]}) 8 | source ${ENV} 9 | SCRIPT_DIR=$(dirname -- $(realpath $0)) 10 | cd $SCRIPT_DIR/.. 11 | 12 | pushd ./contracts 13 | 14 | node_version=$(node -v) 15 | node_version=${node_version:1} 16 | node_version=${node_version%\.*} 17 | node_version=${node_version%\.*} 18 | node_version=$(($node_version)) 19 | if [ $node_version -lt 22 ] 20 | then 21 | echo "Node version is too low - $node_version. Please upgrade to NodeJS 22 or higher." 22 | exit 1 23 | fi 24 | 25 | npm install 26 | npm run build 27 | 28 | popd 29 | 30 | WORK_DIR_RELATIVE_TO_SCRIPTS="./scripts/${WORK_DIR}" 31 | mkdir -p ${WORK_DIR_RELATIVE_TO_SCRIPTS} 32 | 33 | CACHE_DIR_RELATIVE_TO_SCRIPTS="./scripts/${CACHE_DIR}" 34 | mkdir -p ${CACHE_DIR_RELATIVE_TO_SCRIPTS} 35 | 36 | # get alphabeta 37 | ./scripts/get_alphabeta_groth16.sh ${ENV} 38 | 39 | # get aux pairing witness 40 | ./scripts/get_aux_witness_groth16.sh ${ENV} 41 | 42 | # test e2e proof 43 | ./scripts/groth16_tree.sh ${ENV} -------------------------------------------------------------------------------- /scripts/e2e_plonk.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # exit if any of scripts exit 4 | set -e 5 | 6 | args=("$@") 7 | ENV=$(realpath ${args[0]}) 8 | source ${ENV} 9 | SCRIPT_DIR=$(dirname -- $(realpath $0)) 10 | cd $SCRIPT_DIR/.. 11 | 12 | pushd ./contracts 13 | 14 | node_version=$(node -v) 15 | node_version=${node_version:1} 16 | node_version=${node_version%\.*} 17 | node_version=${node_version%\.*} 18 | node_version=$(($node_version)) 19 | if [ $node_version -lt 22 ] 20 | then 21 | echo "Node version is too low - $node_version. Please upgrade to NodeJS 22 or higher." 22 | exit 1 23 | fi 24 | 25 | npm install 26 | npm run build 27 | 28 | popd 29 | 30 | WORK_DIR_RELATIVE_TO_SCRIPTS="./scripts/${WORK_DIR}" 31 | mkdir -p ${WORK_DIR_RELATIVE_TO_SCRIPTS} 32 | 33 | CACHE_DIR_RELATIVE_TO_SCRIPTS="./scripts/${CACHE_DIR}" 34 | mkdir -p ${CACHE_DIR_RELATIVE_TO_SCRIPTS} 35 | 36 | # get aux pairing witness 37 | ./scripts/get_aux_witness_plonk.sh ${ENV} 38 | 39 | # test e2e proof 40 | ./scripts/plonk_tree.sh ${ENV} -------------------------------------------------------------------------------- /scripts/e2e_verify_plonk.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | source ./scripts/.env 6 | cd ./contracts 7 | NODE_SCRIPT="./build/src/plonk/e2e_verify.js" 8 | 9 | AUX_WITNESS_RELATIVE_PATH="../$AUX_WITNESS_PATH" 10 | node --max-old-space-size=16384 $NODE_SCRIPT $HEX_PROOF $PROGRAM_VK $HEX_PI $AUX_WITNESS_RELATIVE_PATH & 11 | 12 | node_pid=$! 13 | wait $node_pid 14 | exit_status=$? 15 | 16 | if [ $exit_status -eq 0 ]; then 17 | echo "Verification successfuly proven" 18 | else 19 | echo "Verification failed" 20 | exit 1 21 | fi 22 | 23 | echo "Success" -------------------------------------------------------------------------------- /scripts/get_alphabeta_groth16.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | args=("$@") 6 | ENV=${args[0]} 7 | source ${ENV} 8 | 9 | WORK_DIR_RELATIVE_TO_SCRIPTS="./scripts/${WORK_DIR}" 10 | 11 | # reposition 12 | cd ./pairing-utils 13 | 14 | cargo run --bin alphabeta -- $RAW_VK_PATH $VK_PATH & 15 | cargo_pid=$! 16 | wait $cargo_pid 17 | exit_status=$? 18 | 19 | if [ $exit_status -eq 0 ]; then 20 | echo "alpha*beta successfully generated" 21 | else 22 | echo "computatio" 23 | exit 1 24 | fi 25 | 26 | echo "Success" -------------------------------------------------------------------------------- /scripts/get_aux_witness_groth16.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | args=("$@") 6 | ENV=${args[0]} 7 | source ${ENV} 8 | 9 | WORK_DIR_RELATIVE_TO_SCRIPTS="./scripts/${WORK_DIR}" 10 | MLO_RELATIVE_PATH="$(realpath ${WORK_DIR_RELATIVE_TO_SCRIPTS})/mlo.json" 11 | rm -f $MLO_RELATIVE_PATH 12 | AUX_WITNESS_RELATIVE_PATH="$(realpath ${WORK_DIR_RELATIVE_TO_SCRIPTS})/aux_wtns.json" 13 | rm -f $AUX_WITNESS_RELATIVE_PATH 14 | 15 | cd ./contracts 16 | NODE_SCRIPT="./build/src/groth/serialize_mlo.js" 17 | 18 | # obtain mlo result 19 | 20 | export GROTH16_VK_PATH=${VK_PATH} 21 | node $NODE_SCRIPT $VK_PATH $PROOF_PATH $MLO_RELATIVE_PATH & 22 | 23 | node_pid=$! 24 | wait $node_pid 25 | exit_status=$? 26 | 27 | if [ $exit_status -eq 0 ]; then 28 | echo "Miller loop output successfully written" 29 | else 30 | echo "Miller loop output computation failed" 31 | exit 1 32 | fi 33 | 34 | # reposition 35 | cd ../pairing-utils 36 | 37 | cargo run --bin aux_witness -- $MLO_RELATIVE_PATH $AUX_WITNESS_RELATIVE_PATH & 38 | cargo_pid=$! 39 | wait $cargo_pid 40 | exit_status=$? 41 | 42 | if [ $exit_status -eq 0 ]; then 43 | echo "Auxilary witness successfully computed" 44 | else 45 | echo "Auxilary witness computation failed" 46 | exit 1 47 | fi 48 | 49 | echo "Success" -------------------------------------------------------------------------------- /scripts/get_aux_witness_plonk.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | args=("$@") 6 | ENV=${args[0]} 7 | source ${ENV} 8 | 9 | WORK_DIR_RELATIVE_TO_SCRIPTS="./scripts/${WORK_DIR}" 10 | MLO_RELATIVE_PATH="$(realpath ${WORK_DIR_RELATIVE_TO_SCRIPTS})/mlo.json" 11 | rm -f $MLO_RELATIVE_PATH 12 | AUX_WITNESS_RELATIVE_PATH="$(realpath ${WORK_DIR_RELATIVE_TO_SCRIPTS})/aux_wtns.json" 13 | rm -f $AUX_WITNESS_RELATIVE_PATH 14 | 15 | cd ./contracts 16 | NODE_SCRIPT="./build/src/plonk/serialize_mlo.js" 17 | 18 | # obtain mlo result 19 | 20 | node $NODE_SCRIPT $MLO_RELATIVE_PATH $HEX_PROOF $PROGRAM_VK $HEX_PI & 21 | 22 | node_pid=$! 23 | wait $node_pid 24 | exit_status=$? 25 | 26 | if [ $exit_status -eq 0 ]; then 27 | echo "Miller loop output successfully written" 28 | else 29 | echo "Miller loop output computation failed" 30 | exit 1 31 | fi 32 | 33 | # reposition 34 | cd ../pairing-utils 35 | 36 | cargo run --bin aux_witness -- $MLO_RELATIVE_PATH $AUX_WITNESS_RELATIVE_PATH & 37 | cargo_pid=$! 38 | wait $cargo_pid 39 | exit_status=$? 40 | 41 | if [ $exit_status -eq 0 ]; then 42 | echo "Auxilary witness successfully computed" 43 | else 44 | echo "Auxilary witness computation failed" 45 | exit 1 46 | fi 47 | 48 | echo "Success" -------------------------------------------------------------------------------- /scripts/groth16_tree.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | args=("$@") 4 | ENV=${args[0]} 5 | source ${ENV} 6 | 7 | cd ./contracts 8 | 9 | WORK_DIR_RELATIVE_TO_SCRIPTS="../scripts/${WORK_DIR}" 10 | CACHE_DIR_RELATIVE_TO_SCRIPTS="../scripts/${CACHE_DIR}" 11 | AUX_WITNESS_RELATIVE_PATH="$(realpath ${WORK_DIR_RELATIVE_TO_SCRIPTS})/aux_wtns.json" 12 | 13 | 14 | mkdir -p ${WORK_DIR_RELATIVE_TO_SCRIPTS}/vks/ 15 | mkdir -p ${WORK_DIR_RELATIVE_TO_SCRIPTS}/proofs/ 16 | 17 | mkdir -p ${WORK_DIR_RELATIVE_TO_SCRIPTS}/proofs/layer0 18 | mkdir -p ${WORK_DIR_RELATIVE_TO_SCRIPTS}/proofs/layer1 19 | mkdir -p ${WORK_DIR_RELATIVE_TO_SCRIPTS}/proofs/layer2 20 | mkdir -p ${WORK_DIR_RELATIVE_TO_SCRIPTS}/proofs/layer3 21 | mkdir -p ${WORK_DIR_RELATIVE_TO_SCRIPTS}/proofs/layer4 22 | 23 | echo "Compiling recursion vks..." 24 | node ./build/src/compile_recursion_vks.js ${WORK_DIR_RELATIVE_TO_SCRIPTS} ${CACHE_DIR_RELATIVE_TO_SCRIPTS} & 25 | 26 | node_pid=$! 27 | wait $node_pid 28 | exit_status=$? 29 | 30 | if [ $exit_status -eq 0 ]; then 31 | echo "Recursion vks compiled successfully" 32 | else 33 | echo "Recursion vks compilation failed" 34 | exit 1 35 | fi 36 | 37 | MAX_THREADS=${MAX_THREADS:-16} 38 | echo "MAX THREADS: $MAX_THREADS" 39 | 40 | MAX_ITERATIONS=$(( (16 + $MAX_THREADS - 1)/$MAX_THREADS )) 41 | TOTAL_IN_LOOP=16 42 | SHOULD_BREAK=false 43 | 44 | export GROTH16_VK_PATH=${VK_PATH} 45 | 46 | echo "Computing ZKPs 0-15..." 47 | for i in `seq 0 $MAX_ITERATIONS`; do 48 | for j in `seq 0 $(( $MAX_THREADS - 1 ))`; do 49 | ZKP_I=$(( $i * $MAX_THREADS + $j )) 50 | if (( $ZKP_I >= $TOTAL_IN_LOOP )); then 51 | SHOULD_BREAK=true 52 | fi 53 | if $SHOULD_BREAK; then 54 | break 55 | fi 56 | # echo "Computing ZKP ${ZKP_I}..." 57 | node ./build/src/groth/recursion/prove_zkps.js zkp${ZKP_I} $PROOF_PATH $AUX_WITNESS_RELATIVE_PATH ${WORK_DIR_RELATIVE_TO_SCRIPTS} ${CACHE_DIR_RELATIVE_TO_SCRIPTS} & 58 | done 59 | wait 60 | if $SHOULD_BREAK; then 61 | break 62 | fi 63 | done 64 | 65 | echo "Computed ZKPs 0-15..." 66 | 67 | for i in `seq 1 4`; do 68 | echo "Compressing layer ${i}..." 69 | upper_limit=$(( 2 ** (4 - i) - 1 )) 70 | MAX_ITERATIONS=$(( ($upper_limit + $MAX_THREADS - 1) / $MAX_THREADS )) 71 | SHOULD_BREAK=false 72 | for j in `seq 0 $MAX_ITERATIONS`; do 73 | # echo "${i}, ${j}" 74 | for k in `seq 0 $(( $MAX_THREADS - 1 ))`; do 75 | ZKP_J=$(( $j * $MAX_THREADS + $k )) 76 | if (( $ZKP_J > $upper_limit )); then 77 | SHOULD_BREAK=true 78 | fi 79 | if $SHOULD_BREAK; then 80 | break 81 | fi 82 | # echo "${i}, ${j}, ${k}, ${ZKP_J}" 83 | node ./build/src/node_resolver.js $TOTAL_IN_LOOP ${i} ${ZKP_J} ${WORK_DIR_RELATIVE_TO_SCRIPTS} ${CACHE_DIR_RELATIVE_TO_SCRIPTS} & 84 | done 85 | wait 86 | if $SHOULD_BREAK; then 87 | break 88 | fi 89 | done 90 | echo "Compressed layer ${i}..." 91 | done 92 | 93 | echo "Done!" -------------------------------------------------------------------------------- /scripts/plonk_tree.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | args=("$@") 4 | ENV=${args[0]} 5 | source ${ENV} 6 | 7 | cd ./contracts 8 | 9 | WORK_DIR_RELATIVE_TO_SCRIPTS="../scripts/${WORK_DIR}" 10 | CACHE_DIR_RELATIVE_TO_SCRIPTS="../scripts/${CACHE_DIR}" 11 | AUX_WITNESS_RELATIVE_PATH="$(realpath ${WORK_DIR_RELATIVE_TO_SCRIPTS})/aux_wtns.json" 12 | 13 | 14 | mkdir -p ${WORK_DIR_RELATIVE_TO_SCRIPTS}/vks/ 15 | mkdir -p ${WORK_DIR_RELATIVE_TO_SCRIPTS}/proofs/ 16 | 17 | mkdir -p ${WORK_DIR_RELATIVE_TO_SCRIPTS}/proofs/layer0 18 | mkdir -p ${WORK_DIR_RELATIVE_TO_SCRIPTS}/proofs/layer1 19 | mkdir -p ${WORK_DIR_RELATIVE_TO_SCRIPTS}/proofs/layer2 20 | mkdir -p ${WORK_DIR_RELATIVE_TO_SCRIPTS}/proofs/layer3 21 | mkdir -p ${WORK_DIR_RELATIVE_TO_SCRIPTS}/proofs/layer4 22 | mkdir -p ${WORK_DIR_RELATIVE_TO_SCRIPTS}/proofs/layer5 23 | 24 | echo "Compiling recursion vks..." 25 | node ./build/src/compile_recursion_vks.js ${WORK_DIR_RELATIVE_TO_SCRIPTS} ${CACHE_DIR_RELATIVE_TO_SCRIPTS} & 26 | 27 | node_pid=$! 28 | wait $node_pid 29 | exit_status=$? 30 | 31 | if [ $exit_status -eq 0 ]; then 32 | echo "Recursion vks compiled successfully" 33 | else 34 | echo "Recursion vks compilation failed" 35 | exit 1 36 | fi 37 | 38 | MAX_THREADS=${MAX_THREADS:-32} 39 | echo "MAX THREADS: $MAX_THREADS" 40 | 41 | MAX_ITERATIONS=$(( (32 + $MAX_THREADS - 1)/$MAX_THREADS )) 42 | TOTAL_IN_LOOP=24 43 | SHOULD_BREAK=false 44 | 45 | echo "Computing ZKPs 0-23..." 46 | for i in `seq 0 $MAX_ITERATIONS`; do 47 | for j in `seq 0 $(( $MAX_THREADS - 1 ))`; do 48 | ZKP_I=$(( $i * $MAX_THREADS + $j )) 49 | if (( $ZKP_I >= $TOTAL_IN_LOOP )); then 50 | SHOULD_BREAK=true 51 | fi 52 | if $SHOULD_BREAK; then 53 | break 54 | fi 55 | # echo "Computing ZKP ${ZKP_I}..." 56 | node ./build/src/plonk/recursion/prove_zkps.js zkp${ZKP_I} $HEX_PROOF $PROGRAM_VK $HEX_PI $AUX_WITNESS_RELATIVE_PATH ${WORK_DIR_RELATIVE_TO_SCRIPTS} ${CACHE_DIR_RELATIVE_TO_SCRIPTS} & 57 | done 58 | wait 59 | if $SHOULD_BREAK; then 60 | break 61 | fi 62 | done 63 | 64 | echo "Computed ZKPs 0-23..." 65 | 66 | for i in `seq 1 5`; do 67 | echo "Compressing layer ${i}..." 68 | upper_limit=$(( 2 ** (5 - i) - 1 )) 69 | MAX_ITERATIONS=$(( ($upper_limit + $MAX_THREADS - 1) / $MAX_THREADS )) 70 | SHOULD_BREAK=false 71 | for j in `seq 0 $MAX_ITERATIONS`; do 72 | # echo "${i}, ${j}" 73 | for k in `seq 0 $(( $MAX_THREADS - 1 ))`; do 74 | ZKP_J=$(( $j * $MAX_THREADS + $k )) 75 | if (( $ZKP_J > $upper_limit )); then 76 | SHOULD_BREAK=true 77 | fi 78 | if $SHOULD_BREAK; then 79 | break 80 | fi 81 | # echo "${i}, ${j}, ${k}, ${ZKP_J}" 82 | node ./build/src/node_resolver.js $TOTAL_IN_LOOP ${i} ${ZKP_J} ${WORK_DIR_RELATIVE_TO_SCRIPTS} ${CACHE_DIR_RELATIVE_TO_SCRIPTS} & 83 | done 84 | wait 85 | if $SHOULD_BREAK; then 86 | break 87 | fi 88 | done 89 | echo "Compressed layer ${i}..." 90 | done 91 | 92 | echo "Done!" -------------------------------------------------------------------------------- /scripts/prepare_vk.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | args=("$@") 6 | ENV=${args[0]} 7 | source ${ENV} 8 | 9 | WORK_DIR_RELATIVE_TO_SCRIPTS="./scripts/${WORK_DIR}" 10 | rm -f $AUX_WITNESS_RELATIVE_PATH 11 | 12 | # reposition 13 | cd ./pairing-utils 14 | 15 | cargo run --bin alphabeta -- $RAW_VK_PATH $VK_PATH & 16 | cargo_pid=$! 17 | wait $cargo_pid 18 | exit_status=$? 19 | 20 | if [ $exit_status -eq 0 ]; then 21 | echo "VK successfully prepared" 22 | else 23 | echo "VK preparation failed" 24 | exit 1 25 | fi 26 | 27 | echo "Success" -------------------------------------------------------------------------------- /scripts/risc_zero_example/e2e_risc_zero.sh: -------------------------------------------------------------------------------- 1 | set -e 2 | 3 | SCRIPT_DIR=$(dirname -- $(realpath $0)) 4 | cd $SCRIPT_DIR/../.. 5 | 6 | RUN_DIR_RELATIVE_TO_SCRIPTS=$(basename $SCRIPT_DIR)/run 7 | RUN_DIR=$(pwd)/scripts/$RUN_DIR_RELATIVE_TO_SCRIPTS 8 | 9 | mkdir -p $RUN_DIR 10 | 11 | pushd ./contracts 12 | 13 | node_version=$(node -v) 14 | node_version=${node_version:1} 15 | node_version=${node_version%\.*} 16 | node_version=${node_version%\.*} 17 | node_version=$(($node_version)) 18 | if [ $node_version -lt 22 ] 19 | then 20 | echo "Node version is too low - $node_version. Please upgrade to NodeJS 22 or higher." 21 | exit 1 22 | fi 23 | 24 | npm install 25 | npm run build 26 | 27 | node "./build/src/groth/proof_to_env.js" $SCRIPT_DIR/risc_zero_proof.json $SCRIPT_DIR/risc_zero_raw_vk.json $RUN_DIR/risc_zero_vk.json $RUN_DIR $RUN_DIR_RELATIVE_TO_SCRIPTS risc_zero & 28 | 29 | node_pid=$! 30 | wait $node_pid 31 | exit_status=$? 32 | 33 | if [ $exit_status -eq 0 ]; then 34 | echo "Risc Zero env successfully written" 35 | else 36 | echo "Risc Zero env failed" 37 | exit 1 38 | fi 39 | 40 | popd 41 | 42 | pushd ./scripts 43 | 44 | ./e2e_groth16.sh $RUN_DIR/env.risc_zero 45 | 46 | source $RUN_DIR/env.risc_zero 47 | export GROTH16_VK_PATH=${VK_PATH} 48 | export RISC_ZERO_EXAMPLE_WORK_DIR=$WORK_DIR 49 | export RISC_ZERO_EXAMPLE_PROOF_PATH=$PROOF_PATH 50 | node "../contracts/build/src/risc_zero/prove_zkps.js" risc_zero ${WORK_DIR}/proofs/layer4/p0.json $PROOF_PATH ${RUN_DIR}/riscZeroProof.json ${CACHE_DIR} & 51 | 52 | node_pid=$! 53 | wait $node_pid 54 | exit_status=$? 55 | 56 | if [ $exit_status -eq 0 ]; then 57 | echo "Risc Zero example proof successfully written" 58 | else 59 | echo "Risc Zero example proof failed" 60 | exit 1 61 | fi 62 | 63 | popd -------------------------------------------------------------------------------- /scripts/risc_zero_example/risc_zero_proof.json: -------------------------------------------------------------------------------- 1 | { 2 | "negA": { 3 | "x": "16465199099708604290698553000024942000051030364759839088954586362243760185403", 4 | "y": "17690359568564825813235988832215237195831246832056909949821693820324615702030" 5 | }, 6 | "B": { 7 | "x_c0": "20076621026680381759767634077167710158570125456580059736389163589252903861997", 8 | "x_c1": "8939596936503745624468413156089285325774309438533412822132357238045755689387", 9 | "y_c0": "111879341840300391556371653123940615424189534247854096298516707026393487948", 10 | "y_c1": "7719123513247232282802073769919057076444760653726977674407327771459600820251" 11 | }, 12 | "C": { 13 | "x": "10184405014965771627427034456113644883671783030647002464387923536809721376302", 14 | "y": "3331725942843591742687069662771052794291623321043694393039994836222102831251" 15 | }, 16 | "pi1": "19350802088444617183621339156085479077", 17 | "pi2": "61803236023146647725736150410140474743", 18 | "pi3": "224573707671822082550326687066026912541", 19 | "pi4": "215629468736039306773365103130456808007", 20 | "pi5": "6655704183316983190945468237220041514376883004657559498672647785620383118673" 21 | } -------------------------------------------------------------------------------- /scripts/risc_zero_example/risc_zero_raw_vk.json: -------------------------------------------------------------------------------- 1 | { 2 | "delta": { 3 | "x_c0": "12043754404802191763554326994664886008979042643626290185762540825416902247219", 4 | "x_c1": "1668323501672964604911431804142266013250380587483576094566949227275849579036", 5 | "y_c0": "13740680757317479711909903993315946540841369848973133181051452051592786724563", 6 | "y_c1": "7710631539206257456743780535472368339139328733484942210876916214502466455394" 7 | }, 8 | "gamma": { 9 | "x_c0": "10857046999023057135944570762232829481370756359578518086990519993285655852781", 10 | "x_c1": "11559732032986387107991004021392285783925812861821192530917403151452391805634", 11 | "y_c0": "8495653923123431417604973247489272438418190587263600148770280649306958101930", 12 | "y_c1": "4082367875863433681332203403145435568316851327593401208105741076214120093531" 13 | }, 14 | "alpha": { 15 | "x": "20491192805390485299153009773594534940189261866228447918068658471970481763042", 16 | "y": "9383485363053290200918347156157836566562967994039712273449902621266178545958" 17 | }, 18 | "beta": { 19 | "x_c0": "6375614351688725206403948262868962793625744043794305715222011528459656738731", 20 | "x_c1": "4252822878758300859123897981450591353533073413197771768651442665752259397132", 21 | "y_c0": "10505242626370262277552901082094356697409835680220590971873171140371331206856", 22 | "y_c1": "21847035105528745403288232691147584728191162732299865338377159692350059136679" 23 | }, 24 | "ic0": { 25 | "x": "8446592859352799428420270221449902464741693648963397251242447530457567083492", 26 | "y": "1064796367193003797175961162477173481551615790032213185848276823815288302804" 27 | }, 28 | "ic1": { 29 | "x": "3179835575189816632597428042194253779818690147323192973511715175294048485951", 30 | "y": "20895841676865356752879376687052266198216014795822152491318012491767775979074" 31 | }, 32 | "ic2": { 33 | "x": "5332723250224941161709478398807683311971555792614491788690328996478511465287", 34 | "y": "21199491073419440416471372042641226693637837098357067793586556692319371762571" 35 | }, 36 | "ic3": { 37 | "x": "12457994489566736295787256452575216703923664299075106359829199968023158780583", 38 | "y": "19706766271952591897761291684837117091856807401404423804318744964752784280790" 39 | }, 40 | "ic4": { 41 | "x": "19617808913178163826953378459323299110911217259216006187355745713323154132237", 42 | "y": "21663537384585072695701846972542344484111393047775983928357046779215877070466" 43 | }, 44 | "ic5": { 45 | "x": "6834578911681792552110317589222010969491336870276623105249474534788043166867", 46 | "y": "15060583660288623605191393599883223885678013570733629274538391874953353488393" 47 | }, 48 | "w27": { 49 | "g00": "0", 50 | "g01": "0", 51 | "g10": "0", 52 | "g11": "0", 53 | "g20": "8204864362109909869166472767738877274689483185363591877943943203703805152849", 54 | "g21": "17912368812864921115467448876996876278487602260484145953989158612875588124088", 55 | "h00": "0", 56 | "h01": "0", 57 | "h10": "0", 58 | "h11": "0", 59 | "h20": "0", 60 | "h21": "0" 61 | } 62 | } -------------------------------------------------------------------------------- /scripts/verify_non_recursive.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # exit if any of scripts exit 4 | set -e 5 | 6 | # get aux pairing witness 7 | ./scripts/get_aux_witness_plonk.sh 8 | 9 | # test e2e proof 10 | ./scripts/e2e_verify_plonk.sh --------------------------------------------------------------------------------