├── __init__.py ├── proofs ├── .gitkeep └── fib.bin ├── src ├── stark_verifier │ ├── air │ │ ├── proof │ │ │ └── commitments.cairo │ │ ├── trace_info.cairo │ │ ├── table.cairo │ │ ├── pub_inputs.cairo │ │ ├── stark_proof.cairo │ │ └── air_instance.cairo │ ├── utils.cairo │ ├── utils.py │ ├── fri │ │ ├── utils.cairo │ │ ├── polynomials.cairo │ │ └── fri_verifier.cairo │ ├── composer.cairo │ ├── stark_verifier.cairo │ ├── evaluator.cairo │ ├── crypto │ │ └── random.cairo │ └── channel.cairo ├── utils │ ├── hex_utils.py │ ├── endianness.cairo │ ├── python_utils.cairo │ ├── math_goldilocks.cairo │ ├── utxo_dummy_generator.py │ ├── benchmark_block.py │ └── pow2.cairo └── crypto │ └── hash_utils.cairo ├── aero-sdk ├── .gitignore ├── src │ ├── webpack.d.ts │ ├── constraints_worker.ts │ ├── hashing_worker.ts │ ├── proving_worker.ts │ ├── sdk.ts │ └── demo │ │ └── index.ts ├── miden-wasm │ ├── src │ │ ├── convert │ │ │ ├── mod.rs │ │ │ ├── sdk.rs │ │ │ ├── convert_inputs.rs │ │ │ └── convert_proof.rs │ │ ├── hashing_worker.rs │ │ ├── constraints_worker.rs │ │ ├── pool.rs │ │ └── lib.rs │ ├── build.rs │ └── Cargo.toml ├── proto │ ├── commitments.proto │ ├── ood_frame.proto │ ├── common.proto │ ├── fri_proof.proto │ ├── miden_vm.proto │ ├── miden_prover.proto │ ├── queries.proto │ ├── service.proto │ ├── stark_proof.proto │ └── context.proto ├── tsconfig.json ├── README.md ├── webpack.config.js ├── package.json └── webpack.config.demo.js ├── rust-toolchain.toml ├── Cargo.toml ├── .vscode ├── settings.json └── ltex.dictionary.en-US.txt ├── .gitmodules ├── miden-proof-generator ├── src │ ├── lib.rs │ └── main.rs └── Cargo.toml ├── protostar.toml ├── miden-to-cairo-parser ├── README.md ├── Cargo.toml └── src │ ├── main.rs │ └── memory.rs ├── .gitignore ├── tests ├── integration │ ├── utils.py │ └── test_verifier.cairo └── unit │ └── test_math_g.cairo ├── Makefile ├── LICENSE └── README.md /__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /proofs/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/stark_verifier/air/proof/commitments.cairo: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /aero-sdk/.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/* 2 | dist/* 3 | src/proto-ts/* 4 | build/* -------------------------------------------------------------------------------- /aero-sdk/src/webpack.d.ts: -------------------------------------------------------------------------------- 1 | declare let __webpack_public_path__: string; 2 | -------------------------------------------------------------------------------- /proofs/fib.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/starkoracles/Aero/HEAD/proofs/fib.bin -------------------------------------------------------------------------------- /src/stark_verifier/air/trace_info.cairo: -------------------------------------------------------------------------------- 1 | struct TraceInfo { 2 | trace_length: felt, 3 | } 4 | -------------------------------------------------------------------------------- /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "nightly-2023-02-17" 3 | target = "wasm32-unknown-unknown" -------------------------------------------------------------------------------- /aero-sdk/miden-wasm/src/convert/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod convert_inputs; 2 | pub mod convert_proof; 3 | pub mod sdk; 4 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = ["miden-proof-generator", "miden-to-cairo-parser", "aero-sdk/miden-wasm"] 3 | -------------------------------------------------------------------------------- /aero-sdk/miden-wasm/src/convert/sdk.rs: -------------------------------------------------------------------------------- 1 | pub mod sdk { 2 | include!(concat!(env!("OUT_DIR"), "/sdk.rs")); 3 | } 4 | -------------------------------------------------------------------------------- /src/stark_verifier/air/table.cairo: -------------------------------------------------------------------------------- 1 | struct Table { 2 | n_rows: felt, 3 | n_cols: felt, 4 | elements: felt*, 5 | } 6 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "protoc": { 3 | "options": [ 4 | "--proto_path=sdk-api/proto" 5 | ] 6 | } 7 | } -------------------------------------------------------------------------------- /.vscode/ltex.dictionary.en-US.txt: -------------------------------------------------------------------------------- 1 | Composability 2 | Miden 3 | Starknet 4 | StarkVM 5 | zkVMs 6 | keccak 7 | composability 8 | prover 9 | Metamask 10 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "miden"] 2 | path = miden 3 | url = git@github.com:starkoracles/miden-vm.git 4 | [submodule "winterfell"] 5 | path = winterfell 6 | url = git@github.com:starkoracles/winterfell.git 7 | -------------------------------------------------------------------------------- /miden-proof-generator/src/lib.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | #[derive(Serialize, Deserialize)] 3 | pub struct ProofData { 4 | pub input_bytes: Vec, 5 | pub proof_bytes: Vec, 6 | } 7 | -------------------------------------------------------------------------------- /aero-sdk/proto/commitments.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package sdk; 4 | 5 | import "common.proto"; 6 | 7 | message Commitments { 8 | repeated Digest trace_roots = 1; 9 | Digest constraint_root = 2; 10 | repeated Digest fri_roots = 3; 11 | } -------------------------------------------------------------------------------- /protostar.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | protostar-version = "0.9.0" 3 | lib-path = "src" 4 | cairo_path = ["src"] 5 | 6 | [profile.integration.test] 7 | target = ["tests/integration"] 8 | cairo-path = ["src"] 9 | 10 | [profile.unit.test] 11 | target = ["tests/unit"] 12 | cairo-path = ["src"] -------------------------------------------------------------------------------- /src/stark_verifier/utils.cairo: -------------------------------------------------------------------------------- 1 | struct Vec { 2 | n_elements: felt, 3 | elements: felt*, 4 | } 5 | 6 | struct Digest { 7 | element_0: felt, 8 | element_1: felt, 9 | element_2: felt, 10 | element_3: felt, 11 | element_4: felt, 12 | element_5: felt, 13 | element_6: felt, 14 | element_7: felt, 15 | } 16 | -------------------------------------------------------------------------------- /miden-to-cairo-parser/README.md: -------------------------------------------------------------------------------- 1 | # Proof parser 2 | 3 | This is a library to write Rust objects into a Cairo memory such that we can then simply cast it into a high-level Cairo object. In particular, this implements parsing of STARKs for proof recursion. 4 | 5 | 6 | ## Example usage 7 | 8 | ``` 9 | cargo +nightly run -- src/proof.bin 10 | ``` 11 | -------------------------------------------------------------------------------- /aero-sdk/proto/ood_frame.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package sdk; 4 | 5 | import "common.proto"; 6 | 7 | message EvaluationFrame { 8 | repeated FieldElement current = 1; 9 | repeated FieldElement next = 2; 10 | } 11 | 12 | message OodFrame { 13 | EvaluationFrame main_frame = 1; 14 | EvaluationFrame aux_frame = 2; 15 | repeated FieldElement evaluations = 3; 16 | } 17 | -------------------------------------------------------------------------------- /src/utils/hex_utils.py: -------------------------------------------------------------------------------- 1 | def chunks(lst, n): 2 | for i in range(0, len(lst), n): 3 | yield lst[i:i + n] 4 | 5 | 6 | def get_hex(memory, ptr): 7 | hex_str = "" 8 | for i in range(8): 9 | word = hex(memory[ptr+i])[2:].zfill(8) 10 | for nibs in list(chunks(word, 2))[::-1]: 11 | for nib in nibs: 12 | hex_str += nib 13 | return hex_str 14 | -------------------------------------------------------------------------------- /aero-sdk/proto/common.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package sdk; 4 | 5 | message FieldElement { 6 | // element le bytes 7 | bytes element = 2; 8 | } 9 | 10 | message Table { 11 | uint32 n_rows = 1; 12 | uint32 n_cols = 2; 13 | // stored as a single dim vector each row at a time 14 | repeated FieldElement elements = 3; 15 | } 16 | 17 | message Digest { 18 | // digest bytes 19 | bytes data = 2; 20 | } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | /target/ 4 | 5 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 6 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 7 | Cargo.lock 8 | 9 | # These are backup files generated by rustfmt 10 | **/*.rs.bk 11 | 12 | 13 | # Added by cargo 14 | 15 | /target 16 | 17 | bin/stark_parser -------------------------------------------------------------------------------- /aero-sdk/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "noImplicitAny": true, 4 | "outDir": "build/types", 5 | "module": "esnext", 6 | "target": "es2017", 7 | "allowJs": true, 8 | "sourceMap": true, 9 | "declaration": true, 10 | "esModuleInterop": true, 11 | "allowSyntheticDefaultImports": true, 12 | "lib": [ 13 | "es2018", 14 | "dom" 15 | ], 16 | "moduleResolution": "node", 17 | }, 18 | "include": [ 19 | "src/**/*.ts", 20 | ], 21 | } -------------------------------------------------------------------------------- /aero-sdk/proto/fri_proof.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package sdk; 4 | 5 | import "common.proto"; 6 | import "queries.proto"; 7 | 8 | message FriProofLayer { 9 | repeated FieldElement values = 1; 10 | BatchMerkleProof proofs = 2; 11 | } 12 | 13 | message FriProof { 14 | // TODO - convert to prover agnostic format, now it's miden specific 15 | repeated FriProofLayer layers = 1; 16 | repeated FieldElement remainder = 2; 17 | uint32 num_partitions = 3; // stored as power of 2 18 | } -------------------------------------------------------------------------------- /aero-sdk/proto/miden_vm.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package sdk; 4 | 5 | import "common.proto"; 6 | 7 | message MidenProgramOutputs { 8 | /// The elements on the stack at the end of execution. 9 | repeated FieldElement stack = 1; 10 | /// The overflow table row addresse required to reconstruct the final state of the table. 11 | repeated FieldElement overflow_addrs = 2; 12 | } 13 | 14 | message MidenPublicInputs { 15 | Digest program_hash = 1; 16 | repeated FieldElement stack_inputs = 2; 17 | MidenProgramOutputs outputs = 3; 18 | } -------------------------------------------------------------------------------- /miden-proof-generator/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "miden_proof_generator" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | bincode = "1.3.3" 8 | env_logger = "0.10.0" 9 | log = "0.4.17" 10 | miden = { version = "0.3.0", path = "../miden/miden" } 11 | miden-core = { version = "0.3.0", path = "../miden/core" } 12 | miden-stdlib = { version = "0.2.0", path = "../miden/stdlib" } 13 | miden-air = {version = "0.3.0", path = "../miden/air"} 14 | miden-verifier = {version = "0.3.0", path = "../miden/verifier"} 15 | serde = { version = "1.0.152", features = ["derive"] } 16 | -------------------------------------------------------------------------------- /aero-sdk/proto/miden_prover.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package sdk; 4 | 5 | // We will use a WASM module as the prover, so these data types will be used to interact with it 6 | message MidenProgram { 7 | string program = 1; 8 | } 9 | 10 | message MidenProgramInputs { 11 | repeated uint64 stack_init = 1; 12 | repeated uint64 advice_tape = 2; 13 | // TODO do we need these? 14 | // advice_map: BTreeMap<[u8; 32], Vec>, 15 | // advice_sets: BTreeMap<[u8; 32], AdviceSet>, 16 | } 17 | 18 | // prove(MidenProgram, MidenProgramInputs, ProofOptions) -> (MidenProgramOutputs , StarkProof) -------------------------------------------------------------------------------- /aero-sdk/proto/queries.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package sdk; 4 | 5 | import "common.proto"; 6 | 7 | message BatchMerkleProofLayer { 8 | repeated Digest nodes = 1; 9 | } 10 | 11 | message BatchMerkleProof { 12 | repeated Digest leaves = 1; 13 | repeated BatchMerkleProofLayer nodes = 2; 14 | uint32 depth = 3; 15 | } 16 | 17 | message TraceQueries { 18 | Table main_states = 1; 19 | Table aux_states = 2; 20 | repeated BatchMerkleProof query_proofs = 3; 21 | } 22 | 23 | message ConstraintQueries { 24 | Table evaluations = 1; 25 | BatchMerkleProof query_proof = 2; 26 | } -------------------------------------------------------------------------------- /aero-sdk/miden-wasm/build.rs: -------------------------------------------------------------------------------- 1 | use std::io::Result; 2 | 3 | fn main() -> Result<()> { 4 | let mut config = prost_build::Config::new(); 5 | config.btree_map(&["."]); 6 | config.compile_protos( 7 | &[ 8 | "service.proto", 9 | "stark_proof.proto", 10 | "miden_vm.proto", 11 | "commitments.proto", 12 | "common.proto", 13 | "context.proto", 14 | "fri_proof.proto", 15 | "miden_prover.proto", 16 | "ood_frame.proto", 17 | "queries.proto", 18 | ], 19 | &["../proto"], 20 | )?; 21 | Ok(()) 22 | } 23 | -------------------------------------------------------------------------------- /aero-sdk/README.md: -------------------------------------------------------------------------------- 1 | # AERO sdk 2 | Users can leverage the npm api library to generate proofs in the browser. The library leverages protobuf/grpc to create 3 | a consistent user interaction across the sdk, prover, and grpc backend service. 4 | 5 | * [Prover interface](https://github.com/starkoracles/starknet-miden-verifier/blob/proto-api/sdk-api/src/sdk.ts) 6 | * [GRPC service](https://github.com/starkoracles/starknet-miden-verifier/blob/proto-api/sdk-api/proto/service.proto) 7 | * [Usage example](https://github.com/starkoracles/starknet-miden-verifier/blob/proto-api/sdk-api/src/demo/index.ts#L21) 8 | 9 | # Demo 10 | ``` 11 | npm run build 12 | npm run serve:demo 13 | ``` -------------------------------------------------------------------------------- /tests/integration/utils.py: -------------------------------------------------------------------------------- 1 | import json 2 | import subprocess 3 | 4 | 5 | def parse_proof(program_name): 6 | completed_process = subprocess.run([ 7 | 'bin/stark_parser', 8 | f'proofs/{program_name}.bin', 9 | 'proof'], 10 | capture_output=True) 11 | return completed_process.stdout 12 | 13 | 14 | def parse_public_inputs(program_name): 15 | pwd = subprocess.run(['pwd'], capture_output=True).stdout[:-1] 16 | completed_process = subprocess.run([ 17 | 'bin/stark_parser', 18 | f'proofs/{program_name}.bin', 19 | 'public-inputs'], 20 | capture_output=True) 21 | return completed_process.stdout 22 | -------------------------------------------------------------------------------- /aero-sdk/proto/service.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package sdk; 4 | 5 | import "stark_proof.proto"; 6 | import "miden_vm.proto"; 7 | 8 | enum TargetChain { 9 | STARKNET = 0; 10 | } 11 | 12 | enum SourceProofSystem { 13 | MIDEN = 0; 14 | } 15 | 16 | message ProofSubmissionRequest { 17 | StarkProof proof = 1; 18 | MidenPublicInputs public_inputs = 2; 19 | SourceProofSystem source_proof_system = 3; 20 | TargetChain target_chain = 4; 21 | } 22 | 23 | message ProofSubmissionResponse { 24 | string receipt = 1; 25 | } 26 | 27 | service ProofSubmissionService { 28 | rpc SubmitProof(ProofSubmissionRequest) returns (ProofSubmissionResponse); 29 | } -------------------------------------------------------------------------------- /aero-sdk/src/constraints_worker.ts: -------------------------------------------------------------------------------- 1 | import init from "miden-wasm"; 2 | import { constraint_entry_point } from "miden-wasm"; 3 | 4 | self.onmessage = event => { 5 | let initialised = init().catch(err => { 6 | // Propagate to main `onerror`: 7 | setTimeout(() => { 8 | throw err; 9 | }); 10 | // Rethrow to keep promise rejected and prevent execution of further commands: 11 | throw err; 12 | }); 13 | 14 | self.onmessage = async event => { 15 | // This will queue further commands up until the module is fully initialised: 16 | await initialised; 17 | constraint_entry_point(event); 18 | }; 19 | }; 20 | -------------------------------------------------------------------------------- /aero-sdk/src/hashing_worker.ts: -------------------------------------------------------------------------------- 1 | import init from "miden-wasm"; 2 | import { hashing_entry_point } from "miden-wasm"; 3 | 4 | self.onmessage = event => { 5 | console.debug("Hashing received init:", event.data); 6 | let initialised = init().catch(err => { 7 | // Propagate to main `onerror`: 8 | setTimeout(() => { 9 | throw err; 10 | }); 11 | // Rethrow to keep promise rejected and prevent execution of further commands: 12 | throw err; 13 | }); 14 | 15 | self.onmessage = async event => { 16 | // This will queue further commands up until the module is fully initialised: 17 | await initialised; 18 | hashing_entry_point(event); 19 | }; 20 | }; 21 | -------------------------------------------------------------------------------- /aero-sdk/src/proving_worker.ts: -------------------------------------------------------------------------------- 1 | import init, { MidenProverAsyncWorker } from "miden-wasm"; 2 | import { proving_entry_point } from "miden-wasm"; 3 | 4 | async function initialize(): Promise { 5 | await init(); 6 | return new MidenProverAsyncWorker(); 7 | } 8 | 9 | let full_init = initialize(); 10 | let prover: MidenProverAsyncWorker = null; 11 | 12 | self.onmessage = async event => { 13 | console.trace("Proving worker received init:", event.data); 14 | prover = await full_init; 15 | self.onmessage = async event => { 16 | // maintain the reference to the worker pool 17 | let new_prover = prover.reset(); 18 | await proving_entry_point(new_prover, event); 19 | }; 20 | }; 21 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean 2 | 3 | BIN_DIR = ./bin 4 | STARK_PARSER = $(BIN_DIR)/stark_parser 5 | 6 | clean: 7 | rm -rf $(BIN_DIR) 8 | 9 | $(STARK_PARSER): 10 | cargo build; 11 | mkdir -p bin; 12 | cp target/debug/miden_to_cairo_parser bin/stark_parser 13 | 14 | generate_proof: 15 | cargo run -p miden_proof_generator 16 | 17 | integration_test: $(STARK_PARSER) 18 | @echo "Running integration tests..." 19 | PYTHONPATH=$$(echo pwd)/tests:$$(python3.9 -c "import site; print(site.getsitepackages()[0])"):$$PYTHONPATH protostar -p integration test --max-steps 100000000 20 | 21 | unit_test: 22 | @echo "Running unit tests..." 23 | PYTHONPATH=$$(echo pwd)/tests:$$(python3.9 -c "import site; print(site.getsitepackages()[0])"):$$PYTHONPATH protostar -p unit test 24 | -------------------------------------------------------------------------------- /src/utils/endianness.cairo: -------------------------------------------------------------------------------- 1 | from starkware.cairo.common.cairo_builtins import BitwiseBuiltin 2 | 3 | // Swap the endianness of an uint32 4 | func byteswap32{bitwise_ptr: BitwiseBuiltin*}(uint32) -> felt { 5 | alloc_locals; 6 | assert bitwise_ptr[0].x = uint32; 7 | assert bitwise_ptr[0].y = 0xFF00FF00; 8 | assert bitwise_ptr[1].x = bitwise_ptr[0].x_and_y / 2 ** 8 + (uint32 - bitwise_ptr[0].x_and_y) * 2 ** 8; 9 | assert bitwise_ptr[1].y = 0xFFFF0000; 10 | let uint32_endian = bitwise_ptr[1].x_and_y / 2 ** 16 + (bitwise_ptr[1].x - bitwise_ptr[1].x_and_y) * 2 ** 16; 11 | let bitwise_ptr = bitwise_ptr + BitwiseBuiltin.SIZE * 2; 12 | return uint32_endian; 13 | } 14 | 15 | // Swap the endianness of an uint16 16 | func byteswap16{bitwise_ptr: BitwiseBuiltin*}(uint16) -> felt { 17 | alloc_locals; 18 | assert [bitwise_ptr].x = uint16; 19 | assert [bitwise_ptr].y = 0xFF00; 20 | let uint16_endian = [bitwise_ptr].x_and_y / 2 ** 8 + (uint16 - [bitwise_ptr].x_and_y) * 2 ** 8; 21 | let bitwise_ptr = bitwise_ptr + BitwiseBuiltin.SIZE; 22 | return uint16_endian; 23 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 starkoracles 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /aero-sdk/webpack.config.js: -------------------------------------------------------------------------------- 1 | 2 | const path = require('path'); 3 | 4 | module.exports = { 5 | mode: "development", 6 | devtool: 'source-map', 7 | entry: './src/sdk.ts', 8 | output: { 9 | filename: 'sdk.js', 10 | path: path.resolve(__dirname, 'build'), 11 | library: "MyLibrary", 12 | libraryTarget: 'umd', 13 | clean: true 14 | }, 15 | module: { 16 | rules: [ 17 | { 18 | test: /\.(js)x?$/, 19 | exclude: /node_modules/, 20 | use: "babel-loader", 21 | }, 22 | { 23 | test: /\.(ts)x?$/, 24 | exclude: /node_modules|\.d\.ts$/, // this line as well 25 | use: { 26 | loader: "ts-loader", 27 | options: { 28 | compilerOptions: { 29 | noEmit: false, // this option will solve the issue 30 | }, 31 | }, 32 | }, 33 | }, 34 | ] 35 | }, 36 | resolve: { 37 | extensions: ['.ts', '.js', '.json'] 38 | }, 39 | }; 40 | -------------------------------------------------------------------------------- /aero-sdk/proto/stark_proof.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package sdk; 4 | 5 | import "context.proto"; 6 | import "commitments.proto"; 7 | import "queries.proto"; 8 | import "ood_frame.proto"; 9 | import "fri_proof.proto"; 10 | 11 | 12 | // A STARK proof for a computation. 13 | message StarkProof { 14 | // Basic metadata about the execution of the computation described by this proof. 15 | Context context = 1; 16 | // Commitments made by the prover during the commit phase of the protocol. 17 | Commitments commitments = 2; 18 | // Decommitments of extended execution trace values (for all trace segments) at position 19 | // queried by the verifier. 20 | TraceQueries trace_queries = 3; 21 | // Decommitments of constraint composition polynomial evaluations at positions queried by 22 | // the verifier. 23 | ConstraintQueries constraint_queries = 4; 24 | // Trace and constraint polynomial evaluations at an out-of-domain point. 25 | OodFrame ood_frame = 5; 26 | // Low-degree proof for a DEEP composition polynomial. 27 | FriProof fri_proof = 6; 28 | // Proof-of-work nonce for query seed grinding. 29 | uint64 pow_nonce = 7; 30 | } -------------------------------------------------------------------------------- /src/stark_verifier/air/pub_inputs.cairo: -------------------------------------------------------------------------------- 1 | from starkware.cairo.common.alloc import alloc 2 | 3 | from stark_verifier.utils import Vec 4 | 5 | struct MemEntry { 6 | address: felt, 7 | value: felt, 8 | } 9 | 10 | struct ProgramOutputs { 11 | stack_len: felt, 12 | stack: felt*, 13 | overflow_addrs_len: felt, 14 | overflow_addrs: felt*, 15 | } 16 | 17 | struct PublicInputs { 18 | program_hash_len: felt, 19 | program_hash: felt*, 20 | stack_inputs_len: felt, 21 | stack_inputs: felt*, 22 | outputs: ProgramOutputs, 23 | } 24 | 25 | func read_public_inputs() -> PublicInputs* { 26 | let (pub_inputs_ptr: PublicInputs*) = alloc(); 27 | %{ 28 | from src.stark_verifier.utils import write_into_memory 29 | write_into_memory(ids.pub_inputs_ptr, json_data, segments) 30 | %} 31 | return pub_inputs_ptr; 32 | } 33 | 34 | func read_mem_values(mem: MemEntry*, address: felt, length: felt, output: felt*) { 35 | if (length == 0) { 36 | return (); 37 | } 38 | assert mem.address = address; 39 | assert output[0] = mem.value; 40 | return read_mem_values(mem=&mem[1], address=address + 1, length=length - 1, output=&output[1]); 41 | } 42 | -------------------------------------------------------------------------------- /aero-sdk/proto/context.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package sdk; 4 | 5 | import "common.proto"; 6 | 7 | enum HashFunction { 8 | // we only support blake2s for now 9 | BLAKE2S = 0; 10 | } 11 | 12 | enum FieldExtension { 13 | // we do not support field extension yet 14 | NONE = 0; 15 | // QUADRATIC = 2; 16 | // CUBIC = 3; 17 | } 18 | 19 | enum PrimeField { 20 | // goldilocks field represented by 64 bit field elements 21 | GOLDILOCKS = 0; 22 | } 23 | 24 | message ProofOptions { 25 | uint32 num_queries = 1; 26 | uint32 blowup_factor = 2; 27 | uint32 grinding_factor = 3; 28 | HashFunction hash_fn = 4; 29 | FieldExtension field_extension = 5; 30 | uint32 fri_folding_factor = 6; 31 | uint32 fri_max_remainder_size = 7; 32 | PrimeField prime_field = 8; 33 | } 34 | 35 | message TraceLayout { 36 | uint64 main_segment_width = 1; 37 | repeated uint64 aux_segment_widths = 2 [packed=true]; 38 | repeated uint64 aux_segment_rands = 3 [packed=true]; 39 | uint64 num_aux_segments = 4; 40 | } 41 | 42 | message Context { 43 | TraceLayout trace_layout = 1; 44 | uint64 trace_length = 2; 45 | bytes trace_meta = 3; 46 | FieldElement field_modulus = 4; 47 | ProofOptions options = 5; 48 | } -------------------------------------------------------------------------------- /miden-to-cairo-parser/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "miden_to_cairo_parser" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | hex = "0.4" 8 | bincode = "1.3.3" 9 | serde = { version = "1.0.152", features = ["derive"] } 10 | serde_json = "1.0.86" 11 | clap = { version = "4.0.18", features = ["derive"] } 12 | winterfell = { package = "winter-verifier", path = "../winterfell/verifier", version = "0.4", default-features = false } 13 | winter_math = { package = "winter-math", path = "../winterfell/math", version = "0.4", default-features = false } 14 | winter_utils = { package = "winter-utils", path = "../winterfell/utils/core", version = "0.4", default-features = false } 15 | winter_crypto = { package = "winter-crypto", path = "../winterfell/crypto", version = "0.4", default-features = false } 16 | winter_air = { package = "winter-air", path = "../winterfell/air", version = "0.4", default-features = false } 17 | winter_fri = { package = "winter-fri", path = "../winterfell/fri", version = "0.4", default-features = false } 18 | miden-air = { path = "../miden/air", version = "0.3.0" } 19 | miden-core = { path = "../miden/core", version = "0.3.0" } 20 | winter-crypto = { version = "0.4.0", path = "../winterfell/crypto" } 21 | miden_proof_generator = { path = "../miden-proof-generator", version = "0.1.0" } 22 | -------------------------------------------------------------------------------- /tests/unit/test_math_g.cairo: -------------------------------------------------------------------------------- 1 | %lang starknet 2 | 3 | from utils.math_goldilocks import sub_g, PG, add_g, mul_g, inv_g, pow_g 4 | 5 | @external 6 | func test_sub_g{range_check_ptr}() { 7 | let res = sub_g(2, 1); 8 | assert res = 1; 9 | 10 | let reverse = sub_g(1, 2); 11 | assert reverse = PG - 1; 12 | 13 | return (); 14 | } 15 | 16 | @external 17 | func test_add_g{range_check_ptr}() { 18 | let res = add_g(2, 1); 19 | assert res = 3; 20 | 21 | let reverse = add_g(PG - 1, 2); 22 | assert reverse = 1; 23 | 24 | return (); 25 | } 26 | 27 | @external 28 | func test_mul_g{range_check_ptr}() { 29 | let res = mul_g(2, 5); 30 | assert res = 10; 31 | 32 | let t = PG - 1; 33 | let overflow = mul_g(t, 2); 34 | assert overflow = PG - 2; 35 | 36 | let overflow2 = mul_g(t, 4); 37 | assert overflow2 = PG - 4; 38 | 39 | return (); 40 | } 41 | 42 | @external 43 | func test_inv_g{range_check_ptr}() { 44 | let l1 = 25; 45 | let l2 = inv_g(l1); 46 | 47 | assert mul_g(l1, l2) = 1; 48 | 49 | let l3 = 55; 50 | let l4 = inv_g(l3); 51 | 52 | assert mul_g(l3, l4) = 1; 53 | 54 | return (); 55 | } 56 | 57 | @external 58 | func test_pow_g{range_check_ptr}() { 59 | let b1 = 5; 60 | let e1 = 3; 61 | 62 | let r1 = pow_g(b1, e1); 63 | 64 | assert r1 = 125; 65 | 66 | let b2 = PG - 5; 67 | let e2 = 2; 68 | 69 | let r2 = pow_g(b2, e2); 70 | let expected = mul_g(b2, b2); 71 | 72 | assert r2 = expected; 73 | 74 | return (); 75 | } 76 | -------------------------------------------------------------------------------- /src/crypto/hash_utils.cairo: -------------------------------------------------------------------------------- 1 | from starkware.cairo.common.memcpy import memcpy 2 | 3 | from utils.serialize import UINT32_SIZE 4 | 5 | const HASH_FELT_SIZE = 8; 6 | 7 | // Copy a hash represented as an array of 8 x Uint32. 8 | // It reads from `source` and writes to `destination` 9 | func copy_hash(source: felt*, destination: felt*) { 10 | memcpy(destination, source, HASH_FELT_SIZE); 11 | return (); 12 | } 13 | 14 | // Assert equality of two hashes represented as an array of 8 x Uint32 15 | // 16 | func assert_hashes_equal(hash1: felt*, hash2: felt*) { 17 | // We're doing some odd gymnastics here, 18 | // because in Cairo it isn't straight-forward to determine if a variable is uninitialized. 19 | // The hack `assert 0 = a - b` ensures that both `a` and `b` are initialized. 20 | let h1 = hash1[0]; 21 | let h2 = hash2[0]; 22 | assert 0 = hash1[0] - hash2[0]; 23 | assert 0 = hash1[1] - hash2[1]; 24 | assert 0 = hash1[2] - hash2[2]; 25 | assert 0 = hash1[3] - hash2[3]; 26 | assert 0 = hash1[4] - hash2[4]; 27 | assert 0 = hash1[5] - hash2[5]; 28 | assert 0 = hash1[6] - hash2[6]; 29 | assert 0 = hash1[7] - hash2[7]; 30 | return (); 31 | } 32 | 33 | func assert_hashes_not_equal(hash1: felt*, hash2: felt*) { 34 | return _assert_hashes_not_equal_loop(hash1, hash2, HASH_FELT_SIZE); 35 | } 36 | 37 | func _assert_hashes_not_equal_loop(hash1_ptr: felt*, hash2_ptr: felt*, length) { 38 | if (length == 0) { 39 | assert 1 = 0; 40 | return (); 41 | } 42 | if ([hash1_ptr] != [hash2_ptr]) { 43 | return (); 44 | } 45 | return _assert_hashes_not_equal_loop(hash1_ptr + 1, hash2_ptr + 1, length - 1); 46 | } 47 | -------------------------------------------------------------------------------- /aero-sdk/miden-wasm/src/hashing_worker.rs: -------------------------------------------------------------------------------- 1 | use crate::utils::{ 2 | from_uint8array, set_once_logger, to_uint8array, HashingResult, HashingWorkItem, 3 | }; 4 | use js_sys::Uint8Array; 5 | use log::debug; 6 | use miden_core::Felt; 7 | use wasm_bindgen::prelude::*; 8 | use web_sys::{DedicatedWorkerGlobalScope, MessageEvent}; 9 | use winter_crypto::hashers::Blake2s_256; 10 | use winter_crypto::ElementHasher; 11 | 12 | pub fn blake2_hash_elements(work_item: &HashingWorkItem) -> Result { 13 | let mut hashes = vec![]; 14 | for row in work_item.data.iter() { 15 | let converted_row: Vec = row.iter().map(|f| f.clone().into()).collect(); 16 | let r = Blake2s_256::hash_elements(&converted_row[..]); 17 | hashes.push(r.0); 18 | } 19 | debug!("done processing hashes for batch {}", work_item.batch_idx); 20 | 21 | let response = HashingResult { 22 | batch_idx: work_item.batch_idx, 23 | hashes, 24 | }; 25 | Ok(to_uint8array(&response)) 26 | } 27 | 28 | #[wasm_bindgen] 29 | pub fn hashing_entry_point(msg: MessageEvent) -> Result<(), JsValue> { 30 | set_once_logger(); 31 | if let Ok(work_item) = from_uint8array::(&Uint8Array::new(&msg.data())) { 32 | debug!( 33 | "Hashing worker received work item: {:?}", 34 | work_item.batch_idx 35 | ); 36 | let response = blake2_hash_elements(&work_item)?; 37 | let global_scope = js_sys::global().unchecked_into::(); 38 | global_scope.post_message(&response)?; 39 | } else { 40 | debug!("Hashing worker received invalid work item"); 41 | } 42 | Ok(()) 43 | } 44 | -------------------------------------------------------------------------------- /src/stark_verifier/utils.py: -------------------------------------------------------------------------------- 1 | import json 2 | import subprocess 3 | 4 | PWD = subprocess.run(['pwd'], capture_output=True).stdout[:-1].decode("utf-8") 5 | PROOF_PATH = f'{PWD}/proofs/fib.bin' 6 | PARSER_PATH = f'{PWD}/bin/stark_parser' 7 | CAIRO_PRIME = 2**251 + 17 * 2**192 + 1 8 | 9 | 10 | def write_into_memory(ptr, json_data, segments): 11 | addr = ptr 12 | if hasattr(ptr, 'address_'): 13 | addr = ptr.address_ 14 | 15 | my_array = json.loads(json_data) 16 | # Note the following: 17 | # - Addresses are stored as `Relocatable` values in the Cairo VM. 18 | # - The "+" operator is overloaded to perform pointer arithmetics. 19 | # - Felts are hex encoded starting with "0x". The virtual addresses are encoded as decimals. 20 | my_memory = [(int(x, 16) if x.startswith('0x') else addr + int(x)) 21 | for x in my_array] 22 | segments.write_arg(addr, my_memory) 23 | # print(addr, my_memory) 24 | 25 | 26 | def index_of(elements_ptr, n_elements, element, memory): 27 | for i in range(n_elements): 28 | if (memory[elements_ptr + i] == element): 29 | return i 30 | return CAIRO_PRIME-1 31 | 32 | 33 | def read_fri_queries_proofs(positions_ptr, fri_queries_proof_ptr, num_queries, memory, segments): 34 | positions = to_json_array(positions_ptr, num_queries, memory) 35 | 36 | completed_process = subprocess.run( 37 | [PARSER_PATH, PROOF_PATH, 'fri-queries', positions], 38 | capture_output=True) 39 | 40 | json_data = completed_process.stdout 41 | write_into_memory(fri_queries_proof_ptr, json_data, segments) 42 | 43 | 44 | def to_json_array(arr_ptr, arr_length, memory): 45 | arr = [] 46 | for i in range(arr_length): 47 | arr.append(memory[arr_ptr + i]) 48 | return json.dumps(arr) 49 | -------------------------------------------------------------------------------- /aero-sdk/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "aero-sdk", 3 | "version": "1.0.0", 4 | "description": "Allow users to run proofs in the browser and submit on-chain", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "jest", 8 | "build:proto": "rm -rf src/proto-ts && mkdir -p src/proto-ts && protoc --plugin=node_modules/ts-proto/protoc-gen-ts_proto --ts_proto_opt=esModuleInterop=true proto/miden_prover.proto proto/miden_vm.proto proto/stark_proof.proto --ts_proto_out=./src/proto-ts -Iproto", 9 | "build:wasm": "rm -rf node_modules/miden-wasm && cd miden-wasm && wasm-pack build --release --target web --out-name miden-wasm --out-dir pkg && cd .. && npm install", 10 | "build:wasm:debug": "rm -rf node_modules/miden-wasm && cd miden-wasm && wasm-pack build --debug --target web --out-name miden-wasm --out-dir pkg && cd .. && npm install", 11 | "build": "npm run build:wasm && npm run build:proto && tsc", 12 | "build:debug": "npm run build:wasm:debug && npm run build:proto && tsc", 13 | "serve:demo": "webpack serve --config webpack.config.demo.js --mode development --open" 14 | }, 15 | "keywords": [ 16 | "zkp", 17 | "miden" 18 | ], 19 | "author": "starkoracles", 20 | "license": "MIT", 21 | "dependencies": { 22 | "miden-wasm": "file:./miden-wasm/pkg", 23 | "protobufjs": "^7.2.3", 24 | "ts-proto": "^1.146.0" 25 | }, 26 | "devDependencies": { 27 | "@types/jest": "^29.5.0", 28 | "@types/node": "^18.15.11", 29 | "babel-loader": "^9.1.2", 30 | "html-webpack-plugin": "^5.5.0", 31 | "jest": "^29.5.0", 32 | "ts-jest": "^29.1.0", 33 | "ts-loader": "^9.4.2", 34 | "ts-node": "^10.9.1", 35 | "typescript": "^5.0.4", 36 | "webpack-cli": "^5.0.1", 37 | "webpack-dev-server": "^4.13.2", 38 | "worker-loader": "^3.0.8" 39 | } 40 | } -------------------------------------------------------------------------------- /src/stark_verifier/fri/utils.cairo: -------------------------------------------------------------------------------- 1 | // Evaluate l_j(x) for a fixed j and a specified x. Should loop for 0 <= m <= x_i_len. 2 | func lagrange_basis_eval(x_i: felt*, x_i_len, x, j, m) -> felt { 3 | if (m + 1 == x_i_len) { 4 | if (j == m) { 5 | return 1; 6 | } 7 | let numerator = x - x_i[m]; 8 | let denominator = x_i[j] - x_i[m]; 9 | 10 | return (numerator / denominator); 11 | } 12 | 13 | if (j == m) { 14 | return lagrange_basis_eval(x_i, x_i_len, x, j, m + 1); 15 | } 16 | 17 | let numerator = x - x_i[m]; 18 | let denominator = x_i[j] - x_i[m]; 19 | 20 | // Reduce to the product of all numerator / denominator 21 | let old_product = lagrange_basis_eval(x_i, x_i_len, x, j, m + 1); 22 | return (numerator / denominator) * old_product; 23 | } 24 | 25 | 26 | // Evaluates L(x) 27 | func lagrange_sum_eval(evaluations_y: felt*, evaluations_x: felt*, evaluations_len, x, j) -> felt { 28 | alloc_locals; 29 | if (j + 1 == evaluations_len) { 30 | let l_j = lagrange_basis_eval(evaluations_x, evaluations_len, x, j, 0); 31 | return evaluations_y[j] * l_j; 32 | } 33 | 34 | // Reduce to the sum of all l_j * y_j 35 | let old_sum = lagrange_sum_eval(evaluations_y, evaluations_x, evaluations_len, x, j + 1); 36 | let l_j = lagrange_basis_eval(evaluations_x, evaluations_len, x, j, 0); 37 | return evaluations_y[j] * l_j + old_sum; 38 | } 39 | 40 | // Evaluate with input x using Lagrange interpolation over evaluations. 41 | func lagrange_eval(evaluations_y: felt*, evaluations_x: felt*, evaluations_len, x) -> felt { 42 | return lagrange_sum_eval(evaluations_y, evaluations_x, evaluations_len, x, 0); 43 | } 44 | 45 | // Evaluate the split and fold step of FRI commit for folding factor 2 46 | // https://aszepieniec.github.io/stark-anatomy/fri 47 | func evaluate_polynomial(evaluations_x : felt *, evaluations_len, x, alpha) -> felt { 48 | return (1 / 2) * (evaluations_x[0] * (1 + alpha / x) + evaluations_x[1] * (1 - alpha / x)); 49 | } 50 | -------------------------------------------------------------------------------- /aero-sdk/miden-wasm/src/convert/convert_inputs.rs: -------------------------------------------------------------------------------- 1 | use std::convert::TryInto; 2 | 3 | use crate::sdk; 4 | use crate::sdk::{MidenProgram, MidenProgramInputs}; 5 | use miden::{Assembler, FieldExtension, HashFunction, Program, ProgramInputs, ProofOptions}; 6 | use miden_stdlib::StdLibrary; 7 | 8 | impl Into for MidenProgramInputs { 9 | fn into(self) -> ProgramInputs { 10 | ProgramInputs::new(&self.stack_init, &self.advice_tape, vec![]) 11 | .expect("cannot parse miden program inputs") 12 | } 13 | } 14 | 15 | impl From for sdk::FieldElement { 16 | fn from(value: u64) -> Self { 17 | Self { 18 | element: value.to_le_bytes().to_vec(), 19 | } 20 | } 21 | } 22 | 23 | impl Into for &sdk::FieldElement { 24 | fn into(self) -> u64 { 25 | u64::from_le_bytes(self.element.clone().try_into().unwrap()) 26 | } 27 | } 28 | 29 | impl Into for MidenProgram { 30 | fn into(self) -> Program { 31 | Assembler::new() 32 | .with_module_provider(StdLibrary::default()) 33 | .compile(&self.program) 34 | .expect("cannot assemble miden program") 35 | } 36 | } 37 | 38 | impl Into for sdk::HashFunction { 39 | fn into(self) -> HashFunction { 40 | match self { 41 | sdk::HashFunction::Blake2s => HashFunction::Blake2s_256, 42 | } 43 | } 44 | } 45 | 46 | impl Into for sdk::FieldExtension { 47 | fn into(self) -> FieldExtension { 48 | match self { 49 | sdk::FieldExtension::None => FieldExtension::None, 50 | } 51 | } 52 | } 53 | 54 | impl Into for sdk::ProofOptions { 55 | fn into(self) -> ProofOptions { 56 | ProofOptions::new( 57 | self.num_queries as usize, 58 | self.blowup_factor as usize, 59 | self.grinding_factor, 60 | self.hash_fn().into(), 61 | self.field_extension().into(), 62 | self.fri_folding_factor as usize, 63 | self.fri_max_remainder_size as usize, 64 | ) 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /aero-sdk/webpack.config.demo.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const HtmlWebpackPlugin = require('html-webpack-plugin'); 3 | 4 | module.exports = { 5 | mode: "development", 6 | devtool: 'cheap-module-source-map', 7 | entry: './src/demo/index.ts', 8 | output: { 9 | filename: 'index.js' 10 | }, 11 | optimization: { 12 | minimize: false, 13 | }, 14 | module: { 15 | rules: [ 16 | { 17 | test: /\.(m|j|t)s$/, 18 | exclude: /(node_modules|bower_components)/, 19 | use: { 20 | loader: 'babel-loader' 21 | } 22 | }, 23 | { 24 | test: /\.(ts)x?$/, 25 | exclude: /node_modules|\.d\.ts$/, // this line as well 26 | use: { 27 | loader: "ts-loader" 28 | }, 29 | }, 30 | { 31 | test: /src\/hashing_worker\.ts$/, 32 | use: [{ 33 | loader: 'worker-loader', 34 | options: { 35 | filename: 'hashing_worker.js', 36 | } 37 | }, { loader: 'ts-loader', }], 38 | }, 39 | { 40 | test: /src\/proving_worker\.ts$/, 41 | use: [{ 42 | loader: 'worker-loader', 43 | options: { 44 | filename: 'proving_worker.js', 45 | } 46 | }, { loader: 'ts-loader', }], 47 | }, 48 | { 49 | test: /src\/constraints_worker\.ts$/, 50 | use: [{ 51 | loader: 'worker-loader', 52 | options: { 53 | filename: 'constraints_worker.js', 54 | } 55 | }, { loader: 'ts-loader', }], 56 | }, 57 | ] 58 | }, 59 | plugins: [ 60 | new HtmlWebpackPlugin(), 61 | ], 62 | resolve: { 63 | extensions: ['.ts', '.js', '.json'] 64 | }, 65 | experiments: { 66 | topLevelAwait: true 67 | } 68 | }; -------------------------------------------------------------------------------- /aero-sdk/miden-wasm/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "miden-wasm" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | prost = "0.11.8" 8 | wasm-bindgen = "0.2.84" 9 | wasm-bindgen-console-logger = "0.1.1" 10 | wasm-bindgen-futures = "0.4" 11 | futures = "0.3" 12 | miden = { version = "0.3.0", path = "../../miden/miden", default-features = false } 13 | miden-core = { version = "0.3.0", path = "../../miden/core", default-features = false } 14 | miden-stdlib = { version = "0.2.0", path = "../../miden/stdlib", default-features = false } 15 | miden-air = {version = "0.3.0", path = "../../miden/air", default-features = false} 16 | miden-verifier = {version = "0.3.0", path = "../../miden/verifier", default-features = false} 17 | miden-processor = {version = "0.3.0", path = "../../miden/processor", default-features = false} 18 | miden-prover = {version = "0.3.0", path = "../../miden/prover", default-features = false} 19 | winter_fri = { package = "winter-fri", path = "../../winterfell/fri", version = "0.4", default-features = false } 20 | winter_air = { package = "winter-air", path = "../../winterfell/air", version = "0.4", default-features = false } 21 | winter_crypto = { package = "winter-crypto", path = "../../winterfell/crypto", version = "0.4", features = ["wasm"] } 22 | winter_verifier = { package = "winter-verifier", path = "../../winterfell/verifier", version = "0.4", default-features = false } 23 | winter_prover = { package = "winter-prover", path = "../../winterfell/prover", version = "0.4", features = ["wasm"], default-features = false } 24 | winter_utils = { package = "winter-utils", path = "../../winterfell/utils/core", version = "0.4", default-features = false } 25 | log = "0.4.17" 26 | web-sys = { version = "0.3.61", features = ["console", "DedicatedWorkerGlobalScope", "ErrorEvent", "Window", "Navigator", "WorkerNavigator"] } 27 | js-sys = "0.3.61" 28 | serde = { version = "1", features = ["derive"] } 29 | serde_json = "1.0.64" 30 | bincode = "1.3.1" 31 | console_error_panic_hook = "0.1.7" 32 | serde_bytes = "0.11.9" 33 | 34 | [build-dependencies] 35 | prost-build = {version = "0.11.8"} 36 | 37 | [dev-dependencies] 38 | hex = "0.4.3" 39 | wasm-bindgen-test = "0.3.34" 40 | 41 | [profile.release] 42 | codegen-units = 1 43 | incremental = false 44 | lto = "fat" 45 | opt-level = "s" 46 | panic = "abort" 47 | 48 | [package.metadata.wasm-pack.profile.release] 49 | wasm-opt = ["-O3"] 50 | 51 | [lib] 52 | crate-type = ["cdylib", "rlib"] 53 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Aero 2 | A Blazingly fast toolkit for running proofs in the browser and verifying them on-chain. 3 | 4 | ## Miden verifier on Starknet 5 | [MidenVM](https://github.com/0xPolygonMiden/miden-vm) is an MIT licensed StarkVM which can now be verified on Starknet. 6 | 7 | [![Twitter URL](https://img.shields.io/twitter/follow/stark_oracle?style=social)](https://twitter.com/stark_oracle) 8 | 9 | **The code in the project is incomplete, DO NOT USE IN PRODUCTION!!!** 10 | 11 | ## Why should you care? 12 | 13 | * Composability - Different zkVMs take different design trade-offs. Making Miden proofs verifiable on Starknet increases the design space 14 | application can be built. For example, certain VMs might make trade-offs that make it cheaper to compute keccak, by allowing composability 15 | we can offload those computations to the appropriate VMs but leverage network effects to achieve cost-effectiveness. 16 | 17 | * Privacy - Since Miden prover is fully open-source, you can generate proofs locally and verify them on Starknet. The proof itself 18 | will divulge a lot less about the activity than the current approach of running your contracts directly on Starknet. 19 | 20 | * Mobile - Miden is an extremely efficient prover which can be run with much smaller RAM configurations. Furthermore, Miden prover can 21 | be compiled down to WASM which allows easy integration with web applications, including Metamask. 22 | 23 | ## Requirements 24 | - Python 3.9 (Activate environment: `source ~/cairo_venv/bin/activate`) 25 | - Cairo. [Installation Guide](https://www.cairo-lang.org/docs/quickstart.html) (Programming language for provable programs) 26 | - [Protostar](https://docs.swmansion.com/protostar/docs/tutorials/installation) (Automated testing) 27 | - Rustup 28 | 29 | ## Commands 30 | 31 | ### Generate proof 32 | ``` 33 | make generate_proof 34 | ``` 35 | 36 | ### Verify in Cairo 37 | ``` 38 | make integration_test 39 | ``` 40 | 41 | ## Roadmap 42 | 43 | * Add AIR verification - Use AirScript to generate constraints in Cairo 44 | * Enable extension field for security 45 | * Eliminate hard-coded parameters 46 | * Deploy on Starknet 47 | 48 | ## Changelog 49 | * Modify ZeroSync to match Miden's trace layout 50 | * Change all field operations to goldilocks (this can be configured to be any field smaller than Cairo's field) 51 | * Change Winterfell to work with blake2s (match Cairo's implementation) 52 | * Remove ZeroSync dependencies that require Python<>Rust integration 53 | 54 | ## Acknowledgements 55 | 56 | This code is heavily reliant on the work done by [ZeroSync](https://github.com/ZeroSync/ZeroSync) and [Max Gillet](https://github.com/maxgillett), please give them a star for their great work! 57 | -------------------------------------------------------------------------------- /src/stark_verifier/air/stark_proof.cairo: -------------------------------------------------------------------------------- 1 | from starkware.cairo.common.alloc import alloc 2 | from starkware.cairo.common.uint256 import Uint256 3 | 4 | from stark_verifier.air.pub_inputs import PublicInputs 5 | from stark_verifier.air.table import Table 6 | from stark_verifier.air.transitions.frame import EvaluationFrame 7 | from stark_verifier.utils import Vec, Digest 8 | 9 | struct TraceLayout { 10 | main_segment_width: felt, 11 | num_aux_segments: felt, 12 | aux_segment_widths: felt*, 13 | aux_segment_rands: felt*, 14 | } 15 | 16 | struct ProofOptions { 17 | num_queries: felt, 18 | blowup_factor: felt, 19 | log_blowup_factor: felt, 20 | grinding_factor: felt, 21 | hash_fn: felt, 22 | field_extension: felt, 23 | fri_folding_factor: felt, 24 | fri_max_remainder_size: felt, // stored as power of 2 25 | } 26 | 27 | struct ProofContext { 28 | trace_layout: TraceLayout, 29 | trace_length: felt, 30 | log_trace_length: felt, 31 | trace_meta_len: felt, 32 | trace_meta: felt*, 33 | field_modulus_bytes_len: felt, 34 | field_modulus_bytes: felt*, 35 | options: ProofOptions, 36 | lde_domain_size: felt, 37 | } 38 | 39 | struct ParsedCommitments { 40 | trace_roots: felt*, 41 | constraint_root: felt*, 42 | fri_roots_len: felt, 43 | fri_roots: felt*, 44 | } 45 | 46 | struct ParsedOodFrame { 47 | main_frame: EvaluationFrame, 48 | aux_frame: EvaluationFrame, 49 | evaluations: Vec, 50 | } 51 | 52 | struct TraceQueries { 53 | main_states: Table, 54 | aux_states: Table, 55 | } 56 | 57 | struct ConstraintQueries { 58 | evaluations: Table, 59 | } 60 | 61 | // Definition of a STARK proof 62 | // 63 | // See also: 64 | // https://github.com/novifinancial/winterfell/blob/ecea359802538692c4e967b083107c6b08f3302e/air/src/proof/mod.rs#L51 65 | // 66 | struct StarkProof { 67 | // Basic metadata about the execution of the computation described by this proof. 68 | context: ProofContext, 69 | // Commitments made by the prover during the commit phase of the protocol. 70 | commitments: ParsedCommitments, 71 | // Trace evaluation frames and out-of-domain constraint evaluations 72 | ood_frame: ParsedOodFrame, 73 | // Proof-of-work nonce for query seed grinding. 74 | pow_nonce: felt, 75 | // Queried states for all trace segments (no authentication paths) 76 | trace_queries: TraceQueries, 77 | // Queried constraint evaluations (no authentication paths) 78 | constraint_queries: ConstraintQueries, 79 | // A proof consists of zero or more layers and a remainder, which is an array of Felts 80 | remainder: Vec, 81 | } 82 | 83 | func read_stark_proof() -> StarkProof* { 84 | let (proof_ptr: StarkProof*) = alloc(); 85 | %{ 86 | from src.stark_verifier.utils import write_into_memory 87 | write_into_memory(ids.proof_ptr, json_data, segments) 88 | %} 89 | return proof_ptr; 90 | } 91 | -------------------------------------------------------------------------------- /aero-sdk/src/sdk.ts: -------------------------------------------------------------------------------- 1 | import init, { MidenProver, start } from "miden-wasm"; 2 | import { MidenProgram, MidenProgramInputs } from "./proto-ts/miden_prover"; 3 | import { StarkProof } from "./proto-ts/stark_proof"; 4 | import { MidenProgramOutputs, MidenPublicInputs } from "./proto-ts/miden_vm"; 5 | import { FieldExtension, HashFunction, PrimeField, ProofOptions } from "./proto-ts/context"; 6 | import "./hashing_worker"; 7 | import "./proving_worker"; 8 | import "./constraints_worker"; 9 | 10 | var miden_prover: MidenProver = null; 11 | init().then(() => { 12 | start(); 13 | miden_prover = new MidenProver(); 14 | console.log("finished sdk init"); 15 | }); 16 | 17 | export async function prove(program: MidenProgram, inputs: MidenProgramInputs, options: ProofOptions = ProofOptions.fromJSON({ 18 | numQueries: 27, 19 | blowupFactor: 8, 20 | grindingFactor: 16, 21 | hashFn: HashFunction.BLAKE2S, 22 | fieldExtension: FieldExtension.NONE, 23 | friFoldingFactor: 8, 24 | friMaxRemainderSize: 256, 25 | primeField: PrimeField.GOLDILOCKS, 26 | })): Promise<[StarkProof, MidenProgramOutputs, MidenPublicInputs]> { 27 | let program_bytes = MidenProgram.encode(program).finish(); 28 | let input_bytes = MidenProgramInputs.encode(inputs).finish(); 29 | let option_bytes = ProofOptions.encode(options).finish(); 30 | let proof_outputs = await miden_prover.prove(program_bytes, input_bytes, option_bytes, 1024, true); 31 | 32 | let proof = StarkProof.decode(proof_outputs.proof); 33 | let outputs = MidenProgramOutputs.decode(proof_outputs.program_outputs); 34 | let pub_inputs = MidenPublicInputs.decode(proof_outputs.public_inputs); 35 | 36 | return [proof, outputs, pub_inputs]; 37 | } 38 | 39 | export async function prove_sequential(program: MidenProgram, inputs: MidenProgramInputs, options: ProofOptions = ProofOptions.fromJSON({ 40 | numQueries: 27, 41 | blowupFactor: 8, 42 | grindingFactor: 16, 43 | hashFn: HashFunction.BLAKE2S, 44 | fieldExtension: FieldExtension.NONE, 45 | friFoldingFactor: 8, 46 | friMaxRemainderSize: 256, 47 | primeField: PrimeField.GOLDILOCKS, 48 | })): Promise<[StarkProof, MidenProgramOutputs, MidenPublicInputs]> { 49 | let program_bytes = MidenProgram.encode(program).finish(); 50 | let input_bytes = MidenProgramInputs.encode(inputs).finish(); 51 | let option_bytes = ProofOptions.encode(options).finish(); 52 | let proof_outputs = await miden_prover.prove_sequential(program_bytes, input_bytes, option_bytes, true); 53 | 54 | let proof = StarkProof.decode(proof_outputs.proof); 55 | let outputs = MidenProgramOutputs.decode(proof_outputs.program_outputs); 56 | let pub_inputs = MidenPublicInputs.decode(proof_outputs.public_inputs); 57 | 58 | return [proof, outputs, pub_inputs]; 59 | } 60 | 61 | export function uint8ArrayToU64LE(arr: Uint8Array): BigInt { 62 | if (arr.length !== 8) { 63 | throw new Error('Uint8Array must have exactly 8 elements to be converted to u64.'); 64 | } 65 | 66 | let result = BigInt(0); 67 | for (let i = 0; i < arr.length; i++) { 68 | result |= BigInt(arr[i]) << BigInt(i * 8); 69 | } 70 | 71 | return result; 72 | } 73 | -------------------------------------------------------------------------------- /miden-proof-generator/src/main.rs: -------------------------------------------------------------------------------- 1 | use miden::{prove, Assembler, Program, ProgramInputs, ProofOptions}; 2 | use miden_air::PublicInputs; 3 | use miden_core::{Felt, FieldElement, StarkField}; 4 | use miden_proof_generator::ProofData; 5 | use miden_stdlib::StdLibrary; 6 | use std::fs::File; 7 | use std::io::Write; 8 | 9 | fn main() { 10 | println!("============================================================"); 11 | println!("Prove program"); 12 | println!("============================================================"); 13 | 14 | // configure logging 15 | env_logger::Builder::new() 16 | .format(|buf, record| writeln!(buf, "{}", record.args())) 17 | .filter_level(log::LevelFilter::Debug) 18 | .init(); 19 | 20 | let n = 10; 21 | let program = generate_fibonacci_program(n); 22 | let expected_result = vec![compute_fibonacci(n).as_int()]; 23 | let proof_security = ProofOptions::with_96_bit_security(); 24 | let input_data = ProgramInputs::new(&[0, 1], &[], vec![]).unwrap(); 25 | println!( 26 | "Generated a program to compute {}-th Fibonacci term; expected result: {}", 27 | n, expected_result[0] 28 | ); 29 | 30 | // execute program and generate proof 31 | let (outputs, proof) = prove(&program, &input_data, &proof_security) 32 | .map_err(|err| format!("Failed to prove program - {:?}", err)) 33 | .unwrap(); 34 | 35 | let pub_inputs = PublicInputs::new(program.hash(), input_data.stack_init().to_vec(), outputs); 36 | let input_bytes = pub_inputs.to_bytes(); 37 | 38 | let proof_bytes = proof.to_bytes(); 39 | println!("Proof size: {:.1} KB", proof_bytes.len() as f64 / 1024f64); 40 | 41 | // Write proof to disk 42 | let data = ProofData { 43 | input_bytes, 44 | proof_bytes, 45 | }; 46 | 47 | miden_verifier::verify(pub_inputs.program_hash, &[0, 1], &pub_inputs.outputs, proof).unwrap(); 48 | 49 | let b = bincode::serialize(&data).unwrap(); 50 | let mut f = File::create("proofs/fib.bin").unwrap(); 51 | f.write_all(&b).unwrap(); 52 | } 53 | 54 | /// Generates a program to compute the `n`-th term of Fibonacci sequence 55 | fn generate_fibonacci_program(n: usize) -> Program { 56 | // the program is a simple repetition of 4 stack operations: 57 | // the first operation moves the 2nd stack item to the top, 58 | // the second operation duplicates the top 2 stack items, 59 | // the third operation removes the top item from the stack 60 | // the last operation pops top 2 stack items, adds them, and pushes 61 | // the result back onto the stack 62 | let program = format!( 63 | "begin 64 | repeat.{} 65 | swap dup.1 add 66 | end 67 | end", 68 | n - 1 69 | ); 70 | 71 | Assembler::new() 72 | .with_module_provider(StdLibrary::default()) 73 | .compile(&program) 74 | .unwrap() 75 | } 76 | 77 | /// Computes the `n`-th term of Fibonacci sequence 78 | fn compute_fibonacci(n: usize) -> Felt { 79 | let mut t0 = Felt::ZERO; 80 | let mut t1 = Felt::ONE; 81 | 82 | for _ in 0..n { 83 | t1 = t0 + t1; 84 | core::mem::swap(&mut t0, &mut t1); 85 | } 86 | t0 87 | } 88 | -------------------------------------------------------------------------------- /src/stark_verifier/fri/polynomials.cairo: -------------------------------------------------------------------------------- 1 | from starkware.cairo.common.alloc import alloc 2 | from starkware.cairo.common.hash_state import hash_finalize, hash_init, hash_update 3 | from starkware.cairo.common.hash import HashBuiltin 4 | from starkware.cairo.common.math import horner_eval 5 | from utils.math_goldilocks import mul_g, sub_g, add_g, div_g, pow_g 6 | 7 | // Evaluate l_j(x) for a fixed j and a specified x. Should loop for 0 <= m <= x_i_len. 8 | func lagrange_basis_eval{range_check_ptr}(x_i: felt*, x_i_len, x, j, m) -> felt { 9 | alloc_locals; 10 | if (m + 1 == x_i_len) { 11 | if (j == m) { 12 | return 1; 13 | } 14 | let n = sub_g(x, x_i[m]); 15 | local numerator = n; 16 | let denominator = sub_g(x_i[j], x_i[m]); 17 | 18 | return div_g(numerator, denominator); 19 | } 20 | 21 | if (j == m) { 22 | return lagrange_basis_eval(x_i, x_i_len, x, j, m + 1); 23 | } 24 | 25 | let numerator = sub_g(x, x_i[m]); 26 | let denominator = sub_g(x_i[j], x_i[m]); 27 | 28 | // Reduce to the product of all numerator / denominator 29 | let old_product = lagrange_basis_eval(x_i, x_i_len, x, j, m + 1); 30 | let div = div_g(numerator, denominator); 31 | return mul_g(div, old_product); 32 | } 33 | 34 | // Evaluates L(x) 35 | func lagrange_sum_eval{range_check_ptr}(y_values: felt*, x_values: felt*, n_points, x, j) -> felt { 36 | alloc_locals; 37 | if (j + 1 == n_points) { 38 | let l_j = lagrange_basis_eval(x_values, n_points, x, j, 0); 39 | return mul_g(y_values[j], l_j); 40 | } 41 | 42 | // Reduce to the sum of all l_j * y_j 43 | let old_sum = lagrange_sum_eval(y_values, x_values, n_points, x, j + 1); 44 | let l_j = lagrange_basis_eval(x_values, n_points, x, j, 0); 45 | let y_val_i_j = mul_g(y_values[j], l_j); 46 | return add_g(y_val_i_j, old_sum); 47 | } 48 | 49 | // Evaluate with input x using Lagrange interpolation over evaluations. 50 | func lagrange_eval{range_check_ptr}(y_values: felt*, x_values: felt*, n_points, x) -> felt { 51 | return lagrange_sum_eval(y_values, x_values, n_points, x, 0); 52 | } 53 | 54 | // Interpolate the Lagrange polynomial derived from `n_points` many points, 55 | // given as arrays of their `x_values` and `y_values`. 56 | func interpolate_poly_and_verify{pedersen_ptr: HashBuiltin*, range_check_ptr}( 57 | x_values: felt*, y_values: felt*, n_points 58 | ) -> felt* { 59 | alloc_locals; 60 | let (local polynomial) = alloc(); 61 | %{ 62 | from src.stark_verifier.utils import interpolate_poly 63 | interpolate_poly(ids.x_values, ids.y_values, ids.n_points, ids.polynomial, memory) 64 | %} 65 | // Use the commitment to the remainder polynomial and evaluations to draw a random 66 | // field element tau 67 | let (hash_state_ptr) = hash_init(); 68 | let (hash_state_ptr) = hash_update{hash_ptr=pedersen_ptr}( 69 | hash_state_ptr=hash_state_ptr, data_ptr=y_values, data_length=n_points 70 | ); 71 | let (hash_state_ptr) = hash_update{hash_ptr=pedersen_ptr}( 72 | hash_state_ptr=hash_state_ptr, data_ptr=polynomial, data_length=n_points 73 | ); 74 | let (tau) = hash_finalize{hash_ptr=pedersen_ptr}(hash_state_ptr=hash_state_ptr); 75 | // Evaluate both polynomial representations at tau and confirm agreement 76 | let (a) = horner_eval(n_points, polynomial, tau); 77 | let b = lagrange_eval(y_values, x_values, n_points, tau); 78 | assert a = b; 79 | return polynomial; 80 | } 81 | -------------------------------------------------------------------------------- /src/utils/python_utils.cairo: -------------------------------------------------------------------------------- 1 | // Defines hashes_from_hex for tests 2 | func setup_python_defs() { 3 | %{ 4 | import re 5 | def hex_to_felt(hex_string): 6 | # Seperate hex_string into chunks of 8 chars. 7 | felts = re.findall(".?.?.?.?.?.?.?.", hex_string) 8 | # Fill remaining space in last chunk with 0. 9 | while len(felts[-1]) < 8: 10 | felts[-1] += "0" 11 | return [int(x, 16) for x in felts] 12 | 13 | # Writes a hex string into an uint32 array 14 | # 15 | # Using multi-line strings in python: 16 | # - https://stackoverflow.com/questions/10660435/how-do-i-split-the-definition-of-a-long-string-over-multiple-lines 17 | def from_hex(hex_string, destination): 18 | # To see if there are only 0..f in hex_string we can try to turn it into an int 19 | try: 20 | check_if_hex = int(hex_string,16) 21 | except ValueError: 22 | print("ERROR: Input to from_hex contains non-hex characters.") 23 | felts = hex_to_felt(hex_string) 24 | segments.write_arg(destination, felts) 25 | 26 | # Return the byte size of the uint32 array and the array length. 27 | return (1 + len(hex_string))// 2, len(felts) 28 | 29 | # Writes a string of any length into the given destination array. 30 | # String is seperated into uint32 chunks. 31 | # Last chunk is filled with zeros after the last string byte. 32 | def from_string(string, destination): 33 | hex_list = [hex(ord(x)).replace("0x","") for x in string] 34 | hex_string = "".join(hex_list) 35 | 36 | return from_hex(hex_string, destination) 37 | 38 | 39 | def little_endian(string): 40 | splited = [str(string)[i: i + 2] for i in range(0, len(str(string)), 2)] 41 | splited.reverse() 42 | return "".join(splited) 43 | 44 | # Writes an array of hex-encoded hashes into an uint32 array 45 | # Because of the quirk in Bitcoin we display hex-encoded hashes 46 | # in reverse byte order. 47 | def hashes_from_hex(hashes, destination): 48 | for i, hex_hash in enumerate(hashes): 49 | hex_string = little_endian(hex_hash.replace("0x","")) 50 | _ = from_hex(hex_string, destination + i * 8) 51 | return len(hashes) 52 | 53 | 54 | def felts_from_hash(hex_hash): 55 | hex_hash = little_endian(hex_hash) 56 | return hex_to_felt(hex_hash) 57 | 58 | def felts_from_hex_strings(hex_strings): 59 | return list( map(lambda x: int(x, 16), hex_strings )) 60 | 61 | # additional helper functions invoked from `utxo_set_extract` 62 | import struct 63 | 64 | def swap32(i): 65 | return struct.unpack("I", i))[0] 66 | 67 | BASE = 2**32 68 | def _read_i(address, i): 69 | return swap32( memory[address + i] ) * BASE ** i 70 | 71 | def hash_from_memory(address): 72 | hash = _read_i(address, 0) \ 73 | + _read_i(address, 1) \ 74 | + _read_i(address, 2) \ 75 | + _read_i(address, 3) \ 76 | + _read_i(address, 4) \ 77 | + _read_i(address, 5) \ 78 | + _read_i(address, 6) \ 79 | + _read_i(address, 7) 80 | return hex(hash).replace('0x','').zfill(64) 81 | %} 82 | return (); 83 | } 84 | -------------------------------------------------------------------------------- /aero-sdk/src/demo/index.ts: -------------------------------------------------------------------------------- 1 | import { prove, prove_sequential, uint8ArrayToU64LE } from "../sdk"; 2 | import { MidenProgram, MidenProgramInputs } from "../proto-ts/miden_prover"; 3 | 4 | const FIB_NUM = 1000; 5 | 6 | async function onPageLoad() { 7 | document.querySelector("body").innerHTML = `

Proving the ${FIB_NUM}th fib number!

`; 8 | console.log("Hello!"); 9 | document.addEventListener('DOMContentLoaded', function () { 10 | const button = document.getElementById('run_proof'); 11 | 12 | button.addEventListener('click', async () => { 13 | await runProof(); 14 | }); 15 | 16 | const button_seq = document.getElementById('run_proof_sequential'); 17 | 18 | button_seq.addEventListener('click', async () => { 19 | await runProofSequential(); 20 | }); 21 | }); 22 | } 23 | 24 | async function runProof() { 25 | console.log("Running proof"); 26 | console.time("running_proof"); 27 | return new Promise((resolve) => { 28 | setTimeout(async () => { 29 | console.log("in proof"); 30 | let program = MidenProgram.fromJSON({ 31 | program: 32 | ` 33 | # STACK EFFECT 34 | # ITERATION-AMOUNT -- FIB-ANSWER # 35 | proc.fib_iter 36 | push.0 37 | push.1 38 | dup.2 39 | neq.0 40 | # Looks about 8 cyles every loop # 41 | while.true 42 | swap dup.1 add movup.2 sub.1 dup movdn.3 neq.0 43 | end 44 | drop 45 | swap 46 | drop 47 | end 48 | 49 | begin 50 | exec.fib_iter 51 | end` 52 | }); 53 | let inputs = MidenProgramInputs.fromJSON({ stackInit: [FIB_NUM], adviceTape: [] }); 54 | const [, outputs,] = await prove(program, inputs); 55 | 56 | let result = uint8ArrayToU64LE(outputs.stack[0].element); 57 | 58 | document.getElementById("result").innerHTML = "Result: " + result.toString(); 59 | console.log("Result: ", result); 60 | console.timeEnd("running_proof"); 61 | resolve(); 62 | }); 63 | }); 64 | } 65 | 66 | async function runProofSequential() { 67 | console.log("Running proof sequential"); 68 | console.time("running_proof_sequential"); 69 | return new Promise((resolve) => { 70 | setTimeout(async () => { 71 | let program = MidenProgram.fromJSON({ 72 | program: 73 | ` 74 | # STACK EFFECT 75 | # ITERATION-AMOUNT -- FIB-ANSWER # 76 | proc.fib_iter 77 | push.0 78 | push.1 79 | dup.2 80 | neq.0 81 | # Looks about 8 cyles every loop # 82 | while.true 83 | swap dup.1 add movup.2 sub.1 dup movdn.3 neq.0 84 | end 85 | drop 86 | swap 87 | drop 88 | end 89 | 90 | begin 91 | exec.fib_iter 92 | end` 93 | }); 94 | let inputs = MidenProgramInputs.fromJSON({ stackInit: [FIB_NUM], adviceTape: [] }); 95 | const [, outputs,] = await prove_sequential(program, inputs); 96 | 97 | let result = uint8ArrayToU64LE(outputs.stack[0].element); 98 | 99 | document.getElementById("result").innerHTML = "Result: " + result.toString(); 100 | console.log("Result: ", result); 101 | console.timeEnd("running_proof_sequential"); 102 | resolve(); 103 | }); 104 | }); 105 | } 106 | 107 | onPageLoad(); -------------------------------------------------------------------------------- /src/utils/math_goldilocks.cairo: -------------------------------------------------------------------------------- 1 | from starkware.cairo.common.registers import get_ap, get_fp_and_pc 2 | 3 | // 2^64 - 2^32 - 1; 4 | const PG = 18446744069414584321; 5 | 6 | // multiply to felts modulo PG, these numbers must be smaller than PG 7 | func mul_g{range_check_ptr}(a: felt, b: felt) -> felt { 8 | // add range checks for a, b 9 | let res = a * b; 10 | 11 | let r = [range_check_ptr]; 12 | let q = [range_check_ptr + 1]; 13 | let range_check_ptr = range_check_ptr + 2; 14 | 15 | %{ 16 | ids.r = ids.res % ids.PG 17 | ids.q = ids.res // ids.PG 18 | %} 19 | assert q * PG + r = res; 20 | return r; 21 | } 22 | 23 | func add_g{range_check_ptr}(a: felt, b: felt) -> felt { 24 | let res = a + b; 25 | 26 | let r = [range_check_ptr]; 27 | let q = [range_check_ptr + 1]; 28 | let range_check_ptr = range_check_ptr + 2; 29 | 30 | %{ 31 | ids.r = ids.res % ids.PG 32 | ids.q = ids.res // ids.PG 33 | %} 34 | assert q * PG + r = res; 35 | return r; 36 | } 37 | 38 | func inv_g{range_check_ptr}(a: felt) -> felt { 39 | let inv = [range_check_ptr]; 40 | let range_check_ptr = range_check_ptr + 1; 41 | 42 | %{ 43 | def mul_g(a, b): 44 | return (a * b) % ids.PG 45 | 46 | def square_g(a): 47 | return (a ** 2) % ids.PG 48 | 49 | def exp_acc(base, tail, exp_bits): 50 | result = base 51 | for i in range(exp_bits): 52 | result = square_g(result) 53 | return mul_g(result, tail) 54 | # compute base^(M - 2) using 72 multiplications 55 | # M - 2 = 0b1111111111111111111111111111111011111111111111111111111111111111 56 | a = ids.a 57 | # compute base^11 58 | t2 = mul_g(square_g(a), a) 59 | 60 | # compute base^111 61 | t3 = mul_g(square_g(t2), a) 62 | 63 | # compute base^111111 (6 ones) 64 | t6 = exp_acc(t3, t3, 3) 65 | 66 | # compute base^111111111111 (12 ones) 67 | t12 = exp_acc(t6, t6, 6) 68 | 69 | # compute base^111111111111111111111111 (24 ones) 70 | t24 = exp_acc(t12, t12, 12) 71 | 72 | # compute base^1111111111111111111111111111111 (31 ones) 73 | t30 = exp_acc(t24, t6, 6) 74 | t31 = mul_g(square_g(t30), a) 75 | 76 | # compute base^111111111111111111111111111111101111111111111111111111111111111 77 | t63 = exp_acc(t31, t31, 32) 78 | 79 | # compute base^1111111111111111111111111111111011111111111111111111111111111111 80 | ids.inv = mul_g(square_g(t63), a) 81 | %} 82 | assert mul_g(inv, a) = 1; 83 | return inv; 84 | } 85 | 86 | func div_g{range_check_ptr}(a: felt, b: felt) -> felt { 87 | let inv = inv_g(b); 88 | return mul_g(a, inv); 89 | } 90 | 91 | func sub_g{range_check_ptr}(a: felt, b: felt) -> felt { 92 | let r = [range_check_ptr]; 93 | let a_greater_than_b = [range_check_ptr + 1]; 94 | let range_check_ptr = range_check_ptr + 2; 95 | 96 | %{ 97 | if ids.a < ids.b: 98 | ids.r = ids.a + ids.PG - ids.b 99 | ids.a_greater_than_b = 0 100 | else: 101 | ids.r = ids.a - ids.b 102 | ids.a_greater_than_b = 1 103 | %} 104 | 105 | if (a_greater_than_b == 1) { 106 | assert r = a - b; 107 | } else { 108 | assert r + b = a + PG; 109 | } 110 | return r; 111 | } 112 | 113 | func pow_g_loop{range_check_ptr}(base, exp, res) -> felt { 114 | if (exp == 0) { 115 | return res; 116 | } 117 | 118 | let base_square = mul_g(base, base); 119 | 120 | let bit = [range_check_ptr]; 121 | let range_check_ptr = range_check_ptr + 1; 122 | 123 | %{ ids.bit = (ids.exp % ids.PG) & 1 %} 124 | if (bit == 1) { 125 | // odd case 126 | let tmp = exp - 1; 127 | let new_exp = tmp / 2; 128 | let r = mul_g(base, res); 129 | return pow_g_loop(base_square, new_exp, r); 130 | } else { 131 | // even case 132 | let new_exp = exp / 2; 133 | return pow_g_loop(base_square, new_exp, res); 134 | } 135 | } 136 | 137 | // Returns base ** exp % PG, for 0 <= exp < 2**63. 138 | func pow_g{range_check_ptr}(base, exp) -> felt { 139 | if (exp == 0) { 140 | return 1; 141 | } 142 | 143 | if (base == 0) { 144 | return 0; 145 | } 146 | 147 | return pow_g_loop(base, exp, 1); 148 | } 149 | -------------------------------------------------------------------------------- /miden-to-cairo-parser/src/main.rs: -------------------------------------------------------------------------------- 1 | use hex::FromHex; 2 | use miden_to_cairo_parser::{ 3 | memory::{Writeable, WriteableWith}, 4 | Air, BinaryProofData, Felt, FriProofParams, ProcessorAir, PublicInputs, StarkProof, 5 | }; 6 | use serde_json::from_str; 7 | use winter_crypto::hashers::Blake2s_256; 8 | use winter_math::polynom::interpolate; 9 | use winter_utils::{Deserializable, SliceReader}; 10 | use winterfell::VerifierChannel; 11 | 12 | use clap::{Parser, Subcommand}; 13 | 14 | #[derive(Parser)] 15 | #[command(name = "parser")] 16 | #[command(about = "A parser for reencoding STARK proofs", long_about = None)] 17 | struct Cli { 18 | path: String, 19 | #[command(subcommand)] 20 | command: Commands, 21 | } 22 | 23 | #[derive(Subcommand)] 24 | enum Commands { 25 | Proof, 26 | PublicInputs, 27 | TraceQueries { 28 | indexes: Option, 29 | }, 30 | ConstraintQueries { 31 | indexes: Option, 32 | }, 33 | FriQueries { 34 | indexes: Option, 35 | }, 36 | InterpolatePoly { 37 | x_values: Option, 38 | y_values: Option, 39 | }, 40 | } 41 | 42 | fn main() { 43 | let cli = Cli::parse(); 44 | 45 | // Load the proof and its public inputs from file 46 | let data = BinaryProofData::from_file(&cli.path); 47 | let proof = StarkProof::from_bytes(&data.proof_bytes).unwrap(); 48 | let pub_inputs = PublicInputs::read_from(&mut SliceReader::new(&data.input_bytes[..])).unwrap(); 49 | 50 | // Serialize to Cairo-compatible memory 51 | let json_arr = match &cli.command { 52 | Commands::Proof => { 53 | let air = 54 | ProcessorAir::new(proof.get_trace_info(), pub_inputs, proof.options().clone()); 55 | proof.to_cairo_memory(&air) 56 | } 57 | Commands::PublicInputs => pub_inputs.to_cairo_memory(), 58 | Commands::TraceQueries { indexes } => { 59 | let air = ProcessorAir::new( 60 | proof.get_trace_info(), 61 | pub_inputs.clone(), 62 | proof.options().clone(), 63 | ); 64 | 65 | let indexes: Vec = from_str(&indexes.clone().unwrap()).unwrap(); 66 | 67 | let channel = 68 | VerifierChannel::>::new(&air, proof.clone()).unwrap(); 69 | 70 | channel.trace_queries.unwrap().to_cairo_memory(&indexes) 71 | } 72 | Commands::ConstraintQueries { indexes } => { 73 | let air = ProcessorAir::new( 74 | proof.get_trace_info(), 75 | pub_inputs.clone(), 76 | proof.options().clone(), 77 | ); 78 | 79 | let indexes: Vec = from_str(&indexes.clone().unwrap()).unwrap(); 80 | 81 | let channel = 82 | VerifierChannel::>::new(&air, proof.clone()).unwrap(); 83 | 84 | channel 85 | .constraint_queries 86 | .unwrap() 87 | .to_cairo_memory(&indexes) 88 | } 89 | Commands::FriQueries { indexes } => { 90 | let air = ProcessorAir::new( 91 | proof.get_trace_info(), 92 | pub_inputs.clone(), 93 | proof.options().clone(), 94 | ); 95 | 96 | let indexes: Vec = from_str(&indexes.clone().unwrap()).unwrap(); 97 | proof.fri_proof.to_cairo_memory(FriProofParams { 98 | air: &air, 99 | indexes: &indexes, 100 | }) 101 | } 102 | Commands::InterpolatePoly { x_values, y_values } => { 103 | let x_values = decode_felt_array(x_values); 104 | let y_values = decode_felt_array(y_values); 105 | 106 | let poly = interpolate(&x_values, &y_values, false); 107 | poly.iter() 108 | .fold(String::new(), |a, x| a + ", " + &x.to_string()) 109 | } 110 | }; 111 | 112 | println!("{}", json_arr); 113 | } 114 | 115 | fn decode_felt_array(values: &Option) -> Vec { 116 | let values: Vec = from_str(&values.clone().unwrap()).unwrap(); 117 | values 118 | .into_iter() 119 | .map(|value| { 120 | let decoded = <[u8; 8]>::from_hex(value).unwrap(); 121 | let d = u64::from_le_bytes(decoded); 122 | Felt::new(d) 123 | }) 124 | .collect() 125 | } 126 | -------------------------------------------------------------------------------- /src/utils/utxo_dummy_generator.py: -------------------------------------------------------------------------------- 1 | import urllib3 2 | import json 3 | import re 4 | import math 5 | import sys 6 | import os 7 | 8 | from starkware.cairo.lang.vm.crypto import pedersen_hash 9 | from starkware.cairo.common.hash_chain import compute_hash_chain 10 | 11 | HASH_FELT_SIZE = 8 12 | 13 | 14 | def hex_to_felt(hex_string): 15 | # Seperate hex_string into chunks of 8 chars. 16 | felts = re.findall(".?.?.?.?.?.?.?.", hex_string) 17 | # Fill remaining space in last chunk with 0. 18 | while len(felts[-1]) < 8: 19 | felts[-1] += "0" 20 | return [int(x, 16) for x in felts] 21 | 22 | 23 | def little_endian(string): 24 | splited = [str(string)[i: i + 2] for i in range(0, len(str(string)), 2)] 25 | splited.reverse() 26 | return "".join(splited) 27 | 28 | 29 | # Returns the block at given height in json format 30 | def fetch_block(block_height): 31 | http = urllib3.PoolManager() 32 | 33 | url = 'https://blockstream.info/api/block-height/' + str(block_height) 34 | r = http.request('GET', url) 35 | block_hash = r.data.decode('utf8') 36 | url = 'https://blockstream.info/api/block/' + str(block_hash) 37 | r = http.request('GET', url) 38 | return json.loads(r.data) 39 | 40 | 41 | # see here: https://github.com/zerosync/zerosync/blob/fb70c24e16bbc5617fe91c4c7db23f6748102558/src/utxo_set/utxo_set.cairo#L120 42 | # txid as a list of uint32 43 | # script_pub_key is a list of uint32 44 | def hash_output(txid, vout, amount, script_pub_key): 45 | script_pub_key_hash = compute_hash_chain(script_pub_key) 46 | txid_hash = compute_hash_chain(txid) 47 | tmp1 = pedersen_hash(amount, script_pub_key_hash) 48 | tmp2 = pedersen_hash(vout, tmp1) 49 | return(pedersen_hash(txid_hash, tmp2)) 50 | 51 | 52 | # Returns a list of tx inputs (used utxos) of a block at specified height 53 | def fetch_tx_ins_and_outs(block_height): 54 | http = urllib3.PoolManager() 55 | 56 | # fetch the block hash 57 | url = 'https://blockstream.info/api/block-height/' + str(block_height) 58 | r = http.request('GET', url) 59 | block_hash = r.data.decode('utf8') 60 | 61 | # fetch a list of txids 62 | url = 'https://blockstream.info/api/block/' + block_hash + '/txids' 63 | r = http.request('GET', url) 64 | txids = json.loads(r.data) 65 | 66 | txs = {} 67 | # fetch all tx_in and tx_out per tx in txids 68 | for txid in txids: 69 | url = 'https://blockstream.info/api/tx/' + txid 70 | r = http.request('GET', url) 71 | tx = json.loads(r.data) 72 | txs[txid] = tx 73 | 74 | return txs 75 | 76 | 77 | def hash_tx_ins(txs): 78 | hashes = [] 79 | for tx in txs: 80 | for tx_vin in txs[tx]['vin']: 81 | if tx_vin['is_coinbase']: 82 | continue 83 | # this utxo is generated in the validated block and we should not add 84 | # it to the utxo set manually 85 | if tx_vin['txid'] in txs: 86 | continue 87 | txid_list = hex_to_felt(little_endian(tx_vin['txid'])) 88 | vout = tx_vin['vout'] 89 | amount = tx_vin['prevout']['value'] 90 | script_pub_key = hex_to_felt(tx_vin['prevout']['scriptpubkey']) 91 | utxo_hash = hash_output(txid_list, vout, amount, script_pub_key) 92 | hashes.append(utxo_hash) 93 | return hashes 94 | 95 | 96 | def generate_utxo_dummys(block_height): 97 | # TODO add cache folder to gitignore 98 | 99 | # Check if the current block exists in the cache directory 100 | cache_dir = 'utxo_dummy_cache' 101 | os.system(f'mkdir -p {cache_dir}') 102 | 103 | if os.path.isfile(f'{cache_dir}/block_{block_height}.json'): 104 | f = open(f'{cache_dir}/block_{block_height}.json', 'r') 105 | output_hashes = json.load(f) 106 | else: 107 | # Fetch all required utxos 108 | txs = fetch_tx_ins_and_outs(block_height) 109 | output_hashes = hash_tx_ins(txs) 110 | 111 | # Create new file as cache entry 112 | f = open(f'{cache_dir}/block_{block_height}.json', 'w') 113 | json.dump(output_hashes, f) 114 | 115 | f.close() 116 | return output_hashes 117 | 118 | 119 | if __name__ == "__main__": 120 | if len(sys.argv) == 1: 121 | print( 122 | f"ERROR: No block height specified.\nUSAGE: python {sys.argv[0]} \n") 123 | exit(1) 124 | block_height = int(sys.argv[1]) 125 | if block_height < 0: 126 | print("ERROR: Specify a block height above zero.") 127 | exit(2) 128 | 129 | dummys = generate_utxo_dummys(block_height) 130 | print(dummys) 131 | -------------------------------------------------------------------------------- /tests/integration/test_verifier.cairo: -------------------------------------------------------------------------------- 1 | %lang starknet 2 | 3 | from starkware.cairo.common.cairo_builtins import BitwiseBuiltin 4 | from starkware.cairo.common.hash import HashBuiltin 5 | from starkware.cairo.common.alloc import alloc 6 | 7 | from stark_verifier.air.stark_proof import read_stark_proof, StarkProof 8 | from stark_verifier.air.pub_inputs import read_public_inputs, PublicInputs 9 | from stark_verifier.stark_verifier import verify 10 | from stark_verifier.crypto.random import random_coin_new, seed_with_pub_inputs, draw_integers, draw 11 | from starkware.cairo.common.cairo_blake2s.blake2s import finalize_blake2s, blake2s_as_words 12 | 13 | // / Test deserialization of StarkProof from file 14 | @external 15 | func test_read_stark_proof{}() { 16 | %{ 17 | from tests.integration.utils import parse_proof 18 | json_data = parse_proof('fib') 19 | %} 20 | let proof: StarkProof* = read_stark_proof(); 21 | 22 | %{ 23 | # TODO: Assert that all proof fields were deserialized correctly using utils.py 24 | print('main_segment_width:', ids.proof.context.trace_layout.main_segment_width) 25 | print('num_queries:', ids.proof.context.options.num_queries) 26 | print('blowup_factor:', ids.proof.context.options.blowup_factor) 27 | print('pow_nonce:', ids.proof.pow_nonce) 28 | %} 29 | return (); 30 | } 31 | 32 | // / Test deserialization of PublicInputs from file 33 | @external 34 | func test_read_pub_inputs{}() { 35 | %{ 36 | from tests.integration.utils import parse_public_inputs 37 | json_data = parse_public_inputs('fib') 38 | %} 39 | let pub_inputs: PublicInputs* = read_public_inputs(); 40 | 41 | %{ 42 | # TODO: Assert that all proof fields were deserialized correctly using utils.py 43 | print('program_hash:', ids.pub_inputs.program_hash) 44 | expected_program_hash_elements = [2541413064022245539, 7129587402699328827, 5589074863266416554, 8033675306619022710] 45 | for i in range(ids.pub_inputs.program_hash_len): 46 | assert memory[ids.pub_inputs.program_hash + i] == expected_program_hash_elements[i] 47 | print('program_hash_len:', ids.pub_inputs.program_hash_len) 48 | print('stack_inputs:', ids.pub_inputs.stack_inputs) 49 | print('stack_inputs_len:', ids.pub_inputs.stack_inputs_len) 50 | print('ouputs.stack:', ids.pub_inputs.outputs.stack) 51 | print('outputs.stack_len:', ids.pub_inputs.outputs.stack_len) 52 | print('ouputs.overflow_addrs:', ids.pub_inputs.outputs.overflow_addrs) 53 | print('outputs.overflow_addrs_len:', ids.pub_inputs.outputs.overflow_addrs_len) 54 | %} 55 | return (); 56 | } 57 | 58 | @external 59 | func test_verify{range_check_ptr, pedersen_ptr: HashBuiltin*, bitwise_ptr: BitwiseBuiltin*}() { 60 | %{ 61 | from tests.integration.utils import parse_public_inputs 62 | json_data = parse_public_inputs('fib') 63 | %} 64 | let pub_inputs: PublicInputs* = read_public_inputs(); 65 | 66 | %{ 67 | from tests.integration.utils import parse_proof 68 | json_data = parse_proof('fib') 69 | %} 70 | let proof: StarkProof* = read_stark_proof(); 71 | 72 | verify(proof, pub_inputs); 73 | return (); 74 | } 75 | 76 | @external 77 | func test_draw{range_check_ptr, bitwise_ptr: BitwiseBuiltin*, pedersen_ptr: HashBuiltin*}() { 78 | alloc_locals; 79 | let (blake2s_ptr: felt*) = alloc(); 80 | local blake2s_ptr_start: felt* = blake2s_ptr; 81 | 82 | %{ 83 | from tests.integration.utils import parse_public_inputs 84 | json_data = parse_public_inputs('fib') 85 | %} 86 | let pub_inputs: PublicInputs* = read_public_inputs(); 87 | let public_coin_seed: felt* = seed_with_pub_inputs{blake2s_ptr=blake2s_ptr}(pub_inputs); 88 | 89 | %{ 90 | seed = [hex(memory[ids.public_coin_seed+ptr]) for ptr in range(8)] 91 | print('seed:', seed) 92 | %} 93 | 94 | with blake2s_ptr { 95 | let public_coin = random_coin_new(public_coin_seed, 32); 96 | } 97 | 98 | let (local elements: felt*) = alloc(); 99 | let n_elements = 20; 100 | let domain_size = 64; 101 | 102 | with public_coin, blake2s_ptr { 103 | let r_element = draw(); 104 | %{ assert ids.r_element == 15636605459427237624 %} 105 | draw_integers(n_elements=n_elements, elements=elements, domain_size=64); 106 | } 107 | %{ 108 | expected = [55, 46, 17, 44, 61, 8, 43, 39, 19, 3, 26, 31, 30, 4, 37, 40, 49, 7, 56, 29] 109 | for i in range(ids.n_elements): 110 | assert memory[ids.elements + i] == expected[i] 111 | %} 112 | finalize_blake2s(blake2s_ptr_start, blake2s_ptr); 113 | return (); 114 | } 115 | -------------------------------------------------------------------------------- /aero-sdk/miden-wasm/src/constraints_worker.rs: -------------------------------------------------------------------------------- 1 | use crate::utils::{ 2 | from_uint8array, set_once_logger, to_uint8array, ConstraintComputeResult, 3 | ConstraintComputeWorkItem, FeltWrapper, TraceLdeWrapper, 4 | }; 5 | use js_sys::Uint8Array; 6 | use log::debug; 7 | use miden_air::ProcessorAir; 8 | use miden_core::{Felt, FieldElement}; 9 | use wasm_bindgen::prelude::*; 10 | use web_sys::{DedicatedWorkerGlobalScope, MessageEvent}; 11 | use winter_air::Air; 12 | use winter_prover::{ConstraintEvaluationTable, ConstraintEvaluator, StarkDomain}; 13 | 14 | pub fn constraint_compute(work_item: &ConstraintComputeWorkItem) -> Result { 15 | let air = ProcessorAir::new( 16 | work_item.trace_info.clone(), 17 | work_item.public_inputs.clone(), 18 | work_item.proof_options.clone(), 19 | ); 20 | // 2 ----- evaluate constraints ----------------------------------------------------------- 21 | // evaluate constraints specified by the AIR over the constraint evaluation domain, and 22 | // compute random linear combinations of these evaluations using coefficients drawn from 23 | // the channel; this step evaluates only constraint numerators, thus, only constraints with 24 | // identical denominators are merged together. the results are saved into a constraint 25 | // evaluation table where each column contains merged evaluations of constraints with 26 | // identical denominators. 27 | let constraint_coeffs = work_item.constraint_coeffs.clone(); 28 | let aux_trace_rand_elements = work_item.aux_rand_elements.clone(); 29 | // build a list of constraint divisors; currently, all transition constraints have the same 30 | // divisor which we put at the front of the list; boundary constraint divisors are appended 31 | // after that 32 | let evaluator: ConstraintEvaluator<_, Felt> = 33 | ConstraintEvaluator::new(&air, aux_trace_rand_elements, constraint_coeffs); 34 | // build a list of constraint divisors; currently, all transition constraints have the same 35 | // divisor which we put at the front of the list; boundary constraint divisors are appended 36 | // after that 37 | let mut divisors = vec![evaluator.transition_constraints.divisor().clone()]; 38 | divisors.append(&mut evaluator.boundary_constraints.get_divisors()); 39 | 40 | let domain = StarkDomain::new(&air); 41 | let trace_lde_wrapper: TraceLdeWrapper = bincode::deserialize(&work_item.trace_lde_wrapper) 42 | .map_err(|e| JsValue::from_str(&format!("cannot deser traceLdeWrapper: {}", e)))?; 43 | let trace_table = &trace_lde_wrapper.trace_lde; 44 | 45 | // allocate space for constraint evaluations; when we are in debug mode, we also allocate 46 | // memory to hold all transition constraint evaluations (before they are merged into a 47 | // single value) so that we can check their degrees later 48 | #[cfg(not(debug_assertions))] 49 | let mut evaluation_table = ConstraintEvaluationTable::::new(&domain, divisors); 50 | #[cfg(debug_assertions)] 51 | let mut evaluation_table = ConstraintEvaluationTable::::new( 52 | &domain, 53 | divisors, 54 | &evaluator.transition_constraints, 55 | ); 56 | let frag_num = work_item.computation_fragment.num_fragments; 57 | let mut fragments = evaluation_table.fragments(frag_num); 58 | let frag = &mut fragments[work_item.computation_fragment.fragment_offset]; 59 | evaluator.evaluate_fragment(&trace_table, &domain, frag); 60 | debug!( 61 | "done processing constraints for batch {}", 62 | work_item.computation_fragment.fragment_offset 63 | ); 64 | 65 | let mut evaluations = vec![vec![FeltWrapper(Felt::ZERO); frag.num_rows()]; frag.num_columns()]; 66 | for i in 0..frag.num_columns() { 67 | for j in 0..frag.num_rows() { 68 | evaluations[i][j] = FeltWrapper(frag.evaluations[i][j]); 69 | } 70 | } 71 | 72 | let response = ConstraintComputeResult { 73 | frag_index: frag.offset(), 74 | frag_num, 75 | constraint_evaluations: evaluations, 76 | }; 77 | 78 | Ok(to_uint8array(&response)) 79 | } 80 | 81 | #[wasm_bindgen] 82 | pub fn constraint_entry_point(msg: MessageEvent) -> Result<(), JsValue> { 83 | set_once_logger(); 84 | if let Ok(work_item) = 85 | from_uint8array::(&Uint8Array::new(&msg.data())) 86 | { 87 | debug!( 88 | "Constraint worker received work item: {:?}", 89 | work_item.computation_fragment.fragment_offset 90 | ); 91 | let response = constraint_compute(&work_item)?; 92 | let global_scope = js_sys::global().unchecked_into::(); 93 | global_scope.post_message(&response)?; 94 | } else { 95 | debug!("Constraint worker received invalid work item"); 96 | } 97 | Ok(()) 98 | } 99 | -------------------------------------------------------------------------------- /miden-to-cairo-parser/src/memory.rs: -------------------------------------------------------------------------------- 1 | pub enum MemoryEntry { 2 | Value { value: String }, 3 | Pointer { pointer: usize }, 4 | } 5 | 6 | impl MemoryEntry { 7 | fn to_string(&self, pointers_map: &Vec) -> String { 8 | match self { 9 | MemoryEntry::Value { value } => value.to_string(), 10 | MemoryEntry::Pointer { pointer } => format!("{}", pointers_map[*pointer]), 11 | } 12 | } 13 | 14 | fn from_u64(value: u64) -> MemoryEntry { 15 | MemoryEntry::Value { 16 | value: format!("{:#X}", value), 17 | } 18 | } 19 | 20 | fn from_hex(value: String) -> MemoryEntry { 21 | MemoryEntry::Value { value } 22 | } 23 | 24 | fn from_pointer(pointer: usize) -> MemoryEntry { 25 | MemoryEntry::Pointer { pointer } 26 | } 27 | } 28 | 29 | type Memory = Vec; 30 | 31 | pub struct DynamicMemory<'a> { 32 | memories: &'a mut Vec, 33 | segment: usize, 34 | } 35 | 36 | impl<'a> DynamicMemory<'a> { 37 | pub fn new(memories: &'a mut Vec) -> DynamicMemory<'a> { 38 | memories.push(Vec::::new()); 39 | DynamicMemory { 40 | memories, 41 | segment: 0, 42 | } 43 | } 44 | 45 | pub fn assemble(&self) -> Vec { 46 | // Concatenate all memories and compute a mapping for pointers 47 | let mut concatenated = Vec::<&MemoryEntry>::new(); 48 | let mut pointers_map = Vec::new(); 49 | 50 | for vector in self.memories.iter() { 51 | pointers_map.push(concatenated.len()); 52 | concatenated.extend(vector); 53 | } 54 | 55 | // Iterate through all memory entries and map the pointers 56 | let mut memory = Vec::new(); 57 | for entry in concatenated { 58 | memory.push(entry.to_string(&pointers_map)); 59 | } 60 | 61 | memory 62 | } 63 | 64 | fn write_entry(&mut self, entry: MemoryEntry) { 65 | self.memories.get_mut(self.segment).unwrap().push(entry); 66 | } 67 | 68 | pub fn write_pointer(&mut self, pointer: usize) { 69 | self.write_entry(MemoryEntry::from_pointer(pointer)) 70 | } 71 | 72 | pub fn write_value(&mut self, value: u64) { 73 | self.write_entry(MemoryEntry::from_u64(value)) 74 | } 75 | 76 | pub fn write_hex_value(&mut self, value: String) { 77 | self.write_entry(MemoryEntry::from_hex(value)) 78 | } 79 | 80 | pub fn write_array(&mut self, array: Vec) { 81 | let mut sub_memory = self.alloc(); 82 | for writable in array { 83 | writable.write_into(&mut sub_memory); 84 | } 85 | } 86 | 87 | pub fn write_array_with, F>(&mut self, array: Vec, f: F) 88 | where 89 | F: Fn(u32) -> Params, 90 | { 91 | let mut sub_memory = self.alloc(); 92 | let mut i = 0; 93 | for writable in array { 94 | writable.write_into(&mut sub_memory, f(i)); 95 | i += 1; 96 | } 97 | } 98 | 99 | pub fn alloc(&mut self) -> DynamicMemory { 100 | let segment = self.memories.len(); 101 | self.write_pointer(segment); 102 | self.memories.push(Vec::::new()); 103 | DynamicMemory { 104 | memories: self.memories, 105 | segment, 106 | } 107 | } 108 | 109 | pub fn write_sized_array(&mut self, array: Vec) { 110 | self.write_value(array.len() as u64); 111 | self.write_array(array); 112 | } 113 | 114 | pub fn write_sized_array_with, F>( 115 | &mut self, 116 | array: Vec, 117 | f: F, 118 | ) where 119 | F: Fn(u32) -> Params, 120 | { 121 | self.write_value(array.len() as u64); 122 | self.write_array_with(array, f); 123 | } 124 | } 125 | 126 | pub trait Writeable: Sized { 127 | fn write_into(&self, target: &mut DynamicMemory); 128 | 129 | fn to_cairo_memory(&self) -> String { 130 | let mut memories = Vec::>::new(); 131 | let mut dynamic_memory = DynamicMemory::new(&mut memories); 132 | self.write_into(&mut dynamic_memory); 133 | let memory = dynamic_memory.assemble(); 134 | let json_arr = serde_json::to_string(&memory).unwrap(); 135 | json_arr 136 | } 137 | } 138 | 139 | pub trait WriteableWith { 140 | fn write_into(&self, target: &mut DynamicMemory, params: Parameters); 141 | 142 | fn to_cairo_memory(&self, params: Parameters) -> String { 143 | let mut memories = Vec::>::new(); 144 | let mut dynamic_memory = DynamicMemory::new(&mut memories); 145 | self.write_into(&mut dynamic_memory, params); 146 | let memory = dynamic_memory.assemble(); 147 | let json_arr = serde_json::to_string(&memory).unwrap(); 148 | json_arr 149 | } 150 | } 151 | 152 | impl Writeable for Vec { 153 | fn write_into(&self, target: &mut DynamicMemory) { 154 | target.write_value(self.len() as u64); 155 | for byte in self { 156 | target.write_value(*byte as u64); 157 | } 158 | } 159 | } 160 | 161 | impl Writeable for u8 { 162 | fn write_into(&self, target: &mut DynamicMemory) { 163 | target.write_value(*self as u64) 164 | } 165 | } 166 | 167 | impl Writeable for u16 { 168 | fn write_into(&self, target: &mut DynamicMemory) { 169 | target.write_value(*self as u64) 170 | } 171 | } 172 | 173 | impl Writeable for u32 { 174 | fn write_into(&self, target: &mut DynamicMemory) { 175 | target.write_value(*self as u64) 176 | } 177 | } 178 | 179 | impl Writeable for u64 { 180 | fn write_into(&self, target: &mut DynamicMemory) { 181 | target.write_value(*self) 182 | } 183 | } 184 | 185 | impl Writeable for usize { 186 | fn write_into(&self, target: &mut DynamicMemory) { 187 | target.write_value(*self as u64) 188 | } 189 | } 190 | -------------------------------------------------------------------------------- /aero-sdk/miden-wasm/src/pool.rs: -------------------------------------------------------------------------------- 1 | // Silences warnings from the compiler about Work.func and child_entry_point 2 | // being unused when the target is not wasm. 3 | #![cfg_attr(not(target_arch = "wasm32"), allow(dead_code))] 4 | use log::debug; 5 | use wasm_bindgen::prelude::*; 6 | use web_sys::{DedicatedWorkerGlobalScope, MessageEvent, Worker, WorkerNavigator}; 7 | 8 | use crate::utils::{to_uint8array, ConstraintComputeWorkItem, FeltWrapper, HashingWorkItem}; 9 | 10 | #[derive(Debug, Clone)] 11 | pub struct WorkerPool { 12 | state: PoolState, 13 | } 14 | 15 | #[derive(Debug, Clone)] 16 | struct PoolState { 17 | workers: Vec, 18 | constraint_workers: Vec, 19 | } 20 | 21 | impl WorkerPool { 22 | fn get_hardware_concurrency() -> usize { 23 | let global = js_sys::global().unchecked_into::(); 24 | let navigator: WorkerNavigator = global.navigator(); 25 | navigator.hardware_concurrency() as usize 26 | } 27 | 28 | pub fn new() -> Result { 29 | let concurrency = Self::get_hardware_concurrency(); 30 | debug!("creating worker pool with concurrency {}", concurrency); 31 | let mut pool = WorkerPool { 32 | state: PoolState { 33 | workers: Vec::with_capacity(concurrency), 34 | constraint_workers: Vec::with_capacity(concurrency), 35 | }, 36 | }; 37 | for _ in 0..concurrency { 38 | let worker = pool.spawn("./hashing_worker.js")?; 39 | pool.state.push(worker); 40 | let constraint_worker = pool.spawn("./constraints_worker.js")?; 41 | pool.state.push_constraint_worker(constraint_worker); 42 | } 43 | 44 | Ok(pool) 45 | } 46 | 47 | /// Unconditionally spawns a new worker 48 | /// 49 | /// The worker isn't registered with this `WorkerPool` but is capable of 50 | /// executing work for this wasm module. 51 | /// 52 | /// # Errors 53 | /// 54 | /// Returns any error that may happen while a JS web worker is created and a 55 | /// message is sent to it. 56 | fn spawn(&self, worker_path: &str) -> Result { 57 | console_log!("spawning new worker, {}", worker_path); 58 | // TODO: what do do about `./worker.js`: 59 | // 60 | // * the path is only known by the bundler. How can we, as a 61 | // library, know what's going on? 62 | // * How do we not fetch a script N times? It internally then 63 | // causes another script to get fetched N times... 64 | let worker = Worker::new(worker_path)?; 65 | worker.post_message(&JsValue::from_str("wake worker up"))?; 66 | Ok(worker) 67 | } 68 | 69 | fn worker(&self, worker_idx: usize) -> Result<&Worker, JsValue> { 70 | let worker = &self.state.workers[worker_idx]; 71 | Ok(worker) 72 | } 73 | 74 | fn constraint_worker(&self, worker_idx: usize) -> Result<&Worker, JsValue> { 75 | let worker = &self.state.constraint_workers[worker_idx]; 76 | Ok(worker) 77 | } 78 | 79 | fn execute( 80 | &self, 81 | batch_idx: usize, 82 | elements_table: Vec>, 83 | get_on_msg_callback: Closure, 84 | ) -> Result<(), JsValue> { 85 | debug!( 86 | "batch_idx: {}, concurrency: {}", 87 | batch_idx, 88 | self.state.concurrency() 89 | ); 90 | let worker_idx = batch_idx % self.state.concurrency(); 91 | debug!("running on worker idx: {}", worker_idx); 92 | let worker = self.worker(worker_idx)?; 93 | 94 | let work_item = HashingWorkItem { 95 | data: elements_table, 96 | batch_idx, 97 | }; 98 | let payload = to_uint8array(&work_item); 99 | worker.post_message(&payload)?; 100 | worker.set_onmessage(Some(get_on_msg_callback.as_ref().unchecked_ref())); 101 | get_on_msg_callback.forget(); 102 | Ok(()) 103 | } 104 | 105 | fn execute_constraint( 106 | &self, 107 | constraint_work_item: ConstraintComputeWorkItem, 108 | get_on_msg_callback: Closure, 109 | ) -> Result<(), JsValue> { 110 | debug!( 111 | "fragment_offset: {}, concurrency: {}", 112 | constraint_work_item.computation_fragment.fragment_offset, 113 | self.state.concurrency() 114 | ); 115 | let worker_idx = 116 | constraint_work_item.computation_fragment.fragment_offset % self.state.concurrency(); 117 | debug!("running on worker idx: {}", worker_idx); 118 | let worker = self.constraint_worker(worker_idx)?; 119 | let payload = to_uint8array(&constraint_work_item); 120 | worker.post_message(&payload)?; 121 | worker.set_onmessage(Some(get_on_msg_callback.as_ref().unchecked_ref())); 122 | get_on_msg_callback.forget(); 123 | Ok(()) 124 | } 125 | } 126 | 127 | impl WorkerPool { 128 | pub fn run( 129 | &self, 130 | batch_idx: usize, 131 | elements_table: Vec>, 132 | get_on_msg_callback: Closure, 133 | ) -> Result<(), JsValue> { 134 | self.execute(batch_idx, elements_table, get_on_msg_callback)?; 135 | Ok(()) 136 | } 137 | 138 | pub fn run_constraint( 139 | &self, 140 | constraint_work_item: ConstraintComputeWorkItem, 141 | get_on_msg_callback: Closure, 142 | ) -> Result<(), JsValue> { 143 | self.execute_constraint(constraint_work_item, get_on_msg_callback)?; 144 | Ok(()) 145 | } 146 | } 147 | 148 | impl PoolState { 149 | fn push(&mut self, worker: Worker) { 150 | self.workers.push(worker); 151 | } 152 | 153 | fn push_constraint_worker(&mut self, worker: Worker) { 154 | self.constraint_workers.push(worker); 155 | } 156 | 157 | fn concurrency(&self) -> usize { 158 | self.workers.len() 159 | } 160 | } 161 | -------------------------------------------------------------------------------- /aero-sdk/miden-wasm/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![feature(once_cell)] 2 | use futures::Future; 3 | use js_sys::Uint8Array; 4 | use log::debug; 5 | use std::pin::Pin; 6 | use std::task::{Context, Poll}; 7 | use std::{cell::RefCell, rc::Rc}; 8 | use utils::set_once_logger; 9 | use wasm_bindgen::prelude::*; 10 | use web_sys::{MessageEvent, Worker}; 11 | 12 | macro_rules! console_log { 13 | ($($t:tt)*) => (crate::log(&format_args!($($t)*).to_string())) 14 | } 15 | 16 | pub mod constraints_worker; 17 | pub mod convert; 18 | pub mod hashing_worker; 19 | pub mod pool; 20 | pub mod proving_worker; 21 | pub mod utils; 22 | use crate::convert::sdk::sdk; 23 | use crate::utils::{from_uint8array, to_uint8array, ProverOutput, ProvingWorkItem}; 24 | use proving_worker::{proving_seq_entry_point, MidenProverAsyncWorker}; 25 | 26 | pub struct ResultFuture { 27 | pub result: Rc>>, 28 | } 29 | 30 | impl Future for ResultFuture { 31 | type Output = (); 32 | 33 | fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { 34 | let r = (*self.result).borrow(); 35 | if r.is_some() { 36 | return Poll::Ready(()); 37 | } else { 38 | // wait every second 39 | let wait_fn = { 40 | let waker = Rc::new(cx.waker().clone()); 41 | Closure::wrap(Box::new(move || { 42 | waker.as_ref().clone().wake(); 43 | }) as Box) 44 | }; 45 | let _ = web_sys::window() 46 | .unwrap() 47 | .set_timeout_with_callback_and_timeout_and_arguments_0( 48 | wait_fn.as_ref().unchecked_ref(), 49 | 200, 50 | ); 51 | wait_fn.forget(); 52 | return Poll::Pending; 53 | } 54 | } 55 | } 56 | 57 | #[wasm_bindgen] 58 | extern "C" { 59 | #[wasm_bindgen(js_namespace = console)] 60 | fn log(s: &str); 61 | #[wasm_bindgen(js_namespace = console, js_name = log)] 62 | fn logv(x: &JsValue); 63 | } 64 | 65 | #[wasm_bindgen] 66 | pub fn start() -> Result<(), JsValue> { 67 | set_once_logger(); 68 | Ok(()) 69 | } 70 | 71 | #[wasm_bindgen(getter_with_clone)] 72 | pub struct MidenProver { 73 | prover_worker: Worker, 74 | prover_output: Rc>>, 75 | } 76 | 77 | #[wasm_bindgen] 78 | impl MidenProver { 79 | #[wasm_bindgen(constructor)] 80 | pub fn new() -> Result { 81 | let proving_worker = Worker::new("./proving_worker.js")?; 82 | proving_worker.post_message(&JsValue::from_str("wake worker up"))?; 83 | Ok(MidenProver { 84 | prover_worker: proving_worker, 85 | prover_output: Rc::new(RefCell::new(None)), 86 | }) 87 | } 88 | 89 | #[wasm_bindgen] 90 | pub async fn prove( 91 | &mut self, 92 | program: Vec, 93 | program_inputs: Vec, 94 | proof_options: Vec, 95 | chunk_size: usize, 96 | local_run: bool, 97 | ) -> Result { 98 | self.set_onmessage_handler(); 99 | let work_item = ProvingWorkItem { 100 | program, 101 | program_inputs, 102 | proof_options, 103 | chunk_size, 104 | is_sequential: false, 105 | }; 106 | let payload = to_uint8array(&work_item); 107 | if local_run { 108 | let mut miden_prover = MidenProverAsyncWorker::new()?; 109 | let output = proving_seq_entry_point(&mut miden_prover, payload).await?; 110 | 111 | let deser_output: ProverOutput = from_uint8array(&output).unwrap(); 112 | Ok(deser_output) 113 | } else { 114 | self.prover_worker.post_message(&payload)?; 115 | ResultFuture { 116 | result: self.prover_output.clone(), 117 | } 118 | .await; 119 | let output = self.prover_output.borrow_mut().take().unwrap(); 120 | Ok(output) 121 | } 122 | } 123 | 124 | #[wasm_bindgen] 125 | pub async fn prove_sequential( 126 | &mut self, 127 | program: Vec, 128 | program_inputs: Vec, 129 | proof_options: Vec, 130 | local_run: bool, 131 | ) -> Result { 132 | self.set_onmessage_handler(); 133 | let work_item = ProvingWorkItem { 134 | program, 135 | program_inputs, 136 | proof_options, 137 | chunk_size: 1024, 138 | is_sequential: true, 139 | }; 140 | let payload = to_uint8array(&work_item); 141 | if local_run { 142 | let mut miden_prover = MidenProverAsyncWorker::new()?; 143 | let output = proving_seq_entry_point(&mut miden_prover, payload).await?; 144 | 145 | let deser_output: ProverOutput = from_uint8array(&output).unwrap(); 146 | Ok(deser_output) 147 | } else { 148 | self.prover_worker.post_message(&payload)?; 149 | ResultFuture { 150 | result: self.prover_output.clone(), 151 | } 152 | .await; 153 | let output = self.prover_output.borrow_mut().take().unwrap(); 154 | Ok(output) 155 | } 156 | } 157 | 158 | fn set_onmessage_handler(&mut self) { 159 | let callback = self.get_on_msg_callback(); 160 | self.prover_worker 161 | .set_onmessage(Some(callback.as_ref().unchecked_ref())); 162 | 163 | // Clean up closure to prevent memory leak 164 | callback.forget(); 165 | } 166 | 167 | /// Message passing by the main thread 168 | fn get_on_msg_callback(&self) -> Closure { 169 | let prover_output = self.prover_output.clone(); 170 | let callback = Closure::new(move |event: MessageEvent| { 171 | debug!("Main thread got prover output"); 172 | let data: Uint8Array = Uint8Array::new(&event.data()); 173 | let output: ProverOutput = from_uint8array(&data).unwrap(); 174 | prover_output.replace(Some(output)); 175 | }); 176 | 177 | callback 178 | } 179 | } 180 | -------------------------------------------------------------------------------- /src/utils/benchmark_block.py: -------------------------------------------------------------------------------- 1 | import struct 2 | import sys 3 | import os 4 | import json 5 | from utxo_dummy_generator import generate_utxo_dummys, fetch_block 6 | import urllib3 7 | import time 8 | import datetime 9 | import resource 10 | 11 | P = 2**251 + 17 * 2**192 + 1 12 | 13 | 14 | def parse_cairo_output(cairo_output): 15 | # Split at line break. Then cut off all lines until the start of the 16 | # program output 17 | lines = cairo_output.split('\n') 18 | start_index = lines.index('Program output:') + 1 19 | 20 | print('\n') 21 | prints = lines[:start_index - 1] 22 | for line in prints: 23 | print(line) 24 | 25 | lines = lines[start_index:] 26 | 27 | # Remove the empty lines 28 | lines = [x for x in lines if x.strip() != ''] 29 | 30 | # Cast all values to int 31 | lines = map(int, lines) 32 | 33 | # Make negative values positive 34 | lines = map(lambda x: x if x >= 0 else (x + P) % P, lines) 35 | 36 | return list(lines) 37 | 38 | 39 | class FeltsReader: 40 | def __init__(self, program_output): 41 | self.cursor = 0 42 | self.program_output = program_output 43 | 44 | def read(self): 45 | self.cursor += 1 46 | return self.program_output[self.cursor - 1] 47 | 48 | def read_n(self, felt_count): 49 | self.cursor += felt_count 50 | return self.program_output[self.cursor - felt_count: self.cursor] 51 | 52 | 53 | def felts_to_hash(felts): 54 | res = 0 55 | for i in range(8): 56 | felt = felts[i] 57 | # Swap endianess 58 | felt = struct.unpack("I", felt))[0] 59 | res += pow(2**32, i) * felt 60 | 61 | return hex(res).replace('0x', '').zfill(64) 62 | 63 | 64 | # Pretty format as hex 65 | # remove leading "0x", add leading zeros to 32 bytes, but display zero as "0". 66 | def felt_to_hex(felt): 67 | hex_felt = hex(felt).replace('0x', '').zfill(64) 68 | if(int(hex_felt, 16) == 0): 69 | return "0" 70 | return hex_felt 71 | 72 | 73 | def felts_to_hex(felts): 74 | return list(map(felt_to_hex, felts)) 75 | 76 | 77 | # Inserts all required utxos and returns the utreexo roots. 78 | def setup_bridge_node(block_height): 79 | http = urllib3.PoolManager() 80 | 81 | # Send a reset. 82 | url = 'http://localhost:2121/reset' 83 | _ = http.request('GET', url) 84 | 85 | # Fetch all required utxo hashes. 86 | utxo_hashes = generate_utxo_dummys(block_height) 87 | 88 | # Fill bridge node with utxos. 89 | for utxo_hash in utxo_hashes: 90 | url = 'http://localhost:2121/add/' + hex(utxo_hash) 91 | r = http.request('GET', url) 92 | 93 | # Get the final list of roots 94 | url = 'http://localhost:2121/roots' 95 | r = http.request('GET', url) 96 | return json.loads(r.data) 97 | 98 | 99 | def bridge_node_running(): 100 | http = urllib3.PoolManager() 101 | 102 | try: 103 | # Send a reset. 104 | url = 'http://localhost:2121/reset' 105 | _ = http.request('GET', url) 106 | except BaseException: 107 | return False 108 | return True 109 | 110 | 111 | if __name__ == '__main__': 112 | 113 | # The first Bitcoin TX ever occured in block 170. The second TX occured in 114 | # block 181. 115 | 116 | if len(sys.argv) < 2: 117 | print( 118 | f'Wrong number of arguments.\nUsage: python {sys.argv[0]} BLOCK_HEIGHT') 119 | exit(1) 120 | block_height = int(sys.argv[1]) 121 | 122 | # Before we do anything else check if the bridge node is running 123 | if not bridge_node_running(): 124 | print("ERROR: Bridge node is not running. Required to initialize the utxo set.") 125 | exit(2) 126 | 127 | output_dir = 'benchmark_tmp' 128 | os.system(f'mkdir -p {output_dir}') 129 | 130 | # Run the Cairo compiler 131 | # Assume we are in the repository root directory. 132 | print('Compiling the Cairo program...') 133 | cmd = f'cairo-compile src/chain_proof/main.cairo --cairo_path src --output {output_dir}/program.json' 134 | print(os.popen(cmd).read()) # In case there are compilation issues 135 | print('Done.') 136 | print('Fetching utxos and setting up brige node and initial state...') 137 | # Copy genesis state.json into the output directory 138 | # also read the program_length from program.json 139 | # and add it to the state.json 140 | f = open('src/chain_proof/state_0.json') 141 | initial_state = json.load(f) 142 | 143 | # Fetch the next block. 144 | block = fetch_block(block_height) 145 | 146 | initial_state['block_height'] = block_height - 1 147 | initial_state['best_block_hash'] = block['previousblockhash'] 148 | initial_state['utreexo_roots'] = setup_bridge_node(block_height) 149 | initial_state['current_target'] = block['bits'] 150 | 151 | with open(f'{output_dir}/program.json') as f: 152 | program = json.load(f) 153 | initial_state['program_length'] = len(program['data']) 154 | 155 | with open(f'{output_dir}/chain_state.json', 'w') as outfile: 156 | outfile.write(json.dumps(initial_state)) 157 | 158 | chain_state_file = f'{output_dir}/chain_state.json' 159 | print('Done.') 160 | 161 | print('Next up is the cairo runner.') 162 | 163 | # Change the runner command here if you need the pprof trace 164 | # Note: Using --profile_output significantly increases the time spent in 165 | # the runner 166 | # Run the Cairo runner (without pprof trace) 167 | cmd = f'cairo-run --program={output_dir}/program.json --layout=all --print_info --print_output --program_input={chain_state_file} --trace_file={output_dir}/trace.bin --memory_file={output_dir}/memory.bin' 168 | 169 | # Run the Cairo runner (with pprof trace) 170 | # cmd = f'cairo-run --program={output_dir}/program.json --layout=all --print_info --print_output --program_input={chain_state_file} --trace_file={output_dir}/trace.bin --memory_file={output_dir}/memory.bin --profile_output={output_dir}/profile.pb.gz' 171 | 172 | start_time = time.clock_gettime(time.CLOCK_REALTIME) 173 | program_output_string = os.popen(cmd).read() 174 | total_time = time.clock_gettime(time.CLOCK_REALTIME) - start_time 175 | 176 | print(program_output_string) # User can check if everything worked 177 | 178 | print( 179 | f'RUNNER_TIME: {total_time} -> {str(datetime.timedelta(seconds=total_time))}\n') 180 | 181 | # Run Giza prover 182 | cmd = f'giza prove --trace={output_dir}/trace.bin --memory={output_dir}/memory.bin --program={output_dir}/program.json --output={output_dir}/proof.bin --num-outputs=50' 183 | start_time = time.clock_gettime(time.CLOCK_REALTIME) 184 | program_output_string = os.popen(cmd).read() 185 | total_time = time.clock_gettime(time.CLOCK_REALTIME) - start_time 186 | 187 | print(program_output_string) 188 | print( 189 | f'PROVER_TIME: {total_time} -> {str(datetime.timedelta(seconds=total_time))}\n') 190 | 191 | print( 192 | f'MAXIMUM_USED_RAM: {resource.getrusage(resource.RUSAGE_CHILDREN)[2]/10**6:.2f} GB') 193 | -------------------------------------------------------------------------------- /src/stark_verifier/air/air_instance.cairo: -------------------------------------------------------------------------------- 1 | from starkware.cairo.common.alloc import alloc 2 | from starkware.cairo.common.cairo_builtins import BitwiseBuiltin 3 | from starkware.cairo.common.pow import pow 4 | 5 | from stark_verifier.crypto.random import ( 6 | PublicCoin, 7 | draw_elements, 8 | draw_pair, 9 | random_coin_new, 10 | hash_elements, 11 | reseed, 12 | seed_with_pub_inputs, 13 | ) 14 | from stark_verifier.air.pub_inputs import PublicInputs 15 | from stark_verifier.air.stark_proof import ProofContext, ProofOptions, StarkProof 16 | 17 | struct AirInstance { 18 | // Layout 19 | main_segment_width: felt, 20 | aux_trace_width: felt, 21 | aux_segment_widths: felt*, 22 | aux_segment_rands: felt*, 23 | num_aux_segments: felt, 24 | // Context 25 | options: ProofOptions, 26 | context: ProofContext, 27 | num_transition_constraints: felt, 28 | num_assertions: felt, 29 | ce_blowup_factor: felt, 30 | // eval_frame_size: felt, 31 | trace_domain_generator: felt, 32 | lde_domain_generator: felt, 33 | // Public input 34 | pub_inputs: PublicInputs*, 35 | } 36 | 37 | // Coefficients used in construction of constraint composition polynomial 38 | struct ConstraintCompositionCoefficients { 39 | // Transition constraints (alpha and beta) 40 | transition_a: felt*, 41 | transition_b: felt*, 42 | // Boundary constraints (alpha and beta) 43 | boundary_a: felt*, 44 | boundary_b: felt*, 45 | } 46 | 47 | struct TraceCoefficients { 48 | n_values: felt, 49 | values: felt*, 50 | } 51 | 52 | // Coefficients used in construction of DEEP composition polynomial 53 | struct DeepCompositionCoefficients { 54 | // Trace polynomial composition coefficients $\alpha_i$, $\beta_i$, and $\gamma_i$ 55 | trace: TraceCoefficients*, 56 | // Constraint column polynomial composition coefficients $\delta_j$ 57 | constraints: felt*, 58 | // Degree adjustment composition coefficients $\lambda$ and $\mu$ 59 | degree: (felt, felt), 60 | } 61 | 62 | func air_instance_new{ 63 | range_check_ptr 64 | }( 65 | proof: StarkProof*, 66 | pub_inputs: PublicInputs*, 67 | options: ProofOptions 68 | ) -> AirInstance { 69 | alloc_locals; 70 | let (aux_segment_widths: felt*) = alloc(); 71 | let (aux_segment_rands: felt*) = alloc(); 72 | 73 | let trace_domain_generator = [range_check_ptr]; 74 | let lde_domain_generator = [range_check_ptr + 1]; 75 | let range_check_ptr = range_check_ptr + 2; 76 | 77 | %{ 78 | # TODO this is insecure 79 | # 2-adic root of unity for field with modulus $2^{64} - 2^{32} + 1$ 80 | TWO_ADICITY = 32 81 | # 2^32 root of unity 82 | G = 1753635133440165772 83 | 84 | PG = 18446744069414584321 # 2^64 - 2^32 - 1 85 | power = pow(2, TWO_ADICITY - ids.proof.context.log_trace_length, PG) 86 | ids.trace_domain_generator = pow(G, power, PG) 87 | 88 | log_lde_domain_size = ids.options.log_blowup_factor + ids.proof.context.log_trace_length 89 | power = pow(2, TWO_ADICITY - log_lde_domain_size, PG) 90 | ids.lde_domain_generator = pow(G, power, PG) 91 | print("blowup_factor", ids.options.blowup_factor) 92 | %} 93 | 94 | // TODO: Make configurable for other VMs and custom AIRs 95 | let res = AirInstance( 96 | main_segment_width=72, 97 | aux_trace_width=9, 98 | aux_segment_widths=aux_segment_widths, 99 | aux_segment_rands=aux_segment_rands, 100 | num_aux_segments=1, 101 | options=options, 102 | context=proof.context, 103 | num_transition_constraints=49, 104 | num_assertions=7, 105 | ce_blowup_factor=options.blowup_factor, 106 | // eval_frame_size=2, 107 | trace_domain_generator=trace_domain_generator, 108 | lde_domain_generator=lde_domain_generator, 109 | pub_inputs=pub_inputs, 110 | ); 111 | return res; 112 | } 113 | 114 | // Returns coefficients needed to construct the constraint composition polynomial 115 | func get_constraint_composition_coefficients{ 116 | range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*, public_coin: PublicCoin 117 | }(air: AirInstance) -> ConstraintCompositionCoefficients { 118 | alloc_locals; 119 | 120 | let (t_coefficients_a: felt*) = alloc(); 121 | let (t_coefficients_b: felt*) = alloc(); 122 | let num_constraints = air.num_transition_constraints; 123 | draw_pairs( 124 | n_pairs=num_constraints, coefficients_a=t_coefficients_a, coefficients_b=t_coefficients_b 125 | ); 126 | 127 | let (b_coefficients_a: felt*) = alloc(); 128 | let (b_coefficients_b: felt*) = alloc(); 129 | let num_assertions = air.num_assertions; 130 | draw_pairs( 131 | n_pairs=num_assertions, coefficients_a=b_coefficients_a, coefficients_b=b_coefficients_b 132 | ); 133 | 134 | let res = ConstraintCompositionCoefficients( 135 | transition_a=t_coefficients_a, 136 | transition_b=t_coefficients_b, 137 | boundary_a=b_coefficients_a, 138 | boundary_b=b_coefficients_b, 139 | ); 140 | 141 | return res; 142 | } 143 | 144 | // Returns coefficients needed to construct the DEEP composition polynomial 145 | func get_deep_composition_coefficients{ 146 | range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*, public_coin: PublicCoin 147 | }(air: AirInstance) -> DeepCompositionCoefficients { 148 | alloc_locals; 149 | 150 | let (t_coefficients: TraceCoefficients*) = alloc(); 151 | set_trace_coefficients( 152 | n_vec= air.context.trace_layout.main_segment_width + air.aux_trace_width, 153 | n_coefficients=3, // TODO: Why is 3 hardcoded? 154 | coefficients=t_coefficients, 155 | ); 156 | 157 | let (c_coefficients: felt*) = alloc(); 158 | draw_elements(n_elements=air.ce_blowup_factor, elements=c_coefficients); 159 | 160 | let (lambda, mu) = draw_pair(); 161 | 162 | let res = DeepCompositionCoefficients( 163 | trace=t_coefficients, constraints=c_coefficients, degree=(lambda, mu) 164 | ); 165 | return res; 166 | } 167 | 168 | func set_trace_coefficients{ 169 | range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*, public_coin: PublicCoin 170 | }(n_vec: felt, n_coefficients: felt, coefficients: TraceCoefficients*) { 171 | if (n_vec == 0) { 172 | return (); 173 | } 174 | // Create a new TraceCoefficients object 175 | let (values) = alloc(); 176 | assert coefficients[0] = TraceCoefficients(n_coefficients, values); 177 | 178 | // Fill it with random elements 179 | draw_elements(n_elements=n_coefficients, elements=values); 180 | 181 | // Recurse 182 | set_trace_coefficients( 183 | n_vec=n_vec - 1, n_coefficients=n_coefficients, coefficients=&coefficients[1] 184 | ); 185 | return (); 186 | } 187 | 188 | // Returns the next pair of pseudo-random field elements, and adds them to the 189 | // list of coefficients 190 | func draw_pairs{ 191 | range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*, public_coin: PublicCoin 192 | }(n_pairs: felt, coefficients_a: felt*, coefficients_b: felt*) { 193 | 194 | if (n_pairs == 0) { 195 | return (); 196 | } 197 | 198 | let (num1, num2) = draw_pair(); 199 | assert coefficients_a[0] = num1; 200 | assert coefficients_b[0] = num2; 201 | 202 | return draw_pairs( 203 | n_pairs=n_pairs - 1, coefficients_a=coefficients_a + 1, coefficients_b=coefficients_b + 1 204 | ); 205 | } 206 | -------------------------------------------------------------------------------- /src/stark_verifier/composer.cairo: -------------------------------------------------------------------------------- 1 | from starkware.cairo.common.alloc import alloc 2 | from starkware.cairo.common.pow import pow 3 | 4 | from stark_verifier.air.air_instance import AirInstance, DeepCompositionCoefficients 5 | from stark_verifier.air.transitions.frame import EvaluationFrame 6 | from stark_verifier.channel import Table 7 | from stark_verifier.utils import Vec 8 | from utils.math_goldilocks import mul_g, sub_g, add_g, div_g, pow_g 9 | 10 | struct DeepComposer { 11 | cc: DeepCompositionCoefficients, 12 | x_coordinates: felt*, 13 | z_curr: felt, 14 | z_next: felt, 15 | } 16 | 17 | func deep_composer_new{range_check_ptr}( 18 | air: AirInstance, query_positions: felt*, z: felt, cc: DeepCompositionCoefficients 19 | ) -> DeepComposer { 20 | alloc_locals; 21 | 22 | let g = air.trace_domain_generator; 23 | let g_lde = air.lde_domain_generator; 24 | let domain_offset = 7; // TODO why is this hardcoded? 25 | 26 | // TODO: Don't hardcode the number of query positions here 27 | let (x_coordinates: felt*) = alloc(); 28 | 29 | let z_next = [range_check_ptr]; 30 | let range_check_ptr = range_check_ptr + 1; 31 | 32 | // TODO this is insecure - need to properly run goldilocks mul within cairo 33 | %{ 34 | PG = 18446744069414584321 # 2^64 - 2^32 - 1 35 | for i in range(27): 36 | x = pow(ids.g_lde, memory[ids.query_positions + i], PG) 37 | x = (x * ids.domain_offset) % PG 38 | memory[ids.x_coordinates + i] = x 39 | ids.z_next = (ids.z * ids.g) % PG 40 | %} 41 | 42 | let z_curr = z; 43 | 44 | let res = DeepComposer(cc, x_coordinates, z_curr, z_next); 45 | return res; 46 | } 47 | 48 | func compose_row{range_check_ptr}( 49 | row_ptr: felt*, 50 | i: felt, 51 | ood_frame: EvaluationFrame, 52 | composer: DeepComposer, 53 | sum_curr: felt, 54 | sum_next: felt, 55 | n_cols: felt, 56 | cc_offset: felt, 57 | ) -> (felt, felt) { 58 | alloc_locals; 59 | 60 | if (i == n_cols) { 61 | return (sum_curr, sum_next); 62 | } 63 | 64 | let row_cell = [row_ptr + i]; 65 | 66 | let frame_curr = ood_frame.current[i]; 67 | let curr = sub_g(row_cell, frame_curr); 68 | local mul_curr = mul_g(curr, composer.cc.trace[i + cc_offset].values[0]); 69 | local sum_curr_new = add_g(sum_curr, mul_curr); 70 | 71 | tempvar frame_next = ood_frame.next[i]; 72 | let next = sub_g(row_cell, frame_next); 73 | local mul_next = mul_g(next, composer.cc.trace[i + cc_offset].values[1]); 74 | local sum_next_new = add_g(sum_next, mul_next); 75 | 76 | return compose_row( 77 | row_ptr, i + 1, ood_frame, composer, sum_curr_new, sum_next_new, n_cols, cc_offset 78 | ); 79 | } 80 | 81 | func compose_loop{range_check_ptr}( 82 | result_ptr: felt*, 83 | prev_result_ptr: felt*, 84 | n: felt, 85 | composer: DeepComposer, 86 | queried_trace_states: Table, 87 | ood_frame: EvaluationFrame, 88 | cc_offset: felt, 89 | inner_loop_len: felt, 90 | add_to_previous_result: felt, 91 | ) -> () { 92 | alloc_locals; 93 | 94 | if (n == 0) { 95 | return (); 96 | } 97 | 98 | let row_ptr = queried_trace_states.elements; 99 | let z_curr = composer.z_curr; 100 | let z_next = composer.z_next; 101 | let n_cols = queried_trace_states.n_cols; 102 | let offset = queried_trace_states.n_rows - n; 103 | tempvar x_coord_ptr = composer.x_coordinates + offset; 104 | 105 | let (sum_curr, sum_next) = compose_row( 106 | row_ptr + (offset * n_cols), 0, ood_frame, composer, 0, 0, inner_loop_len, cc_offset 107 | ); 108 | 109 | tempvar x = [x_coord_ptr]; 110 | let x_z_curr = sub_g(x, z_curr); 111 | let x_z_next = sub_g(x, z_next); 112 | let s_curr = div_g(sum_curr, x_z_curr); 113 | let s_next = div_g(sum_next, x_z_next); 114 | let sum = add_g(s_curr, s_next); 115 | 116 | local to_add; 117 | if (add_to_previous_result == 1) { 118 | let prev_sum = [prev_result_ptr]; 119 | to_add = prev_sum; 120 | } else { 121 | to_add = 0; 122 | } 123 | 124 | let sum_new = add_g(sum, to_add); 125 | assert [result_ptr] = sum_new; 126 | 127 | return compose_loop( 128 | result_ptr + 1, 129 | prev_result_ptr + 1, 130 | n - 1, 131 | composer, 132 | queried_trace_states, 133 | ood_frame, 134 | cc_offset, 135 | inner_loop_len, 136 | add_to_previous_result, 137 | ); 138 | } 139 | 140 | func compose_trace_columns{range_check_ptr}( 141 | composer: DeepComposer, 142 | queried_main_trace_states: Table, 143 | queried_aux_trace_states: Table, 144 | ood_main_frame: EvaluationFrame, 145 | ood_aux_frame: EvaluationFrame, 146 | ) -> felt* { 147 | alloc_locals; 148 | 149 | // Main trace coefficient rows 150 | let n_cols = queried_main_trace_states.n_cols; 151 | 152 | let z_curr = composer.z_curr; 153 | let z_next = composer.z_next; 154 | 155 | // Compose columns of the main segment 156 | let (local mock_prev_result: felt*) = alloc(); 157 | let (local result: felt*) = alloc(); 158 | // TODO HARDCODE: Don't hardcode the number of query and columns 159 | tempvar n = 27; 160 | tempvar result_ptr = result; 161 | tempvar mock_prev_results_ptr = mock_prev_result; 162 | 163 | // TODO HARDCODE do not hardcode inner loop len 164 | compose_loop( 165 | result_ptr, 166 | mock_prev_results_ptr, 167 | n, 168 | composer, 169 | queried_main_trace_states, 170 | ood_main_frame, 171 | 0, 172 | 72, 173 | 0, 174 | ); 175 | 176 | // // Aux trace coefficient rows 177 | let n_cols = queried_aux_trace_states.n_cols; 178 | 179 | let z_curr = composer.z_curr; 180 | let z_next = composer.z_next; 181 | 182 | // Compose columns of the main segment 183 | let (local with_aux_result: felt*) = alloc(); 184 | // TODO HARDCODE: Don't hardcode the number of query and columns 185 | tempvar n = 27; 186 | tempvar result_ptr = with_aux_result; 187 | tempvar prev_result_ptr = result; 188 | 189 | // TODO HARDCODE do not hardcode inner loop len 190 | compose_loop( 191 | result_ptr, prev_result_ptr, n, composer, queried_aux_trace_states, ood_aux_frame, 72, 9, 1 192 | ); 193 | return with_aux_result; 194 | } 195 | 196 | func compose_constraint_evaluations_add_terms{range_check_ptr}( 197 | composer: DeepComposer, 198 | queried_evaluations: Table, 199 | ood_evaluations: Vec, 200 | row_ptr: felt*, 201 | sum: felt, 202 | idx: felt, 203 | iter: felt, 204 | ) -> felt { 205 | if (idx == iter) { 206 | return sum; 207 | } 208 | 209 | let r = row_ptr[idx]; 210 | let e = ood_evaluations.elements[idx]; 211 | let r1 = sub_g(r, e); 212 | let c = composer.cc.constraints[idx]; 213 | let r2 = mul_g(r1, c); 214 | let sum_new = add_g(sum, r2); 215 | 216 | return compose_constraint_evaluations_add_terms( 217 | composer, queried_evaluations, ood_evaluations, row_ptr, sum_new, idx + 1, iter 218 | ); 219 | } 220 | 221 | func compose_constraint_evaluations_loop{range_check_ptr}( 222 | composer: DeepComposer, 223 | queried_evaluations: Table, 224 | ood_evaluations: Vec, 225 | idx: felt, 226 | result_ptr: felt*, 227 | iterations: felt, 228 | z_m: felt, 229 | ) -> () { 230 | alloc_locals; 231 | if (idx == iterations) { 232 | return (); 233 | } 234 | 235 | let row: felt* = queried_evaluations.elements; 236 | let n_cols = queried_evaluations.n_cols; 237 | let row_ptr = row + (idx * n_cols); 238 | let cc_constraint: felt* = composer.cc.constraints; 239 | let x_coord_ptr = composer.x_coordinates + idx; 240 | 241 | let sum = compose_constraint_evaluations_add_terms( 242 | composer, queried_evaluations, ood_evaluations, row_ptr, 0, 0, 8 243 | ); 244 | 245 | tempvar x = [x_coord_ptr]; 246 | let div = sub_g(x, z_m); 247 | tempvar sum_final = div_g(sum, div); 248 | assert [result_ptr] = sum_final; 249 | 250 | return compose_constraint_evaluations_loop( 251 | composer, queried_evaluations, ood_evaluations, idx + 1, result_ptr + 1, iterations, z_m 252 | ); 253 | } 254 | 255 | func compose_constraint_evaluations{range_check_ptr}( 256 | composer: DeepComposer, queried_evaluations: Table, ood_evaluations: Vec 257 | ) -> felt* { 258 | alloc_locals; 259 | 260 | // Compute z^m 261 | let num_eval_columns = ood_evaluations.n_elements; 262 | let z = composer.z_curr; 263 | let z_m = pow_g(z, num_eval_columns); 264 | local range_check_ptr = range_check_ptr; 265 | let (local result: felt*) = alloc(); 266 | 267 | tempvar result_ptr = result; 268 | 269 | // TODO HARDCODE: don't hardcode number of queries 270 | compose_constraint_evaluations_loop( 271 | composer, queried_evaluations, ood_evaluations, 0, result_ptr, 27, z_m 272 | ); 273 | 274 | return result; 275 | } 276 | 277 | func combine_compositions_loop{range_check_ptr}( 278 | composer: DeepComposer, 279 | t_composition: felt*, 280 | c_composition: felt*, 281 | results_ptr: felt*, 282 | idx: felt, 283 | iterations: felt, 284 | ) -> () { 285 | alloc_locals; 286 | if (idx == iterations) { 287 | return (); 288 | } 289 | 290 | let t = t_composition[idx]; 291 | let c = c_composition[idx]; 292 | let composition = add_g(t, c); 293 | 294 | let degree_1_x = mul_g(composer.x_coordinates[idx], composer.cc.degree[1]); 295 | let degree_0_1_x = add_g(composer.cc.degree[0], degree_1_x); 296 | let composition = mul_g(composition, degree_0_1_x); 297 | assert [results_ptr] = composition; 298 | 299 | return combine_compositions_loop( 300 | composer, t_composition, c_composition, results_ptr + 1, idx + 1, iterations 301 | ); 302 | } 303 | 304 | func combine_compositions{range_check_ptr}( 305 | composer: DeepComposer, t_composition: felt*, c_composition: felt* 306 | ) -> felt* { 307 | alloc_locals; 308 | 309 | let (local result: felt*) = alloc(); 310 | // TODO HARDCODE: Don't hardcode number of queries 311 | tempvar n = 27; 312 | 313 | combine_compositions_loop(composer, t_composition, c_composition, result, 0, n); 314 | 315 | return result; 316 | } 317 | -------------------------------------------------------------------------------- /aero-sdk/miden-wasm/src/convert/convert_proof.rs: -------------------------------------------------------------------------------- 1 | use crate::convert::sdk::sdk; 2 | use miden::{FieldExtension, HashFunction, StarkProof}; 3 | use miden_air::{Felt, ProcessorAir, PublicInputs}; 4 | use miden_core::{utils::Serializable, ProgramOutputs}; 5 | use winter_air::{ 6 | proof::{Commitments, Context, OodFrame, Queries, Table}, 7 | Air, EvaluationFrame, ProofOptions, TraceLayout, 8 | }; 9 | use winter_crypto::{hash::ByteDigest, hashers::Blake2s_256, BatchMerkleProof, Hasher}; 10 | use winter_fri::FriProof; 11 | use winter_verifier::{math::log2, ConstraintQueries, TraceQueries}; 12 | 13 | impl IntoSdk for sdk::StarkProof { 14 | fn into_sdk(input: StarkProof, params: &ProcessorAir) -> Self { 15 | Self { 16 | ood_frame: Some(sdk::OodFrame::into_sdk(input.ood_frame, params)), 17 | context: Some(input.context.into()), 18 | commitments: Some(sdk::Commitments::into_sdk(input.commitments, params)), 19 | trace_queries: Some(sdk::TraceQueries::into_sdk(input.trace_queries, params)), 20 | constraint_queries: Some(sdk::ConstraintQueries::into_sdk( 21 | input.constraint_queries, 22 | params, 23 | )), 24 | fri_proof: Some(sdk::FriProof::into_sdk(input.fri_proof, params)), 25 | pow_nonce: input.pow_nonce, 26 | } 27 | } 28 | } 29 | 30 | pub trait IntoSdk { 31 | fn into_sdk(input: Input, params: Parameters) -> Self; 32 | } 33 | 34 | impl IntoSdk for sdk::OodFrame { 35 | fn into_sdk(input: OodFrame, params: &ProcessorAir) -> Self { 36 | let main_trace_width = params.trace_layout().main_trace_width(); 37 | let aux_trace_width = params.trace_layout().aux_trace_width(); 38 | let num_evaluations = params.ce_blowup_factor(); 39 | let (ood_main_trace_frame, ood_aux_trace_frame, ood_constraint_evaluations) = input 40 | .clone() 41 | .parse::(main_trace_width, aux_trace_width, num_evaluations) 42 | .unwrap(); 43 | 44 | Self { 45 | main_frame: Some(ood_main_trace_frame.into()), 46 | aux_frame: ood_aux_trace_frame.map(|f| f.into()), 47 | evaluations: ood_constraint_evaluations 48 | .iter() 49 | .map(|e| e.into()) 50 | .collect(), 51 | } 52 | } 53 | } 54 | 55 | impl From<&Felt> for sdk::FieldElement { 56 | fn from(element: &Felt) -> Self { 57 | Self { 58 | element: element.to_bytes(), 59 | } 60 | } 61 | } 62 | 63 | impl From> for sdk::EvaluationFrame { 64 | fn from(frame: EvaluationFrame) -> Self { 65 | let current = frame.current().iter().map(|e| e.into()).collect::>(); 66 | let next = frame.next().iter().map(|e| e.into()).collect::>(); 67 | 68 | Self { current, next } 69 | } 70 | } 71 | 72 | impl From for sdk::Context { 73 | fn from(value: Context) -> Self { 74 | let binding = value.get_trace_info(); 75 | let trace_meta = binding.meta(); 76 | let field_modulus = sdk::FieldElement { 77 | element: value.field_modulus_bytes().to_vec(), 78 | }; 79 | 80 | Self { 81 | trace_layout: Some(value.trace_layout().into()), 82 | trace_length: value.trace_length() as u64, 83 | trace_meta: trace_meta.to_vec(), 84 | field_modulus: Some(field_modulus), 85 | options: Some(value.options().into()), 86 | } 87 | } 88 | } 89 | 90 | impl From<&TraceLayout> for sdk::TraceLayout { 91 | fn from(layout: &TraceLayout) -> Self { 92 | let mut aux_segment_widths = Vec::new(); 93 | let mut aux_segment_rands = Vec::new(); 94 | 95 | for i in 0..layout.num_aux_segments() { 96 | aux_segment_widths.push(layout.get_aux_segment_width(i) as u64); 97 | aux_segment_rands.push(layout.get_aux_segment_rand_elements(i) as u64); 98 | } 99 | 100 | Self { 101 | main_segment_width: layout.main_trace_width() as u64, 102 | aux_segment_widths, 103 | aux_segment_rands, 104 | num_aux_segments: layout.num_aux_segments() as u64, 105 | } 106 | } 107 | } 108 | 109 | impl From<&ProofOptions> for sdk::ProofOptions { 110 | fn from(value: &ProofOptions) -> Self { 111 | let hash_fn: sdk::HashFunction = value.hash_fn().into(); 112 | let field_extension: sdk::FieldExtension = value.field_extension().into(); 113 | let fri_options = value.to_fri_options(); 114 | 115 | Self { 116 | num_queries: value.num_queries() as u32, 117 | blowup_factor: value.blowup_factor() as u32, 118 | grinding_factor: value.grinding_factor(), 119 | hash_fn: hash_fn.into(), 120 | field_extension: field_extension.into(), 121 | fri_folding_factor: fri_options.folding_factor() as u32, 122 | fri_max_remainder_size: fri_options.max_remainder_size() as u32, 123 | // this should be configurable 124 | prime_field: sdk::PrimeField::Goldilocks.into(), 125 | } 126 | } 127 | } 128 | 129 | impl From for sdk::HashFunction { 130 | fn from(value: HashFunction) -> Self { 131 | match value { 132 | HashFunction::Blake2s_256 => Self::Blake2s, 133 | HashFunction::Blake3_192 => todo!(), 134 | HashFunction::Blake3_256 => todo!(), 135 | HashFunction::Sha3_256 => todo!(), 136 | } 137 | } 138 | } 139 | 140 | impl From for sdk::FieldExtension { 141 | fn from(value: FieldExtension) -> Self { 142 | match value { 143 | FieldExtension::None => Self::None, 144 | FieldExtension::Quadratic => todo!(), 145 | FieldExtension::Cubic => todo!(), 146 | } 147 | } 148 | } 149 | 150 | impl From<&ByteDigest> for sdk::Digest { 151 | fn from(value: &ByteDigest) -> Self { 152 | Self { 153 | data: value.to_bytes().to_vec(), 154 | } 155 | } 156 | } 157 | 158 | impl IntoSdk for sdk::Commitments { 159 | fn into_sdk(input: Commitments, params: &ProcessorAir) -> Self { 160 | let num_trace_segments = params.trace_layout().num_segments(); 161 | let lde_domain_size = params.lde_domain_size(); 162 | let fri_options = params.options().to_fri_options(); 163 | let num_fri_layers = fri_options.num_fri_layers(lde_domain_size); 164 | 165 | let (trace_commitments, constraint_commitment, fri_commitments) = input 166 | .clone() 167 | .parse::>(num_trace_segments, num_fri_layers) 168 | .unwrap(); 169 | 170 | let constraint_root: sdk::Digest = (&constraint_commitment).into(); 171 | 172 | Self { 173 | trace_roots: trace_commitments.iter().map(|d| d.into()).collect(), 174 | constraint_root: Some(constraint_root), 175 | fri_roots: fri_commitments.iter().map(|d| d.into()).collect(), 176 | } 177 | } 178 | } 179 | 180 | impl From> for sdk::Table { 181 | fn from(table: Table) -> Self { 182 | // table saved as a single dim array 183 | let data = table.data().iter().map(|e| e.into()).collect::>(); 184 | 185 | Self { 186 | n_rows: table.num_rows() as u32, 187 | n_cols: table.num_columns() as u32, 188 | elements: data, 189 | } 190 | } 191 | } 192 | 193 | impl IntoSdk, &ProcessorAir> for sdk::TraceQueries { 194 | fn into_sdk(input: Vec, params: &ProcessorAir) -> Self { 195 | let trace_queries = 196 | TraceQueries::>::new(input.clone(), params).unwrap(); 197 | 198 | Self { 199 | main_states: Some(trace_queries.main_states.into()), 200 | aux_states: trace_queries.aux_states.map(|t| t.into()), 201 | query_proofs: trace_queries 202 | .query_proofs 203 | .iter() 204 | .map(|p| p.into()) 205 | .collect(), 206 | } 207 | } 208 | } 209 | 210 | impl IntoSdk for sdk::ConstraintQueries { 211 | fn into_sdk(input: Queries, params: &ProcessorAir) -> Self { 212 | let constraint_queries = 213 | ConstraintQueries::>::new(input, params).unwrap(); 214 | 215 | Self { 216 | evaluations: Some(constraint_queries.evaluations.into()), 217 | query_proof: Some((&constraint_queries.query_proofs).into()), 218 | } 219 | } 220 | } 221 | 222 | impl IntoSdk for sdk::FriProof { 223 | fn into_sdk(proof: FriProof, params: &ProcessorAir) -> Self { 224 | let num_partitions = log2(proof.num_partitions()); 225 | let (queries_values, proofs) = proof 226 | .clone() 227 | .parse_layers::, Felt>( 228 | params.lde_domain_size(), 229 | params.options().to_fri_options().folding_factor(), 230 | ) 231 | .unwrap(); 232 | 233 | let layers = proofs 234 | .iter() 235 | .zip(queries_values) 236 | .map(|(p, q)| sdk::FriProofLayer { 237 | values: q.iter().map(|e| e.into()).collect::>(), 238 | proofs: Some(p.into()), 239 | }) 240 | .collect(); 241 | 242 | let remainder = proof 243 | .parse_remainder::() 244 | .unwrap() 245 | .iter() 246 | .map(|e| e.into()) 247 | .collect(); 248 | 249 | Self { 250 | layers, 251 | remainder, 252 | num_partitions, 253 | } 254 | } 255 | } 256 | 257 | impl From for sdk::MidenProgramOutputs { 258 | fn from(outputs: ProgramOutputs) -> Self { 259 | Self { 260 | stack: outputs.stack().iter().map(|e| e.clone().into()).collect(), 261 | overflow_addrs: outputs 262 | .overflow_addrs() 263 | .iter() 264 | .map(|e| e.clone().into()) 265 | .collect(), 266 | } 267 | } 268 | } 269 | 270 | impl From for sdk::MidenPublicInputs { 271 | fn from(inputs: PublicInputs) -> Self { 272 | Self { 273 | program_hash: Some(sdk::Digest { 274 | data: inputs.program_hash.to_bytes(), 275 | }), 276 | stack_inputs: inputs.stack_inputs.iter().map(|e| e.into()).collect(), 277 | outputs: Some(inputs.outputs.into()), 278 | } 279 | } 280 | } 281 | 282 | impl From<&BatchMerkleProof> for sdk::BatchMerkleProof { 283 | fn from(proof: &BatchMerkleProof) -> Self { 284 | let leaves = proof 285 | .leaves 286 | .iter() 287 | .map(|e| sdk::Digest { data: e.to_bytes() }) 288 | .collect(); 289 | 290 | let nodes = proof 291 | .nodes 292 | .iter() 293 | .map(|e| sdk::BatchMerkleProofLayer { 294 | nodes: e 295 | .iter() 296 | .map(|v| sdk::Digest { data: v.to_bytes() }) 297 | .collect(), 298 | }) 299 | .collect(); 300 | 301 | Self { 302 | leaves, 303 | nodes, 304 | depth: proof.depth as u32, 305 | } 306 | } 307 | } 308 | -------------------------------------------------------------------------------- /src/utils/pow2.cairo: -------------------------------------------------------------------------------- 1 | // This is taken from https://github.com/greenlucid/chess-cairo. 2 | from starkware.cairo.common.registers import get_label_location 3 | 4 | func pow2(i: felt) -> felt { 5 | let (data_address) = get_label_location(data); 6 | return [data_address + i]; 7 | 8 | data: 9 | dw 0x1; 10 | dw 0x2; 11 | dw 0x4; 12 | dw 0x8; 13 | dw 0x10; 14 | dw 0x20; 15 | dw 0x40; 16 | dw 0x80; 17 | dw 0x100; 18 | dw 0x200; 19 | dw 0x400; 20 | dw 0x800; 21 | dw 0x1000; 22 | dw 0x2000; 23 | dw 0x4000; 24 | dw 0x8000; 25 | dw 0x10000; 26 | dw 0x20000; 27 | dw 0x40000; 28 | dw 0x80000; 29 | dw 0x100000; 30 | dw 0x200000; 31 | dw 0x400000; 32 | dw 0x800000; 33 | dw 0x1000000; 34 | dw 0x2000000; 35 | dw 0x4000000; 36 | dw 0x8000000; 37 | dw 0x10000000; 38 | dw 0x20000000; 39 | dw 0x40000000; 40 | dw 0x80000000; 41 | dw 0x100000000; 42 | dw 0x200000000; 43 | dw 0x400000000; 44 | dw 0x800000000; 45 | dw 0x1000000000; 46 | dw 0x2000000000; 47 | dw 0x4000000000; 48 | dw 0x8000000000; 49 | dw 0x10000000000; 50 | dw 0x20000000000; 51 | dw 0x40000000000; 52 | dw 0x80000000000; 53 | dw 0x100000000000; 54 | dw 0x200000000000; 55 | dw 0x400000000000; 56 | dw 0x800000000000; 57 | dw 0x1000000000000; 58 | dw 0x2000000000000; 59 | dw 0x4000000000000; 60 | dw 0x8000000000000; 61 | dw 0x10000000000000; 62 | dw 0x20000000000000; 63 | dw 0x40000000000000; 64 | dw 0x80000000000000; 65 | dw 0x100000000000000; 66 | dw 0x200000000000000; 67 | dw 0x400000000000000; 68 | dw 0x800000000000000; 69 | dw 0x1000000000000000; 70 | dw 0x2000000000000000; 71 | dw 0x4000000000000000; 72 | dw 0x8000000000000000; 73 | dw 0x10000000000000000; 74 | dw 0x20000000000000000; 75 | dw 0x40000000000000000; 76 | dw 0x80000000000000000; 77 | dw 0x100000000000000000; 78 | dw 0x200000000000000000; 79 | dw 0x400000000000000000; 80 | dw 0x800000000000000000; 81 | dw 0x1000000000000000000; 82 | dw 0x2000000000000000000; 83 | dw 0x4000000000000000000; 84 | dw 0x8000000000000000000; 85 | dw 0x10000000000000000000; 86 | dw 0x20000000000000000000; 87 | dw 0x40000000000000000000; 88 | dw 0x80000000000000000000; 89 | dw 0x100000000000000000000; 90 | dw 0x200000000000000000000; 91 | dw 0x400000000000000000000; 92 | dw 0x800000000000000000000; 93 | dw 0x1000000000000000000000; 94 | dw 0x2000000000000000000000; 95 | dw 0x4000000000000000000000; 96 | dw 0x8000000000000000000000; 97 | dw 0x10000000000000000000000; 98 | dw 0x20000000000000000000000; 99 | dw 0x40000000000000000000000; 100 | dw 0x80000000000000000000000; 101 | dw 0x100000000000000000000000; 102 | dw 0x200000000000000000000000; 103 | dw 0x400000000000000000000000; 104 | dw 0x800000000000000000000000; 105 | dw 0x1000000000000000000000000; 106 | dw 0x2000000000000000000000000; 107 | dw 0x4000000000000000000000000; 108 | dw 0x8000000000000000000000000; 109 | dw 0x10000000000000000000000000; 110 | dw 0x20000000000000000000000000; 111 | dw 0x40000000000000000000000000; 112 | dw 0x80000000000000000000000000; 113 | dw 0x100000000000000000000000000; 114 | dw 0x200000000000000000000000000; 115 | dw 0x400000000000000000000000000; 116 | dw 0x800000000000000000000000000; 117 | dw 0x1000000000000000000000000000; 118 | dw 0x2000000000000000000000000000; 119 | dw 0x4000000000000000000000000000; 120 | dw 0x8000000000000000000000000000; 121 | dw 0x10000000000000000000000000000; 122 | dw 0x20000000000000000000000000000; 123 | dw 0x40000000000000000000000000000; 124 | dw 0x80000000000000000000000000000; 125 | dw 0x100000000000000000000000000000; 126 | dw 0x200000000000000000000000000000; 127 | dw 0x400000000000000000000000000000; 128 | dw 0x800000000000000000000000000000; 129 | dw 0x1000000000000000000000000000000; 130 | dw 0x2000000000000000000000000000000; 131 | dw 0x4000000000000000000000000000000; 132 | dw 0x8000000000000000000000000000000; 133 | dw 0x10000000000000000000000000000000; 134 | dw 0x20000000000000000000000000000000; 135 | dw 0x40000000000000000000000000000000; 136 | dw 0x80000000000000000000000000000000; 137 | dw 0x100000000000000000000000000000000; 138 | dw 0x200000000000000000000000000000000; 139 | dw 0x400000000000000000000000000000000; 140 | dw 0x800000000000000000000000000000000; 141 | dw 0x1000000000000000000000000000000000; 142 | dw 0x2000000000000000000000000000000000; 143 | dw 0x4000000000000000000000000000000000; 144 | dw 0x8000000000000000000000000000000000; 145 | dw 0x10000000000000000000000000000000000; 146 | dw 0x20000000000000000000000000000000000; 147 | dw 0x40000000000000000000000000000000000; 148 | dw 0x80000000000000000000000000000000000; 149 | dw 0x100000000000000000000000000000000000; 150 | dw 0x200000000000000000000000000000000000; 151 | dw 0x400000000000000000000000000000000000; 152 | dw 0x800000000000000000000000000000000000; 153 | dw 0x1000000000000000000000000000000000000; 154 | dw 0x2000000000000000000000000000000000000; 155 | dw 0x4000000000000000000000000000000000000; 156 | dw 0x8000000000000000000000000000000000000; 157 | dw 0x10000000000000000000000000000000000000; 158 | dw 0x20000000000000000000000000000000000000; 159 | dw 0x40000000000000000000000000000000000000; 160 | dw 0x80000000000000000000000000000000000000; 161 | dw 0x100000000000000000000000000000000000000; 162 | dw 0x200000000000000000000000000000000000000; 163 | dw 0x400000000000000000000000000000000000000; 164 | dw 0x800000000000000000000000000000000000000; 165 | dw 0x1000000000000000000000000000000000000000; 166 | dw 0x2000000000000000000000000000000000000000; 167 | dw 0x4000000000000000000000000000000000000000; 168 | dw 0x8000000000000000000000000000000000000000; 169 | dw 0x10000000000000000000000000000000000000000; 170 | dw 0x20000000000000000000000000000000000000000; 171 | dw 0x40000000000000000000000000000000000000000; 172 | dw 0x80000000000000000000000000000000000000000; 173 | dw 0x100000000000000000000000000000000000000000; 174 | dw 0x200000000000000000000000000000000000000000; 175 | dw 0x400000000000000000000000000000000000000000; 176 | dw 0x800000000000000000000000000000000000000000; 177 | dw 0x1000000000000000000000000000000000000000000; 178 | dw 0x2000000000000000000000000000000000000000000; 179 | dw 0x4000000000000000000000000000000000000000000; 180 | dw 0x8000000000000000000000000000000000000000000; 181 | dw 0x10000000000000000000000000000000000000000000; 182 | dw 0x20000000000000000000000000000000000000000000; 183 | dw 0x40000000000000000000000000000000000000000000; 184 | dw 0x80000000000000000000000000000000000000000000; 185 | dw 0x100000000000000000000000000000000000000000000; 186 | dw 0x200000000000000000000000000000000000000000000; 187 | dw 0x400000000000000000000000000000000000000000000; 188 | dw 0x800000000000000000000000000000000000000000000; 189 | dw 0x1000000000000000000000000000000000000000000000; 190 | dw 0x2000000000000000000000000000000000000000000000; 191 | dw 0x4000000000000000000000000000000000000000000000; 192 | dw 0x8000000000000000000000000000000000000000000000; 193 | dw 0x10000000000000000000000000000000000000000000000; 194 | dw 0x20000000000000000000000000000000000000000000000; 195 | dw 0x40000000000000000000000000000000000000000000000; 196 | dw 0x80000000000000000000000000000000000000000000000; 197 | dw 0x100000000000000000000000000000000000000000000000; 198 | dw 0x200000000000000000000000000000000000000000000000; 199 | dw 0x400000000000000000000000000000000000000000000000; 200 | dw 0x800000000000000000000000000000000000000000000000; 201 | dw 0x1000000000000000000000000000000000000000000000000; 202 | dw 0x2000000000000000000000000000000000000000000000000; 203 | dw 0x4000000000000000000000000000000000000000000000000; 204 | dw 0x8000000000000000000000000000000000000000000000000; 205 | dw 0x10000000000000000000000000000000000000000000000000; 206 | dw 0x20000000000000000000000000000000000000000000000000; 207 | dw 0x40000000000000000000000000000000000000000000000000; 208 | dw 0x80000000000000000000000000000000000000000000000000; 209 | dw 0x100000000000000000000000000000000000000000000000000; 210 | dw 0x200000000000000000000000000000000000000000000000000; 211 | dw 0x400000000000000000000000000000000000000000000000000; 212 | dw 0x800000000000000000000000000000000000000000000000000; 213 | dw 0x1000000000000000000000000000000000000000000000000000; 214 | dw 0x2000000000000000000000000000000000000000000000000000; 215 | dw 0x4000000000000000000000000000000000000000000000000000; 216 | dw 0x8000000000000000000000000000000000000000000000000000; 217 | dw 0x10000000000000000000000000000000000000000000000000000; 218 | dw 0x20000000000000000000000000000000000000000000000000000; 219 | dw 0x40000000000000000000000000000000000000000000000000000; 220 | dw 0x80000000000000000000000000000000000000000000000000000; 221 | dw 0x100000000000000000000000000000000000000000000000000000; 222 | dw 0x200000000000000000000000000000000000000000000000000000; 223 | dw 0x400000000000000000000000000000000000000000000000000000; 224 | dw 0x800000000000000000000000000000000000000000000000000000; 225 | dw 0x1000000000000000000000000000000000000000000000000000000; 226 | dw 0x2000000000000000000000000000000000000000000000000000000; 227 | dw 0x4000000000000000000000000000000000000000000000000000000; 228 | dw 0x8000000000000000000000000000000000000000000000000000000; 229 | dw 0x10000000000000000000000000000000000000000000000000000000; 230 | dw 0x20000000000000000000000000000000000000000000000000000000; 231 | dw 0x40000000000000000000000000000000000000000000000000000000; 232 | dw 0x80000000000000000000000000000000000000000000000000000000; 233 | dw 0x100000000000000000000000000000000000000000000000000000000; 234 | dw 0x200000000000000000000000000000000000000000000000000000000; 235 | dw 0x400000000000000000000000000000000000000000000000000000000; 236 | dw 0x800000000000000000000000000000000000000000000000000000000; 237 | dw 0x1000000000000000000000000000000000000000000000000000000000; 238 | dw 0x2000000000000000000000000000000000000000000000000000000000; 239 | dw 0x4000000000000000000000000000000000000000000000000000000000; 240 | dw 0x8000000000000000000000000000000000000000000000000000000000; 241 | dw 0x10000000000000000000000000000000000000000000000000000000000; 242 | dw 0x20000000000000000000000000000000000000000000000000000000000; 243 | dw 0x40000000000000000000000000000000000000000000000000000000000; 244 | dw 0x80000000000000000000000000000000000000000000000000000000000; 245 | dw 0x100000000000000000000000000000000000000000000000000000000000; 246 | dw 0x200000000000000000000000000000000000000000000000000000000000; 247 | dw 0x400000000000000000000000000000000000000000000000000000000000; 248 | dw 0x800000000000000000000000000000000000000000000000000000000000; 249 | dw 0x1000000000000000000000000000000000000000000000000000000000000; 250 | dw 0x2000000000000000000000000000000000000000000000000000000000000; 251 | dw 0x4000000000000000000000000000000000000000000000000000000000000; 252 | dw 0x8000000000000000000000000000000000000000000000000000000000000; 253 | dw 0x10000000000000000000000000000000000000000000000000000000000000; 254 | dw 0x20000000000000000000000000000000000000000000000000000000000000; 255 | dw 0x40000000000000000000000000000000000000000000000000000000000000; 256 | dw 0x80000000000000000000000000000000000000000000000000000000000000; 257 | dw 0x100000000000000000000000000000000000000000000000000000000000000; 258 | dw 0x200000000000000000000000000000000000000000000000000000000000000; 259 | dw 0x400000000000000000000000000000000000000000000000000000000000000; 260 | } 261 | -------------------------------------------------------------------------------- /src/stark_verifier/stark_verifier.cairo: -------------------------------------------------------------------------------- 1 | // %builtins pedersen range_check bitwise 2 | 3 | from starkware.cairo.common.alloc import alloc 4 | from starkware.cairo.common.cairo_blake2s.blake2s import finalize_blake2s, STATE_SIZE_FELTS 5 | from starkware.cairo.common.cairo_builtins import BitwiseBuiltin 6 | from starkware.cairo.common.hash import HashBuiltin 7 | from starkware.cairo.common.math import assert_lt, assert_le 8 | from starkware.cairo.common.registers import get_fp_and_pc 9 | from starkware.cairo.common.uint256 import Uint256 10 | from starkware.cairo.common.serialize import serialize_word 11 | from starkware.cairo.common.pow import pow 12 | 13 | from stark_verifier.air.air_instance import ( 14 | AirInstance, 15 | air_instance_new, 16 | get_constraint_composition_coefficients, 17 | get_deep_composition_coefficients, 18 | ConstraintCompositionCoefficients, 19 | ) 20 | from stark_verifier.air.pub_inputs import PublicInputs 21 | from stark_verifier.air.stark_proof import TraceLayout, ProofOptions, StarkProof, read_stark_proof 22 | from stark_verifier.air.trace_info import TraceInfo 23 | from stark_verifier.channel import ( 24 | Channel, 25 | channel_new, 26 | read_constraint_commitment, 27 | read_constraint_evaluations, 28 | read_ood_trace_frame, 29 | read_ood_constraint_evaluations, 30 | read_pow_nonce, 31 | read_trace_commitments, 32 | read_queried_trace_states, 33 | ) 34 | from stark_verifier.composer import ( 35 | compose_constraint_evaluations, 36 | combine_compositions, 37 | compose_trace_columns, 38 | deep_composer_new, 39 | ) 40 | from stark_verifier.crypto.random import ( 41 | PublicCoin, 42 | draw, 43 | draw_elements, 44 | draw_integers, 45 | get_leading_zeros, 46 | hash_elements, 47 | random_coin_new, 48 | reseed, 49 | reseed_with_int, 50 | reseed_with_ood_frames, 51 | seed_with_pub_inputs, 52 | reseed_endian, 53 | ) 54 | // from stark_verifier.evaluator import evaluate_constraints 55 | from stark_verifier.fri.fri_verifier import fri_verifier_new, fri_verify, to_fri_options 56 | from stark_verifier.utils import Vec 57 | 58 | from stark_verifier.air.pub_inputs import read_public_inputs 59 | 60 | // Verifies that the specified computation was executed correctly against the specified inputs. 61 | // 62 | // These subroutines are intended to be as close to a line-by-line transcription of the 63 | // Winterfell verifier code (see https://github.com/novifinancial/winterfell and the associated 64 | // LICENSE.winterfell.md) 65 | func verify{range_check_ptr, pedersen_ptr: HashBuiltin*, bitwise_ptr: BitwiseBuiltin*}( 66 | proof: StarkProof*, pub_inputs: PublicInputs* 67 | ) { 68 | alloc_locals; 69 | 70 | let p = [proof]; 71 | %{ 72 | print('lde_domain_size', ids.p.context.lde_domain_size) 73 | print('folding_factor', ids.p.context.options.fri_folding_factor) 74 | %} 75 | 76 | let (__fp__, _) = get_fp_and_pc(); 77 | 78 | // Initialize hasher 79 | let (blake2s_ptr: felt*) = alloc(); 80 | local blake2s_ptr_start: felt* = blake2s_ptr; 81 | 82 | // Build a seed for the public coin; the initial seed is the hash of public inputs 83 | let public_coin_seed: felt* = seed_with_pub_inputs{blake2s_ptr=blake2s_ptr}(pub_inputs); 84 | 85 | // Create an AIR instance for the computation specified in the proof. 86 | let air = air_instance_new(proof, pub_inputs, proof.context.options); 87 | 88 | // Create a public coin and channel struct 89 | with blake2s_ptr { 90 | let public_coin = random_coin_new(public_coin_seed, 32); 91 | } 92 | let channel = channel_new(air, proof); 93 | 94 | with blake2s_ptr, channel, public_coin { 95 | perform_verification(air=air); 96 | } 97 | 98 | // finalize_blake2s(blake2s_ptr_start, blake2s_ptr); // TODO: uncomment this line before deployment. Otherwise, the proof is INSECURE! 99 | 100 | return (); 101 | } 102 | 103 | // Performs the actual verification by reading the data from the `channel` and making sure it 104 | // attests to a correct execution of the computation specified by the provided `air`. 105 | func perform_verification{ 106 | range_check_ptr, 107 | blake2s_ptr: felt*, 108 | bitwise_ptr: BitwiseBuiltin*, 109 | channel: Channel, 110 | public_coin: PublicCoin, 111 | }(air: AirInstance) { 112 | alloc_locals; 113 | 114 | // // 1 ----- Trace commitment ------------------------------------------------------------------- 115 | 116 | // // Read the commitments to evaluations of the trace polynomials over the LDE domain. 117 | let trace_commitments = read_trace_commitments(); 118 | 119 | // Reseed the coin with the commitment to the main trace segment 120 | reseed_endian(value=trace_commitments); 121 | 122 | // Process auxiliary trace segments to build a set of random elements for each segment, 123 | // and to reseed the coin. 124 | let (aux_trace_rand_elements: felt**) = alloc(); 125 | process_aux_segments( 126 | trace_commitments=trace_commitments + STATE_SIZE_FELTS, 127 | trace_commitments_len=air.context.trace_layout.num_aux_segments, 128 | aux_segment_rands=air.context.trace_layout.aux_segment_rands, 129 | aux_trace_rand_elements=aux_trace_rand_elements, 130 | ); 131 | 132 | // // Build random coefficients for the composition polynomial 133 | let constraint_coeffs = get_constraint_composition_coefficients(air=air); 134 | 135 | // 2 ----- Constraint commitment -------------------------------------------------------------- 136 | 137 | // Read the commitment to evaluations of the constraint composition polynomial over the LDE 138 | // domain sent by the prover. 139 | let constraint_commitment = read_constraint_commitment(); 140 | // Update the public coin. 141 | reseed_endian(value=constraint_commitment); 142 | 143 | // Draw an out-of-domain point z from the coin. 144 | let z = draw(); 145 | // 3 ----- OOD consistency check -------------------------------------------------------------- 146 | 147 | // Read the out-of-domain trace frames (the main trace frame and auxiliary trace frame, if 148 | // provided) sent by the prover. 149 | let (ood_main_trace_frame, ood_aux_trace_frame) = read_ood_trace_frame(); 150 | 151 | // // Evaluate constraints over the OOD frames. 152 | // let ood_constraint_evaluation_1 = evaluate_constraints( 153 | // air=air, 154 | // coeffs=constraint_coeffs, 155 | // ood_main_trace_frame=ood_main_trace_frame, 156 | // ood_aux_trace_frame=ood_aux_trace_frame, 157 | // aux_trace_rand_elements=aux_trace_rand_elements, 158 | // z=z, 159 | // ); 160 | 161 | // Reseed the public coin with the OOD frames. 162 | reseed_with_ood_frames( 163 | ood_main_trace_frame=ood_main_trace_frame, ood_aux_trace_frame=ood_aux_trace_frame 164 | ); 165 | 166 | // Read evaluations of composition polynomial columns sent by the prover, and reduce them into 167 | // a single value by computing sum(z^i * value_i), where value_i is the evaluation of the ith 168 | // column polynomial at z^m, where m is the total number of column polynomials. Also, reseed 169 | // the public coin with the OOD constraint evaluations received from the prover. 170 | let ood_constraint_evaluations = read_ood_constraint_evaluations(); 171 | let ood_constraint_evaluation_2 = reduce_evaluations( 172 | evaluations=ood_constraint_evaluations.elements, 173 | evaluations_len=ood_constraint_evaluations.n_elements, 174 | z=z, 175 | index=0, 176 | ); 177 | let value = hash_elements( 178 | n_elements=ood_constraint_evaluations.n_elements, 179 | elements=ood_constraint_evaluations.elements, 180 | ); 181 | reseed_endian(value=value); 182 | 183 | // // Finally, make sure the values are the same. 184 | // with_attr error_message( 185 | // "Ood constraint evaluations differ. ${ood_constraint_evaluation_1} != ${ood_constraint_evaluation_2}") { 186 | // assert ood_constraint_evaluation_1 = ood_constraint_evaluation_2; 187 | // } 188 | 189 | // // 4 ----- FRI commitments -------------------------------------------------------------------- 190 | 191 | // Draw coefficients for computing DEEP composition polynomial from the public coin. 192 | let deep_coefficients = get_deep_composition_coefficients(air=air); 193 | 194 | // Instantiates a FRI verifier with the FRI layer commitments read from the channel. From the 195 | // verifier's perspective, this is equivalent to executing the commit phase of the FRI protocol. 196 | // The verifier uses these commitments to update the public coin and draw random points alpha 197 | // from them. 198 | let fri_context = to_fri_options(air.context.options); 199 | let max_poly_degree = air.context.trace_length - 1; 200 | let fri_verifier = fri_verifier_new(fri_context, max_poly_degree); 201 | 202 | // 5 ----- Trace and constraint queries ------------------------------------------------------- 203 | 204 | // Read proof-of-work nonce sent by the prover and update the public coin with it. 205 | let pow_nonce = read_pow_nonce(); 206 | reseed_with_int(pow_nonce); 207 | 208 | // Make sure the proof-of-work specified by the grinding factor is satisfied. 209 | let leading_zeros = get_leading_zeros(public_coin.seed); 210 | %{ print("leading zeros", ids.leading_zeros, "grinding_factor", ids.air.options.grinding_factor) %} 211 | with_attr error_message("Insufficient proof of work") { 212 | assert_le(air.options.grinding_factor, leading_zeros); 213 | } 214 | 215 | // // Draw pseudorandom query positions for the LDE domain from the public coin. 216 | let (local query_positions: felt*) = alloc(); 217 | draw_integers( 218 | n_elements=air.options.num_queries, 219 | elements=query_positions, 220 | domain_size=air.context.lde_domain_size, 221 | ); 222 | %{ print("query_positions:", [memory[ids.query_positions + i] for i in range(ids.air.options.num_queries)]) %} 223 | 224 | // Read evaluations of trace and constraint composition polynomials at the queried positions. 225 | // This also checks that the read values are valid against trace and constraint commitments. 226 | let (queried_main_trace_states, queried_aux_trace_states) = read_queried_trace_states( 227 | positions=query_positions, 228 | num_queries=air.options.num_queries, 229 | num_aux_segments=air.context.trace_layout.num_aux_segments, 230 | ); 231 | let queried_constraint_evaluations = read_constraint_evaluations( 232 | positions=query_positions, num_queries=air.options.num_queries 233 | ); 234 | 235 | // 6 ----- DEEP composition ------------------------------------------------------------------- 236 | 237 | // Compute evaluations of the DEEP composition polynomial at the queried positions. 238 | let composer = deep_composer_new( 239 | air=air, query_positions=query_positions, z=z, cc=deep_coefficients 240 | ); 241 | %{ 242 | coeffs = [memory[ids.composer.x_coordinates + i] for i in range(0, 27)] 243 | print("deep", coeffs, ids.composer.z_curr, ids.composer.z_next) 244 | %} 245 | let t_composition = compose_trace_columns( 246 | composer, 247 | queried_main_trace_states, 248 | queried_aux_trace_states, 249 | ood_main_trace_frame, 250 | ood_aux_trace_frame, 251 | ); 252 | let c_composition = compose_constraint_evaluations( 253 | composer, queried_constraint_evaluations, ood_constraint_evaluations 254 | ); 255 | let deep_evaluations = combine_compositions(composer, t_composition, c_composition); 256 | 257 | // 7 ----- Verify low-degree proof ------------------------------------------------------------- 258 | 259 | // Make sure that evaluations of the DEEP composition polynomial we computed in the previous 260 | // step are in fact evaluations of a polynomial of degree equal to trace polynomial degree. 261 | fri_verify(fri_verifier, deep_evaluations, query_positions); 262 | 263 | return (); 264 | } 265 | 266 | func process_aux_segments{ 267 | range_check_ptr, 268 | blake2s_ptr: felt*, 269 | bitwise_ptr: BitwiseBuiltin*, 270 | channel: Channel, 271 | public_coin: PublicCoin, 272 | }( 273 | trace_commitments: felt*, 274 | trace_commitments_len: felt, 275 | aux_segment_rands: felt*, 276 | aux_trace_rand_elements: felt**, 277 | ) { 278 | alloc_locals; 279 | if (trace_commitments_len == 0) { 280 | return (); 281 | } 282 | let (elements) = alloc(); 283 | local elements_saved: felt* = elements; 284 | assert [aux_trace_rand_elements] = elements; 285 | draw_elements(n_elements=[aux_segment_rands], elements=elements); 286 | reseed_endian(value=trace_commitments); 287 | process_aux_segments( 288 | trace_commitments=trace_commitments + STATE_SIZE_FELTS, 289 | trace_commitments_len=trace_commitments_len - 1, 290 | aux_segment_rands=aux_segment_rands + 1, 291 | aux_trace_rand_elements=aux_trace_rand_elements + 1, 292 | ); 293 | return (); 294 | } 295 | 296 | func reduce_evaluations{range_check_ptr}(evaluations: felt*, evaluations_len, z, index) -> felt { 297 | if (evaluations_len == 0) { 298 | return 0; 299 | } 300 | alloc_locals; 301 | let acc = reduce_evaluations(evaluations + 1, evaluations_len - 1, z, index + 1); 302 | let (pow_z) = pow(z, index); 303 | return acc + pow_z * [evaluations]; 304 | } 305 | -------------------------------------------------------------------------------- /src/stark_verifier/evaluator.cairo: -------------------------------------------------------------------------------- 1 | // from starkware.cairo.common.alloc import alloc 2 | // from starkware.cairo.common.pow import pow 3 | // from stark_verifier.air.air_instance import AirInstance, ConstraintCompositionCoefficients 4 | // from stark_verifier.air.pub_inputs import PublicInputs, MemEntry 5 | 6 | // from stark_verifier.air.transitions.frame import ( 7 | // EvaluationFrame, 8 | // evaluate_transition, 9 | // evaluate_aux_transition, 10 | // ) 11 | 12 | // // Main segment column indices with boundary constraints 13 | // const MEM_A_TRACE_OFFSET = 19; 14 | // const MEM_P_TRACE_OFFSET = 17; 15 | 16 | // // Aux segment column indices with boundary constraints 17 | // const P_M_OFFSET = 8; 18 | // const P_M_WIDTH = 4; 19 | // const A_RC_PRIME_OFFSET = 12; 20 | // const P_M_LAST = P_M_OFFSET + P_M_WIDTH - 1; 21 | // const A_RC_PRIME_FIRST = A_RC_PRIME_OFFSET; 22 | // const A_RC_PRIME_LAST = A_RC_PRIME_OFFSET + 2; 23 | 24 | // // TODO: Functions to evaluate transitions and combine evaluation should be autogenerated 25 | // // from a constraint description language instead of hand-coded. 26 | // func evaluate_constraints{ 27 | // range_check_ptr 28 | // }( 29 | // air: AirInstance, 30 | // coeffs: ConstraintCompositionCoefficients, 31 | // ood_main_trace_frame: EvaluationFrame, 32 | // ood_aux_trace_frame: EvaluationFrame, 33 | // aux_trace_rand_elements: felt**, 34 | // z: felt, 35 | // ) -> felt { 36 | // alloc_locals; 37 | 38 | // // 1 ----- evaluate transition constraints ---------------------------------------------------- 39 | 40 | // // Evaluate main trace 41 | // let (t_evaluations1: felt*) = alloc(); 42 | // evaluate_transition( 43 | // ood_main_trace_frame, 44 | // t_evaluations1, 45 | // ); 46 | 47 | // // Evaluate auxiliary trace 48 | // let (t_evaluations2: felt*) = alloc(); 49 | // evaluate_aux_transition( 50 | // ood_main_trace_frame, 51 | // ood_aux_trace_frame, 52 | // aux_trace_rand_elements, 53 | // t_evaluations2, 54 | // ); 55 | 56 | // // Combine evaluations 57 | // let result = combine_evaluations( 58 | // t_evaluations1, 59 | // t_evaluations2, 60 | // z, 61 | // air, 62 | // coeffs, 63 | // ); 64 | 65 | // // 2 ----- evaluate boundary constraints ------------------------------------------------ 66 | 67 | // let (b_evaluations1: felt*) = alloc(); 68 | // assert b_evaluations1[0] = ood_main_trace_frame.current[MEM_A_TRACE_OFFSET] - air.pub_inputs.init._pc; 69 | // assert b_evaluations1[1] = ood_main_trace_frame.current[MEM_A_TRACE_OFFSET] - air.pub_inputs.fin._pc; 70 | // assert b_evaluations1[2] = ood_main_trace_frame.current[MEM_P_TRACE_OFFSET] - air.pub_inputs.init._ap; 71 | // assert b_evaluations1[3] = ood_main_trace_frame.current[MEM_P_TRACE_OFFSET] - air.pub_inputs.fin._ap; 72 | 73 | // let (b_evaluations2: felt*) = alloc(); 74 | // let r = reduce_pub_mem(air.pub_inputs, aux_trace_rand_elements); 75 | // assert b_evaluations2[0] = ood_aux_trace_frame.current[P_M_LAST] - r; 76 | // assert b_evaluations2[1] = ood_aux_trace_frame.current[A_RC_PRIME_FIRST] - air.pub_inputs.rc_min; 77 | // assert b_evaluations2[2] = ood_aux_trace_frame.current[A_RC_PRIME_LAST] - air.pub_inputs.rc_max; 78 | 79 | // // All boundary (main and aux) constraints have the same degree, and one of two different 80 | // // divisors (constraining either the first or last step) 81 | // let composition_degree = air.context.trace_length * air.ce_blowup_factor - 1; 82 | // let trace_poly_degree = air.context.trace_length - 1; 83 | // let divisor_degree = 1; 84 | // let target_degree = composition_degree + divisor_degree; 85 | // let degree_adjustment = target_degree - trace_poly_degree; 86 | // let (xp) = pow(z, degree_adjustment); 87 | 88 | // // Divisor evaluation for first step 89 | // let z_1 = z - 1; 90 | 91 | // // Divisor evaluation for last step 92 | // let g = air.trace_domain_generator; 93 | // let (g_n) = pow(g, air.pub_inputs.num_steps - 1); 94 | // let z_n = z - g_n; 95 | 96 | // let (g_m) = pow(g, air.context.trace_length - 1); 97 | // let z_m = z - g_m; 98 | 99 | // // Sum all constraint group evaluations 100 | 101 | // // Main constraints 102 | // let sum_1 = b_evaluations1[2] * (coeffs.boundary_a[0] + coeffs.boundary_b[0] * xp) + 103 | // b_evaluations1[0] * (coeffs.boundary_a[1] + coeffs.boundary_b[1] * xp); 104 | // let sum_n = b_evaluations1[3] * (coeffs.boundary_a[2] + coeffs.boundary_b[2] * xp) + 105 | // b_evaluations1[1] * (coeffs.boundary_a[3] + coeffs.boundary_b[3] * xp); 106 | // // Merge group sums 107 | // let main_evaluation = sum_1 / z_1 + sum_n / z_n; 108 | // let result = result + main_evaluation; 109 | 110 | // // Aux constraints 111 | // let sum_1 = b_evaluations2[1] * (coeffs.boundary_a[4] + coeffs.boundary_b[4] * xp); 112 | // let sum_m = b_evaluations2[0] * (coeffs.boundary_a[5] + coeffs.boundary_b[5] * xp) + 113 | // b_evaluations2[2] * (coeffs.boundary_a[6] + coeffs.boundary_b[6] * xp); 114 | // // Merge group sums 115 | // let aux_evaluation = sum_1 / z_1 + sum_m / z_m; 116 | // let result = result + aux_evaluation; 117 | 118 | // return result; 119 | // } 120 | 121 | // func combine_evaluations{ 122 | // range_check_ptr 123 | // }( 124 | // t_evaluations1: felt*, 125 | // t_evaluations2: felt*, 126 | // x: felt, 127 | // air: AirInstance, 128 | // coeffs: ConstraintCompositionCoefficients, 129 | // ) -> felt { 130 | // alloc_locals; 131 | // // Degrees needed to compute adjustments 132 | // let composition_degree = air.context.trace_length * air.ce_blowup_factor - 1; 133 | // let divisor_degree = air.context.trace_length - 1; 134 | // let target_degree = composition_degree + divisor_degree; 135 | 136 | // // Evaluate divisor 137 | // let g = air.trace_domain_generator; 138 | // let (numerator) = pow(x, air.context.trace_length); 139 | // let numerator = numerator - 1; 140 | // let (denominator) = pow(g, air.context.trace_length - 1); 141 | // let denominator = x - denominator; 142 | // let z = numerator / denominator; 143 | 144 | // // Sum all constraint evaluations 145 | // let sum = 0; 146 | 147 | // // Merge evaluations for degree 1 constraints 148 | // let evaluation_degree = (air.context.trace_length - 1); 149 | // let degree_adjustment = target_degree - evaluation_degree; 150 | // let (xp) = pow(x, degree_adjustment); 151 | // let sum = sum + (coeffs.transition_a[15] + coeffs.transition_b[15] * xp) * t_evaluations1[15]; 152 | 153 | // // Merge evaluations for degree 2 constraints 154 | // let evaluation_degree = 2 * (air.context.trace_length - 1); 155 | // let degree_adjustment = target_degree - evaluation_degree; 156 | // let (xp) = pow(x, degree_adjustment); 157 | // let sum = sum + (coeffs.transition_a[0] + coeffs.transition_b[0] * xp) * t_evaluations1[0]; 158 | // let sum = sum + (coeffs.transition_a[1] + coeffs.transition_b[1] * xp) * t_evaluations1[1]; 159 | // let sum = sum + (coeffs.transition_a[2] + coeffs.transition_b[2] * xp) * t_evaluations1[2]; 160 | // let sum = sum + (coeffs.transition_a[3] + coeffs.transition_b[3] * xp) * t_evaluations1[3]; 161 | // let sum = sum + (coeffs.transition_a[4] + coeffs.transition_b[4] * xp) * t_evaluations1[4]; 162 | // let sum = sum + (coeffs.transition_a[5] + coeffs.transition_b[5] * xp) * t_evaluations1[5]; 163 | // let sum = sum + (coeffs.transition_a[6] + coeffs.transition_b[6] * xp) * t_evaluations1[6]; 164 | // let sum = sum + (coeffs.transition_a[7] + coeffs.transition_b[7] * xp) * t_evaluations1[7]; 165 | // let sum = sum + (coeffs.transition_a[8] + coeffs.transition_b[8] * xp) * t_evaluations1[8]; 166 | // let sum = sum + (coeffs.transition_a[9] + coeffs.transition_b[9] * xp) * t_evaluations1[9]; 167 | // let sum = sum + (coeffs.transition_a[10] + coeffs.transition_b[10] * xp) * t_evaluations1[10]; 168 | // let sum = sum + (coeffs.transition_a[11] + coeffs.transition_b[11] * xp) * t_evaluations1[11]; 169 | // let sum = sum + (coeffs.transition_a[12] + coeffs.transition_b[12] * xp) * t_evaluations1[12]; 170 | // let sum = sum + (coeffs.transition_a[13] + coeffs.transition_b[13] * xp) * t_evaluations1[13]; 171 | // let sum = sum + (coeffs.transition_a[14] + coeffs.transition_b[14] * xp) * t_evaluations1[14]; 172 | 173 | // // Merge evaluations for degree 4 constraints 174 | // let evaluation_degree = 4 * (air.context.trace_length - 1); 175 | // let degree_adjustment = target_degree - evaluation_degree; 176 | // let (xp) = pow(x, degree_adjustment); 177 | // let sum = sum + (coeffs.transition_a[16] + coeffs.transition_b[16] * xp) * t_evaluations1[16]; 178 | // let sum = sum + (coeffs.transition_a[17] + coeffs.transition_b[17] * xp) * t_evaluations1[17]; 179 | // let sum = sum + (coeffs.transition_a[18] + coeffs.transition_b[18] * xp) * t_evaluations1[18]; 180 | // let sum = sum + (coeffs.transition_a[19] + coeffs.transition_b[19] * xp) * t_evaluations1[19]; 181 | // let sum = sum + (coeffs.transition_a[20] + coeffs.transition_b[20] * xp) * t_evaluations1[20]; 182 | // let sum = sum + (coeffs.transition_a[21] + coeffs.transition_b[21] * xp) * t_evaluations1[21]; 183 | // let sum = sum + (coeffs.transition_a[22] + coeffs.transition_b[22] * xp) * t_evaluations1[22]; 184 | // let sum = sum + (coeffs.transition_a[23] + coeffs.transition_b[23] * xp) * t_evaluations1[23]; 185 | // let sum = sum + (coeffs.transition_a[24] + coeffs.transition_b[24] * xp) * t_evaluations1[24]; 186 | // let sum = sum + (coeffs.transition_a[25] + coeffs.transition_b[25] * xp) * t_evaluations1[25]; 187 | // let sum = sum + (coeffs.transition_a[26] + coeffs.transition_b[26] * xp) * t_evaluations1[26]; 188 | // let sum = sum + (coeffs.transition_a[27] + coeffs.transition_b[27] * xp) * t_evaluations1[27]; 189 | // let sum = sum + (coeffs.transition_a[28] + coeffs.transition_b[28] * xp) * t_evaluations1[28]; 190 | // let sum = sum + (coeffs.transition_a[29] + coeffs.transition_b[29] * xp) * t_evaluations1[29]; 191 | // let sum = sum + (coeffs.transition_a[30] + coeffs.transition_b[30] * xp) * t_evaluations1[30]; 192 | 193 | // // Merge evaluations for degree 2 auxiliary constraints 194 | // let evaluation_degree = 2 * (air.context.trace_length-1); 195 | // let degree_adjustment = target_degree - evaluation_degree; 196 | // let (xp) = pow(x, degree_adjustment); 197 | // let sum = sum + (coeffs.transition_a[31] + coeffs.transition_b[31] * xp) * t_evaluations2[0]; 198 | // let sum = sum + (coeffs.transition_a[32] + coeffs.transition_b[32] * xp) * t_evaluations2[1]; 199 | // let sum = sum + (coeffs.transition_a[33] + coeffs.transition_b[33] * xp) * t_evaluations2[2]; 200 | // let sum = sum + (coeffs.transition_a[34] + coeffs.transition_b[34] * xp) * t_evaluations2[3]; 201 | // let sum = sum + (coeffs.transition_a[35] + coeffs.transition_b[35] * xp) * t_evaluations2[4]; 202 | // let sum = sum + (coeffs.transition_a[36] + coeffs.transition_b[36] * xp) * t_evaluations2[5]; 203 | // let sum = sum + (coeffs.transition_a[37] + coeffs.transition_b[37] * xp) * t_evaluations2[6]; 204 | // let sum = sum + (coeffs.transition_a[38] + coeffs.transition_b[38] * xp) * t_evaluations2[7]; 205 | // let sum = sum + (coeffs.transition_a[39] + coeffs.transition_b[39] * xp) * t_evaluations2[8]; 206 | // let sum = sum + (coeffs.transition_a[40] + coeffs.transition_b[40] * xp) * t_evaluations2[9]; 207 | // let sum = sum + (coeffs.transition_a[41] + coeffs.transition_b[41] * xp) * t_evaluations2[10]; 208 | // let sum = sum + (coeffs.transition_a[42] + coeffs.transition_b[42] * xp) * t_evaluations2[11]; 209 | // let sum = sum + (coeffs.transition_a[43] + coeffs.transition_b[43] * xp) * t_evaluations2[12]; 210 | // let sum = sum + (coeffs.transition_a[44] + coeffs.transition_b[44] * xp) * t_evaluations2[13]; 211 | // let sum = sum + (coeffs.transition_a[45] + coeffs.transition_b[45] * xp) * t_evaluations2[14]; 212 | // let sum = sum + (coeffs.transition_a[46] + coeffs.transition_b[46] * xp) * t_evaluations2[15]; 213 | // let sum = sum + (coeffs.transition_a[47] + coeffs.transition_b[47] * xp) * t_evaluations2[16]; 214 | // let sum = sum + (coeffs.transition_a[48] + coeffs.transition_b[48] * xp) * t_evaluations2[17]; 215 | 216 | // // Divide by divisor evaluation. We can do this once at the end of merging because 217 | // // the divisor is identical for all constraints 218 | // let sum = sum / z; 219 | 220 | // return sum; 221 | // } 222 | 223 | // func reduce_pub_mem{ 224 | // range_check_ptr 225 | // }(pub_inputs: PublicInputs*, aux_rand_elements: felt**) -> felt { 226 | // alloc_locals; 227 | 228 | // let rand_element_z = aux_rand_elements[0][0]; 229 | // let rand_element_alpha = aux_rand_elements[0][1]; 230 | 231 | // if (pub_inputs.mem_length == 0) { 232 | // return 1; 233 | // } 234 | 235 | // let (num) = pow(rand_element_z, pub_inputs.mem_length); 236 | 237 | // local range_check_ptr = range_check_ptr; 238 | 239 | // tempvar res = num; 240 | // tempvar mem_ptr = pub_inputs.mem; 241 | // tempvar mem_len = pub_inputs.mem_length; 242 | // loop: 243 | // let expr = rand_element_z - ( [mem_ptr].address + 244 | // rand_element_alpha * [mem_ptr].value ); 245 | // tempvar res = res / expr; 246 | // tempvar mem_ptr = mem_ptr + MemEntry.SIZE; 247 | // tempvar mem_len = mem_len - 1; 248 | // jmp loop if mem_len != 0; 249 | 250 | // return res; 251 | // } 252 | -------------------------------------------------------------------------------- /src/stark_verifier/crypto/random.cairo: -------------------------------------------------------------------------------- 1 | from starkware.cairo.common.alloc import alloc 2 | from starkware.cairo.common.cairo_blake2s.blake2s import ( 3 | blake2s_add_felt, 4 | blake2s_bigend, 5 | blake2s_felts, 6 | blake2s_add_felts, 7 | blake2s, 8 | blake2s_as_words, 9 | ) 10 | from starkware.cairo.common.cairo_builtins import BitwiseBuiltin 11 | from starkware.cairo.common.hash import HashBuiltin 12 | from starkware.cairo.common.hash_state import hash_finalize, hash_init, hash_update 13 | from starkware.cairo.common.math import assert_nn_le, assert_le 14 | from starkware.cairo.common.math_cmp import is_le 15 | from starkware.cairo.common.bool import TRUE, FALSE 16 | from starkware.cairo.common.memcpy import memcpy 17 | from starkware.cairo.common.memset import memset 18 | from utils.pow2 import pow2 19 | from utils.endianness import byteswap32 20 | 21 | from stark_verifier.air.pub_inputs import MemEntry, PublicInputs, read_mem_values 22 | from stark_verifier.air.transitions.frame import EvaluationFrame 23 | 24 | // Pseudo-random element generator for finite fields. 25 | struct PublicCoin { 26 | seed: felt*, 27 | counter: felt, 28 | } 29 | 30 | // Returns a new random coin instantiated with the provided `seed`. 31 | func random_coin_new{range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*}( 32 | seed: felt*, n_bytes: felt 33 | ) -> PublicCoin { 34 | let (digest) = blake2s_as_words(data=seed, n_bytes=n_bytes); 35 | let public_coin = PublicCoin(seed=digest, counter=0); 36 | return public_coin; 37 | } 38 | 39 | // Returns a hash of two digests. This method is intended for use in construction of 40 | // Merkle trees. 41 | func merge{range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*}( 42 | seed: felt*, value: felt* 43 | ) -> felt* { 44 | alloc_locals; 45 | let (data: felt*) = alloc(); 46 | 47 | memcpy(data, seed, 8); 48 | 49 | // TODO: delete this and then delete `merge_endian` and `reseed_endian` 50 | let be_value = data + 8; 51 | assert be_value[0] = byteswap32(value[0]); 52 | assert be_value[1] = byteswap32(value[1]); 53 | assert be_value[2] = byteswap32(value[2]); 54 | assert be_value[3] = byteswap32(value[3]); 55 | assert be_value[4] = byteswap32(value[4]); 56 | assert be_value[5] = byteswap32(value[5]); 57 | assert be_value[6] = byteswap32(value[6]); 58 | assert be_value[7] = byteswap32(value[7]); 59 | 60 | let (digest) = blake2s_as_words(data=data, n_bytes=64); 61 | 62 | return digest; 63 | } 64 | 65 | // Returns hash(`seed` || `value`). This method is intended for use in PRNG and PoW contexts. 66 | // This function does not ensure that value fits within a u64 integer. 67 | func merge_with_int{range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*}( 68 | seed: felt*, value: felt 69 | ) -> felt* { 70 | alloc_locals; 71 | let (data: felt*) = alloc(); 72 | let data_start = data; 73 | 74 | memcpy(data, seed, 8); 75 | let data = data + 8; 76 | 77 | // Convert value : u64 -> (high: u32, low:u32) 78 | assert [bitwise_ptr].x = value; 79 | assert [bitwise_ptr].y = 0xffffffff; 80 | let low = [bitwise_ptr].x_and_y; 81 | let high = (value - low) / 2 ** 32; 82 | let bitwise_ptr = bitwise_ptr + BitwiseBuiltin.SIZE; 83 | 84 | // Write high and low into data in little endian order 85 | assert data[0] = low; 86 | assert data[1] = high; 87 | 88 | // Compute the blake2s hash 89 | let (digest) = blake2s_as_words(data=data_start, n_bytes=40); 90 | return digest; 91 | } 92 | 93 | func hash_elements{range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*}( 94 | n_elements: felt, elements: felt* 95 | ) -> felt* { 96 | alloc_locals; 97 | let (data) = alloc(); 98 | let data_start = data; 99 | with data { 100 | blake2s_add_felts(n_elements=n_elements, elements=elements, bigend=0); 101 | } 102 | let (res) = blake2s_as_words(data=data_start, n_bytes=n_elements * 32); 103 | return res; 104 | } 105 | 106 | // Reseeds the coin with the specified data by setting the new seed to hash(`seed` || `value`). 107 | // where value is a U256 integer representing a hash digest 108 | func reseed{ 109 | range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*, public_coin: PublicCoin 110 | }(value: felt*) { 111 | let digest = merge(seed=public_coin.seed, value=value); 112 | let public_coin = PublicCoin(seed=digest, counter=0); 113 | return (); 114 | } 115 | 116 | // Reseeds the coin with the specified value by setting the new seed to hash(`seed` || `value`) 117 | // where value is a u64 integer. 118 | // This function ensures that value fits within a u64 integer. 119 | func reseed_with_int{ 120 | range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*, public_coin: PublicCoin 121 | }(value: felt) { 122 | with_attr error_message("Value (${value}) is negative or greater than (2 ** 64 - 1).") { 123 | assert_nn_le(value, 2 ** 64 - 1); 124 | } 125 | let digest = merge_with_int(seed=public_coin.seed, value=value); 126 | let public_coin = PublicCoin(seed=digest, counter=0); 127 | return (); 128 | } 129 | 130 | func reseed_with_ood_frames{ 131 | range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*, public_coin: PublicCoin 132 | }(ood_main_trace_frame: EvaluationFrame, ood_aux_trace_frame: EvaluationFrame) { 133 | alloc_locals; 134 | 135 | let (elements) = alloc(); 136 | let elements_start = elements; 137 | memcpy(elements, ood_main_trace_frame.current, ood_main_trace_frame.current_len); 138 | let elements = elements + ood_main_trace_frame.current_len; 139 | memcpy(elements, ood_aux_trace_frame.current, ood_aux_trace_frame.current_len); 140 | let elements = elements + ood_aux_trace_frame.current_len; 141 | let n_elements = elements - elements_start; 142 | let elements_hash = hash_elements(n_elements, elements_start); 143 | reseed_endian(elements_hash); 144 | 145 | let (elements) = alloc(); 146 | let elements_start = elements; 147 | memcpy(elements, ood_main_trace_frame.next, ood_main_trace_frame.next_len); 148 | let elements = elements + ood_main_trace_frame.next_len; 149 | memcpy(elements, ood_aux_trace_frame.next, ood_aux_trace_frame.next_len); 150 | let elements = elements + ood_aux_trace_frame.next_len; 151 | let n_elements = elements - elements_start; 152 | let elements_hash = hash_elements(n_elements, elements_start); 153 | reseed_endian(elements_hash); 154 | 155 | return (); 156 | } 157 | 158 | // Returns the next pseudo-random field element 159 | func draw{ 160 | range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*, public_coin: PublicCoin 161 | }() -> felt { 162 | alloc_locals; 163 | let digest = merge_with_int(seed=public_coin.seed, value=public_coin.counter + 1); 164 | let public_coin = PublicCoin(public_coin.seed, public_coin.counter + 1); 165 | return digest[0] + digest[1] * 2 ** 32; 166 | } 167 | 168 | func draw_pair{ 169 | range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*, public_coin: PublicCoin 170 | }() -> (res1: felt, res2: felt) { 171 | alloc_locals; 172 | let res1 = draw(); 173 | let res2 = draw(); 174 | return (res1=res1, res2=res2); 175 | } 176 | 177 | func draw_elements{ 178 | range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*, public_coin: PublicCoin 179 | }(n_elements: felt, elements: felt*) { 180 | if (n_elements == 0) { 181 | return (); 182 | } 183 | let res = draw(); 184 | assert [elements] = res; 185 | draw_elements(n_elements=n_elements - 1, elements=&elements[1]); 186 | return (); 187 | } 188 | 189 | func contains(element: felt, array: felt*, array_len: felt) -> felt { 190 | if (array_len == 0) { 191 | return 0; 192 | } 193 | if ([array] == element) { 194 | return 1; 195 | } 196 | 197 | return contains(element, array + 1, array_len - 1); 198 | } 199 | 200 | // Returns the next pseudo-random field element 201 | func draw_digest{ 202 | range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*, public_coin: PublicCoin 203 | }() -> felt* { 204 | alloc_locals; 205 | tempvar public_coin = PublicCoin(public_coin.seed, public_coin.counter + 1); 206 | let digest = merge_with_int(seed=public_coin.seed, value=public_coin.counter); 207 | return digest; 208 | } 209 | 210 | func _draw_integers_loop{ 211 | range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*, public_coin: PublicCoin 212 | }(n_elements: felt, elements: felt*, domain_size: felt, index: felt) { 213 | alloc_locals; 214 | if (n_elements == index) { 215 | return (); 216 | } 217 | 218 | // determine how many bits are needed to represent valid values in the domain 219 | let v_mask = domain_size - 1; 220 | 221 | // draw values from PRNG until we get as many unique values as specified by n_elements 222 | let element = draw_digest(); 223 | 224 | // convert to integer and limit the integer to the number of bits which can fit 225 | // into the specified domain 226 | assert [bitwise_ptr].x = element[0] + element[1] * 2 ** 32; 227 | assert [bitwise_ptr].y = v_mask; 228 | let value = [bitwise_ptr].x_and_y; 229 | let bitwise_ptr = bitwise_ptr + BitwiseBuiltin.SIZE; 230 | 231 | let is_contained = contains(value, elements, index); 232 | if (is_contained != FALSE) { 233 | return _draw_integers_loop(n_elements, elements, domain_size, index); 234 | } 235 | 236 | assert elements[index] = value; 237 | 238 | return _draw_integers_loop(n_elements, elements, domain_size, index + 1); 239 | } 240 | 241 | // / Returns a vector of unique integers selected from the range [0, domain_size). 242 | // / 243 | // / Errors if: 244 | // / - `domain_size` is not a power of two. 245 | // / - `n_elements` is greater than or equal to `domain_size`. 246 | // / 247 | // /See also: https://github.com/ZeroSync/winterfell/blob/main/crypto/src/random/mod.rs#L252 248 | func draw_integers{ 249 | range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*, public_coin: PublicCoin 250 | }(n_elements: felt, elements: felt*, domain_size: felt) { 251 | return _draw_integers_loop(n_elements, elements, domain_size, 0); 252 | } 253 | 254 | func seed_with_pub_inputs{ 255 | range_check_ptr, blake2s_ptr: felt*, pedersen_ptr: HashBuiltin*, bitwise_ptr: BitwiseBuiltin* 256 | }(pub_inputs: PublicInputs*) -> felt* { 257 | alloc_locals; 258 | let (data: felt*) = alloc(); 259 | let data_start = data; 260 | with data { 261 | blake2s_add_felts( 262 | n_elements=pub_inputs.program_hash_len, elements=pub_inputs.program_hash, bigend=0 263 | ); 264 | blake2s_add_felts( 265 | n_elements=pub_inputs.stack_inputs_len, elements=pub_inputs.stack_inputs, bigend=0 266 | ); 267 | blake2s_add_felts( 268 | n_elements=pub_inputs.outputs.stack_len, elements=pub_inputs.outputs.stack, bigend=0 269 | ); 270 | blake2s_add_felts( 271 | n_elements=pub_inputs.outputs.overflow_addrs_len, 272 | elements=pub_inputs.outputs.overflow_addrs, 273 | bigend=0, 274 | ); 275 | } 276 | 277 | let n_bytes = (data - data_start) * 4; 278 | let (res) = blake2s_as_words(data=data_start, n_bytes=n_bytes); 279 | return res; 280 | } 281 | 282 | func get_leading_zeros{range_check_ptr, bitwise_ptr: BitwiseBuiltin*}(seed: felt*) -> felt { 283 | alloc_locals; 284 | 285 | let seed3 = byteswap32(seed[3]); 286 | let seed2 = byteswap32(seed[2]); 287 | let seed1 = byteswap32(seed[1]); 288 | let seed0 = byteswap32(seed[0]); 289 | let high = seed3 + seed2 * 2 ** 32 + seed1 * 2 ** 64 + seed0 * 2 ** 96; 290 | 291 | local lzcnt; 292 | %{ 293 | # Count high bits in use 294 | n_bits = len( bin(ids.high).replace('0b', '') ) 295 | assert 0 <= n_bits <= 128, "expected 128 bits" 296 | 297 | # Store leading zeros count 298 | ids.lzcnt = 128 - n_bits 299 | %} 300 | 301 | // Verify leading zeros count 302 | let ceil_pow2 = pow2(128 - lzcnt); 303 | 304 | // 2**(log2-1) < seed.high <= 2**log2 305 | with_attr error_message("Error in 2**(log2-1) < seed.high <= 2**log2 verification.") { 306 | assert_le(high, ceil_pow2 - 1); 307 | assert_le(ceil_pow2 / 2, high); 308 | } 309 | // Ensure that less or equal 64 leading zeros 310 | let is_lzcnt_le_64 = is_le(lzcnt, 64); 311 | if (is_lzcnt_le_64 == TRUE) { 312 | return lzcnt; 313 | } else { 314 | return 64; 315 | } 316 | } 317 | 318 | // Reseeds the coin with the specified data by setting the new seed to hash(`seed` || `value`). 319 | // where value is a U256 integer representing a hash digest. Preserves the endianness of value 320 | func reseed_endian{ 321 | range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*, public_coin: PublicCoin 322 | }(value: felt*) { 323 | let digest = merge_endian(seed=public_coin.seed, value=value); 324 | let public_coin = PublicCoin(seed=digest, counter=0); 325 | return (); 326 | } 327 | 328 | // Returns a hash of two digests. This method is intended for use in construction of 329 | // Merkle trees. Preserves the endianness of value 330 | func merge_endian{range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*}( 331 | seed: felt*, value: felt* 332 | ) -> felt* { 333 | alloc_locals; 334 | let (data: felt*) = alloc(); 335 | 336 | memcpy(data, seed, 8); 337 | memcpy(data + 8, value, 8); 338 | 339 | let (digest) = blake2s_as_words(data=data, n_bytes=64); 340 | 341 | return digest; 342 | } 343 | -------------------------------------------------------------------------------- /src/stark_verifier/channel.cairo: -------------------------------------------------------------------------------- 1 | from starkware.cairo.common.alloc import alloc 2 | from starkware.cairo.common.cairo_builtins import BitwiseBuiltin 3 | from starkware.cairo.common.registers import get_fp_and_pc 4 | from starkware.cairo.common.memcpy import memcpy 5 | from starkware.cairo.common.cairo_blake2s.blake2s import blake2s_as_words 6 | 7 | from stark_verifier.air.stark_proof import ( 8 | ConstraintQueries, 9 | ParsedOodFrame, 10 | StarkProof, 11 | TraceQueries, 12 | ) 13 | from stark_verifier.air.air_instance import AirInstance 14 | from stark_verifier.air.table import Table 15 | from stark_verifier.air.transitions.frame import EvaluationFrame 16 | from stark_verifier.utils import Vec 17 | from crypto.hash_utils import assert_hashes_equal, copy_hash 18 | from utils.endianness import byteswap32 19 | 20 | from stark_verifier.crypto.random import hash_elements 21 | 22 | struct TraceOodFrame { 23 | main_frame: EvaluationFrame, 24 | aux_frame: EvaluationFrame, 25 | } 26 | 27 | // TODO remove hardcode 28 | const FOLDING_FACTOR = 8; 29 | const HASH_FELT_SIZE = 8; 30 | const UINT32_SIZE = 4; 31 | 32 | struct Channel { 33 | // Trace queries 34 | trace_roots: felt*, 35 | // Constraint queries 36 | constraint_root: felt*, 37 | // FRI proof 38 | fri_roots_len: felt, 39 | fri_roots: felt*, 40 | // OOD frame 41 | ood_trace_frame: TraceOodFrame, 42 | ood_constraint_evaluations: Vec, 43 | // Query PoW nonce 44 | pow_nonce: felt, 45 | // Queried trace states 46 | trace_queries: TraceQueries*, 47 | // Queried constraint evaluations 48 | constraint_queries: ConstraintQueries*, 49 | // Remainder 50 | remainder: Vec, 51 | } 52 | 53 | func channel_new{bitwise_ptr: BitwiseBuiltin*}(air: AirInstance, proof: StarkProof*) -> Channel { 54 | // Parsed commitments 55 | tempvar trace_roots = proof.commitments.trace_roots; 56 | tempvar constraint_root = proof.commitments.constraint_root; 57 | tempvar fri_roots = proof.commitments.fri_roots; 58 | 59 | // Parsed ood_frame 60 | tempvar ood_constraint_evaluations = proof.ood_frame.evaluations; 61 | tempvar ood_trace_frame = TraceOodFrame( 62 | main_frame=proof.ood_frame.main_frame, aux_frame=proof.ood_frame.aux_frame 63 | ); 64 | 65 | tempvar channel = Channel( 66 | trace_roots=trace_roots, 67 | constraint_root=constraint_root, 68 | fri_roots_len=proof.commitments.fri_roots_len, 69 | fri_roots=fri_roots, 70 | ood_trace_frame=ood_trace_frame, 71 | ood_constraint_evaluations=ood_constraint_evaluations, 72 | pow_nonce=proof.pow_nonce, 73 | trace_queries=&proof.trace_queries, 74 | constraint_queries=&proof.constraint_queries, 75 | remainder=proof.remainder, 76 | ); 77 | return channel; 78 | } 79 | 80 | func read_remainder{ 81 | range_check_ptr, channel: Channel, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin* 82 | }() -> Vec { 83 | alloc_locals; 84 | let remainder = channel.remainder.elements; 85 | let loop_counter = channel.remainder.n_elements / FOLDING_FACTOR; 86 | let num_of_layers = loop_counter / FOLDING_FACTOR; 87 | let (remainder_values: felt**) = alloc(); 88 | transpose_slice(remainder, remainder_values, loop_counter, num_of_layers); 89 | 90 | // build remainder Merkle tree 91 | let (hashed_values: felt*) = alloc(); 92 | hash_values(remainder_values, hashed_values, loop_counter); 93 | let root = compute_merkle_root(hashed_values, loop_counter); 94 | 95 | // Compare the root to the last fri_root 96 | let expected_root = channel.fri_roots + (channel.fri_roots_len - 1) * HASH_FELT_SIZE; 97 | assert_hashes_equal(root, expected_root); 98 | 99 | return channel.remainder; 100 | } 101 | 102 | func transpose_slice_loop(n: felt, row_ptr: felt*, src_ptr: felt*, num_of_layers: felt) -> () { 103 | if (n == 0) { 104 | return (); 105 | } 106 | 107 | let jmp_factor = FOLDING_FACTOR * num_of_layers; 108 | assert [row_ptr] = [src_ptr]; 109 | 110 | return transpose_slice_loop(n - 1, row_ptr + 1, src_ptr + jmp_factor, num_of_layers); 111 | } 112 | 113 | func transpose_slice(source: felt*, destination: felt**, loop_counter: felt, num_of_layers: felt) { 114 | if (loop_counter == 0) { 115 | return (); 116 | } 117 | let (row) = alloc(); 118 | assert [destination] = row; 119 | transpose_slice_loop(FOLDING_FACTOR, row, source, num_of_layers); 120 | return transpose_slice(source + 1, destination + 1, loop_counter - 1, num_of_layers); 121 | } 122 | 123 | func hash_values{range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*}( 124 | values: felt**, hashes: felt*, loop_counter 125 | ) { 126 | if (loop_counter == 0) { 127 | return (); 128 | } 129 | alloc_locals; 130 | let digest = hash_elements(n_elements=FOLDING_FACTOR, elements=[values]); 131 | memcpy(hashes, digest, HASH_FELT_SIZE); 132 | return hash_values(values + 1, hashes + HASH_FELT_SIZE, loop_counter - 1); 133 | } 134 | 135 | // Compute the Merkle root hash of a set of hashes 136 | func compute_merkle_root{range_check_ptr, bitwise_ptr: BitwiseBuiltin*, blake2s_ptr: felt*}( 137 | leaves: felt*, leaves_len: felt 138 | ) -> felt* { 139 | alloc_locals; 140 | 141 | // The trivial case is a tree with a single leaf 142 | if (leaves_len == 1) { 143 | return leaves; 144 | } 145 | 146 | // Compute the next generation of leaves one level higher up in the tree 147 | let (next_leaves) = alloc(); 148 | let next_leaves_len = (leaves_len) / 2; 149 | _compute_merkle_root_loop(leaves, next_leaves, next_leaves_len); 150 | 151 | // Ascend in the tree and recurse on the next generation one step closer to the root 152 | return compute_merkle_root(next_leaves, next_leaves_len); 153 | } 154 | 155 | // Compute the next generation of leaves by pairwise hashing 156 | // the previous generation of leaves 157 | func _compute_merkle_root_loop{range_check_ptr, bitwise_ptr: BitwiseBuiltin*, blake2s_ptr: felt*}( 158 | prev_leaves: felt*, next_leaves: felt*, loop_counter 159 | ) { 160 | alloc_locals; 161 | 162 | // We loop until we've completed the next generation 163 | if (loop_counter == 0) { 164 | return (); 165 | } 166 | 167 | // Hash two prev_leaves to get one leaf of the next generation 168 | let (digest) = blake2s_as_words(data=prev_leaves, n_bytes=HASH_FELT_SIZE * 2 * UINT32_SIZE); 169 | copy_hash(digest, next_leaves); 170 | 171 | // Continue this loop with the next two prev_leaves 172 | return _compute_merkle_root_loop( 173 | prev_leaves + HASH_FELT_SIZE * 2, next_leaves + HASH_FELT_SIZE, loop_counter - 1 174 | ); 175 | } 176 | 177 | func read_trace_commitments{channel: Channel}() -> felt* { 178 | return channel.trace_roots; 179 | } 180 | 181 | func read_constraint_commitment{channel: Channel}() -> felt* { 182 | return channel.constraint_root; 183 | } 184 | 185 | func read_ood_trace_frame{channel: Channel}() -> (res1: EvaluationFrame, res2: EvaluationFrame) { 186 | return (res1=channel.ood_trace_frame.main_frame, res2=channel.ood_trace_frame.aux_frame); 187 | } 188 | 189 | func read_ood_constraint_evaluations{channel: Channel}() -> Vec { 190 | return channel.ood_constraint_evaluations; 191 | } 192 | 193 | func read_pow_nonce{channel: Channel}() -> felt { 194 | return channel.pow_nonce; 195 | } 196 | 197 | struct QueriesProof { 198 | length: felt, // TODO: this is unneccessary overhead. All paths of a BatchMerkleProof have the same length 199 | digests: felt*, 200 | } 201 | 202 | struct QueriesProofs { 203 | proofs: QueriesProof*, 204 | } 205 | 206 | func _verify_merkle_proof{range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*}( 207 | depth: felt, path: felt*, position, root: felt*, accu: felt* 208 | ) { 209 | alloc_locals; 210 | if (depth == 0) { 211 | assert_hashes_equal(root, accu); 212 | return (); 213 | } 214 | 215 | local lowest_bit; 216 | %{ ids.lowest_bit = ids.position & 1 %} 217 | // TODO: verify the hint. Otherwise, the position becomes arbitrary 218 | 219 | let (data: felt*) = alloc(); 220 | if (lowest_bit != 0) { 221 | memcpy(data + 8, accu, 8); 222 | memcpy(data, path, 8); 223 | } else { 224 | memcpy(data, accu, 8); 225 | memcpy(data + 8, path, 8); 226 | } 227 | 228 | let (digest) = blake2s_as_words(data=data, n_bytes=64); 229 | 230 | let position = (position - lowest_bit) / 2; 231 | _verify_merkle_proof(depth - 1, path + 8, position, root, digest); 232 | 233 | return (); 234 | } 235 | 236 | func verify_merkle_proof{range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*}( 237 | length: felt, path: felt*, position, root: felt* 238 | ) { 239 | alloc_locals; 240 | 241 | _verify_merkle_proof(length - 1, path + 8, position, root, path); 242 | 243 | return (); 244 | } 245 | 246 | func verify_merkle_proofs{range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*}( 247 | proofs: QueriesProof*, 248 | positions: felt*, 249 | trace_roots: felt*, 250 | loop_counter, 251 | rows: felt*, 252 | n_cols: felt, 253 | ) { 254 | if (loop_counter == 0) { 255 | return (); 256 | } 257 | // Hash the row of the table at the current index and compare it to the leaf of the path 258 | let digest = hash_elements(n_elements=n_cols, elements=rows); 259 | let current_digests = proofs[0].digests; 260 | assert_hashes_equal(digest, proofs[0].digests); 261 | 262 | verify_merkle_proof(proofs[0].length, proofs[0].digests, positions[0], trace_roots); 263 | verify_merkle_proofs( 264 | &proofs[1], positions + 1, trace_roots, loop_counter - 1, rows + n_cols, n_cols 265 | ); 266 | return (); 267 | } 268 | 269 | // AUX TRACE (Memory) 270 | func verify_aux_merkle_proofs_1{range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*}( 271 | proofs: QueriesProof*, 272 | positions: felt*, 273 | trace_roots: felt*, 274 | loop_counter, 275 | rows: felt*, 276 | n_cols: felt, 277 | ) { 278 | if (loop_counter == 0) { 279 | return (); 280 | } 281 | // Hash the row of the table at the current index and compare it to the leaf of the path 282 | let digest = hash_elements(n_elements=n_cols, elements=rows); 283 | assert_hashes_equal(digest, proofs[0].digests); 284 | 285 | verify_merkle_proof(proofs[0].length, proofs[0].digests, positions[0], trace_roots); 286 | verify_aux_merkle_proofs_1( 287 | &proofs[1], positions + 1, trace_roots, loop_counter - 1, rows + n_cols, n_cols 288 | ); 289 | return (); 290 | } 291 | 292 | // AUX TRACE (Range check) 293 | func verify_aux_merkle_proofs_2{range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*}( 294 | proofs: QueriesProof*, 295 | positions: felt*, 296 | trace_roots: felt*, 297 | loop_counter, 298 | rows: felt*, 299 | n_cols: felt, 300 | ) { 301 | if (loop_counter == 0) { 302 | return (); 303 | } 304 | // Hash the row of the table at the current index and compare it to the leaf of the path 305 | let digest = hash_elements(n_elements=6, elements=rows + 12); 306 | assert_hashes_equal(digest, proofs[0].digests); 307 | 308 | verify_merkle_proof(proofs[0].length, proofs[0].digests, positions[0], trace_roots); 309 | verify_aux_merkle_proofs_2( 310 | &proofs[1], positions + 1, trace_roots, loop_counter - 1, rows + n_cols, n_cols 311 | ); 312 | return (); 313 | } 314 | 315 | func read_queried_trace_states{ 316 | range_check_ptr, blake2s_ptr: felt*, channel: Channel, bitwise_ptr: BitwiseBuiltin* 317 | }(positions: felt*, num_queries: felt, num_aux_segments) -> ( 318 | main_states: Table, aux_states: Table 319 | ) { 320 | alloc_locals; 321 | let (local trace_queries_proof_ptr: QueriesProofs*) = alloc(); 322 | %{ 323 | import json 324 | import subprocess 325 | from src.stark_verifier.utils import write_into_memory 326 | 327 | positions = [] 328 | for i in range(ids.num_queries): 329 | positions.append( memory[ids.positions + i] ) 330 | 331 | positions = json.dumps( positions ) 332 | 333 | completed_process = subprocess.run([ 334 | 'bin/stark_parser', 335 | 'proofs/fib.bin', # TODO: this path shouldn't be hardcoded! 336 | 'trace-queries', 337 | positions 338 | ], 339 | capture_output=True) 340 | 341 | json_data = completed_process.stdout 342 | write_into_memory(ids.trace_queries_proof_ptr, json_data, segments) 343 | %} 344 | 345 | let num_queries = 4; // TODO: this should be num_queries, but it takes forever... 346 | 347 | let main_states = channel.trace_queries.main_states; 348 | let aux_states = channel.trace_queries.aux_states; 349 | 350 | // Authenticate proof paths 351 | verify_merkle_proofs( 352 | trace_queries_proof_ptr[0].proofs, 353 | positions, 354 | channel.trace_roots, 355 | num_queries, 356 | main_states.elements, 357 | main_states.n_cols, 358 | ); 359 | 360 | // TODO what are cases where aux is greater than 1? 361 | assert num_aux_segments = 1; 362 | 363 | verify_aux_merkle_proofs_1( 364 | trace_queries_proof_ptr[1].proofs, 365 | positions, 366 | channel.trace_roots + 8, 367 | num_queries, 368 | aux_states.elements, 369 | aux_states.n_cols, 370 | ); 371 | // verify_aux_merkle_proofs_2( 372 | // trace_queries_proof_ptr[2].proofs, 373 | // positions, 374 | // channel.trace_roots + 8 * 2, 375 | // num_queries, 376 | // aux_states.elements, 377 | // aux_states.n_cols, 378 | // ); 379 | 380 | return (main_states, aux_states); 381 | } 382 | 383 | func read_constraint_evaluations{ 384 | range_check_ptr, blake2s_ptr: felt*, channel: Channel, bitwise_ptr: BitwiseBuiltin* 385 | }(positions: felt*, num_queries: felt) -> Table { 386 | alloc_locals; 387 | let (local constraint_queries_proof_ptr: QueriesProofs*) = alloc(); 388 | %{ 389 | import json 390 | import subprocess 391 | from src.stark_verifier.utils import write_into_memory 392 | 393 | positions = [] 394 | for i in range(ids.num_queries): 395 | positions.append( memory[ids.positions + i] ) 396 | 397 | positions = json.dumps( positions ) 398 | 399 | completed_process = subprocess.run([ 400 | 'bin/stark_parser', 401 | 'proofs/fib.bin', # TODO: this path shouldn't be hardcoded! 402 | 'constraint-queries', 403 | positions 404 | ], 405 | capture_output=True) 406 | 407 | json_data = completed_process.stdout 408 | write_into_memory(ids.constraint_queries_proof_ptr, json_data, segments) 409 | %} 410 | let num_queries = 4; // TODO: this should be 54, but it takes forever... 411 | 412 | // Authenticate proof paths 413 | let evaluations = channel.constraint_queries.evaluations; 414 | verify_merkle_proofs( 415 | constraint_queries_proof_ptr[0].proofs, 416 | positions, 417 | channel.constraint_root, 418 | num_queries, 419 | evaluations.elements, 420 | evaluations.n_cols, 421 | ); 422 | 423 | return evaluations; 424 | } 425 | -------------------------------------------------------------------------------- /src/stark_verifier/fri/fri_verifier.cairo: -------------------------------------------------------------------------------- 1 | from stark_verifier.air.air_instance import AirInstance 2 | from stark_verifier.channel import Channel 3 | from stark_verifier.air.stark_proof import ProofOptions 4 | from starkware.cairo.common.cairo_builtins import BitwiseBuiltin 5 | from stark_verifier.crypto.random import PublicCoin, reseed, draw, reseed_endian, hash_elements 6 | from starkware.cairo.common.alloc import alloc 7 | from starkware.cairo.common.memcpy import memcpy 8 | from starkware.cairo.common.memset import memset 9 | from starkware.cairo.common.math import assert_le, assert_not_zero, unsigned_div_rem 10 | from starkware.cairo.common.math_cmp import is_le 11 | from starkware.cairo.common.pow import pow 12 | from utils.pow2 import pow2 13 | from stark_verifier.channel import verify_merkle_proof, QueriesProofs, QueriesProof, read_remainder 14 | from stark_verifier.fri.polynomials import lagrange_eval, interpolate_poly_and_verify 15 | from stark_verifier.crypto.random import contains 16 | from utils.math_goldilocks import mul_g, sub_g, add_g, div_g, pow_g 17 | from crypto.hash_utils import assert_hashes_equal 18 | from starkware.cairo.common.registers import get_fp_and_pc 19 | from stark_verifier.utils import Vec 20 | 21 | // TODO HARDCODE - use param instead 22 | const FOLDING_FACTOR = 8; 23 | const g = 7; // domain offset for goldilocks 24 | const HASH_FELT_SIZE = 8; 25 | const NUM_QUERIES = 27; 26 | 27 | struct FriQueryProof { 28 | length: felt, 29 | path: felt*, 30 | values: felt*, 31 | } 32 | 33 | struct FriOptions { 34 | folding_factor: felt, 35 | max_remainder_size: felt, 36 | blowup_factor: felt, 37 | } 38 | 39 | func to_fri_options(proof_options: ProofOptions) -> FriOptions { 40 | let folding_factor = proof_options.fri_folding_factor; 41 | let max_remainder_size = proof_options.fri_max_remainder_size; // stored as power of 2 42 | let fri_options = FriOptions(folding_factor, max_remainder_size, proof_options.blowup_factor); 43 | return fri_options; 44 | } 45 | 46 | struct FriVerifier { 47 | max_poly_degree: felt, 48 | domain_size: felt, 49 | domain_generator: felt, 50 | layer_commitments: felt*, 51 | layer_alphas: felt*, 52 | options: FriOptions, 53 | num_partitions: felt, 54 | } 55 | 56 | func _fri_verifier_new{ 57 | range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*, public_coin: PublicCoin 58 | }( 59 | options: FriOptions, 60 | max_degree_plus_1, 61 | layer_commitment_ptr: felt*, 62 | layer_alpha_ptr: felt*, 63 | count, 64 | ) { 65 | if (count == 0) { 66 | return (); 67 | } 68 | alloc_locals; 69 | 70 | reseed_endian(layer_commitment_ptr); 71 | let alpha = draw(); 72 | assert [layer_alpha_ptr] = alpha; 73 | 74 | _fri_verifier_new( 75 | options, 76 | max_degree_plus_1 / options.folding_factor, 77 | layer_commitment_ptr + 8, 78 | layer_alpha_ptr + 1, 79 | count - 1, 80 | ); 81 | return (); 82 | } 83 | 84 | func read_fri_proofs{ 85 | range_check_ptr, blake2s_ptr: felt*, channel: Channel, bitwise_ptr: BitwiseBuiltin* 86 | }(positions: felt*) -> FriQueryProof** { 87 | alloc_locals; 88 | 89 | let (local fri_queries_proof_ptr: FriQueryProof**) = alloc(); 90 | %{ 91 | from src.stark_verifier.utils import read_fri_queries_proofs 92 | read_fri_queries_proofs(ids.positions, ids.fri_queries_proof_ptr, ids.NUM_QUERIES, memory, segments) 93 | %} 94 | 95 | return fri_queries_proof_ptr; 96 | } 97 | 98 | func fri_verifier_new{ 99 | range_check_ptr, 100 | blake2s_ptr: felt*, 101 | bitwise_ptr: BitwiseBuiltin*, 102 | public_coin: PublicCoin, 103 | channel: Channel, 104 | }(options: FriOptions, max_poly_degree) -> FriVerifier { 105 | alloc_locals; 106 | 107 | let _next_power_of_two = next_power_of_two(max_poly_degree); 108 | // using normal mul since it should not overflow 109 | let domain_size = _next_power_of_two * options.blowup_factor; 110 | 111 | let domain_size_log2 = log2(domain_size); 112 | let domain_generator = get_root_of_unity(domain_size_log2); 113 | // air.trace_domain_generator ? 114 | // air.lde_domain_generator ? 115 | 116 | let num_partitions = 1; 117 | // channel.read_fri_num_partitions() ? 118 | 119 | // read layer commitments from the channel and use them to build a list of alphas 120 | let (layer_alphas) = alloc(); 121 | let layer_commitments = channel.fri_roots; 122 | _fri_verifier_new( 123 | options, max_poly_degree + 1, layer_commitments, layer_alphas, channel.fri_roots_len 124 | ); 125 | 126 | let res = FriVerifier( 127 | max_poly_degree, 128 | domain_size, 129 | domain_generator, 130 | layer_commitments, 131 | layer_alphas, 132 | options, 133 | num_partitions, 134 | ); 135 | return res; 136 | } 137 | 138 | func next_power_of_two{range_check_ptr}(x) -> felt { 139 | // leaving regular cairo field math since it shouldn't overflow or underflow 140 | // is this secure? 141 | alloc_locals; 142 | local n_bits; 143 | %{ ids.n_bits = len( bin(ids.x - 1).replace('0b', '') ) %} 144 | let next_power_of_two = pow2(n_bits); 145 | local next_power_of_two = next_power_of_two; 146 | local x2_1 = x * 2 - 1; 147 | with_attr error_message("{x} <= {next_power_of_two} <= {x2_1}") { 148 | assert_le(x, next_power_of_two); 149 | assert_le(next_power_of_two, x * 2 - 1); 150 | } 151 | return next_power_of_two; 152 | } 153 | 154 | const TWO_ADICITY = 32; 155 | const TWO_ADIC_ROOT_OF_UNITY = 1753635133440165772; 156 | 157 | func get_root_of_unity{range_check_ptr}(n) -> felt { 158 | with_attr error_message("cannot get root of unity for n = 0") { 159 | assert_not_zero(n); 160 | } 161 | with_attr error_message("order cannot exceed 2^{TWO_ADICITY}") { 162 | assert_le(n, TWO_ADICITY); 163 | } 164 | let base = sub_g(TWO_ADICITY, n); 165 | let power = pow_g(2, base); 166 | let root_of_unity = pow_g(TWO_ADIC_ROOT_OF_UNITY, power); 167 | return root_of_unity; 168 | } 169 | 170 | func log2(n) -> felt { 171 | alloc_locals; 172 | local n_bits; 173 | %{ ids.n_bits = len( bin(ids.n - 1).replace('0b', '') ) %} 174 | let next_power_of_two = pow2(n_bits); 175 | with_attr error_message("n must be a power of two") { 176 | assert next_power_of_two = n; 177 | } 178 | return n_bits; 179 | } 180 | 181 | func verify_fri_merkle_proofs{range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*}( 182 | proofs: QueriesProof*, 183 | positions: felt*, 184 | trace_roots: felt*, 185 | loop_counter, 186 | evaluations: felt*, 187 | n_evaluations: felt, 188 | ) { 189 | if (loop_counter == 0) { 190 | return (); 191 | } 192 | 193 | // let digest = hash_elements(n_elements=n_evaluations, elements=evaluations); // TODO: hash the evaluation correctly 194 | // assert_hashes_equal(digest, proofs[0].digests); 195 | 196 | verify_merkle_proof(proofs[0].length, proofs[0].digests, positions[0], trace_roots); 197 | verify_fri_merkle_proofs( 198 | &proofs[1], 199 | positions + 1, 200 | trace_roots, 201 | loop_counter - 1, 202 | evaluations + n_evaluations, 203 | n_evaluations, 204 | ); 205 | return (); 206 | } 207 | 208 | func num_fri_layers{range_check_ptr}(fri_verifier: FriVerifier*, domain_size) -> felt { 209 | let is_leq = is_le(fri_verifier.options.max_remainder_size + 1, domain_size); 210 | if (is_leq == 0) { 211 | return 0; 212 | } 213 | let res = num_fri_layers(fri_verifier, domain_size / fri_verifier.options.folding_factor); 214 | return 1 + res; 215 | } 216 | 217 | // pre-compute roots of unity used in computing x coordinates in the folded domain 218 | func compute_folding_roots{range_check_ptr}(omega_folded: felt*, omega, log_degree: felt, i: felt) { 219 | if (i == FOLDING_FACTOR) { 220 | return (); 221 | } 222 | let degree = pow_g(2, log_degree); 223 | let new_domain_size = degree / FOLDING_FACTOR * i; 224 | let res = pow_g(omega, new_domain_size); 225 | assert [omega_folded] = res; 226 | compute_folding_roots(omega_folded + 1, omega, log_degree, i + 1); 227 | return (); 228 | } 229 | 230 | func assign_folding_roots_loop{range_check_ptr}( 231 | idx: felt, folding_roots: felt*, xe: felt, x_values: felt* 232 | ) { 233 | if (idx == 0) { 234 | return (); 235 | } 236 | 237 | let r = mul_g(folding_roots[idx - 1], xe); 238 | assert x_values[idx - 1] = r; 239 | 240 | return assign_folding_roots_loop(idx - 1, folding_roots, xe, x_values); 241 | } 242 | 243 | func verify_layers{range_check_ptr, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin*}( 244 | omega: felt, 245 | alphas: felt*, 246 | position: felt, 247 | evaluations: felt*, 248 | num_layer_evaluations: felt, 249 | num_layers: felt, 250 | previous_eval: felt, 251 | fri_proofs: FriQueryProof**, 252 | modulus: felt, 253 | verified_positions: felt**, 254 | verified_positions_len: felt*, 255 | next_verified_positions_len: felt*, 256 | layer_commitments: felt*, 257 | folding_roots: felt*, 258 | remainders: Vec*, 259 | ) { 260 | alloc_locals; 261 | if (num_layers == 0) { 262 | // Check that the claimed remainder is equal to the final evaluation. 263 | assert_contains(remainders.elements, remainders.n_elements, evaluations[0]); 264 | return (); 265 | } 266 | 267 | let (local query_position, folded_position) = unsigned_div_rem(position, modulus); 268 | 269 | // Check if we have already verified this folded_position 270 | local index: felt; 271 | let curr_len = verified_positions_len[0]; 272 | let prev_positions = verified_positions[0]; 273 | // This hint gives us the index of the position if included, or it returns -1 274 | %{ 275 | from src.stark_verifier.utils import index_of 276 | ids.index = index_of(ids.prev_positions, ids.curr_len, ids.folded_position, memory) 277 | %} 278 | // If so, copy the previous verified_positions_len, and we're done 279 | if (index != -1) { 280 | // Verify the index given by the hint 281 | assert folded_position = verified_positions[0][index]; 282 | // Copy previous lenghts 283 | memcpy(next_verified_positions_len, verified_positions_len, num_layers); 284 | return (); 285 | } 286 | let index = curr_len; 287 | 288 | // Otherwise, verify this folded_position 289 | assert verified_positions[0][index] = folded_position; 290 | // and add it to verified_positions 291 | assert next_verified_positions_len[0] = index + 1; 292 | 293 | // Verify that evaluations are consistent with the layer commitment 294 | let query_proof = fri_proofs[0][index]; 295 | verify_merkle_proof(query_proof.length, query_proof.path, folded_position, layer_commitments); 296 | let leaf_hash = hash_elements(n_elements=FOLDING_FACTOR, elements=query_proof.values); 297 | assert_hashes_equal(leaf_hash, query_proof.path); 298 | let is_contained = contains(evaluations[0], query_proof.values, FOLDING_FACTOR); 299 | assert_not_zero(is_contained); 300 | 301 | // Compare poly evaluations to the query proof 302 | let query_value = query_proof.values[query_position]; 303 | assert query_value = evaluations[0]; 304 | 305 | // Interpolate the evaluations at the x-coordinates, and evaluate at alpha. 306 | let alpha = [alphas]; 307 | let xe = pow_g(omega, folded_position); 308 | local xe = mul_g(xe, g); 309 | let (local x_values) = alloc(); 310 | 311 | tempvar i = FOLDING_FACTOR; 312 | 313 | assign_folding_roots_loop(i, folding_roots, xe, x_values); 314 | 315 | let previous_eval = lagrange_eval(query_proof.values, x_values, FOLDING_FACTOR, alpha); 316 | 317 | // Update variables for the next layer 318 | let omega = pow_g(omega, FOLDING_FACTOR); 319 | let modulus = modulus / FOLDING_FACTOR; 320 | let (evaluations) = alloc(); 321 | assert evaluations[0] = previous_eval; 322 | 323 | return verify_layers( 324 | omega, 325 | alphas + 1, 326 | folded_position, 327 | evaluations, 328 | num_layer_evaluations, 329 | num_layers - 1, 330 | previous_eval, 331 | &fri_proofs[1], 332 | modulus, 333 | &verified_positions[1], 334 | &verified_positions_len[1], 335 | &next_verified_positions_len[1], 336 | &layer_commitments[HASH_FELT_SIZE], 337 | folding_roots, 338 | remainders, 339 | ); 340 | } 341 | 342 | func verify_queries{ 343 | range_check_ptr, channel: Channel, blake2s_ptr: felt*, bitwise_ptr: BitwiseBuiltin* 344 | }( 345 | fri_verifier: FriVerifier*, 346 | positions: felt*, 347 | evaluations: felt*, 348 | num_queries: felt, 349 | fri_proofs: FriQueryProof**, 350 | num_layers: felt, 351 | verified_positions: felt**, 352 | verified_positions_len: felt*, 353 | folding_roots: felt*, 354 | remainders: Vec*, 355 | ) { 356 | if (num_queries == 0) { 357 | return (); 358 | } 359 | alloc_locals; 360 | 361 | // Iterate over the layers within this query 362 | verify_layers( 363 | omega=fri_verifier.domain_generator, 364 | alphas=fri_verifier.layer_alphas, 365 | position=[positions], 366 | evaluations=evaluations, 367 | num_layer_evaluations=FOLDING_FACTOR * num_layers, 368 | num_layers=num_layers, 369 | previous_eval=0, 370 | fri_proofs=fri_proofs, 371 | modulus=fri_verifier.domain_size / FOLDING_FACTOR, 372 | verified_positions=verified_positions, 373 | verified_positions_len=verified_positions_len, 374 | next_verified_positions_len=&verified_positions_len[num_layers], 375 | layer_commitments=channel.fri_roots, 376 | folding_roots=folding_roots, 377 | remainders=remainders, 378 | ); 379 | 380 | // Iterate over the remaining queries 381 | verify_queries( 382 | fri_verifier, 383 | &positions[1], 384 | &evaluations[1], 385 | num_queries - 1, 386 | fri_proofs, 387 | num_layers, 388 | verified_positions, 389 | &verified_positions_len[num_layers], 390 | folding_roots, 391 | remainders, 392 | ); 393 | return (); 394 | } 395 | 396 | func fri_verify{ 397 | range_check_ptr, blake2s_ptr: felt*, channel: Channel, bitwise_ptr: BitwiseBuiltin* 398 | }(fri_verifier: FriVerifier, evaluations: felt*, positions: felt*) { 399 | alloc_locals; 400 | let (__fp__, _) = get_fp_and_pc(); 401 | // Read FRI Merkle proofs from a hint 402 | let fri_proofs = read_fri_proofs(positions); 403 | 404 | // Read remainders from a hint 405 | // and check that a Merkle tree of the claimed remainders hash to the final layer commitment 406 | let remainder: Vec = read_remainder(); 407 | let remainder_ptr = remainder; 408 | 409 | let num_layers = num_fri_layers(&fri_verifier, fri_verifier.domain_size); 410 | 411 | // Initialize an empty array of verified positions for each layer 412 | let (local verified_positions: felt**) = alloc(); 413 | tempvar verified_positions_ptr = verified_positions; 414 | tempvar n = num_layers; 415 | 416 | init_loop: 417 | let (array) = alloc(); 418 | assert [verified_positions_ptr] = array; 419 | tempvar verified_positions_ptr = verified_positions_ptr + 1; 420 | tempvar n = n - 1; 421 | jmp init_loop if n != 0; 422 | 423 | let (verified_positions_len: felt*) = alloc(); 424 | memset(verified_positions_len, 0, num_layers); 425 | 426 | // Compute the remaining folded roots of unity 427 | let (folding_roots) = alloc(); 428 | let log2_domain_size = log2(fri_verifier.domain_size); 429 | compute_folding_roots( 430 | omega_folded=folding_roots, 431 | omega=fri_verifier.domain_generator, 432 | log_degree=log2_domain_size, 433 | i=0, 434 | ); 435 | 436 | // Verify a round for each query 437 | verify_queries( 438 | &fri_verifier, 439 | positions, 440 | evaluations, 441 | NUM_QUERIES, 442 | fri_proofs, 443 | num_layers, 444 | verified_positions, 445 | verified_positions_len, 446 | folding_roots, 447 | &remainder_ptr, 448 | ); 449 | 450 | return (); 451 | } 452 | 453 | // Ensure that a given array contains a particular element 454 | func assert_contains(array: felt*, array_len, element) { 455 | alloc_locals; 456 | local index: felt; 457 | %{ ids.index = index_of(ids.array, ids.array_len, ids.element, memory) %} 458 | // TODO: Do we have to verify that `0 < index < array_len` here? 459 | assert element = array[index]; 460 | return (); 461 | } 462 | --------------------------------------------------------------------------------