├── .gitignore ├── .gitmodules ├── Cargo.toml ├── LICENSE ├── README.md ├── circom-prover ├── Cargo.toml ├── build.rs └── src │ ├── circom.rs │ ├── json.rs │ ├── lib.rs │ ├── utils.rs │ └── verification.rs ├── circom.sh ├── circuits ├── air │ └── sum.circom ├── fri.circom ├── merkle.circom ├── ood_consistency_check.circom ├── poseidon │ ├── generate_parameters_grain.sage.py │ ├── param.circom │ └── poseidon.circom ├── public_coin.circom ├── utils.circom └── verify.circom ├── examples └── sum │ ├── Cargo.toml │ └── src │ ├── air.rs │ ├── compile.rs │ ├── prove.rs │ ├── prover.rs │ └── verify.rs └── winterfell ├── .cargo └── katex-header.html ├── .github └── workflows │ └── ci.yml ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── air ├── .cargo │ └── katex-header.html ├── Cargo.toml ├── README.md └── src │ ├── air │ ├── assertions │ │ ├── mod.rs │ │ └── tests.rs │ ├── boundary │ │ ├── constraint.rs │ │ ├── constraint_group.rs │ │ ├── mod.rs │ │ └── tests.rs │ ├── coefficients.rs │ ├── context.rs │ ├── divisor.rs │ ├── mod.rs │ ├── tests.rs │ ├── trace_info.rs │ └── transition │ │ ├── degree.rs │ │ ├── frame.rs │ │ └── mod.rs │ ├── errors.rs │ ├── lib.rs │ ├── options.rs │ └── proof │ ├── commitments.rs │ ├── context.rs │ ├── mod.rs │ ├── ood_frame.rs │ ├── queries.rs │ └── table.rs ├── crypto ├── Cargo.toml ├── README.md ├── benches │ ├── hash.rs │ └── merkle.rs └── src │ ├── errors.rs │ ├── hash │ ├── blake │ │ ├── mod.rs │ │ └── tests.rs │ ├── mod.rs │ ├── poseidon │ │ ├── Makefile │ │ ├── generate_parameters_grain.sage.py │ │ ├── mod.rs │ │ ├── param.rs │ │ ├── poseidon.rs │ │ └── tests.rs │ ├── rescue │ │ ├── mod.rs │ │ ├── rp62_248 │ │ │ ├── digest.rs │ │ │ ├── mod.rs │ │ │ └── tests.rs │ │ └── rp64_256 │ │ │ ├── digest.rs │ │ │ ├── mod.rs │ │ │ └── tests.rs │ └── sha │ │ └── mod.rs │ ├── lib.rs │ ├── merkle │ ├── concurrent.rs │ ├── mod.rs │ ├── proofs.rs │ └── tests.rs │ └── random │ └── mod.rs ├── fri ├── Cargo.toml ├── README.md ├── benches │ ├── folding.rs │ └── prover.rs └── src │ ├── errors.rs │ ├── folding │ └── mod.rs │ ├── lib.rs │ ├── options.rs │ ├── proof.rs │ ├── prover │ ├── channel.rs │ ├── mod.rs │ └── tests.rs │ ├── utils.rs │ └── verifier │ ├── channel.rs │ └── mod.rs ├── math ├── .cargo │ └── katex-header.html ├── Cargo.toml ├── README.md ├── benches │ ├── fft.rs │ ├── field.rs │ └── polynom.rs └── src │ ├── fft │ ├── concurrent.rs │ ├── mod.rs │ ├── serial.rs │ └── tests.rs │ ├── field │ ├── extensions │ │ ├── cubic.rs │ │ ├── mod.rs │ │ └── quadratic.rs │ ├── f128 │ │ ├── mod.rs │ │ └── tests.rs │ ├── f256 │ │ ├── mod.rs │ │ ├── tests.rs │ │ ├── u256.rs │ │ └── u512.rs │ ├── f62 │ │ ├── mod.rs │ │ └── tests.rs │ ├── f64 │ │ ├── mod.rs │ │ └── tests.rs │ ├── mod.rs │ └── traits.rs │ ├── lib.rs │ ├── polynom │ ├── mod.rs │ └── tests.rs │ └── utils │ └── mod.rs ├── prover ├── .cargo │ └── katex-header.html ├── Cargo.toml ├── README.md └── src │ ├── channel.rs │ ├── composer │ └── mod.rs │ ├── constraints │ ├── boundary.rs │ ├── commitment.rs │ ├── composition_poly.rs │ ├── evaluation_table.rs │ ├── evaluator.rs │ ├── mod.rs │ └── periodic_table.rs │ ├── domain.rs │ ├── errors.rs │ ├── lib.rs │ ├── matrix.rs │ ├── tests │ └── mod.rs │ └── trace │ ├── commitment.rs │ ├── mod.rs │ ├── poly_table.rs │ ├── tests.rs │ ├── trace_lde.rs │ └── trace_table.rs ├── utils ├── core │ ├── Cargo.toml │ ├── README.md │ └── src │ │ ├── collections.rs │ │ ├── errors.rs │ │ ├── iterators.rs │ │ ├── lib.rs │ │ ├── string.rs │ │ └── tests.rs └── rand │ ├── Cargo.toml │ ├── README.md │ └── src │ └── lib.rs ├── verifier ├── .cargo │ └── katex-header.html ├── Cargo.toml ├── README.md └── src │ ├── channel.rs │ ├── composer.rs │ ├── errors.rs │ ├── evaluator.rs │ └── lib.rs └── winterfell ├── .cargo └── katex-header.html ├── Cargo.toml ├── README.md └── src └── lib.rs /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | **/target/ 4 | 5 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 6 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 7 | Cargo.lock 8 | 9 | # These are backup files generated by rustfmt 10 | **/*.rs.bk 11 | 12 | # macOS attribute files 13 | **/.DS_store 14 | 15 | # circom & snarkjs 16 | target/ 17 | *.ptau 18 | 19 | # VS Code settings 20 | **/.vscode 21 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "iden3_circom"] 2 | path = iden3/circom 3 | url = https://github.com/iden3/circom 4 | branch = master 5 | [submodule "iden3/snarkjs"] 6 | path = iden3/snarkjs 7 | url = https://github.com/iden3/snarkjs 8 | branch = master 9 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | "winterfell/utils/core", 4 | "winterfell/utils/rand", 5 | "winterfell/math", 6 | "winterfell/crypto", 7 | "winterfell/fri", 8 | "winterfell/air", 9 | "winterfell/prover", 10 | "winterfell/verifier", 11 | "winterfell/winterfell", 12 | "circom-prover", 13 | "examples/sum" 14 | ] 15 | 16 | 17 | [profile.release] 18 | codegen-units = 1 19 | lto = true 20 | 21 | [profile.bench] 22 | codegen-units = 1 23 | lto = true 24 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | SOFTWARE. -------------------------------------------------------------------------------- /circom-prover/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "winter-circom-prover" 3 | version = "0.1.0" 4 | edition = "2021" 5 | rust-version = "1.60" 6 | 7 | [features] 8 | std = ["winterfell/std", "serde/std", "serde_json/std"] 9 | default = ["std"] 10 | concurrent = ["std", "winterfell/concurrent"] 11 | 12 | [dependencies] 13 | rug = "1.16" 14 | winterfell = { version = "0.4.0", default-features = false, path = "../winterfell/winterfell" } 15 | serde = { version = "1.0", default-features = false } 16 | serde_json = { version = "1.0", default-features = false } 17 | colored = "2.0" 18 | -------------------------------------------------------------------------------- /circom-prover/build.rs: -------------------------------------------------------------------------------- 1 | use std::{env, path::Path, process::Command}; 2 | 3 | // TODO: switch to cargo binary dependency when available 4 | // see https://rust-lang.github.io/rfcs/3028-cargo-binary-dependencies.html 5 | pub fn main() { 6 | println!("cargo:rerun-if-changed=../iden3/circom/"); 7 | println!("cargo:rerun-if-changed=../iden3/snarkjs/build/"); 8 | let cargo = env::var("CARGO").unwrap(); 9 | 10 | // initialize and update git submodules 11 | if !(Path::new("iden3/circom/.git").exists() && Path::new("iden3/snarkjs/.git").exists()) { 12 | assert!( 13 | Command::new("git") 14 | .arg("submodule") 15 | .arg("update") 16 | .arg("--init") 17 | .arg("--recursive") 18 | .status() 19 | .unwrap() 20 | .success(), 21 | "Git submodule initialization failed." 22 | ); 23 | } 24 | 25 | // build circom 26 | assert!( 27 | Command::new(cargo) 28 | .arg("build") 29 | .arg("--release") 30 | .current_dir("../iden3/circom") 31 | .status() 32 | .unwrap() 33 | .success(), 34 | "Circom build failed." 35 | ); 36 | 37 | // npm clean install 38 | assert!( 39 | Command::new("npm") 40 | .arg("ci") 41 | .current_dir("../iden3/snarkjs") 42 | .status() 43 | .unwrap() 44 | .success(), 45 | "Npm SnarkJS clean install failed." 46 | ); 47 | } 48 | -------------------------------------------------------------------------------- /circom-prover/src/verification.rs: -------------------------------------------------------------------------------- 1 | use std::fs; 2 | 3 | use winterfell::{ 4 | math::{ 5 | fields::f256::{BaseElement, U256}, 6 | FieldElement, 7 | }, 8 | Air, EvaluationFrame, 9 | }; 10 | 11 | /// Check that the out-of-domain (OOD) trace frame corresponds to the given [Air] 12 | /// and the OOD constraint evaluations. 13 | /// 14 | /// The OOD trace frame is guaranteed correct by the Circom Groth16 proof. Indeed, 15 | /// should it have been modified, the pseudo-randomly generated query positions 16 | /// would be different and the Merkle commitment verifications would fail. This 17 | /// function therefore guarantees that the OOD constraint evaluations are correct. 18 | /// 19 | /// This function requires the `public.json` file in the 20 | /// `target/circom//` directory to contain `t` ood constraint 21 | /// evaluations and 2`t` ood trace frame elements, in that order, where `t` is 22 | /// the trace width. This should be correct if the Circom proof was generated 23 | /// with the [circom_prove](crate::circom_prove) function. 24 | pub fn check_ood_frame(circuit_name: &str) 25 | where 26 | AIR: Air + Default, 27 | { 28 | // public.json parsing 29 | let data = fs::read_to_string(format!("target/circom/{}/public.json", circuit_name)) 30 | .expect("Unable to read file"); 31 | let json: serde_json::Value = 32 | serde_json::from_str(&data).expect("public.json format incorrect!"); 33 | 34 | let pub_inputs = json.as_array().unwrap(); 35 | 36 | // public.json contains 3 * trace_width elements : 37 | // - trace_width ood_constraint_evaluation 38 | // - 2 * trace_width elements for the OOD trace frame 39 | let trace_width = pub_inputs.len() / 3; 40 | 41 | let mut channel_ood_constraint_evaluation = Vec::::with_capacity(trace_width); 42 | 43 | for i in 0..trace_width { 44 | channel_ood_constraint_evaluation.push(BaseElement::new( 45 | U256::from_str_radix(pub_inputs[i].as_str().unwrap(), 10).unwrap(), 46 | )); 47 | } 48 | 49 | let mut frame = EvaluationFrame::new(trace_width); 50 | 51 | for i in 0..trace_width { 52 | frame.current_mut()[i] = BaseElement::new( 53 | U256::from_str_radix(pub_inputs[trace_width + i].as_str().unwrap(), 10).unwrap(), 54 | ); 55 | frame.next_mut()[i] = BaseElement::new( 56 | U256::from_str_radix(pub_inputs[2 * trace_width + i].as_str().unwrap(), 10).unwrap(), 57 | ); 58 | } 59 | 60 | // We only need to access the 'evaluate_constraints' method which doesn't depend on the air. 61 | // A default implementation of a Workair is sufficient here. 62 | let air = AIR::default(); 63 | let mut ood_frame_constraint_evaluation = BaseElement::zeroed_vector(trace_width); 64 | air.evaluate_transition::(&frame, &[], &mut ood_frame_constraint_evaluation); 65 | 66 | for i in 0..trace_width { 67 | assert!( 68 | ood_frame_constraint_evaluation[i] == channel_ood_constraint_evaluation[i], 69 | "\x1b[33m{}\x1b[0m", 70 | "Proof invalid: OOD not correct!" 71 | ); 72 | } 73 | 74 | println!( 75 | "\x1b[32m{}\x1b[0m", 76 | "OOD constraint evaluations are correct!" 77 | ); 78 | } 79 | -------------------------------------------------------------------------------- /circuits/air/sum.circom: -------------------------------------------------------------------------------- 1 | pragma circom 2.0.0; 2 | 3 | include "../utils.circom"; 4 | 5 | /** 6 | * Define the degree for the transitions constraints. 7 | * 8 | * INPUTS: 9 | * - frame: Out Of Domain frame on which we will check the 10 | * the consistency with the channel. 11 | * 12 | * OUTPUTS: 13 | * - transition_degree : degree of the transition, will be used for degree 14 | * adjustment. Should be set to the number of trace columns multiplied in 15 | * during the transition. 16 | */ 17 | template AIRTransitions(trace_width) { 18 | signal output transition_degree[trace_width]; 19 | 20 | // transition 0 21 | transition_degree[0] <== 1; 22 | 23 | // transition 1 24 | transition_degree[1] <== 1; 25 | } 26 | 27 | /** 28 | * Define the assertions that will tie your public inputs to the calculation. 29 | * These assertions will then be transformed into boundray constraints. 30 | * For now only single assertions are supported : 31 | * --> Assigning a value to a fixed step for a fixed trace column. 32 | * 33 | * INPUTS: 34 | * - public_inputs: inputs used for the calculation 35 | * - frame: Out Of Domain evaluation frame 36 | * 37 | * OUTPUTS: 38 | * - out: evaluation of the boundary constraints against each trace column 39 | * - divisor_degree: degree of the polynomial used as divisor, need for degree 40 | * adjustment 41 | * 42 | * TODO: 43 | * - Add support for cyclic and sequence constraints. 44 | * - for now divisor_degree is always 1 as we only use signel constraints. See 45 | * https://docs.rs/winter-air/0.4.0/winter_air/struct.ConstraintDivisor.html for 46 | * for other types of divisors. 47 | */ 48 | template AIRAssertions( 49 | num_assertions, 50 | num_public_inputs, 51 | trace_length, 52 | trace_width 53 | ) { 54 | signal input frame[2][trace_width]; 55 | signal input g_trace; 56 | signal input public_inputs[num_public_inputs]; 57 | signal input z; 58 | 59 | signal output out[num_assertions]; 60 | signal output divisor_degree[num_assertions]; 61 | 62 | signal numerator[num_assertions]; 63 | signal value[num_assertions]; 64 | signal output step[num_assertions]; 65 | signal register[num_assertions]; 66 | 67 | /* HERE YOUR ASSERTIONS HERE */ 68 | 69 | value[0] <== public_inputs[0]; 70 | step[0] <== 0; 71 | register[0] <== 0; 72 | 73 | value[1] <== public_inputs[0]; 74 | step[1] <== 0; 75 | register[1] <== 1; 76 | 77 | value[2] <== public_inputs[1]; 78 | step[2] <== trace_length - 1; 79 | register[2] <== 1; 80 | 81 | /* ------------------------------------- */ 82 | 83 | // boundary constraints evaluation 84 | component sel[num_assertions]; 85 | for (var i = 0; i < num_assertions; i++) { 86 | sel[i] = Selector(trace_width); 87 | for (var j = 0; j < trace_width; j++) { 88 | sel[i].in[j] <== frame[0][j]; 89 | } 90 | sel[i].index <== register[i]; 91 | 92 | out[i] <== sel[i].out - value[i]; 93 | divisor_degree[i] <== 1; 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /examples/sum/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "example-sum" 3 | version = "0.1.0" 4 | edition = "2021" 5 | rust-version = "1.60" 6 | default-run = "prove" 7 | 8 | [features] 9 | std = ["serde/std", "winter-circom-prover/std"] 10 | default = ["std"] 11 | concurrent = ["std", "winter-circom-prover/concurrent"] 12 | 13 | [dependencies] 14 | winter-circom-prover = { version = "0.1.0", default-features = false, path = "../../circom-prover" } 15 | serde = { version = "1.0", default-features = false } 16 | 17 | [[bin]] 18 | name = "compile" 19 | path = "src/compile.rs" 20 | 21 | [[bin]] 22 | name = "prove" 23 | path = "src/prove.rs" 24 | 25 | [[bin]] 26 | name = "verify" 27 | path = "src/verify.rs" 28 | -------------------------------------------------------------------------------- /examples/sum/src/air.rs: -------------------------------------------------------------------------------- 1 | use serde::{ser::SerializeTuple, Serialize}; 2 | use winter_circom_prover::{winterfell::{ 3 | math::{fields::f256::BaseElement, FieldElement}, 4 | Air, AirContext, Assertion, ByteWriter, EvaluationFrame, FieldExtension, HashFunction, 5 | ProofOptions, Serializable, TraceInfo, 6 | }, WinterCircomProofOptions}; 7 | use winter_circom_prover::WinterPublicInputs; 8 | 9 | pub(crate) const PROOF_OPTIONS: WinterCircomProofOptions<2> = 10 | WinterCircomProofOptions::new(128, 2, 3, [1, 1], 32, 8, 0, 8, 128); 11 | 12 | #[derive(Clone, Default)] 13 | pub struct PublicInputs { 14 | pub start: BaseElement, 15 | pub result: BaseElement, 16 | } 17 | 18 | impl WinterPublicInputs for PublicInputs { 19 | const NUM_PUB_INPUTS: usize = 2; 20 | } 21 | 22 | impl Serialize for PublicInputs { 23 | fn serialize(&self, serializer: S) -> Result 24 | where 25 | S: serde::Serializer, 26 | { 27 | let mut state = serializer.serialize_tuple(2)?; 28 | state.serialize_element(&self.start)?; 29 | state.serialize_element(&self.result)?; 30 | state.end() 31 | } 32 | } 33 | 34 | impl Serializable for PublicInputs { 35 | fn write_into(&self, target: &mut W) { 36 | target.write(self.start); 37 | target.write(self.result); 38 | } 39 | } 40 | 41 | pub struct WorkAir { 42 | context: AirContext, 43 | start: BaseElement, 44 | result: BaseElement, 45 | } 46 | 47 | impl Air for WorkAir { 48 | type BaseField = BaseElement; 49 | type PublicInputs = PublicInputs; 50 | 51 | fn new(trace_info: TraceInfo, pub_inputs: PublicInputs, options: ProofOptions) -> Self { 52 | let degrees = PROOF_OPTIONS.transition_constraint_degrees(); 53 | 54 | let num_assertions = PROOF_OPTIONS.num_assertions(); 55 | 56 | WorkAir { 57 | context: AirContext::new(trace_info, degrees, num_assertions, options), 58 | start: pub_inputs.start, 59 | result: pub_inputs.result, 60 | } 61 | } 62 | 63 | fn evaluate_transition>( 64 | &self, 65 | frame: &EvaluationFrame, 66 | _periodic_values: &[E], 67 | result: &mut [E], 68 | ) { 69 | let current = &frame.current(); 70 | let next = &frame.next(); 71 | 72 | result[0] = next[0] - (current[0] + E::ONE); 73 | result[1] = next[1] - (current[1] + current[0] + E::ONE); 74 | } 75 | 76 | fn get_assertions(&self) -> Vec> { 77 | let last_step = self.trace_length() - 1; 78 | vec![ 79 | Assertion::single(0, 0, self.start), 80 | Assertion::single(1, 0, self.start), 81 | Assertion::single(1, last_step, self.result), 82 | ] 83 | } 84 | 85 | fn context(&self) -> &AirContext { 86 | &self.context 87 | } 88 | } 89 | 90 | impl Default for WorkAir { 91 | fn default() -> Self { 92 | WorkAir::new( 93 | TraceInfo::new(2, 8), 94 | PublicInputs::default(), 95 | ProofOptions::new( 96 | 32, 97 | 8, 98 | 0, 99 | HashFunction::Poseidon, 100 | FieldExtension::None, 101 | 8, 102 | 128, 103 | ), 104 | ) 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /examples/sum/src/compile.rs: -------------------------------------------------------------------------------- 1 | #[path ="prove.rs"] 2 | #[allow(dead_code)] 3 | mod prove; 4 | 5 | use prove::{PROOF_OPTIONS, WorkProver}; 6 | use winter_circom_prover::{circom_compile, utils::{LoggingLevel, WinterCircomError}}; 7 | 8 | fn main() -> Result<(), WinterCircomError> { 9 | circom_compile::(PROOF_OPTIONS, "sum", LoggingLevel::Default) 10 | } 11 | -------------------------------------------------------------------------------- /examples/sum/src/prove.rs: -------------------------------------------------------------------------------- 1 | use winter_circom_prover::{ 2 | circom_prove, 3 | utils::{LoggingLevel, WinterCircomError}, 4 | winterfell::math::{fields::f256::BaseElement, FieldElement}, 5 | }; 6 | 7 | mod air; 8 | pub(crate) use air::PROOF_OPTIONS; 9 | 10 | mod prover; 11 | pub use prover::WorkProver; 12 | 13 | fn main() -> Result<(), WinterCircomError> { 14 | // parameters 15 | let start = BaseElement::ONE; 16 | 17 | // build proof 18 | let options = PROOF_OPTIONS.get_proof_options(); 19 | let prover = WorkProver::new(options.clone()); 20 | let trace = prover.build_trace(start, PROOF_OPTIONS.trace_length); 21 | 22 | circom_prove(prover, trace, "sum", LoggingLevel::Default) 23 | } 24 | -------------------------------------------------------------------------------- /examples/sum/src/prover.rs: -------------------------------------------------------------------------------- 1 | use super::air::{PublicInputs, WorkAir, PROOF_OPTIONS}; 2 | use winter_circom_prover::winterfell::{ 3 | math::{fields::f256::BaseElement, FieldElement}, 4 | ProofOptions, Prover, Trace, TraceTable, 5 | }; 6 | 7 | pub struct WorkProver { 8 | options: ProofOptions, 9 | } 10 | 11 | impl WorkProver { 12 | pub fn new(options: ProofOptions) -> Self { 13 | Self { options } 14 | } 15 | 16 | pub fn build_trace(&self, start: BaseElement, n: usize) -> TraceTable { 17 | let trace_width = PROOF_OPTIONS.trace_width; 18 | let mut trace = TraceTable::new(trace_width, n); 19 | 20 | trace.fill( 21 | |state| { 22 | state[0] = start; 23 | state[1] = start; 24 | }, 25 | |_, state| { 26 | state[0] += BaseElement::ONE; 27 | state[1] += state[0]; 28 | }, 29 | ); 30 | 31 | trace 32 | } 33 | } 34 | 35 | impl Prover for WorkProver { 36 | type BaseField = BaseElement; 37 | type Air = WorkAir; 38 | type Trace = TraceTable; 39 | 40 | fn get_pub_inputs(&self, trace: &Self::Trace) -> PublicInputs { 41 | let last_step = trace.length() - 1; 42 | PublicInputs { 43 | start: trace.get(0, 0), 44 | result: trace.get(1, last_step), 45 | } 46 | } 47 | 48 | fn options(&self) -> &ProofOptions { 49 | &self.options 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /examples/sum/src/verify.rs: -------------------------------------------------------------------------------- 1 | use winter_circom_prover::{ 2 | check_ood_frame, circom_verify, 3 | utils::{LoggingLevel, WinterCircomError}, 4 | }; 5 | 6 | mod air; 7 | use air::WorkAir; 8 | 9 | fn main() -> Result<(), WinterCircomError> { 10 | check_ood_frame::("sum"); 11 | circom_verify("sum", LoggingLevel::Verbose)?; 12 | 13 | Ok(()) 14 | } 15 | -------------------------------------------------------------------------------- /winterfell/.cargo/katex-header.html: -------------------------------------------------------------------------------- 1 | 3 | 6 | 9 | -------------------------------------------------------------------------------- /winterfell/.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: 3 | push: 4 | branches: 5 | - main 6 | pull_request: 7 | types: [opened, repoened, synchronize] 8 | 9 | jobs: 10 | test: 11 | name: Test Rust ${{matrix.toolchain}} on ${{matrix.os}} 12 | runs-on: ${{matrix.os}}-latest 13 | strategy: 14 | fail-fast: false 15 | matrix: 16 | toolchain: [stable, nightly] 17 | os: [ubuntu] 18 | steps: 19 | - uses: actions/checkout@main 20 | - name: Install rust 21 | uses: actions-rs/toolchain@v1 22 | with: 23 | toolchain: ${{matrix.toolchain}} 24 | override: true 25 | - name: Test 26 | uses: actions-rs/cargo@v1 27 | with: 28 | command: test 29 | 30 | clippy: 31 | name: Clippy 32 | runs-on: ubuntu-latest 33 | steps: 34 | - uses: actions/checkout@main 35 | - name: Install minimal nightly with clippy 36 | uses: actions-rs/toolchain@v1 37 | with: 38 | profile: minimal 39 | toolchain: nightly 40 | components: clippy 41 | override: true 42 | 43 | - name: Clippy 44 | uses: actions-rs/cargo@v1 45 | with: 46 | command: clippy 47 | args: --all -- -D clippy::all -D warnings 48 | 49 | rustfmt: 50 | name: rustfmt 51 | runs-on: ubuntu-latest 52 | steps: 53 | - uses: actions/checkout@main 54 | - name: Install minimal nightly with rustfmt 55 | uses: actions-rs/toolchain@v1 56 | with: 57 | profile: minimal 58 | toolchain: nightly 59 | components: rustfmt 60 | override: true 61 | 62 | - name: rustfmt 63 | uses: actions-rs/cargo@v1 64 | with: 65 | command: fmt 66 | args: --all -- --check 67 | 68 | no-std: 69 | name: no-std 70 | runs-on: ubuntu-latest 71 | strategy: 72 | fail-fast: false 73 | matrix: 74 | toolchain: [stable, nightly] 75 | target: 76 | - wasm32-unknown-unknown 77 | steps: 78 | - uses: actions/checkout@main 79 | - name: Install rust 80 | uses: actions-rs/toolchain@v1 81 | with: 82 | toolchain: ${{matrix.toolchain}} 83 | override: true 84 | - run: rustup target add wasm32-unknown-unknown 85 | - name: Build 86 | uses: actions-rs/cargo@v1 87 | with: 88 | command: build 89 | args: --verbose --no-default-features --target ${{ matrix.target }} 90 | -------------------------------------------------------------------------------- /winterfell/CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## 0.4.0 (2022-04-29) 4 | * Added support for Randomized AIR (with example). 5 | * Added support for custom number of transition constraint exemptions. 6 | * Enabled transition constraints of degree *n + 1* when blowup factor is *n*. 7 | * Moved trace and constraint commitment construction into separate functions in the `Prover` trait. 8 | * Introduced `Matrix` struct in the prover which is used as a backing type for trace and constraint evaluations. 9 | * Added `ExtensionOf` trait and implemented it for all supported fields. 10 | * Sped up inversion in `f64` field by using inversion method based on Fermat’s little theorem. 11 | * Implemented `Randomizable` trait for `u32`, `u16`, and `u8` types. 12 | * [BREAKING] `AirContext::new()` now requires `num_assertions` parameter. 13 | * [BREAKING] Various interface changes in the `Air` trait to support multi-segment traces. 14 | * Increased min version of `rustc` to 1.60. 15 | 16 | ## 0.3.2 (2022-01-20) - crypto 17 | * Implemented into byte conversion for Rp64_256 digest. 18 | * Moved capacity elements to the front of the state for Rp64_256. 19 | 20 | ## 0.3.1 (2022-01-13) - crypto 21 | * Implemented digest to array conversion for Rp64_256 digest. 22 | * Exposed some internal functions of Rp64_256 publicly. 23 | 24 | ## 0.3.0 (2022-01-04) 25 | * Added `f64` field. 26 | * Added support for cubic field extensions. 27 | * Added an implementation of Rescue Prime hash function in `f64` field. 28 | * Switched to Rust 2021 and increased min version of `rustc` to 1.57. 29 | * [BREAKING] Renamed `Air::BaseElement` to `Air::BaseField`. 30 | * [BREAKING] Replaced `prover::prove()` function with `Prover` trait. 31 | * [BREAKING] Split `ExecutionTrace` struct into `Trace` trait and `TraceTable` struct. 32 | 33 | ## 0.2.0 (2021-08-23) 34 | * Added `Blake3_192` as hash function option. 35 | * Implemented high-performance version of Rescue Prime hash function. 36 | * Removed `alloc` feature in favor of turning on `no_std` via `--no-default-features` flag only. 37 | * Moved `rand` dependency to `dev-dependencies` only and removed `hashbrown` dependency. 38 | * Increased min version of `rustc` to 1.54. 39 | 40 | ## 0.1.0 (2021-08-03) 41 | * Initial release -------------------------------------------------------------------------------- /winterfell/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as 6 | contributors and maintainers pledge to make participation in our project and 7 | our community a harassment-free experience for everyone, regardless of age, body 8 | size, disability, ethnicity, sex characteristics, gender identity and expression, 9 | level of experience, education, socio-economic status, nationality, personal 10 | appearance, race, religion, or sexual identity and orientation. 11 | 12 | ## Our Standards 13 | 14 | Examples of behavior that contributes to creating a positive environment 15 | include: 16 | 17 | * Using welcoming and inclusive language 18 | * Being respectful of differing viewpoints and experiences 19 | * Gracefully accepting constructive criticism 20 | * Focusing on what is best for the community 21 | * Showing empathy towards other community members 22 | 23 | Examples of unacceptable behavior by participants include: 24 | 25 | * The use of sexualized language or imagery and unwelcome sexual attention or 26 | advances 27 | * Trolling, insulting/derogatory comments, and personal or political attacks 28 | * Public or private harassment 29 | * Publishing others' private information, such as a physical or electronic 30 | address, without explicit permission 31 | * Other conduct which could reasonably be considered inappropriate in a 32 | professional setting 33 | 34 | ## Our Responsibilities 35 | 36 | Project maintainers are responsible for clarifying the standards of acceptable 37 | behavior and are expected to take appropriate and fair corrective action in 38 | response to any instances of unacceptable behavior. 39 | 40 | Project maintainers have the right and responsibility to remove, edit, or 41 | reject comments, commits, code, wiki edits, issues, and other contributions 42 | that are not aligned to this Code of Conduct, or to ban temporarily or 43 | permanently any contributor for other behaviors that they deem inappropriate, 44 | threatening, offensive, or harmful. 45 | 46 | ## Scope 47 | 48 | This Code of Conduct applies within all project spaces, and it also applies when 49 | an individual is representing the project or its community in public spaces. 50 | Examples of representing a project or community include using an official 51 | project e-mail address, posting via an official social media account, or acting 52 | as an appointed representative at an online or offline event. Representation of 53 | a project may be further defined and clarified by project maintainers. 54 | 55 | ## Enforcement 56 | 57 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 58 | reported by contacting the project team at . All 59 | complaints will be reviewed and investigated and will result in a response that 60 | is deemed necessary and appropriate to the circumstances. The project team is 61 | obligated to maintain confidentiality with regard to the reporter of an incident. 62 | Further details of specific enforcement policies may be posted separately. 63 | 64 | Project maintainers who do not follow or enforce the Code of Conduct in good 65 | faith may face temporary or permanent repercussions as determined by other 66 | members of the project's leadership. 67 | 68 | ## Attribution 69 | 70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, 71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html 72 | 73 | [homepage]: https://www.contributor-covenant.org 74 | 75 | For answers to common questions about this code of conduct, see 76 | https://www.contributor-covenant.org/faq -------------------------------------------------------------------------------- /winterfell/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to this library 2 | We want to make contributing to this project as easy and transparent as 3 | possible. 4 | 5 | ## Pull Requests 6 | We actively welcome your pull requests. 7 | 8 | 1. Fork the repo and create your branch from `main`. 9 | 2. If you've added code that should be tested, add tests. 10 | 3. If you've changed APIs, update the documentation. 11 | 4. Ensure the test suite passes. 12 | 5. If you haven't already, complete the Contributor License Agreement ("CLA"). 13 | 14 | ## Contributor License Agreement ("CLA") 15 | In order to accept your pull request, we need you to submit a CLA. You only need 16 | to do this once to work on any of Facebook's open source projects. 17 | 18 | Complete your CLA here: 19 | 20 | ## Issues 21 | We use GitHub issues to track public bugs. Please ensure your description is 22 | clear and has sufficient instructions to be able to reproduce the issue. 23 | 24 | Facebook has a [bounty program](https://www.facebook.com/whitehat/) for the safe 25 | disclosure of security bugs. In those cases, please go through the process 26 | outlined on that page and do not file a public issue. 27 | 28 | ## License 29 | By contributing to Winterfell, you agree that your contributions will be 30 | licensed under the LICENSE file in the root directory of this source tree. -------------------------------------------------------------------------------- /winterfell/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) Facebook, Inc. and its affiliates. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /winterfell/air/.cargo/katex-header.html: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VictorColomb/stark-snark-recursive-proofs/185076895b7339daa8dd45b9160ed19a1de9ce3f/winterfell/air/.cargo/katex-header.html -------------------------------------------------------------------------------- /winterfell/air/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "winter-air" 3 | version = "0.4.0" 4 | description = "AIR components for the Winterfell STARK prover/verifier" 5 | authors = ["winterfell contributors"] 6 | readme = "README.md" 7 | license = "MIT" 8 | repository = "https://github.com/novifinancial/winterfell" 9 | documentation = "https://docs.rs/winter-air/0.4.0" 10 | categories = ["cryptography", "no-std"] 11 | keywords = ["crypto", "arithmetization", "air"] 12 | edition = "2021" 13 | rust-version = "1.60" 14 | 15 | [lib] 16 | bench = false 17 | 18 | [features] 19 | default = ["std"] 20 | std = ["crypto/std", "fri/std", "math/std", "utils/std"] 21 | 22 | [dependencies] 23 | crypto = { version = "0.4", path = "../crypto", package = "winter-crypto", default-features = false } 24 | fri = { version = "0.4", path = "../fri", package = "winter-fri", default-features = false } 25 | math = { version = "0.4", path = "../math", package = "winter-math", default-features = false } 26 | utils = { version = "0.4", path = "../utils/core", package = "winter-utils", default-features = false } 27 | 28 | [dev-dependencies] 29 | rand-utils = { version = "0.4", path = "../utils/rand", package = "winter-rand-utils" } 30 | 31 | # Allow math in docs 32 | [package.metadata.docs.rs] 33 | rustdoc-args = ["--html-in-header", ".cargo/katex-header.html"] 34 | -------------------------------------------------------------------------------- /winterfell/air/src/air/transition/frame.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use super::{FieldElement, Vec}; 7 | 8 | // EVALUATION FRAME 9 | // ================================================================================================ 10 | /// A set of execution trace rows required for evaluation of transition constraints. 11 | /// 12 | /// In the current implementation, an evaluation frame always contains two consecutive rows of the 13 | /// execution trace. It is passed in as one of the parameters into 14 | /// [Air::evaluate_transition()](crate::Air::evaluate_transition) function. 15 | #[derive(Debug, Clone)] 16 | pub struct EvaluationFrame { 17 | current: Vec, 18 | next: Vec, 19 | } 20 | 21 | impl EvaluationFrame { 22 | // CONSTRUCTORS 23 | // -------------------------------------------------------------------------------------------- 24 | 25 | /// Returns a new evaluation frame instantiated with the specified number of columns. 26 | /// 27 | /// # Panics 28 | /// Panics if `num_columns` is zero. 29 | pub fn new(num_columns: usize) -> Self { 30 | assert!( 31 | num_columns > 0, 32 | "number of columns must be greater than zero" 33 | ); 34 | EvaluationFrame { 35 | current: E::zeroed_vector(num_columns), 36 | next: E::zeroed_vector(num_columns), 37 | } 38 | } 39 | 40 | /// Returns a new evaluation frame instantiated from the provided rows. 41 | /// 42 | /// # Panics 43 | /// Panics if: 44 | /// * Lengths of the provided rows are zero. 45 | /// * Lengths of the provided rows are not the same. 46 | pub fn from_rows(current: Vec, next: Vec) -> Self { 47 | assert!(!current.is_empty(), "a row must contain at least one value"); 48 | assert_eq!( 49 | current.len(), 50 | next.len(), 51 | "number of values in the rows must be the same" 52 | ); 53 | Self { current, next } 54 | } 55 | 56 | // ROW ACCESSORS 57 | // -------------------------------------------------------------------------------------------- 58 | 59 | /// Returns a reference to the current row. 60 | #[inline(always)] 61 | pub fn current(&self) -> &[E] { 62 | &self.current 63 | } 64 | 65 | /// Returns a mutable reference to the current row. 66 | #[inline(always)] 67 | pub fn current_mut(&mut self) -> &mut [E] { 68 | &mut self.current 69 | } 70 | 71 | /// Returns a reference to the next row. 72 | #[inline(always)] 73 | pub fn next(&self) -> &[E] { 74 | &self.next 75 | } 76 | 77 | /// Returns a mutable reference to the next row. 78 | #[inline(always)] 79 | pub fn next_mut(&mut self) -> &mut [E] { 80 | &mut self.next 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /winterfell/air/src/errors.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use core::fmt; 7 | 8 | // ASSERTION ERROR 9 | // ================================================================================================ 10 | /// Represents an error returned during assertion evaluation. 11 | #[derive(Debug, PartialEq)] 12 | pub enum AssertionError { 13 | /// This error occurs when an assertion is evaluated against an execution trace which does not 14 | /// contain a column specified by the assertion. 15 | TraceWidthTooShort(usize, usize), 16 | /// This error occurs when an assertion is evaluated against an execution trace with length 17 | /// which is not a power of two. 18 | TraceLengthNotPowerOfTwo(usize), 19 | /// This error occurs when an assertion is evaluated against an execution trace which does not 20 | /// contain a step against which the assertion is placed. 21 | TraceLengthTooShort(usize, usize), 22 | /// This error occurs when a `Sequence` assertion is placed against an execution trace with 23 | /// length which conflicts with the trace length implied by the assertion. 24 | TraceLengthNotExact(usize, usize), 25 | } 26 | 27 | impl fmt::Display for AssertionError { 28 | #[rustfmt::skip] 29 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 30 | match self { 31 | Self::TraceWidthTooShort(expected, actual) => { 32 | write!(f, "expected trace width to be at least {}, but was {}", expected, actual) 33 | } 34 | Self::TraceLengthNotPowerOfTwo(actual) => { 35 | write!(f, "expected trace length to be a power of two, but was {}", actual) 36 | } 37 | Self::TraceLengthTooShort(expected, actual) => { 38 | write!(f, "expected trace length to be at least {}, but was {}", expected, actual) 39 | } 40 | Self::TraceLengthNotExact(expected, actual) => { 41 | write!(f, "expected trace length to be exactly {}, but was {}", expected, actual) 42 | } 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /winterfell/air/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | //! This crate contains components need to describe arbitrary computations in a STARK-specific 7 | //! format. 8 | //! 9 | //! Before we can generate proofs attesting that some computations were executed correctly, we 10 | //! need to describe these computations in a way that can be understood by the Winterfell prover 11 | //! and verifier. 12 | //! 13 | //! More formally, we need to reduce our computations to algebraic statements involving a set of 14 | //! bounded-degree polynomials. This step is usually called *arithmetization*. STARK arithmetization 15 | //! reduces computations to an *algebraic intermediate representation* or AIR for short. For basics 16 | //! of AIR arithmetization please refer to the excellent posts from StarkWare: 17 | //! 18 | //! * [Arithmetization I](https://medium.com/starkware/arithmetization-i-15c046390862) 19 | //! * [Arithmetization II](https://medium.com/starkware/arithmetization-ii-403c3b3f4355) 20 | //! * [StarkDEX Deep Dive: the STARK Core Engine](https://medium.com/starkware/starkdex-deep-dive-the-stark-core-engine-497942d0f0ab) 21 | //! 22 | //! Coming up with efficient arithmetizations for computations is highly non-trivial, and 23 | //! describing arithmetizations could be tedious and error-prone. The [Air] trait aims to help 24 | //! with the latter, which, hopefully, also makes the former a little simpler. For additional 25 | //! details, please refer to the documentation of the [Air] trait itself. 26 | //! 27 | //! This crate also contains components describing STARK protocol parameters ([ProofOptions]) and 28 | //! proof structure ([StarkProof](proof::StarkProof)). 29 | 30 | #![cfg_attr(not(feature = "std"), no_std)] 31 | 32 | #[cfg(not(feature = "std"))] 33 | #[macro_use] 34 | extern crate alloc; 35 | 36 | pub mod proof; 37 | 38 | mod errors; 39 | pub use errors::AssertionError; 40 | 41 | mod options; 42 | pub use options::{FieldExtension, HashFunction, ProofOptions}; 43 | 44 | mod air; 45 | pub use air::{ 46 | Air, AirContext, Assertion, AuxTraceRandElements, BoundaryConstraint, BoundaryConstraintGroup, 47 | BoundaryConstraints, ConstraintCompositionCoefficients, ConstraintDivisor, 48 | DeepCompositionCoefficients, EvaluationFrame, TraceInfo, TraceLayout, 49 | TransitionConstraintDegree, TransitionConstraintGroup, TransitionConstraints, 50 | }; 51 | -------------------------------------------------------------------------------- /winterfell/air/src/proof/commitments.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use crypto::Hasher; 7 | use utils::{ 8 | collections::Vec, ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, 9 | SliceReader, 10 | }; 11 | 12 | // COMMITMENTS 13 | // ================================================================================================ 14 | /// Commitments made by the prover during commit phase of the protocol. 15 | /// 16 | /// These commitments include: 17 | /// * Commitment to the extended execution trace, which may include commitments to one or more 18 | /// execution trace segments. 19 | /// * Commitment to the evaluations of constraint composition polynomial over LDE domain. 20 | /// * Commitments to the evaluations of polynomials at all FRI layers. 21 | /// 22 | /// Internally, the commitments are stored as a sequence of bytes. Thus, to retrieve the 23 | /// commitments, [parse()](Commitments::parse) function should be used. 24 | #[derive(Debug, Clone, Default, Eq, PartialEq)] 25 | pub struct Commitments(Vec); 26 | 27 | impl Commitments { 28 | // CONSTRUCTOR 29 | // -------------------------------------------------------------------------------------------- 30 | /// Returns a new Commitments struct initialized with the provided commitments. 31 | pub fn new( 32 | trace_roots: Vec, 33 | constraint_root: H::Digest, 34 | fri_roots: Vec, 35 | ) -> Self { 36 | let mut bytes = Vec::new(); 37 | bytes.write(trace_roots); 38 | bytes.write(constraint_root); 39 | bytes.write(fri_roots); 40 | Commitments(bytes) 41 | } 42 | 43 | // PUBLIC METHODS 44 | // -------------------------------------------------------------------------------------------- 45 | 46 | /// Adds the specified commitment to the list of commitments. 47 | pub fn add(&mut self, commitment: &H::Digest) { 48 | commitment.write_into(&mut self.0); 49 | } 50 | 51 | // PARSING 52 | // -------------------------------------------------------------------------------------------- 53 | 54 | /// Parses the serialized commitments into distinct parts. 55 | /// 56 | /// The parts are (in the order in which they appear in the tuple): 57 | /// 1. Extended execution trace commitments. 58 | /// 2. Constraint composition polynomial evaluation commitment. 59 | /// 3. FRI layer commitments. 60 | /// 61 | /// # Errors 62 | /// Returns an error if the bytes stored in self could not be parsed into the requested number 63 | /// of commitments, or if there are any unconsumed bytes remaining after the parsing completes. 64 | #[allow(clippy::type_complexity)] 65 | pub fn parse( 66 | self, 67 | num_trace_segments: usize, 68 | num_fri_layers: usize, 69 | ) -> Result<(Vec, H::Digest, Vec), DeserializationError> { 70 | let mut reader = SliceReader::new(&self.0); 71 | 72 | // parse trace commitments 73 | let trace_commitments = H::Digest::read_batch_from(&mut reader, num_trace_segments)?; 74 | 75 | // parse constraint evaluation commitment: 76 | let constraint_commitment = H::Digest::read_from(&mut reader)?; 77 | 78 | // read FRI commitments (+1 is for FRI remainder commitment) 79 | let fri_commitments = H::Digest::read_batch_from(&mut reader, num_fri_layers + 1)?; 80 | 81 | // make sure we consumed all available commitment bytes 82 | if reader.has_more_bytes() { 83 | return Err(DeserializationError::UnconsumedBytes); 84 | } 85 | Ok((trace_commitments, constraint_commitment, fri_commitments)) 86 | } 87 | } 88 | 89 | impl Serializable for Commitments { 90 | /// Serializes `self` and writes the resulting bytes into the `target`. 91 | fn write_into(&self, target: &mut W) { 92 | assert!(self.0.len() < u16::MAX as usize); 93 | target.write_u16(self.0.len() as u16); 94 | target.write_u8_slice(&self.0); 95 | } 96 | } 97 | 98 | impl Deserializable for Commitments { 99 | /// Reads commitments from the specified `source` and returns the result. 100 | /// 101 | /// # Errors 102 | /// Returns an error of a valid Commitments struct could not be read from the specified 103 | /// `source`. 104 | fn read_from(source: &mut R) -> Result { 105 | let num_bytes = source.read_u16()? as usize; 106 | let result = source.read_u8_vec(num_bytes)?; 107 | Ok(Commitments(result)) 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /winterfell/air/src/proof/table.rs: -------------------------------------------------------------------------------- 1 | use super::{DeserializationError, SliceReader, Vec}; 2 | use core::iter::FusedIterator; 3 | use math::FieldElement; 4 | 5 | // CONSTANTS 6 | // ================================================================================================ 7 | 8 | const MAX_ROWS: usize = 255; 9 | const MAX_COLS: usize = 255; 10 | 11 | // TABLE 12 | // ================================================================================================ 13 | 14 | /// A two-dimensional table of field elements arranged in row-major order. 15 | /// 16 | /// This struct is used primarily to hold queried values of execution trace segments and constraint 17 | /// evaluations. In such cases, each row in the table corresponds to a single query, and each 18 | /// column corresponds to a trace segment column or a constraint evaluation column. 19 | #[derive(Debug, Clone)] 20 | pub struct Table { 21 | data: Vec, 22 | row_width: usize, 23 | } 24 | 25 | impl Table { 26 | // CONSTRUCTORS 27 | // -------------------------------------------------------------------------------------------- 28 | /// Returns a new [Table] instantiated with data from the provided bytes. 29 | /// 30 | /// # Panics 31 | /// Panics if: 32 | /// * Specified number of rows is 0 or greater than 255. 33 | /// * Specified number of columns is 0 or greater than 255. 34 | /// * Provided bytes do not encode valid field elements required to fill the table. 35 | pub fn from_bytes( 36 | bytes: &[u8], 37 | num_rows: usize, 38 | num_cols: usize, 39 | ) -> Result { 40 | assert!(num_rows > 0, "number of rows must be greater than 0"); 41 | assert!( 42 | num_rows < MAX_ROWS, 43 | "number of rows cannot exceed {}, but was {}", 44 | MAX_ROWS, 45 | num_rows 46 | ); 47 | assert!(num_cols > 0, "number of columns must be greater than 0"); 48 | assert!( 49 | num_cols < MAX_ROWS, 50 | "number of columns cannot exceed {}, but was {}", 51 | MAX_COLS, 52 | num_cols 53 | ); 54 | 55 | let mut reader = SliceReader::new(bytes); 56 | let num_elements = num_rows * num_cols; 57 | Ok(Self { 58 | data: E::read_batch_from(&mut reader, num_elements)?, 59 | row_width: num_cols, 60 | }) 61 | } 62 | 63 | // PUBLIC ACCESSORS 64 | // -------------------------------------------------------------------------------------------- 65 | 66 | /// Returns number of rows in this table. 67 | pub fn num_rows(&self) -> usize { 68 | self.data.len() / self.row_width 69 | } 70 | 71 | /// Returns number of columns in this table. 72 | pub fn num_columns(&self) -> usize { 73 | self.row_width 74 | } 75 | 76 | /// Returns a reference to a row at the specified index. 77 | pub fn get_row(&self, row_idx: usize) -> &[E] { 78 | let row_offset = row_idx * self.row_width; 79 | &self.data[row_offset..row_offset + self.row_width] 80 | } 81 | 82 | /// Returns an iterator over rows of this table. 83 | pub fn rows(&self) -> RowIterator { 84 | RowIterator::new(self) 85 | } 86 | 87 | // TABLE PROCESSING 88 | // -------------------------------------------------------------------------------------------- 89 | 90 | /// Combines multiple tables together into a single table by stacking tables column-wise (e.g. 91 | /// the number of rows remains the same but the number of columns changes). 92 | /// 93 | /// Currently, this method does not support inputs containing more than one table. 94 | /// 95 | /// # Panics 96 | /// Panics if the list of tables is empty. 97 | pub fn merge(mut tables: Vec>) -> Table { 98 | assert!(!tables.is_empty(), "cannot merge an empty set of tables"); 99 | if tables.len() == 1 { 100 | tables.remove(0) 101 | } else { 102 | unimplemented!("merging of multiple tables is not yet implemented") 103 | } 104 | } 105 | } 106 | 107 | // COLUMN ITERATOR 108 | // ================================================================================================ 109 | 110 | pub struct RowIterator<'a, E: FieldElement> { 111 | table: &'a Table, 112 | cursor: usize, 113 | } 114 | 115 | impl<'a, E: FieldElement> RowIterator<'a, E> { 116 | pub fn new(table: &'a Table) -> Self { 117 | Self { table, cursor: 0 } 118 | } 119 | } 120 | 121 | impl<'a, E: FieldElement> Iterator for RowIterator<'a, E> { 122 | type Item = &'a [E]; 123 | 124 | fn next(&mut self) -> Option { 125 | match self.table.num_rows() - self.cursor { 126 | 0 => None, 127 | _ => { 128 | let row = self.table.get_row(self.cursor); 129 | self.cursor += 1; 130 | Some(row) 131 | } 132 | } 133 | } 134 | } 135 | 136 | impl<'a, E: FieldElement> ExactSizeIterator for RowIterator<'a, E> { 137 | fn len(&self) -> usize { 138 | self.table.num_rows() 139 | } 140 | } 141 | 142 | impl<'a, E: FieldElement> FusedIterator for RowIterator<'a, E> {} 143 | -------------------------------------------------------------------------------- /winterfell/crypto/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "winter-crypto" 3 | version = "0.4.0" 4 | description = "Cryptographic library for the Winterfell STARK prover/verifier" 5 | authors = ["winterfell contributors"] 6 | readme = "README.md" 7 | license = "MIT" 8 | repository = "https://github.com/novifinancial/winterfell" 9 | documentation = "https://docs.rs/winter-crypto/0.4.0" 10 | categories = ["cryptography", "no-std"] 11 | keywords = ["crypto", "merkle-tree", "hash"] 12 | edition = "2021" 13 | rust-version = "1.60" 14 | 15 | [lib] 16 | bench = false 17 | 18 | [[bench]] 19 | name = "hash" 20 | harness = false 21 | 22 | [[bench]] 23 | name = "merkle" 24 | harness = false 25 | required-features = ["concurrent"] 26 | 27 | [features] 28 | default = ["std"] 29 | concurrent = ["utils/concurrent", "std"] 30 | std = ["blake3/std", "math/std", "sha3/std", "utils/std"] 31 | 32 | [dependencies] 33 | blake3 = { version = "1.0", default-features = false } 34 | math = { version = "0.4", path = "../math", package = "winter-math", default-features = false } 35 | sha3 = { version = "0.10", default-features = false } 36 | utils = { version = "0.4", path = "../utils/core", package = "winter-utils", default-features = false } 37 | 38 | [dev-dependencies] 39 | criterion = "0.3" 40 | proptest = "1.0" 41 | rand-utils = { version = "0.4", path = "../utils/rand", package = "winter-rand-utils" } 42 | -------------------------------------------------------------------------------- /winterfell/crypto/benches/hash.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use criterion::{black_box, criterion_group, criterion_main, BatchSize, Criterion}; 7 | use math::fields::f128; 8 | use rand_utils::rand_value; 9 | use winter_crypto::{ 10 | hashers::{Blake3_256, Poseidon, Rp62_248, Rp64_256, Sha3_256}, 11 | Hasher, 12 | }; 13 | 14 | type Blake3 = Blake3_256; 15 | type Blake3Digest = ::Digest; 16 | 17 | type Sha3 = Sha3_256; 18 | type Sha3Digest = ::Digest; 19 | 20 | type Rp62_248Digest = ::Digest; 21 | type Rp64_256Digest = ::Digest; 22 | 23 | type PoseidonHash = Poseidon; 24 | type PoseidonDigest = ::Digest; 25 | 26 | fn blake3(c: &mut Criterion) { 27 | let v: [Blake3Digest; 2] = [Blake3::hash(&[1u8]), Blake3::hash(&[2u8])]; 28 | c.bench_function("hash_blake3 (cached)", |bench| { 29 | bench.iter(|| Blake3::merge(black_box(&v))) 30 | }); 31 | 32 | c.bench_function("hash_blake3 (random)", |b| { 33 | b.iter_batched( 34 | || { 35 | [ 36 | Blake3::hash(&rand_value::().to_le_bytes()), 37 | Blake3::hash(&rand_value::().to_le_bytes()), 38 | ] 39 | }, 40 | |state| Blake3::merge(&state), 41 | BatchSize::SmallInput, 42 | ) 43 | }); 44 | } 45 | 46 | fn sha3(c: &mut Criterion) { 47 | let v: [Sha3Digest; 2] = [Sha3::hash(&[1u8]), Sha3::hash(&[2u8])]; 48 | c.bench_function("hash_sha3 (cached)", |bench| { 49 | bench.iter(|| Sha3::merge(black_box(&v))) 50 | }); 51 | 52 | c.bench_function("hash_sha3 (random)", |b| { 53 | b.iter_batched( 54 | || { 55 | [ 56 | Sha3::hash(&rand_value::().to_le_bytes()), 57 | Sha3::hash(&rand_value::().to_le_bytes()), 58 | ] 59 | }, 60 | |state| Sha3::merge(&state), 61 | BatchSize::SmallInput, 62 | ) 63 | }); 64 | } 65 | 66 | fn rescue248(c: &mut Criterion) { 67 | let v: [Rp62_248Digest; 2] = [Rp62_248::hash(&[1u8]), Rp62_248::hash(&[2u8])]; 68 | c.bench_function("hash_rp62_248 (cached)", |bench| { 69 | bench.iter(|| Rp62_248::merge(black_box(&v))) 70 | }); 71 | 72 | c.bench_function("hash_rp62_248 (random)", |b| { 73 | b.iter_batched( 74 | || { 75 | [ 76 | Rp62_248::hash(&rand_value::().to_le_bytes()), 77 | Rp62_248::hash(&rand_value::().to_le_bytes()), 78 | ] 79 | }, 80 | |state| Rp62_248::merge(&state), 81 | BatchSize::SmallInput, 82 | ) 83 | }); 84 | } 85 | 86 | fn rescue256(c: &mut Criterion) { 87 | let v: [Rp64_256Digest; 2] = [Rp64_256::hash(&[1u8]), Rp64_256::hash(&[2u8])]; 88 | c.bench_function("hash_Rp64_256 (cached)", |bench| { 89 | bench.iter(|| Rp64_256::merge(black_box(&v))) 90 | }); 91 | 92 | c.bench_function("hash_Rp64_256 (random)", |b| { 93 | b.iter_batched( 94 | || { 95 | [ 96 | Rp64_256::hash(&rand_value::().to_le_bytes()), 97 | Rp64_256::hash(&rand_value::().to_le_bytes()), 98 | ] 99 | }, 100 | |state| Rp64_256::merge(&state), 101 | BatchSize::SmallInput, 102 | ) 103 | }); 104 | } 105 | 106 | fn poseidon(c: &mut Criterion) { 107 | let v: [PoseidonDigest; 2] = [PoseidonHash::hash(&[1u8]), PoseidonHash::hash(&[2u8])]; 108 | c.bench_function("hash_Poseidon (cached)", |bench| { 109 | bench.iter(|| PoseidonHash::merge(black_box(&v))) 110 | }); 111 | 112 | c.bench_function("hash_Poseidon (random)", |b| { 113 | b.iter_batched( 114 | || { 115 | [ 116 | PoseidonHash::hash(&rand_value::().to_le_bytes()), 117 | PoseidonHash::hash(&rand_value::().to_le_bytes()), 118 | ] 119 | }, 120 | |state| PoseidonHash::merge(&state), 121 | BatchSize::SmallInput, 122 | ) 123 | }); 124 | } 125 | 126 | criterion_group!(hash_group, blake3, sha3, rescue248, rescue256, poseidon); 127 | criterion_main!(hash_group); 128 | -------------------------------------------------------------------------------- /winterfell/crypto/benches/merkle.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; 7 | use math::fields::f128::BaseElement; 8 | use rand_utils::rand_value; 9 | use utils::uninit_vector; 10 | use winter_crypto::{build_merkle_nodes, concurrent, hashers::Blake3_256, Hasher}; 11 | 12 | type Blake3 = Blake3_256; 13 | type Blake3Digest = ::Digest; 14 | 15 | pub fn merkle_tree_construction(c: &mut Criterion) { 16 | let mut merkle_group = c.benchmark_group("merkle tree construction"); 17 | 18 | static BATCH_SIZES: [usize; 3] = [65536, 131072, 262144]; 19 | 20 | for size in &BATCH_SIZES { 21 | let data: Vec = { 22 | let mut res = unsafe { uninit_vector(*size) }; 23 | for i in 0..*size { 24 | res[i] = Blake3::hash(&rand_value::().to_le_bytes()); 25 | } 26 | res 27 | }; 28 | merkle_group.bench_with_input(BenchmarkId::new("sequential", size), &data, |b, i| { 29 | b.iter(|| build_merkle_nodes::(&i)) 30 | }); 31 | merkle_group.bench_with_input(BenchmarkId::new("concurrent", size), &data, |b, i| { 32 | b.iter(|| concurrent::build_merkle_nodes::(&i)) 33 | }); 34 | } 35 | } 36 | 37 | criterion_group!(merkle_group, merkle_tree_construction,); 38 | criterion_main!(merkle_group); 39 | -------------------------------------------------------------------------------- /winterfell/crypto/src/errors.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use core::fmt; 7 | 8 | // MERKLE TREE ERROR 9 | // ================================================================================================ 10 | 11 | /// Defines errors which can occur when using Merkle trees. 12 | #[derive(Debug, PartialEq)] 13 | pub enum MerkleTreeError { 14 | /// Fewer than two leaves were used to construct a Merkle tree. 15 | TooFewLeaves(usize, usize), 16 | /// Number of leaves for a Merkle tree was not a power of two. 17 | NumberOfLeavesNotPowerOfTwo(usize), 18 | /// A leaf index was greater than or equal to the number of leaves in the tree. 19 | LeafIndexOutOfBounds(usize, usize), 20 | /// A leaf index was included more than once in the list of indexes for a batch proof. 21 | DuplicateLeafIndex, 22 | /// No leaf indexes were provided for a batch Merkle proof. 23 | TooFewLeafIndexes, 24 | /// Too many leaf index were provided for a batch Merkle proof. 25 | TooManyLeafIndexes(usize, usize), 26 | /// Merkle proof is not valid for the specified position(s). 27 | InvalidProof, 28 | } 29 | 30 | impl fmt::Display for MerkleTreeError { 31 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 32 | match self { 33 | Self::TooFewLeaves(expected, actual) => { 34 | write!( 35 | f, 36 | "a Merkle tree must contain at least {} leaves, but {} were provided", 37 | expected, actual 38 | ) 39 | } 40 | Self::NumberOfLeavesNotPowerOfTwo(num_leaves) => { 41 | write!( 42 | f, 43 | "number of leaves must be a power of two, but {} were provided", 44 | num_leaves 45 | ) 46 | } 47 | Self::LeafIndexOutOfBounds(expected, actual) => { 48 | write!( 49 | f, 50 | "a leaf index cannot exceed {}, but was {}", 51 | expected, actual 52 | ) 53 | } 54 | Self::DuplicateLeafIndex => { 55 | write!(f, "repeating indexes detected") 56 | } 57 | Self::TooFewLeafIndexes => { 58 | write!(f, "at least one leaf index must be provided") 59 | } 60 | Self::TooManyLeafIndexes(max_indexes, num_indexes) => { 61 | write!( 62 | f, 63 | "number of leaf indexes cannot exceed {}, but was {} provided", 64 | max_indexes, num_indexes 65 | ) 66 | } 67 | Self::InvalidProof => { 68 | write!(f, "Merkle proof is invalid") 69 | } 70 | } 71 | } 72 | } 73 | 74 | // RANDOM COIN ERROR 75 | // ================================================================================================ 76 | 77 | /// Defines errors which can occur when drawing values from a random coin. 78 | #[derive(Debug, PartialEq)] 79 | pub enum RandomCoinError { 80 | /// A valid element could not be drawn from the field after the specified number of tries. 81 | FailedToDrawFieldElement(usize), 82 | /// The required number of integer values could not be drawn from the specified domain after 83 | /// the specified number of tries. 84 | FailedToDrawIntegers(usize, usize, usize), 85 | } 86 | 87 | impl fmt::Display for RandomCoinError { 88 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 89 | match self { 90 | Self::FailedToDrawFieldElement(num_tries) => { 91 | write!( 92 | f, 93 | "failed to generate a valid field element after {} tries", 94 | num_tries 95 | ) 96 | } 97 | Self::FailedToDrawIntegers(num_expected, num_actual, num_tries) => { 98 | write!( 99 | f, 100 | "needed to draw {} integers from a domain, but drew only {} after {} tries", 101 | num_expected, num_actual, num_tries 102 | ) 103 | } 104 | } 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /winterfell/crypto/src/hash/blake/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use super::{ByteDigest, ElementHasher, Hasher}; 7 | use core::{convert::TryInto, fmt::Debug, marker::PhantomData}; 8 | use math::{FieldElement, StarkField}; 9 | use utils::ByteWriter; 10 | 11 | #[cfg(test)] 12 | mod tests; 13 | 14 | // BLAKE3 256-BIT OUTPUT 15 | // ================================================================================================ 16 | 17 | /// Implementation of the [Hasher](super::Hasher) trait for BLAKE3 hash function with 256-bit 18 | /// output. 19 | #[derive(Debug, PartialEq, Eq)] 20 | pub struct Blake3_256(PhantomData); 21 | 22 | impl Hasher for Blake3_256 { 23 | type Digest = ByteDigest<32>; 24 | 25 | fn hash(bytes: &[u8]) -> Self::Digest { 26 | ByteDigest(*blake3::hash(bytes).as_bytes()) 27 | } 28 | 29 | fn merge(values: &[Self::Digest; 2]) -> Self::Digest { 30 | ByteDigest(blake3::hash(ByteDigest::digests_as_bytes(values)).into()) 31 | } 32 | 33 | fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest { 34 | let mut data = [0; 40]; 35 | data[..32].copy_from_slice(&seed.0); 36 | data[32..].copy_from_slice(&value.to_le_bytes()); 37 | ByteDigest(*blake3::hash(&data).as_bytes()) 38 | } 39 | } 40 | 41 | impl ElementHasher for Blake3_256 { 42 | type BaseField = B; 43 | 44 | fn hash_elements>(elements: &[E]) -> Self::Digest { 45 | if B::IS_CANONICAL { 46 | // when element's internal and canonical representations are the same, we can hash 47 | // element bytes directly 48 | let bytes = E::elements_as_bytes(elements); 49 | ByteDigest(*blake3::hash(bytes).as_bytes()) 50 | } else { 51 | // when elements' internal and canonical representations differ, we need to serialize 52 | // them before hashing 53 | let mut hasher = BlakeHasher::new(); 54 | hasher.write(elements); 55 | ByteDigest(hasher.finalize()) 56 | } 57 | } 58 | } 59 | 60 | // BLAKE3 192-BIT OUTPUT 61 | // ================================================================================================ 62 | 63 | /// Implementation of the [Hasher](super::Hasher) trait for BLAKE3 hash function with 192-bit 64 | /// output. 65 | #[derive(Debug, PartialEq, Eq)] 66 | pub struct Blake3_192(PhantomData); 67 | 68 | impl Hasher for Blake3_192 { 69 | type Digest = ByteDigest<24>; 70 | 71 | fn hash(bytes: &[u8]) -> Self::Digest { 72 | let result = blake3::hash(bytes); 73 | ByteDigest(result.as_bytes()[..24].try_into().unwrap()) 74 | } 75 | 76 | fn merge(values: &[Self::Digest; 2]) -> Self::Digest { 77 | let result = blake3::hash(ByteDigest::digests_as_bytes(values)); 78 | ByteDigest(result.as_bytes()[..24].try_into().unwrap()) 79 | } 80 | 81 | fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest { 82 | let mut data = [0; 32]; 83 | data[..24].copy_from_slice(&seed.0); 84 | data[24..].copy_from_slice(&value.to_le_bytes()); 85 | 86 | let result = blake3::hash(&data); 87 | ByteDigest(result.as_bytes()[..24].try_into().unwrap()) 88 | } 89 | } 90 | 91 | impl ElementHasher for Blake3_192 { 92 | type BaseField = B; 93 | 94 | fn hash_elements>(elements: &[E]) -> Self::Digest { 95 | if B::IS_CANONICAL { 96 | // when element's internal and canonical representations are the same, we can hash 97 | // element bytes directly 98 | let bytes = E::elements_as_bytes(elements); 99 | let result = blake3::hash(bytes); 100 | ByteDigest(result.as_bytes()[..24].try_into().unwrap()) 101 | } else { 102 | // when elements' internal and canonical representations differ, we need to serialize 103 | // them before hashing 104 | let mut hasher = BlakeHasher::new(); 105 | hasher.write(elements); 106 | let result = hasher.finalize(); 107 | ByteDigest(result[..24].try_into().unwrap()) 108 | } 109 | } 110 | } 111 | 112 | // BLAKE HASHER 113 | // ================================================================================================ 114 | 115 | /// Wrapper around BLAKE3 hasher to implement [ByteWriter] trait for it. 116 | struct BlakeHasher(blake3::Hasher); 117 | 118 | impl BlakeHasher { 119 | pub fn new() -> Self { 120 | Self(blake3::Hasher::new()) 121 | } 122 | 123 | pub fn finalize(&self) -> [u8; 32] { 124 | *self.0.finalize().as_bytes() 125 | } 126 | } 127 | 128 | impl ByteWriter for BlakeHasher { 129 | fn write_u8(&mut self, value: u8) { 130 | self.0.update(&[value]); 131 | } 132 | 133 | fn write_u8_slice(&mut self, values: &[u8]) { 134 | self.0.update(values); 135 | } 136 | } 137 | -------------------------------------------------------------------------------- /winterfell/crypto/src/hash/blake/tests.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use super::{Blake3_256, ElementHasher, Hasher}; 7 | use math::{fields::f62::BaseElement, FieldElement}; 8 | use rand_utils::rand_array; 9 | 10 | #[test] 11 | fn hash_padding() { 12 | let b1 = [1_u8, 2, 3]; 13 | let b2 = [1_u8, 2, 3, 0]; 14 | 15 | // adding a zero bytes at the end of a byte string should result in a different hash 16 | let r1 = Blake3_256::::hash(&b1); 17 | let r2 = Blake3_256::::hash(&b2); 18 | assert_ne!(r1, r2); 19 | } 20 | 21 | #[test] 22 | fn hash_elements_padding() { 23 | let e1: [BaseElement; 2] = rand_array(); 24 | let e2 = [e1[0], e1[1], BaseElement::ZERO]; 25 | 26 | // adding a zero element at the end of a list of elements should result in a different hash 27 | let r1 = Blake3_256::hash_elements(&e1); 28 | let r2 = Blake3_256::hash_elements(&e2); 29 | assert_ne!(r1, r2); 30 | } 31 | -------------------------------------------------------------------------------- /winterfell/crypto/src/hash/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use core::{fmt::Debug, slice}; 7 | use math::{FieldElement, StarkField}; 8 | use utils::{ByteReader, Deserializable, DeserializationError, Serializable}; 9 | 10 | mod blake; 11 | pub use blake::{Blake3_192, Blake3_256}; 12 | 13 | mod sha; 14 | pub use sha::Sha3_256; 15 | 16 | mod rescue; 17 | pub use rescue::{Rp62_248, Rp64_256}; 18 | 19 | mod poseidon; 20 | pub use poseidon::Poseidon; 21 | // HASHER TRAITS 22 | // ================================================================================================ 23 | 24 | /// Defines a cryptographic hash function. 25 | /// 26 | /// This trait defined hash procedures for the following inputs: 27 | /// * A sequence of bytes. 28 | /// * Two digests - this is intended for use in Merkle tree constructions. 29 | /// * A digests and a u64 value - this intended for use in PRNG or PoW contexts. 30 | pub trait Hasher { 31 | /// Specifies a digest type returned by this hasher. 32 | type Digest: Digest; 33 | 34 | /// Returns a hash of the provided sequence of bytes. 35 | fn hash(bytes: &[u8]) -> Self::Digest; 36 | 37 | /// Returns a hash of two digests. This method is intended for use in construction of 38 | /// Merkle trees. 39 | fn merge(values: &[Self::Digest; 2]) -> Self::Digest; 40 | 41 | /// Returns hash(`seed` || `value`). This method is intended for use in PRNG and PoW contexts. 42 | fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest; 43 | } 44 | 45 | /// Defines a cryptographic hash function for hashing field elements. 46 | /// 47 | /// This trait defines a hash procedure for a sequence of field elements. The elements can be 48 | /// either in the base field specified for this hasher, or in an extension of the base field. 49 | pub trait ElementHasher: Hasher { 50 | /// Specifies a base field for elements which can be hashed with this hasher. 51 | type BaseField: StarkField; 52 | 53 | /// Returns a hash of the provided field elements. 54 | fn hash_elements(elements: &[E]) -> Self::Digest 55 | where 56 | E: FieldElement; 57 | } 58 | 59 | // DIGEST TRAIT 60 | // ================================================================================================ 61 | 62 | /// Defines output type for a cryptographic hash function. 63 | pub trait Digest: 64 | Debug + Default + Copy + Clone + Eq + PartialEq + Send + Sync + Serializable + Deserializable 65 | { 66 | /// Returns this digest serialized into an array of bytes. 67 | /// 68 | /// Ideally, the length of the returned array should be defined by an associated constant, but 69 | /// using associated constants in const generics is not supported by Rust yet. Thus, we put an 70 | /// upper limit on the possible digest size. For digests which are smaller than 32 bytes, the 71 | /// unused bytes should be set to 0. 72 | fn as_bytes(&self) -> [u8; 32]; 73 | } 74 | 75 | // BYTE DIGEST 76 | // ================================================================================================ 77 | 78 | #[derive(Debug, Copy, Clone, Eq, PartialEq)] 79 | pub struct ByteDigest([u8; N]); 80 | 81 | impl ByteDigest { 82 | pub fn new(value: [u8; N]) -> Self { 83 | Self(value) 84 | } 85 | 86 | #[inline(always)] 87 | pub fn bytes_as_digests(bytes: &[[u8; N]]) -> &[ByteDigest] { 88 | let p = bytes.as_ptr(); 89 | let len = bytes.len(); 90 | unsafe { slice::from_raw_parts(p as *const ByteDigest, len) } 91 | } 92 | 93 | #[inline(always)] 94 | pub fn digests_as_bytes(digests: &[ByteDigest]) -> &[u8] { 95 | let p = digests.as_ptr(); 96 | let len = digests.len() * N; 97 | unsafe { slice::from_raw_parts(p as *const u8, len) } 98 | } 99 | } 100 | 101 | impl Digest for ByteDigest { 102 | fn as_bytes(&self) -> [u8; 32] { 103 | let mut result = [0; 32]; 104 | result[..N].copy_from_slice(&self.0); 105 | result 106 | } 107 | } 108 | 109 | impl Default for ByteDigest { 110 | fn default() -> Self { 111 | ByteDigest([0; N]) 112 | } 113 | } 114 | 115 | impl Serializable for ByteDigest { 116 | fn write_into(&self, target: &mut W) { 117 | target.write_u8_slice(&self.0); 118 | } 119 | } 120 | 121 | impl Deserializable for ByteDigest { 122 | fn read_from(source: &mut R) -> Result { 123 | Ok(ByteDigest(source.read_u8_array()?)) 124 | } 125 | } 126 | 127 | #[cfg(test)] 128 | mod tests { 129 | use super::{ByteDigest, Digest}; 130 | 131 | #[test] 132 | fn byte_digest_as_bytes() { 133 | let d = ByteDigest::new([255_u8; 32]); 134 | assert_eq!([255_u8; 32], d.as_bytes()); 135 | 136 | let d = ByteDigest::new([255_u8; 31]); 137 | let mut expected = [255_u8; 32]; 138 | expected[31] = 0; 139 | assert_eq!(expected, d.as_bytes()); 140 | } 141 | } 142 | -------------------------------------------------------------------------------- /winterfell/crypto/src/hash/poseidon/Makefile: -------------------------------------------------------------------------------- 1 | bls12-381 := 52435875175126190479447740508185965837690552500527637822603658699938581184513 2 | bn-128 := 21888242871839275222246405745257275088548364400416034343698204186575808495617 3 | dbg_p := 18446744073709551359 4 | 5 | bn_constants: 6 | python3 generate_parameters_grain.sage.py 1 0 254 4 8 58 ${bn-128} > param.rs 7 | 8 | bls_constants: 9 | python3 generate_parameters_grain.sage.py 1 0 255 5 8 60 ${bls12-381} > param.rs 10 | 11 | dbg: 12 | python3 generate_parameters_grain.sage.py 1 0 64 24 8 42 ${dbg_p} > param.txt 13 | -------------------------------------------------------------------------------- /winterfell/crypto/src/hash/poseidon/mod.rs: -------------------------------------------------------------------------------- 1 | mod param; 2 | use core::marker::PhantomData; 3 | 4 | // Optimized version of poseidon with same output as the basic permutation 5 | mod poseidon; 6 | 7 | #[cfg(test)] 8 | mod tests; 9 | 10 | use super::{ByteDigest, ElementHasher, Hasher}; 11 | use math::{FieldElement, StarkField}; 12 | 13 | // POSEIDON WITH 256-BIT OUTPUT 14 | // =============================================================================================== 15 | /// Implementation of the [Hasher](super::Hasher) trait for POSEIDON hash function with 256-bit 16 | /// output. 17 | 18 | pub struct Poseidon(PhantomData); 19 | 20 | impl Hasher for Poseidon { 21 | type Digest = ByteDigest<32>; 22 | 23 | fn hash(bytes: &[u8]) -> Self::Digest { 24 | // return the first [RATE] elements of the state as hash result 25 | poseidon::digest(bytes) 26 | } 27 | 28 | fn merge(values: &[Self::Digest; 2]) -> Self::Digest { 29 | let mut data = [0; 64]; 30 | data[..32].copy_from_slice(values[0].0.as_slice()); 31 | data[32..].copy_from_slice(values[1].0.as_slice()); 32 | poseidon::digest(&data) 33 | } 34 | 35 | fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest { 36 | let mut data = [0; 40]; 37 | data[..32].copy_from_slice(&seed.0); 38 | data[32..].copy_from_slice(&value.to_le_bytes()); 39 | poseidon::digest(&data) 40 | } 41 | } 42 | 43 | impl ElementHasher for Poseidon { 44 | type BaseField = B; 45 | 46 | fn hash_elements>(elements: &[E]) -> Self::Digest { 47 | assert!(B::IS_CANONICAL); 48 | 49 | let bytes = E::elements_as_bytes(elements); 50 | poseidon::digest(bytes) 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /winterfell/crypto/src/hash/poseidon/poseidon.rs: -------------------------------------------------------------------------------- 1 | use crate::hash::ByteDigest; 2 | 3 | //padding with 0s and a single one 4 | use super::param::*; 5 | use math::{fields::f256::BaseElement, FieldElement}; 6 | use std::vec::Vec; 7 | 8 | pub(crate) fn digest(input: &[u8]) -> ByteDigest<32> { 9 | let mut formatted_input: Vec = vec![]; 10 | 11 | for chunk in input.chunks(32) { 12 | formatted_input.push(BaseElement::from_le_bytes(chunk)); 13 | } 14 | 15 | let mut output = formatted_input.clone(); 16 | 17 | padder(&mut output); 18 | 19 | ByteDigest(hash(&mut output)) 20 | } 21 | 22 | pub(crate) fn padder(input: &mut Vec) { 23 | let l = input.len(); 24 | let padded_length = (l / RATE + 1) * RATE; 25 | 26 | input.push(BaseElement::ONE); 27 | for _i in l + 1..padded_length { 28 | input.push(BaseElement::ZERO) 29 | } 30 | } 31 | 32 | pub(crate) fn hash(input: &mut Vec) -> [u8; 32 * DIGEST_SIZE] { 33 | let ref mut state = [BaseElement::ZERO; T].to_vec(); 34 | 35 | for i in 0..input.len() / RATE { 36 | //absorbtion 37 | for j in 0..RATE { 38 | state[j] += input[i * RATE + j] 39 | } 40 | permutation(state); 41 | } 42 | 43 | let mut output = [0_u8; 32 * DIGEST_SIZE]; 44 | for i in 0..DIGEST_SIZE { 45 | output[i..i + 32].copy_from_slice(&state[i].to_le_bytes()) 46 | } 47 | 48 | output 49 | } 50 | 51 | pub(crate) fn permutation(input: &mut Vec) { 52 | let ref mut state = input.clone()[..T].to_vec(); 53 | 54 | for j in 0..R_F / 2 { 55 | full_round(state, j); 56 | } 57 | 58 | for j in 0..R_P { 59 | partial_round(state, j + R_F / 2); 60 | } 61 | 62 | for j in 0..R_F / 2 { 63 | full_round(state, j + R_F / 2 + R_P); 64 | } 65 | 66 | input[..T].copy_from_slice(&state); 67 | } 68 | 69 | pub(crate) fn full_round(state: &mut Vec, i: usize) { 70 | add_constants(state, i); 71 | apply_sbox(state); 72 | apply_mds(state); 73 | } 74 | 75 | pub(crate) fn partial_round(state: &mut Vec, i: usize) { 76 | if i == R_F / 2 { 77 | add_constants(state, i); 78 | matrix_mul(state, MP); 79 | } 80 | 81 | state[0] = state[0].exp(ALPHA.into()); 82 | 83 | if i < R_F / 2 + R_P - 1 { 84 | state[0] += ROUND_CONSTANTS_OPTI[i+1][0]; 85 | } 86 | 87 | sparse_matrix(state, R_P - 1 - (i - R_F / 2)); 88 | } 89 | 90 | pub(crate) fn add_constants(state: &mut [BaseElement], round: usize) { 91 | for i in 0..T { 92 | state[i] += ROUND_CONSTANTS_OPTI[round][i]; 93 | } 94 | } 95 | 96 | pub(crate) fn apply_sbox(state: &mut [E]) { 97 | for i in 0..T { 98 | state[i] = state[i].exp(ALPHA.into()); 99 | } 100 | } 101 | 102 | pub(crate) fn matrix_mul>(state: &mut [E],m: [[BaseElement; T]; T] ) { 103 | let mut result = [E::ZERO; T]; 104 | let mut temp = [E::ZERO; T]; 105 | for i in 0..T { 106 | for j in 0..T { 107 | temp[j] = E::from(m[j][i]) * state[j]; 108 | } 109 | 110 | for j in 0..T { 111 | result[i] += temp[j]; 112 | } 113 | } 114 | state.copy_from_slice(&result); 115 | } 116 | 117 | pub(crate) fn sparse_matrix(state: &mut [BaseElement],i: usize) { 118 | let _v = V_COLLECTION[i]; 119 | let _w = W_HAT_COLLECTION[i]; 120 | let s0 = state[0]; 121 | 122 | state[0] *= M_0_0; 123 | 124 | for j in 1..T { 125 | state[0] += W_HAT_COLLECTION[i][j - 1] * state[j]; 126 | } 127 | 128 | for j in 1..T { 129 | state[j] += s0 * V_COLLECTION[i][j - 1]; 130 | } 131 | } 132 | 133 | pub(crate) fn apply_mds>(state: &mut [E]) { 134 | let mut result = [E::ZERO; T]; 135 | let mut temp = [E::ZERO; T]; 136 | for i in 0..T { 137 | for j in 0..T { 138 | temp[j] = E::from(MDS[i][j]) * state[j]; 139 | } 140 | 141 | for j in 0..T { 142 | result[i] += temp[j]; 143 | } 144 | } 145 | state.copy_from_slice(&result); 146 | } 147 | -------------------------------------------------------------------------------- /winterfell/crypto/src/hash/poseidon/tests.rs: -------------------------------------------------------------------------------- 1 | use super::param::*; 2 | use super::poseidon; 3 | use math::fields::f256::{BaseElement, U256}; 4 | use math::FieldElement; 5 | use rand_utils::rand_array; 6 | 7 | #[test] 8 | fn test_sbox() { 9 | let state: [BaseElement; T] = rand_array(); 10 | 11 | let mut expected = state.clone(); 12 | expected.iter_mut().for_each(|v| *v = v.exp(ALPHA.into())); 13 | 14 | let mut actual = state; 15 | poseidon::apply_sbox(&mut actual); 16 | println!("{:?}", actual); 17 | assert_eq!(expected, actual); 18 | } 19 | 20 | #[test] 21 | fn test_mds() { 22 | let mut state = element_vec(T, &|i| i); 23 | 24 | poseidon::apply_mds(&mut state); 25 | 26 | // expected values are obtained by executing sage reference implementation code 27 | 28 | let expected = [ 29 | BaseElement(U256::from( 30 | "0xf47a5b360b59a13893b68e1e358334e084358abf3aaa900833200a2c6d1d52d", 31 | )), 32 | BaseElement(U256::from( 33 | "0x19d0095e2c71ff7b76f932791481c7d71ef535a8ed962b52c7648d8171253db8", 34 | )), 35 | BaseElement(U256::from( 36 | "0x3213e05466d7d030b09f48ef6964560bda50374ac5457abac46b3423e3a1e571", 37 | )), 38 | BaseElement(U256::from( 39 | "0x3c962d6b4732a622c8fab64d4b246e851cf3521329c157b99d44bc416fd63f1b", 40 | )), 41 | BaseElement(U256::from( 42 | "0x5e5b33996c55d24b440db9eeae3ec2ca620f0a4774f0091fc5014d7b8ee26a82", 43 | )), 44 | ]; 45 | assert_eq!(state, expected) 46 | } 47 | 48 | #[test] 49 | fn test_constants() { 50 | let mut state = element_vec(T, &|i| i); 51 | 52 | poseidon::add_constants(&mut state, 0); 53 | 54 | // expected values are obtained by executing sage reference implementation code 55 | let expected = [ 56 | BaseElement(U256::from( 57 | "0x5ee52b2f39e240a4006e97a15a7609dce42fa9aa510d11586a56db98fa925158", 58 | )), 59 | BaseElement(U256::from( 60 | "0x3e92829ce321755f769c6fd0d51e98262d7747ad553b028dbbe98b5274b9c8e2", 61 | )), 62 | BaseElement(U256::from( 63 | "0x7067b2b9b65af0519cef530217d4563543852399c2af1557fcd9eb325b5365e6", 64 | )), 65 | BaseElement(U256::from( 66 | "0x725e66aa00e406f247f00002487d092328c526f2f5a3c456004a71cea83845d8", 67 | )), 68 | BaseElement(U256::from( 69 | "0x72bf92303a9d433709d29979a296d98f147e8e7b8ed0cb452bd9f9508f6e4715", 70 | )), 71 | ]; 72 | 73 | assert_eq!(state, expected); 74 | } 75 | 76 | #[test] 77 | fn test_permutation() { 78 | let mut state = element_vec(T, &|i| i); 79 | 80 | // expected values are obtained by executing sage reference implementation code 81 | let expected = [ 82 | BaseElement(U256::from( 83 | "0x2a918b9c9f9bd7bb509331c81e297b5707f6fc7393dcee1b13901a0b22202e18", 84 | )), 85 | BaseElement(U256::from( 86 | "0x65ebf8671739eeb11fb217f2d5c5bf4a0c3f210e3f3cd3b08b5db75675d797f7", 87 | )), 88 | BaseElement(U256::from( 89 | "0x2cc176fc26bc70737a696a9dfd1b636ce360ee76926d182390cdb7459cf585ce", 90 | )), 91 | BaseElement(U256::from( 92 | "0x4dc4e29d283afd2a491fe6aef122b9a968e74eff05341f3cc23fda1781dcb566", 93 | )), 94 | BaseElement(U256::from( 95 | "0x3ff622da276830b9451b88b85e6184fd6ae15c8ab3ee25a5667be8592cce3b1", 96 | )), 97 | ]; 98 | poseidon::permutation(&mut state); 99 | dbg!(&state); 100 | assert_eq!(state, expected); 101 | } 102 | 103 | #[test] 104 | fn test_hash() { 105 | let mut state = element_vec(T, &|i| i); 106 | 107 | poseidon::padder(&mut state); 108 | 109 | let output = poseidon::hash(&mut state); 110 | 111 | // expected values are obtained by executing sage reference implementation code 112 | let expected: [u8; 32] = [ 113 | 9, 86, 3, 12, 160, 105, 236, 249, 54, 3, 34, 207, 252, 122, 39, 91, 21, 156, 202, 4, 107, 114 | 88, 95, 45, 61, 24, 40, 254, 16, 78, 58, 42, 115 | ]; 116 | assert_eq!(expected, output); 117 | } 118 | 119 | //HELPER FUNCTION 120 | 121 | fn element_vec(n: usize, f: &dyn Fn(usize) -> usize) -> Vec { 122 | let mut vec = vec![]; 123 | for i in 0usize..n { 124 | vec.push(BaseElement::from(f(i) as u128)); 125 | } 126 | return vec; 127 | } 128 | -------------------------------------------------------------------------------- /winterfell/crypto/src/hash/rescue/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use super::{Digest, ElementHasher, Hasher, StarkField}; 7 | 8 | mod rp62_248; 9 | pub use rp62_248::Rp62_248; 10 | 11 | mod rp64_256; 12 | pub use rp64_256::Rp64_256; 13 | 14 | // HELPER FUNCTIONS 15 | // ================================================================================================ 16 | 17 | #[inline(always)] 18 | fn exp_acc(base: [B; N], tail: [B; N]) -> [B; N] { 19 | let mut result = base; 20 | for _ in 0..M { 21 | result.iter_mut().for_each(|r| *r = r.square()); 22 | } 23 | result.iter_mut().zip(tail).for_each(|(r, t)| *r *= t); 24 | result 25 | } 26 | -------------------------------------------------------------------------------- /winterfell/crypto/src/hash/rescue/rp62_248/digest.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use super::{Digest, DIGEST_SIZE}; 7 | use core::slice; 8 | use math::{fields::f62::BaseElement, StarkField}; 9 | use utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; 10 | 11 | // DIGEST TRAIT IMPLEMENTATIONS 12 | // ================================================================================================ 13 | 14 | #[derive(Debug, Copy, Clone, Eq, PartialEq)] 15 | pub struct ElementDigest([BaseElement; DIGEST_SIZE]); 16 | 17 | impl ElementDigest { 18 | pub fn new(value: [BaseElement; DIGEST_SIZE]) -> Self { 19 | Self(value) 20 | } 21 | 22 | pub fn as_elements(&self) -> &[BaseElement] { 23 | &self.0 24 | } 25 | 26 | pub fn digests_as_elements(digests: &[Self]) -> &[BaseElement] { 27 | let p = digests.as_ptr(); 28 | let len = digests.len() * DIGEST_SIZE; 29 | unsafe { slice::from_raw_parts(p as *const BaseElement, len) } 30 | } 31 | } 32 | 33 | impl Digest for ElementDigest { 34 | fn as_bytes(&self) -> [u8; 32] { 35 | let v1 = self.0[0].as_int(); 36 | let v2 = self.0[1].as_int(); 37 | let v3 = self.0[2].as_int(); 38 | let v4 = self.0[3].as_int(); 39 | 40 | let mut result = [0; 32]; 41 | result[..8].copy_from_slice(&(v1 | (v2 << 62)).to_le_bytes()); 42 | result[8..16].copy_from_slice(&((v2 >> 2) | (v3 << 60)).to_le_bytes()); 43 | result[16..24].copy_from_slice(&((v3 >> 4) | (v4 << 58)).to_le_bytes()); 44 | result[24..].copy_from_slice(&(v4 >> 6).to_le_bytes()); 45 | 46 | result 47 | } 48 | } 49 | 50 | impl Default for ElementDigest { 51 | fn default() -> Self { 52 | ElementDigest([BaseElement::default(); DIGEST_SIZE]) 53 | } 54 | } 55 | 56 | impl Serializable for ElementDigest { 57 | fn write_into(&self, target: &mut W) { 58 | target.write_u8_slice(&self.as_bytes()[..31]); 59 | } 60 | } 61 | 62 | impl Deserializable for ElementDigest { 63 | fn read_from(source: &mut R) -> Result { 64 | let v1 = source.read_u64()?; 65 | let v2 = source.read_u64()?; 66 | let v3 = source.read_u64()?; 67 | let v4 = source.read_u32()?; 68 | let v5 = source.read_u16()?; 69 | let v6 = source.read_u8()?; 70 | 71 | let e1 = BaseElement::new(v1 & 0x3FFFFFFFFFFFFFFF); 72 | let e2 = BaseElement::new(((v2 << 4) >> 2) | (v1 >> 62) & 0x3FFFFFFFFFFFFFFF); 73 | let e3 = BaseElement::new(((v3 << 6) >> 2) | (v2 >> 60) & 0x3FFFFFFFFFFFFFFF); 74 | let e4 = 75 | BaseElement::new(v3 >> 58 | (v4 as u64) << 6 | (v5 as u64) << 38 | (v6 as u64) << 54); 76 | 77 | Ok(Self([e1, e2, e3, e4])) 78 | } 79 | } 80 | 81 | // TESTS 82 | // ================================================================================================ 83 | 84 | #[cfg(test)] 85 | mod tests { 86 | 87 | use super::ElementDigest; 88 | use rand_utils::rand_array; 89 | use utils::{Deserializable, Serializable, SliceReader}; 90 | 91 | #[test] 92 | fn digest_serialization() { 93 | let d1 = ElementDigest(rand_array()); 94 | 95 | let mut bytes = vec![]; 96 | d1.write_into(&mut bytes); 97 | assert_eq!(31, bytes.len()); 98 | 99 | let mut reader = SliceReader::new(&bytes); 100 | let d2 = ElementDigest::read_from(&mut reader).unwrap(); 101 | 102 | assert_eq!(d1, d2); 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /winterfell/crypto/src/hash/rescue/rp62_248/tests.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use super::{ 7 | BaseElement, ElementDigest, ElementHasher, FieldElement, Hasher, Rp62_248, ALPHA, INV_ALPHA, 8 | STATE_WIDTH, 9 | }; 10 | use core::convert::TryInto; 11 | use math::StarkField; 12 | use rand_utils::{rand_array, rand_value}; 13 | 14 | #[test] 15 | fn test_alphas() { 16 | let e: BaseElement = rand_value(); 17 | let e_exp = e.exp(ALPHA.into()); 18 | assert_eq!(e, e_exp.exp(INV_ALPHA)); 19 | } 20 | 21 | #[test] 22 | fn test_inv_sbox() { 23 | let state: [BaseElement; STATE_WIDTH] = rand_array(); 24 | 25 | let mut expected = state; 26 | expected.iter_mut().for_each(|v| *v = v.exp(INV_ALPHA)); 27 | 28 | let mut actual = state; 29 | super::apply_inv_sbox(&mut actual); 30 | 31 | assert_eq!(expected, actual); 32 | } 33 | 34 | #[test] 35 | fn apply_permutation() { 36 | let mut state: [BaseElement; STATE_WIDTH] = [ 37 | BaseElement::new(0), 38 | BaseElement::new(1), 39 | BaseElement::new(2), 40 | BaseElement::new(3), 41 | BaseElement::new(4), 42 | BaseElement::new(5), 43 | BaseElement::new(6), 44 | BaseElement::new(7), 45 | BaseElement::new(8), 46 | BaseElement::new(9), 47 | BaseElement::new(10), 48 | BaseElement::new(11), 49 | ]; 50 | 51 | super::apply_permutation(&mut state); 52 | 53 | // expected values are obtained by executing sage reference implementation code 54 | let expected = vec![ 55 | BaseElement::new(2176593392043442589), 56 | BaseElement::new(3663362000910009411), 57 | BaseElement::new(2446978550600442325), 58 | BaseElement::new(4214718471639678996), 59 | BaseElement::new(4179776369445579812), 60 | BaseElement::new(2274316532403536457), 61 | BaseElement::new(2336761070419368662), 62 | BaseElement::new(3192888412646553651), 63 | BaseElement::new(4092565229845701133), 64 | BaseElement::new(753437048204208885), 65 | BaseElement::new(4067414342325289862), 66 | BaseElement::new(3516613610105678931), 67 | ]; 68 | 69 | assert_eq!(expected, state); 70 | } 71 | 72 | #[test] 73 | fn hash_elements_vs_merge() { 74 | let elements: [BaseElement; 8] = rand_array(); 75 | 76 | let digests: [ElementDigest; 2] = [ 77 | ElementDigest::new(elements[..4].try_into().unwrap()), 78 | ElementDigest::new(elements[4..].try_into().unwrap()), 79 | ]; 80 | 81 | let m_result = Rp62_248::merge(&digests); 82 | let h_result = Rp62_248::hash_elements(&elements); 83 | assert_eq!(m_result, h_result); 84 | } 85 | 86 | #[test] 87 | fn hash_elements_vs_merge_with_int() { 88 | let seed = ElementDigest::new(rand_array()); 89 | 90 | // ----- value fits into a field element ------------------------------------------------------ 91 | let val: BaseElement = rand_value(); 92 | let m_result = Rp62_248::merge_with_int(seed, val.as_int()); 93 | 94 | let mut elements = seed.as_elements().to_vec(); 95 | elements.push(val); 96 | let h_result = Rp62_248::hash_elements(&elements); 97 | 98 | assert_eq!(m_result, h_result); 99 | 100 | // ----- value does not fit into a field element ---------------------------------------------- 101 | let val = BaseElement::MODULUS + 2; 102 | let m_result = Rp62_248::merge_with_int(seed, val); 103 | 104 | let mut elements = seed.as_elements().to_vec(); 105 | elements.push(BaseElement::new(val)); 106 | elements.push(BaseElement::new(1)); 107 | let h_result = Rp62_248::hash_elements(&elements); 108 | 109 | assert_eq!(m_result, h_result); 110 | } 111 | 112 | #[test] 113 | fn hash_padding() { 114 | // adding a zero bytes at the end of a byte string should result in a different hash 115 | let r1 = Rp62_248::hash(&[1_u8, 2, 3]); 116 | let r2 = Rp62_248::hash(&[1_u8, 2, 3, 0]); 117 | assert_ne!(r1, r2); 118 | 119 | // same as above but with bigger inputs 120 | let r1 = Rp62_248::hash(&[1_u8, 2, 3, 4, 5, 6]); 121 | let r2 = Rp62_248::hash(&[1_u8, 2, 3, 4, 5, 6, 0]); 122 | assert_ne!(r1, r2); 123 | 124 | // same as above but with input splitting over two elements 125 | let r1 = Rp62_248::hash(&[1_u8, 2, 3, 4, 5, 6, 7]); 126 | let r2 = Rp62_248::hash(&[1_u8, 2, 3, 4, 5, 6, 7, 0]); 127 | assert_ne!(r1, r2); 128 | 129 | // same as above but with multiple zeros 130 | let r1 = Rp62_248::hash(&[1_u8, 2, 3, 4, 5, 6, 7, 0, 0]); 131 | let r2 = Rp62_248::hash(&[1_u8, 2, 3, 4, 5, 6, 7, 0, 0, 0, 0]); 132 | assert_ne!(r1, r2); 133 | } 134 | 135 | #[test] 136 | fn hash_elements_padding() { 137 | let e1: [BaseElement; 2] = rand_array(); 138 | let e2 = [e1[0], e1[1], BaseElement::ZERO]; 139 | 140 | let r1 = Rp62_248::hash_elements(&e1); 141 | let r2 = Rp62_248::hash_elements(&e2); 142 | assert_ne!(r1, r2); 143 | } 144 | -------------------------------------------------------------------------------- /winterfell/crypto/src/hash/rescue/rp64_256/digest.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use super::{Digest, DIGEST_SIZE}; 7 | use core::slice; 8 | use math::{fields::f64::BaseElement, StarkField}; 9 | use utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}; 10 | 11 | // DIGEST TRAIT IMPLEMENTATIONS 12 | // ================================================================================================ 13 | 14 | #[derive(Debug, Copy, Clone, Eq, PartialEq)] 15 | pub struct ElementDigest([BaseElement; DIGEST_SIZE]); 16 | 17 | impl ElementDigest { 18 | pub fn new(value: [BaseElement; DIGEST_SIZE]) -> Self { 19 | Self(value) 20 | } 21 | 22 | pub fn as_elements(&self) -> &[BaseElement] { 23 | &self.0 24 | } 25 | 26 | pub fn digests_as_elements(digests: &[Self]) -> &[BaseElement] { 27 | let p = digests.as_ptr(); 28 | let len = digests.len() * DIGEST_SIZE; 29 | unsafe { slice::from_raw_parts(p as *const BaseElement, len) } 30 | } 31 | } 32 | 33 | impl Digest for ElementDigest { 34 | fn as_bytes(&self) -> [u8; 32] { 35 | let mut result = [0; 32]; 36 | 37 | result[..8].copy_from_slice(&self.0[0].as_int().to_le_bytes()); 38 | result[8..16].copy_from_slice(&self.0[1].as_int().to_le_bytes()); 39 | result[16..24].copy_from_slice(&self.0[2].as_int().to_le_bytes()); 40 | result[24..].copy_from_slice(&self.0[3].as_int().to_le_bytes()); 41 | 42 | result 43 | } 44 | } 45 | 46 | impl Default for ElementDigest { 47 | fn default() -> Self { 48 | ElementDigest([BaseElement::default(); DIGEST_SIZE]) 49 | } 50 | } 51 | 52 | impl Serializable for ElementDigest { 53 | fn write_into(&self, target: &mut W) { 54 | target.write_u8_slice(&self.as_bytes()); 55 | } 56 | } 57 | 58 | impl Deserializable for ElementDigest { 59 | fn read_from(source: &mut R) -> Result { 60 | // TODO: check if the field elements are valid? 61 | let e1 = BaseElement::new(source.read_u64()?); 62 | let e2 = BaseElement::new(source.read_u64()?); 63 | let e3 = BaseElement::new(source.read_u64()?); 64 | let e4 = BaseElement::new(source.read_u64()?); 65 | 66 | Ok(Self([e1, e2, e3, e4])) 67 | } 68 | } 69 | 70 | impl From<[BaseElement; DIGEST_SIZE]> for ElementDigest { 71 | fn from(value: [BaseElement; DIGEST_SIZE]) -> Self { 72 | Self(value) 73 | } 74 | } 75 | 76 | impl From for [BaseElement; DIGEST_SIZE] { 77 | fn from(value: ElementDigest) -> Self { 78 | value.0 79 | } 80 | } 81 | 82 | impl From for [u8; 32] { 83 | fn from(value: ElementDigest) -> Self { 84 | value.as_bytes() 85 | } 86 | } 87 | 88 | // TESTS 89 | // ================================================================================================ 90 | 91 | #[cfg(test)] 92 | mod tests { 93 | 94 | use super::ElementDigest; 95 | use rand_utils::rand_array; 96 | use utils::{Deserializable, Serializable, SliceReader}; 97 | 98 | #[test] 99 | fn digest_serialization() { 100 | let d1 = ElementDigest(rand_array()); 101 | 102 | let mut bytes = vec![]; 103 | d1.write_into(&mut bytes); 104 | assert_eq!(32, bytes.len()); 105 | 106 | let mut reader = SliceReader::new(&bytes); 107 | let d2 = ElementDigest::read_from(&mut reader).unwrap(); 108 | 109 | assert_eq!(d1, d2); 110 | } 111 | } 112 | -------------------------------------------------------------------------------- /winterfell/crypto/src/hash/rescue/rp64_256/tests.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use super::{ 7 | BaseElement, ElementDigest, ElementHasher, FieldElement, Hasher, Rp64_256, StarkField, ALPHA, 8 | INV_ALPHA, STATE_WIDTH, 9 | }; 10 | use core::convert::TryInto; 11 | 12 | use rand_utils::{rand_array, rand_value}; 13 | 14 | #[test] 15 | fn test_alphas() { 16 | let e: BaseElement = rand_value(); 17 | let e_exp = e.exp(ALPHA.into()); 18 | assert_eq!(e, e_exp.exp(INV_ALPHA)); 19 | } 20 | 21 | #[test] 22 | fn test_sbox() { 23 | let state: [BaseElement; STATE_WIDTH] = rand_array(); 24 | 25 | let mut expected = state; 26 | expected.iter_mut().for_each(|v| *v = v.exp(ALPHA)); 27 | 28 | let mut actual = state; 29 | Rp64_256::apply_sbox(&mut actual); 30 | 31 | assert_eq!(expected, actual); 32 | } 33 | 34 | #[test] 35 | fn test_inv_sbox() { 36 | let state: [BaseElement; STATE_WIDTH] = rand_array(); 37 | 38 | let mut expected = state; 39 | expected.iter_mut().for_each(|v| *v = v.exp(INV_ALPHA)); 40 | 41 | let mut actual = state; 42 | Rp64_256::apply_inv_sbox(&mut actual); 43 | 44 | assert_eq!(expected, actual); 45 | } 46 | 47 | #[test] 48 | fn apply_permutation() { 49 | let mut state: [BaseElement; STATE_WIDTH] = [ 50 | BaseElement::new(0), 51 | BaseElement::new(1), 52 | BaseElement::new(2), 53 | BaseElement::new(3), 54 | BaseElement::new(4), 55 | BaseElement::new(5), 56 | BaseElement::new(6), 57 | BaseElement::new(7), 58 | BaseElement::new(8), 59 | BaseElement::new(9), 60 | BaseElement::new(10), 61 | BaseElement::new(11), 62 | ]; 63 | 64 | Rp64_256::apply_permutation(&mut state); 65 | 66 | // expected values are obtained by executing sage reference implementation code 67 | let expected = vec![ 68 | BaseElement::new(10809974140050983728), 69 | BaseElement::new(6938491977181280539), 70 | BaseElement::new(8834525837561071698), 71 | BaseElement::new(6854417192438540779), 72 | BaseElement::new(4476630872663101667), 73 | BaseElement::new(6292749486700362097), 74 | BaseElement::new(18386622366690620454), 75 | BaseElement::new(10614098972800193173), 76 | BaseElement::new(7543273285584849722), 77 | BaseElement::new(9490898458612615694), 78 | BaseElement::new(9030271581669113292), 79 | BaseElement::new(10101107035874348250), 80 | ]; 81 | 82 | assert_eq!(expected, state); 83 | } 84 | 85 | #[test] 86 | fn hash_elements_vs_merge() { 87 | let elements: [BaseElement; 8] = rand_array(); 88 | 89 | let digests: [ElementDigest; 2] = [ 90 | ElementDigest::new(elements[..4].try_into().unwrap()), 91 | ElementDigest::new(elements[4..].try_into().unwrap()), 92 | ]; 93 | 94 | let m_result = Rp64_256::merge(&digests); 95 | let h_result = Rp64_256::hash_elements(&elements); 96 | assert_eq!(m_result, h_result); 97 | } 98 | 99 | #[test] 100 | fn hash_elements_vs_merge_with_int() { 101 | let seed = ElementDigest::new(rand_array()); 102 | 103 | // ----- value fits into a field element ------------------------------------------------------ 104 | let val: BaseElement = rand_value(); 105 | let m_result = Rp64_256::merge_with_int(seed, val.as_int()); 106 | 107 | let mut elements = seed.as_elements().to_vec(); 108 | elements.push(val); 109 | let h_result = Rp64_256::hash_elements(&elements); 110 | 111 | assert_eq!(m_result, h_result); 112 | 113 | // ----- value does not fit into a field element ---------------------------------------------- 114 | let val = BaseElement::MODULUS + 2; 115 | let m_result = Rp64_256::merge_with_int(seed, val); 116 | 117 | let mut elements = seed.as_elements().to_vec(); 118 | elements.push(BaseElement::new(val)); 119 | elements.push(BaseElement::new(1)); 120 | let h_result = Rp64_256::hash_elements(&elements); 121 | 122 | assert_eq!(m_result, h_result); 123 | } 124 | 125 | #[test] 126 | fn hash_padding() { 127 | // adding a zero bytes at the end of a byte string should result in a different hash 128 | let r1 = Rp64_256::hash(&[1_u8, 2, 3]); 129 | let r2 = Rp64_256::hash(&[1_u8, 2, 3, 0]); 130 | assert_ne!(r1, r2); 131 | 132 | // same as above but with bigger inputs 133 | let r1 = Rp64_256::hash(&[1_u8, 2, 3, 4, 5, 6]); 134 | let r2 = Rp64_256::hash(&[1_u8, 2, 3, 4, 5, 6, 0]); 135 | assert_ne!(r1, r2); 136 | 137 | // same as above but with input splitting over two elements 138 | let r1 = Rp64_256::hash(&[1_u8, 2, 3, 4, 5, 6, 7]); 139 | let r2 = Rp64_256::hash(&[1_u8, 2, 3, 4, 5, 6, 7, 0]); 140 | assert_ne!(r1, r2); 141 | 142 | // same as above but with multiple zeros 143 | let r1 = Rp64_256::hash(&[1_u8, 2, 3, 4, 5, 6, 7, 0, 0]); 144 | let r2 = Rp64_256::hash(&[1_u8, 2, 3, 4, 5, 6, 7, 0, 0, 0, 0]); 145 | assert_ne!(r1, r2); 146 | } 147 | 148 | #[test] 149 | fn hash_elements_padding() { 150 | let e1: [BaseElement; 2] = rand_array(); 151 | let e2 = [e1[0], e1[1], BaseElement::ZERO]; 152 | 153 | let r1 = Rp64_256::hash_elements(&e1); 154 | let r2 = Rp64_256::hash_elements(&e2); 155 | assert_ne!(r1, r2); 156 | } 157 | -------------------------------------------------------------------------------- /winterfell/crypto/src/hash/sha/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use super::{ByteDigest, ElementHasher, Hasher}; 7 | use core::marker::PhantomData; 8 | use math::{FieldElement, StarkField}; 9 | use sha3::Digest; 10 | use utils::ByteWriter; 11 | 12 | // SHA3 WITH 256-BIT OUTPUT 13 | // ================================================================================================ 14 | 15 | /// Implementation of the [Hasher](super::Hasher) trait for SHA3 hash function with 256-bit 16 | /// output. 17 | pub struct Sha3_256(PhantomData); 18 | 19 | impl Hasher for Sha3_256 { 20 | type Digest = ByteDigest<32>; 21 | 22 | fn hash(bytes: &[u8]) -> Self::Digest { 23 | ByteDigest(sha3::Sha3_256::digest(bytes).into()) 24 | } 25 | 26 | fn merge(values: &[Self::Digest; 2]) -> Self::Digest { 27 | ByteDigest(sha3::Sha3_256::digest(ByteDigest::digests_as_bytes(values)).into()) 28 | } 29 | 30 | fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest { 31 | let mut data = [0; 40]; 32 | data[..32].copy_from_slice(&seed.0); 33 | data[32..].copy_from_slice(&value.to_le_bytes()); 34 | ByteDigest(sha3::Sha3_256::digest(&data).into()) 35 | } 36 | } 37 | 38 | impl ElementHasher for Sha3_256 { 39 | type BaseField = B; 40 | 41 | fn hash_elements>(elements: &[E]) -> Self::Digest { 42 | if B::IS_CANONICAL { 43 | // when element's internal and canonical representations are the same, we can hash 44 | // element bytes directly 45 | let bytes = E::elements_as_bytes(elements); 46 | ByteDigest(sha3::Sha3_256::digest(bytes).into()) 47 | } else { 48 | // when elements' internal and canonical representations differ, we need to serialize 49 | // them before hashing 50 | let mut hasher = ShaHasher::new(); 51 | hasher.write(elements); 52 | ByteDigest(hasher.finalize()) 53 | } 54 | } 55 | } 56 | 57 | // SHA HASHER 58 | // ================================================================================================ 59 | 60 | /// Wrapper around SHA3 hasher to implement [ByteWriter] trait for it. 61 | struct ShaHasher(sha3::Sha3_256); 62 | 63 | impl ShaHasher { 64 | pub fn new() -> Self { 65 | Self(sha3::Sha3_256::new()) 66 | } 67 | 68 | pub fn finalize(self) -> [u8; 32] { 69 | self.0.finalize().into() 70 | } 71 | } 72 | 73 | impl ByteWriter for ShaHasher { 74 | fn write_u8(&mut self, value: u8) { 75 | self.0.update(&[value]); 76 | } 77 | 78 | fn write_u8_slice(&mut self, values: &[u8]) { 79 | self.0.update(values); 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /winterfell/crypto/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | //! This crate contains cryptographic primitives used in STARK proof generation and verification. 7 | //! These include: 8 | //! 9 | //! * **Hash functions** - which are defined using the [Hasher] trait. The crate also contains two 10 | //! implementations of the trait for BLAKE3 and SHA3 hash functions. 11 | //! * **Merkle trees** - which are used as a commitment scheme in the STARK protocol. The 12 | //! [MerkleTree] implementation supports concurrent tree construction as well as compact 13 | //! aggregation of Merkle paths implemented using a variation of the 14 | //! [Octopus](https://eprint.iacr.org/2017/933) algorithm. 15 | //! * **PRNG** - which is used to generate pseudo-random elements in a finite field. The 16 | //! [RandomCoin] implementation uses a cryptographic hash function to generate pseudo-random 17 | //! elements form a seed. 18 | 19 | #![cfg_attr(not(feature = "std"), no_std)] 20 | 21 | #[cfg(not(feature = "std"))] 22 | #[macro_use] 23 | extern crate alloc; 24 | 25 | mod hash; 26 | pub use hash::{Digest, ElementHasher, Hasher}; 27 | pub mod hashers { 28 | //! Contains implementations of currently supported hash functions. 29 | 30 | pub use super::hash::Blake3_192; 31 | pub use super::hash::Blake3_256; 32 | pub use super::hash::Rp62_248; 33 | pub use super::hash::Rp64_256; 34 | pub use super::hash::Sha3_256; 35 | pub use super::hash::Poseidon; 36 | } 37 | 38 | mod merkle; 39 | pub use merkle::{build_merkle_nodes, BatchMerkleProof, MerkleTree}; 40 | 41 | #[cfg(feature = "concurrent")] 42 | pub use merkle::concurrent; 43 | 44 | mod random; 45 | pub use random::RandomCoin; 46 | 47 | mod errors; 48 | pub use errors::{MerkleTreeError, RandomCoinError}; 49 | -------------------------------------------------------------------------------- /winterfell/crypto/src/merkle/concurrent.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use crate::Hasher; 7 | use core::slice; 8 | use utils::{collections::Vec, iterators::*, rayon}; 9 | 10 | // CONSTANTS 11 | // ================================================================================================ 12 | 13 | pub const MIN_CONCURRENT_LEAVES: usize = 1024; 14 | 15 | // PUBLIC FUNCTIONS 16 | // ================================================================================================ 17 | 18 | /// Builds a all internal nodes of the Merkle using all available threads and stores the 19 | /// results in a single vector such that root of the tree is at position 1, nodes immediately 20 | /// under the root is at positions 2 and 3 etc. 21 | pub fn build_merkle_nodes(leaves: &[H::Digest]) -> Vec { 22 | let n = leaves.len() / 2; 23 | 24 | // create un-initialized array to hold all intermediate nodes 25 | let mut nodes = unsafe { utils::uninit_vector::(2 * n) }; 26 | nodes[0] = H::Digest::default(); 27 | 28 | // re-interpret leaves as an array of two leaves fused together and use it to 29 | // build first row of internal nodes (parents of leaves) 30 | let two_leaves = unsafe { slice::from_raw_parts(leaves.as_ptr() as *const [H::Digest; 2], n) }; 31 | nodes[n..] 32 | .par_iter_mut() 33 | .zip(two_leaves.par_iter()) 34 | .for_each(|(target, source)| *target = H::merge(source)); 35 | 36 | // calculate all other tree nodes, we can't use regular iterators here because 37 | // access patterns are rather complicated - so, we use regular threads instead 38 | 39 | // number of sub-trees must always be a power of 2 40 | let num_subtrees = rayon::current_num_threads().next_power_of_two(); 41 | let batch_size = n / num_subtrees; 42 | 43 | // re-interpret nodes as an array of two nodes fused together 44 | let two_nodes = unsafe { slice::from_raw_parts(nodes.as_ptr() as *const [H::Digest; 2], n) }; 45 | 46 | // process each subtree in a separate thread 47 | rayon::scope(|s| { 48 | for i in 0..num_subtrees { 49 | let nodes = unsafe { &mut *(&mut nodes[..] as *mut [H::Digest]) }; 50 | s.spawn(move |_| { 51 | let mut batch_size = batch_size / 2; 52 | let mut start_idx = n / 2 + batch_size * i; 53 | while start_idx >= num_subtrees { 54 | for k in (start_idx..(start_idx + batch_size)).rev() { 55 | nodes[k] = H::merge(&two_nodes[k]); 56 | } 57 | start_idx /= 2; 58 | batch_size /= 2; 59 | } 60 | }); 61 | } 62 | }); 63 | 64 | // finish the tip of the tree 65 | for i in (1..num_subtrees).rev() { 66 | nodes[i] = H::merge(&two_nodes[i]); 67 | } 68 | 69 | nodes 70 | } 71 | 72 | // TESTS 73 | // ================================================================================================ 74 | 75 | #[cfg(test)] 76 | mod tests { 77 | use crate::hash::{ByteDigest, Sha3_256}; 78 | use math::fields::f128::BaseElement; 79 | use proptest::collection::vec; 80 | use proptest::prelude::*; 81 | 82 | proptest! { 83 | #[test] 84 | fn build_merkle_nodes_concurrent(ref data in vec(any::<[u8; 32]>(), 256..257).no_shrink()) { 85 | let leaves = ByteDigest::bytes_as_digests(&data).to_vec(); 86 | let sequential = super::super::build_merkle_nodes::>(&leaves); 87 | let concurrent = super::build_merkle_nodes::>(&leaves); 88 | assert_eq!(concurrent, sequential); 89 | } 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /winterfell/fri/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "winter-fri" 3 | version = "0.4.0" 4 | description = "Implementation of FRI protocol for the Winterfell STARK prover/verifier" 5 | authors = ["winterfell contributors"] 6 | readme = "README.md" 7 | license = "MIT" 8 | repository = "https://github.com/novifinancial/winterfell" 9 | documentation = "https://docs.rs/winter-fri/0.4.0" 10 | categories = ["cryptography", "no-std"] 11 | keywords = ["crypto", "polynomial", "commitments"] 12 | edition = "2021" 13 | rust-version = "1.60" 14 | 15 | [lib] 16 | bench = false 17 | 18 | [[bench]] 19 | name = "prover" 20 | harness = false 21 | 22 | [[bench]] 23 | name = "folding" 24 | harness = false 25 | 26 | [features] 27 | concurrent = ["crypto/concurrent", "math/concurrent", "utils/concurrent", "std"] 28 | default = ["std"] 29 | std = ["crypto/std", "math/std", "utils/std"] 30 | 31 | [dependencies] 32 | crypto = { version = "0.4", path = "../crypto", package = "winter-crypto", default-features = false } 33 | math = { version = "0.4", path = "../math", package = "winter-math", default-features = false } 34 | utils = { version = "0.4", path = "../utils/core", package = "winter-utils", default-features = false } 35 | 36 | [dev-dependencies] 37 | criterion = "0.3" 38 | rand-utils = { version = "0.4", path = "../utils/rand", package = "winter-rand-utils" } 39 | -------------------------------------------------------------------------------- /winterfell/fri/README.md: -------------------------------------------------------------------------------- 1 | # Winter FRI 2 | This crate contains an implementation of FRI prover and verifier used by the Winterfell STARK prover and verifier. 3 | 4 | FRI stands for Fast Reed-Solomon Interactive Oracle Proof of Proximity, and is used in the STARK protocol for low-degree testing. Specifically, given a commitment to a set of evaluations of some function over domain *D*, the verifier can be convinced that the function is a polynomial of degree at most *d*, by making a small number of queries to the commitment. 5 | 6 | ## Prover 7 | FRI proofs are generated by a [FRI prover](src/prover/mod.rs) in two steps: 8 | 9 | 1. First, the commit phase of the protocol is executed via `build_layers()` function. During this phase, the degree of the polynomial is repeatedly reduced by applying a degree-respecting projection, until the size of the domain over which the polynomial is evaluated falls under `max_remainder_size` parameter. While performing the reduction, the prover writes a set of layer commitments into the `ProverChannel`. These commitments should be recorded and sent to the verifier as they will be needed during the proof verification procedure. 10 | 2. Then, the query phase of the protocol is executed via `build_proof()` function. The output of this function is an instance of the `FriProof` struct. When FRI is executed as a part of the STARK protocol, FRI proof is included into a STARK proof. 11 | 12 | ## Verifier 13 | FRI proofs are verified by a [FriVerifier](src/verifier/mod.rs) as follows: 14 | 1. First, a FRI proof needs to be converted into a `VerifierChannel`. This crate provides a default implementation of the verifier channel, but when FRI proof verification is executed as a part of the larger STARK protocol, STARK verifier handles this conversion. 15 | 2. Then, a `FriVerifier` should be instantiated (via `new()` function). This will execute the commit phase of the FRI protocol from the verifier's perspective - i.e., the verifier will read FRI layer commitments from the channel, and generates random values needed for layer folding. 16 | 3. Finally, the query phase of the FRI protocol should be executed via `verify()` function. Note that query values at the first FRI layer are provided to the `verify()` function directly. The values at remaining layers, the verifier reads from the specified verifier channel. 17 | 18 | ## Protocol parameters 19 | This crates supports executing FRI protocol with dynamically configurable parameters including: 20 | 21 | * Base STARK field, 22 | * Extension field, 23 | * Domain blowup factor, 24 | * Hash function (used for Merkle tree commitments), 25 | * Folding factor (used for degree reduction for each FRI layer), 26 | * Maximum size of the last FRI layer. 27 | 28 | ## Crate features 29 | This crate can be compiled with the following features: 30 | 31 | * `std` - enabled by default and relies on the Rust standard library. 32 | * `concurrent` - implies `std` and also enables multi-threaded proof generation. 33 | * `no_std` - does not rely on the Rust standard library and enables compilation to WebAssembly. 34 | 35 | To compile with `no_std`, disable default features via `--no-default-features` flag. 36 | 37 | ### Concurrent execution 38 | When this crate is compiled with `concurrent` feature enabled, `FriProver` will build FRI layers using multiple threads. The number of threads can be configured via `RAYON_NUM_THREADS` environment variable, and usually defaults to the number of logical cores on the machine. 39 | 40 | ## References 41 | 42 | * StarkWare's blog post on [Low Degree Testing](https://medium.com/starkware/low-degree-testing-f7614f5172db) 43 | * [Fast Reed-Solomon Interactive Oracle Proofs of Proximity](https://eccc.weizmann.ac.il/report/2017/134/) 44 | * [DEEP-FRI: Sampling Outside the Box Improves Soundness](https://eprint.iacr.org/2019/336) 45 | * Swastik Kooparty's [talk on DEEP-FRI](https://www.youtube.com/watch?v=txo_kPSn59Y&list=PLcIyXLwiPilWvjvNkhMn283LV370Pk5CT&index=6) 46 | 47 | 48 | License 49 | ------- 50 | 51 | This project is [MIT licensed](../LICENSE). -------------------------------------------------------------------------------- /winterfell/fri/benches/folding.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use criterion::{criterion_group, criterion_main, BenchmarkId, Criterion}; 7 | use math::{fields::f128::BaseElement, get_power_series, log2, polynom, StarkField}; 8 | use rand_utils::{rand_value, rand_vector}; 9 | use utils::group_vector_elements; 10 | use winter_fri::folding; 11 | 12 | static BATCH_SIZES: [usize; 3] = [65536, 131072, 262144]; 13 | 14 | pub fn interpolate_batch(c: &mut Criterion) { 15 | let mut group = c.benchmark_group("interpolate batch"); 16 | 17 | for &size in &BATCH_SIZES { 18 | let (xs, ys) = build_coordinate_batches(size); 19 | group.bench_function(BenchmarkId::new("generic", size), |b| { 20 | b.iter(|| polynom::interpolate_batch(&xs, &ys)) 21 | }); 22 | } 23 | } 24 | 25 | pub fn apply_drp(c: &mut Criterion) { 26 | let mut group = c.benchmark_group("drp"); 27 | 28 | for &size in &BATCH_SIZES { 29 | let (_, ys) = build_coordinate_batches(size); 30 | let alpha: BaseElement = rand_value(); 31 | group.bench_function(BenchmarkId::new("base field", size), |b| { 32 | b.iter(|| folding::apply_drp(&ys, BaseElement::GENERATOR, alpha)) 33 | }); 34 | } 35 | } 36 | 37 | criterion_group!(quartic_group, interpolate_batch, apply_drp); 38 | criterion_main!(quartic_group); 39 | 40 | // HELPER FUNCTIONS 41 | // ================================================================================================ 42 | 43 | fn build_coordinate_batches(batch_size: usize) -> (Vec<[BaseElement; 4]>, Vec<[BaseElement; 4]>) { 44 | let r = BaseElement::get_root_of_unity(log2(batch_size)); 45 | let xs = group_vector_elements(get_power_series(r, batch_size)); 46 | let ys = group_vector_elements(rand_vector::(batch_size)); 47 | (xs, ys) 48 | } 49 | -------------------------------------------------------------------------------- /winterfell/fri/benches/prover.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; 7 | use crypto::hashers::Blake3_256; 8 | use math::{fft, fields::f128::BaseElement, FieldElement}; 9 | use rand_utils::rand_vector; 10 | use std::time::Duration; 11 | use winter_fri::{DefaultProverChannel, FriOptions, FriProver}; 12 | 13 | static BATCH_SIZES: [usize; 3] = [65536, 131072, 262144]; 14 | static BLOWUP_FACTOR: usize = 8; 15 | 16 | pub fn build_layers(c: &mut Criterion) { 17 | let mut fri_group = c.benchmark_group("FRI prover"); 18 | fri_group.sample_size(10); 19 | fri_group.measurement_time(Duration::from_secs(10)); 20 | 21 | let options = FriOptions::new(BLOWUP_FACTOR, 4, 256); 22 | 23 | for &domain_size in &BATCH_SIZES { 24 | let evaluations = build_evaluations(domain_size); 25 | 26 | fri_group.bench_with_input( 27 | BenchmarkId::new("build_layers", domain_size), 28 | &evaluations, 29 | |b, e| { 30 | let mut prover = FriProver::new(options.clone()); 31 | b.iter_batched( 32 | || e.clone(), 33 | |evaluations| { 34 | let mut channel = DefaultProverChannel::< 35 | BaseElement, 36 | BaseElement, 37 | Blake3_256, 38 | >::new(domain_size, 32); 39 | prover.build_layers(&mut channel, evaluations); 40 | prover.reset(); 41 | }, 42 | BatchSize::LargeInput, 43 | ); 44 | }, 45 | ); 46 | } 47 | } 48 | 49 | criterion_group!(fri_prover_group, build_layers); 50 | criterion_main!(fri_prover_group); 51 | 52 | // HELPER FUNCTIONS 53 | // ================================================================================================ 54 | 55 | fn build_evaluations(domain_size: usize) -> Vec { 56 | let mut p: Vec = rand_vector(domain_size / BLOWUP_FACTOR); 57 | p.resize(domain_size, BaseElement::ZERO); 58 | let twiddles = fft::get_twiddles::(domain_size); 59 | fft::evaluate_poly(&mut p, &twiddles); 60 | p 61 | } 62 | -------------------------------------------------------------------------------- /winterfell/fri/src/errors.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use core::fmt; 7 | use utils::string::String; 8 | 9 | use crypto::RandomCoinError; 10 | 11 | // VERIFIER ERROR 12 | // ================================================================================================ 13 | 14 | /// Defines errors which can occur during FRI proof verification. 15 | #[derive(Debug, PartialEq)] 16 | pub enum VerifierError { 17 | /// Attempt to draw a random value from a public coin failed. 18 | PublicCoinError(RandomCoinError), 19 | /// Folding factor specified for the protocol is not supported. Currently, supported folding 20 | /// factors are: 4, 8, and 16. 21 | UnsupportedFoldingFactor(usize), 22 | /// Number of query positions does not match the number of provided evaluations. 23 | NumPositionEvaluationMismatch(usize, usize), 24 | /// Evaluations at queried positions did not match layer commitment made by the prover. 25 | LayerCommitmentMismatch, 26 | /// Degree-respecting projection was not performed correctly at one of the layers. 27 | InvalidLayerFolding(usize), 28 | /// Failed to construct a Merkle tree out of FRI remainder values. 29 | RemainderTreeConstructionFailed(String), 30 | /// FRI remainder did not match the commitment. 31 | RemainderCommitmentMismatch, 32 | /// Degree-respecting projection was not performed correctly at the last layer. 33 | InvalidRemainderFolding, 34 | /// FRI remainder expected degree is greater than number of remainder values. 35 | RemainderDegreeNotValid, 36 | /// FRI remainder degree is greater than the polynomial degree expected for the last layer. 37 | RemainderDegreeMismatch(usize), 38 | /// Polynomial degree at one of the FRI layers could not be divided evenly by the folding factor. 39 | DegreeTruncation(usize, usize, usize), 40 | } 41 | 42 | impl fmt::Display for VerifierError { 43 | #[rustfmt::skip] 44 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 45 | match self { 46 | Self::PublicCoinError(err) => { 47 | write!(f, "failed to draw a random value from the public coin: {}", err) 48 | } 49 | Self::UnsupportedFoldingFactor(value) => { 50 | write!(f, "folding factor {} is not currently supported", value) 51 | } 52 | Self::NumPositionEvaluationMismatch(num_positions, num_evaluations) => write!(f, 53 | "the number of query positions must be the same as the number of polynomial evaluations, but {} and {} were provided", 54 | num_positions, num_evaluations 55 | ), 56 | Self::LayerCommitmentMismatch => { 57 | write!(f, "FRI queries did not match layer commitment made by the prover") 58 | } 59 | Self::InvalidLayerFolding(layer) => { 60 | write!(f, "degree-respecting projection is not consistent at layer {}", layer) 61 | } 62 | Self::RemainderTreeConstructionFailed(err_msg) => { 63 | write!(f, "FRI remainder Merkle tree could not be constructed: {}", err_msg) 64 | } 65 | Self::RemainderCommitmentMismatch => { 66 | write!(f, "FRI remainder did not match the commitment") 67 | } 68 | Self::InvalidRemainderFolding => { 69 | write!(f, "degree-respecting projection is inconsistent at the last FRI layer") 70 | } 71 | Self::RemainderDegreeNotValid => { 72 | write!(f, "FRI remainder expected degree is greater than number of remainder values") 73 | } 74 | Self::RemainderDegreeMismatch(degree) => { 75 | write!(f, "FRI remainder is not a valid degree {} polynomial", degree) 76 | } 77 | Self::DegreeTruncation(degree, folding, layer) => { 78 | write!(f, "degree reduction from {} by {} at layer {} results in degree truncation", degree, folding, layer) 79 | } 80 | } 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /winterfell/fri/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | //! This crate contains an implementation of the FRI protocol used by the Winterfell STARK prover 7 | //! and verifier. 8 | //! 9 | //! FRI stands for Fast Reed-Solomon Interactive Oracle Proof of Proximity, and is used in the 10 | //! STARK protocol for low-degree testing. Specifically, given a commitment to a set of evaluations 11 | //! of some function over domain *D*, the verifier can be convinced that the function is a 12 | //! polynomial of degree at most *d*, by making a small number of queries to the commitment. 13 | //! 14 | //! # Proof generation 15 | //! FRI proofs are generated by a [FriProver] in two steps: 16 | //! 17 | //! 1. First, the commit phase of the protocol is executed via 18 | //! [build_layers()](prover::FriProver::build_layers()) function. During this phase, the degree 19 | //! of the polynomial is repeatedly reduced by applying a degree-respecting projection, until 20 | //! the size of the domain over which the polynomial is evaluated falls under 21 | //! `max_remainder_size` parameter. While performing the reduction, the prover writes a set of 22 | //! layer commitments into the [ProverChannel]. These commitments should be recorded and sent 23 | //! to the verifier as they will be needed during the proof verification procedure. 24 | //! 2. Then, the query phase of the protocol is executed via 25 | //! [build_proof()](prover::FriProver::build_proof()) function. The output of this function is 26 | //! an instance of the [FriProof] struct. When FRI is executed as a part of the STARK protocol, 27 | //! FRI proof is included into a STARK proof. 28 | //! 29 | //! When the crate is compiled with `concurrent` feature enabled, proof generation will be 30 | //! performed in multiple threads (usually, as many threads as there are logical cores on the 31 | //! machine). The number of threads can be configured via `RAYON_NUM_THREADS` environment variable. 32 | //! 33 | //! # Proof verification 34 | //! FRI proofs are verified by a [FriVerifier] as follows: 35 | //! 1. First, a FRI proof needs to be converted into a [VerifierChannel]. This crate provides a 36 | //! default implementation of the verifier channel, but when FRI proof verification is executed 37 | //! as a part of the larger STARK protocol, STARK verifier handles this conversion. 38 | //! 2. Then, a [FriVerifier] should be instantiated (via [new()](FriVerifier::new()) function). 39 | //! This will execute the commit phase of the FRI protocol from the verifier's perspective - 40 | //! i.e., the verifier will read FRI layer commitments from the channel, and generates 41 | //! random values needed for layer folding. 42 | //! 3. Finally, the query phase of the FRI protocol should be executed via 43 | //! [verify()](FriVerifier::verify()) function. Note that query values at the first FRI layer 44 | //! are provided to the [verify()](FriVerifier::verify()) function directly. The values at 45 | //! remaining layers, the verifier reads from the specified verifier channel. 46 | //! 47 | //! # Protocol parameters 48 | //! The current implementation supports executing FRI protocol with dynamically configurable 49 | //! parameters including: 50 | //! 51 | //! * Base STARK field, 52 | //! * Extension field, 53 | //! * Domain blowup factor, 54 | //! * Hash function (used for Merkle tree commitments), 55 | //! * Folding factor (used for degree reduction for each FRI layer), 56 | //! * Maximum size of the last FRI layer. 57 | //! 58 | //! # References 59 | //! * StarkWare's blog post on [Low Degree Testing](https://medium.com/starkware/low-degree-testing-f7614f5172db) 60 | //! * [Fast Reed-Solomon Interactive Oracle Proofs of Proximity](https://eccc.weizmann.ac.il/report/2017/134/) 61 | //! * [DEEP-FRI: Sampling Outside the Box Improves Soundness](https://eprint.iacr.org/2019/336) 62 | //! * Swastik Kooparty's [talk on DEEP-FRI](https://www.youtube.com/watch?v=txo_kPSn59Y&list=PLcIyXLwiPilWvjvNkhMn283LV370Pk5CT&index=6) 63 | 64 | #![cfg_attr(not(feature = "std"), no_std)] 65 | 66 | #[cfg(not(feature = "std"))] 67 | #[macro_use] 68 | extern crate alloc; 69 | 70 | pub mod folding; 71 | 72 | mod prover; 73 | pub use prover::{DefaultProverChannel, FriProver, ProverChannel}; 74 | 75 | mod verifier; 76 | pub use verifier::{DefaultVerifierChannel, FriVerifier, VerifierChannel}; 77 | 78 | mod options; 79 | pub use options::FriOptions; 80 | 81 | mod proof; 82 | pub use proof::FriProof; 83 | 84 | mod errors; 85 | pub use errors::VerifierError; 86 | 87 | mod utils; 88 | -------------------------------------------------------------------------------- /winterfell/fri/src/options.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use math::StarkField; 7 | 8 | // FRI OPTIONS 9 | // ================================================================================================ 10 | 11 | /// FRI protocol config options for proof generation and verification. 12 | #[derive(Clone, PartialEq, Eq)] 13 | pub struct FriOptions { 14 | folding_factor: usize, 15 | max_remainder_size: usize, 16 | blowup_factor: usize, 17 | } 18 | 19 | impl FriOptions { 20 | /// Returns a new [FriOptions] struct instantiated with the specified parameters. 21 | /// 22 | /// # Panics 23 | /// Panics if: 24 | /// * `blowup_factor` is not a power of two. 25 | /// * `folding_factor` is not 4, 8, or 16. 26 | /// * `max_remainder_size` is not at least twice the size of the `blowup_factor`. 27 | pub fn new(blowup_factor: usize, folding_factor: usize, max_remainder_size: usize) -> Self { 28 | // TODO: change panics to errors 29 | assert!( 30 | blowup_factor.is_power_of_two(), 31 | "blowup factor must be a power of two, but was {}", 32 | blowup_factor 33 | ); 34 | assert!( 35 | folding_factor == 4 || folding_factor == 8 || folding_factor == 16, 36 | "folding factor {} is not supported", 37 | folding_factor 38 | ); 39 | assert!( 40 | max_remainder_size >= folding_factor * 2, 41 | "expected max remainder size to be at least {}, but was {}", 42 | folding_factor * 2, 43 | max_remainder_size 44 | ); 45 | FriOptions { 46 | folding_factor, 47 | max_remainder_size, 48 | blowup_factor, 49 | } 50 | } 51 | 52 | /// Returns the offset by which the evaluation domain is shifted. 53 | /// 54 | /// The domain is shifted by multiplying every element in the domain by this offset. 55 | /// 56 | /// Currently, the offset is hard-coded to be the primitive element in the field specified by 57 | /// type parameter `B`. 58 | pub fn domain_offset(&self) -> B { 59 | B::GENERATOR 60 | } 61 | 62 | /// Returns the factor by which the degree of a polynomial is reduced with each FRI layer. 63 | /// 64 | /// In combination with `max_remainder_size` this property defines how many FRI layers are 65 | /// needed for an evaluation domain of a given size. 66 | pub fn folding_factor(&self) -> usize { 67 | self.folding_factor 68 | } 69 | 70 | /// Returns maximum allowed remainder (last FRI layer) size. 71 | /// 72 | /// In combination with `folding_factor` this property defines how many FRI layers are needed 73 | /// for an evaluation domain of a given size. 74 | pub fn max_remainder_size(&self) -> usize { 75 | self.max_remainder_size 76 | } 77 | 78 | /// Returns a blowup factor of the evaluation domain. 79 | /// 80 | /// Specifically, if the polynomial for which the FRI protocol is executed is of degree `d` 81 | /// where `d` is one less than a power of two, then the evaluation domain size will be 82 | /// equal to `(d + 1) * blowup_factor`. 83 | pub fn blowup_factor(&self) -> usize { 84 | self.blowup_factor 85 | } 86 | 87 | /// Computes and return the number of FRI layers required for a domain of the specified size. 88 | /// 89 | /// The remainder layer (the last FRI layer) is not included in the returned value. 90 | /// 91 | /// The number of layers for a given domain size is defined by the `folding_factor` and 92 | /// `max_remainder_size` settings. 93 | pub fn num_fri_layers(&self, mut domain_size: usize) -> usize { 94 | let mut result = 0; 95 | while domain_size > self.max_remainder_size { 96 | domain_size /= self.folding_factor; 97 | result += 1; 98 | } 99 | result 100 | } 101 | 102 | /// Computes and returns the size of the remainder layer (the last FRI layer) for a domain of 103 | /// the specified size. 104 | /// 105 | /// The size of the remainder layer for a given domain size is defined by the `folding_factor` 106 | /// and `max_remainder_size` settings. 107 | pub fn fri_remainder_size(&self, mut domain_size: usize) -> usize { 108 | while domain_size > self.max_remainder_size { 109 | domain_size /= self.folding_factor; 110 | } 111 | domain_size 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /winterfell/fri/src/prover/tests.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use super::{DefaultProverChannel, FriProver}; 7 | use crate::{ 8 | verifier::{DefaultVerifierChannel, FriVerifier}, 9 | FriOptions, FriProof, VerifierError, 10 | }; 11 | use crypto::{hashers::Blake3_256, Hasher, RandomCoin}; 12 | use math::{fft, fields::f128::BaseElement, FieldElement}; 13 | use utils::{collections::Vec, Deserializable, Serializable, SliceReader}; 14 | 15 | type Blake3 = Blake3_256; 16 | 17 | // PROVE/VERIFY TEST 18 | // ================================================================================================ 19 | 20 | #[test] 21 | fn fri_prove_verify() { 22 | let trace_length = 4096; 23 | let lde_blowup = 8; 24 | 25 | let options = FriOptions::new(lde_blowup, 4, 256); 26 | let mut channel = build_prover_channel(trace_length, &options); 27 | let evaluations = build_evaluations(trace_length, lde_blowup); 28 | 29 | // instantiate the prover and generate the proof 30 | let mut prover = FriProver::new(options.clone()); 31 | prover.build_layers(&mut channel, evaluations.clone()); 32 | let positions = channel.draw_query_positions(); 33 | let proof = prover.build_proof(&positions); 34 | 35 | // make sure the proof can be verified 36 | let commitments = channel.layer_commitments().to_vec(); 37 | let max_degree = trace_length - 1; 38 | let result = verify_proof( 39 | proof.clone(), 40 | commitments.clone(), 41 | &evaluations, 42 | max_degree, 43 | trace_length * lde_blowup, 44 | &positions, 45 | &options, 46 | ); 47 | assert!(result.is_ok(), "{:}", result.err().unwrap()); 48 | 49 | // make sure proof fails for invalid degree 50 | let result = verify_proof( 51 | proof, 52 | commitments, 53 | &evaluations, 54 | max_degree - 256, 55 | trace_length * lde_blowup, 56 | &positions, 57 | &options, 58 | ); 59 | assert!(result.is_err()); 60 | } 61 | 62 | // TEST UTILS 63 | // ================================================================================================ 64 | 65 | pub fn build_prover_channel( 66 | trace_length: usize, 67 | options: &FriOptions, 68 | ) -> DefaultProverChannel { 69 | DefaultProverChannel::new(trace_length * options.blowup_factor(), 32) 70 | } 71 | 72 | pub fn build_evaluations(trace_length: usize, lde_blowup: usize) -> Vec { 73 | let mut p = (0..trace_length as u128) 74 | .map(BaseElement::new) 75 | .collect::>(); 76 | let domain_size = trace_length * lde_blowup; 77 | p.resize(domain_size, BaseElement::ZERO); 78 | 79 | let twiddles = fft::get_twiddles::(domain_size); 80 | 81 | fft::evaluate_poly(&mut p, &twiddles); 82 | p 83 | } 84 | 85 | pub fn verify_proof( 86 | proof: FriProof, 87 | commitments: Vec<::Digest>, 88 | evaluations: &[BaseElement], 89 | max_degree: usize, 90 | domain_size: usize, 91 | positions: &[usize], 92 | options: &FriOptions, 93 | ) -> Result<(), VerifierError> { 94 | // test proof serialization / deserialization 95 | let mut proof_bytes = Vec::new(); 96 | proof.write_into(&mut proof_bytes); 97 | 98 | let mut reader = SliceReader::new(&proof_bytes); 99 | let proof = FriProof::read_from(&mut reader).unwrap(); 100 | 101 | // verify the proof 102 | let mut channel = DefaultVerifierChannel::::new( 103 | proof, 104 | commitments, 105 | domain_size, 106 | options.folding_factor(), 107 | ) 108 | .unwrap(); 109 | let mut coin = RandomCoin::::new(&[]); 110 | let verifier = FriVerifier::new(&mut channel, &mut coin, options.clone(), max_degree).unwrap(); 111 | let queried_evaluations = positions 112 | .iter() 113 | .map(|&p| evaluations[p]) 114 | .collect::>(); 115 | verifier.verify(&mut channel, &queried_evaluations, &positions) 116 | } 117 | -------------------------------------------------------------------------------- /winterfell/fri/src/utils.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use crypto::ElementHasher; 7 | use math::FieldElement; 8 | use utils::{collections::Vec, iter_mut, uninit_vector}; 9 | 10 | #[cfg(feature = "concurrent")] 11 | use utils::iterators::*; 12 | 13 | /// Maps positions in the evaluation domain to indexes of commitment Merkle tree. 14 | pub fn map_positions_to_indexes( 15 | positions: &[usize], 16 | source_domain_size: usize, 17 | folding_factor: usize, 18 | num_partitions: usize, 19 | ) -> Vec { 20 | // if there was only 1 partition, order of elements in the commitment tree 21 | // is the same as the order of elements in the evaluation domain 22 | if num_partitions == 1 { 23 | return positions.to_vec(); 24 | } 25 | 26 | let target_domain_size = source_domain_size / folding_factor; 27 | let partition_size = target_domain_size / num_partitions; 28 | 29 | let mut result = Vec::new(); 30 | for position in positions { 31 | let partition_idx = position % num_partitions; 32 | let local_idx = (position - partition_idx) / num_partitions; 33 | let position = partition_idx * partition_size + local_idx; 34 | result.push(position); 35 | } 36 | 37 | result 38 | } 39 | 40 | /// Hashes each of the arrays in the provided slice and returns a vector of resulting hashes. 41 | pub fn hash_values(values: &[[E; N]]) -> Vec 42 | where 43 | E: FieldElement, 44 | H: ElementHasher, 45 | { 46 | let mut result: Vec = unsafe { uninit_vector(values.len()) }; 47 | iter_mut!(result, 1024).zip(values).for_each(|(r, v)| { 48 | *r = H::hash_elements(v); 49 | }); 50 | result 51 | } 52 | -------------------------------------------------------------------------------- /winterfell/math/.cargo/katex-header.html: -------------------------------------------------------------------------------- 1 | ../../.cargo/katex-header.html -------------------------------------------------------------------------------- /winterfell/math/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "winter-math" 3 | version = "0.4.0" 4 | description = "Math library for the Winterfell STARK prover/verifier" 5 | authors = ["winterfell contributors"] 6 | readme = "README.md" 7 | license = "MIT" 8 | repository = "https://github.com/novifinancial/winterfell" 9 | documentation = "https://docs.rs/winter-math/0.4.0" 10 | categories = ["cryptography", "no-std"] 11 | keywords = ["crypto", "finite-fields", "polynomials", "fft"] 12 | edition = "2021" 13 | rust-version = "1.60" 14 | 15 | [lib] 16 | bench = false 17 | 18 | [[bench]] 19 | name = "fft" 20 | harness = false 21 | 22 | [[bench]] 23 | name = "field" 24 | harness = false 25 | 26 | [[bench]] 27 | name = "polynom" 28 | harness = false 29 | 30 | [features] 31 | concurrent = ["utils/concurrent", "std"] 32 | default = ["std"] 33 | std = ["utils/std", "serde/std"] 34 | 35 | [dependencies] 36 | utils = { version = "0.4", path = "../utils/core", package = "winter-utils", default-features = false } 37 | uint = "0.9.3" 38 | serde = { version = "1.0", default-features = false } 39 | 40 | [dev-dependencies] 41 | criterion = "0.3" 42 | num-bigint = "0.4" 43 | proptest = "1.0" 44 | rand-utils = { version = "0.4", path = "../utils/rand", package = "winter-rand-utils" } 45 | 46 | # Allow math in docs 47 | [package.metadata.docs.rs] 48 | rustdoc-args = ["--html-in-header", ".cargo/katex-header.html"] 49 | -------------------------------------------------------------------------------- /winterfell/math/README.md: -------------------------------------------------------------------------------- 1 | # Winter math 2 | This crate contains modules with mathematical operations needed in STARK proof generation and verification. 3 | 4 | ## Finite field 5 | [Finite field](src/field) module implements arithmetic operations in STARK-friendly finite fields. The operation include: 6 | 7 | * Basic arithmetic operations: addition, multiplication, subtraction, division, inversion. 8 | * Drawing random and pseudo-random elements from the field. 9 | * Computing roots of unity of a given order. 10 | 11 | Currently, there are two implementations of finite fields: 12 | 13 | * A 128-bit field with modulus 2128 - 45 * 240 + 1. This field was not chosen with any significant thought given to performance, and the implementation of most operations is sub-optimal as well. Proofs generated in this field can support security level of ~100 bits. If higher level of security is desired, proofs must be generated in a quadratic extension of the field. 14 | * A 62-bit field with modulus 262 - 111 * 239 + 1. This field supports very fast modular arithmetic including branchless multiplication and addition. To achieve adequate security (i.e. ~100 bits), proofs must be generated in a quadratic extension of this field. For higher levels of security, a cubic extension field should be used. 15 | * A 64-bit field with modulus 264 - 232 + 1. This field is about 15% slower than the 62-bit field described above, but it has a number of other attractive properties. To achieve adequate security (i.e. ~100 bits), proofs must be generated in a quadratic extension of this field. For higher levels of security, a cubic extension field should be used. 16 | 17 | ### Extension fields 18 | Currently, the library provides a generic way to create quadratic and cubic extensions of supported STARK fields. This can be done by implementing 'ExtensibleField' trait for degrees 2 and 3. 19 | 20 | Quadratic extension fields are defined using the following irreducible polynomials: 21 | * For `f62` field, the polynomial is x2 - x - 1. 22 | * For `f64` field, the polynomial is x2 - x + 2. 23 | * For `f128` field, the polynomial is x2 - x - 1. 24 | 25 | Cubic extension fields are defined using the following irreducible polynomials: 26 | * For `f62` field, the polynomial is x3 + 2x + 2. 27 | * For `f64` field, the polynomial is x3 - x - 1. 28 | * For `f128` field, cubic extensions are not supported. 29 | 30 | ## Polynomials 31 | [Polynomials](src/polynom) module implements basic polynomial operations such as: 32 | 33 | * Evaluation of a polynomial at a single point. 34 | * Interpolation of a polynomial from a set of points (using [Lagrange](https://en.wikipedia.org/wiki/Lagrange_polynomial) interpolation). 35 | * Addition, multiplication, subtraction, and division of polynomials. 36 | * Synthetic polynomial division (using [Ruffini's](https://en.wikipedia.org/wiki/Ruffini%27s_rule) method). 37 | 38 | ## Fast Fourier transform 39 | [FFT](src/fft) module contains operations for computing Fast Fourier transform in a prime field (also called [Number-theoretic transform](https://en.wikipedia.org/wiki/Discrete_Fourier_transform_(general)#Number-theoretic_transform)). This can be used to interpolate and evaluate polynomials in *O(n log n)* time as long as the domain of the polynomial is a multiplicative subgroup with size which is a power of 2. 40 | 41 | ## Crate features 42 | This crate can be compiled with the following features: 43 | 44 | * `std` - enabled by default and relies on the Rust standard library. 45 | * `concurrent` - implies `std` and also enables multi-threaded execution for some of the crate functions. 46 | * `no_std` - does not rely on Rust's standard library and enables compilation to WebAssembly. 47 | 48 | To compile with `no_std`, disable default features via `--no-default-features` flag. 49 | 50 | ### Concurrent execution 51 | When compiled with `concurrent` feature enabled, the following operations will be executed in multiple threads: 52 | 53 | * fft module: 54 | - `evaluate_poly()` 55 | - `evaluate_poly_with_offset()` 56 | - `interpolate_poly()` 57 | - `interpolate_poly_with_offset()` 58 | - `get_twiddles()` 59 | - `get_inv_twiddles()` 60 | * utils module: 61 | - `get_power_series()` 62 | - `get_power_series_with_offset()` 63 | - `add_in_place()` 64 | - `mul_acc()` 65 | - `batch_inversion()` 66 | 67 | The number of threads can be configured via `RAYON_NUM_THREADS` environment variable, and usually defaults to the number of logical cores on the machine. 68 | 69 | License 70 | ------- 71 | 72 | This project is [MIT licensed](../LICENSE). -------------------------------------------------------------------------------- /winterfell/math/benches/fft.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; 7 | use rand_utils::rand_vector; 8 | use std::time::Duration; 9 | use winter_math::{ 10 | fft, 11 | fields::{f128, f62, f64, CubeExtension, QuadExtension}, 12 | FieldElement, StarkField, 13 | }; 14 | 15 | const SIZES: [usize; 3] = [262_144, 524_288, 1_048_576]; 16 | 17 | fn fft_evaluate_poly(c: &mut Criterion, field_name: &str) 18 | where 19 | B: StarkField, 20 | E: FieldElement, 21 | { 22 | let mut group = c.benchmark_group(format!("{}/fft_evaluate_poly", field_name)); 23 | group.sample_size(10); 24 | group.measurement_time(Duration::from_secs(10)); 25 | 26 | let blowup_factor = 8; 27 | 28 | for &size in SIZES.iter() { 29 | let p: Vec = rand_vector(size / blowup_factor); 30 | let twiddles: Vec = fft::get_twiddles(size); 31 | group.bench_function(BenchmarkId::new("simple", size), |bench| { 32 | bench.iter_with_large_drop(|| { 33 | let mut result = vec![E::ZERO; size]; 34 | result[..p.len()].copy_from_slice(&p); 35 | fft::evaluate_poly(&mut result, &twiddles); 36 | result 37 | }); 38 | }); 39 | } 40 | 41 | for &size in SIZES.iter() { 42 | let p: Vec = rand_vector(size / blowup_factor); 43 | let twiddles: Vec = fft::get_twiddles(size / blowup_factor); 44 | group.bench_function(BenchmarkId::new("with_offset", size), |bench| { 45 | bench.iter_with_large_drop(|| { 46 | let result = 47 | fft::evaluate_poly_with_offset(&p, &twiddles, B::GENERATOR, blowup_factor); 48 | result 49 | }); 50 | }); 51 | } 52 | 53 | group.finish(); 54 | } 55 | 56 | fn fft_interpolate_poly(c: &mut Criterion, field_name: &str) 57 | where 58 | B: StarkField, 59 | E: FieldElement, 60 | { 61 | let mut group = c.benchmark_group(format!("{}/fft_interpolate_poly", field_name)); 62 | group.sample_size(10); 63 | group.measurement_time(Duration::from_secs(10)); 64 | 65 | for &size in SIZES.iter() { 66 | let p: Vec = rand_vector(size); 67 | let inv_twiddles: Vec = fft::get_inv_twiddles(size); 68 | group.bench_function(BenchmarkId::new("simple", size), |bench| { 69 | bench.iter_batched_ref( 70 | || p.clone(), 71 | |mut p| fft::interpolate_poly(&mut p, &inv_twiddles), 72 | BatchSize::LargeInput, 73 | ); 74 | }); 75 | } 76 | 77 | for &size in SIZES.iter() { 78 | let p: Vec = rand_vector(size); 79 | let inv_twiddles: Vec = fft::get_inv_twiddles(size); 80 | group.bench_function(BenchmarkId::new("with_offset", size), |bench| { 81 | bench.iter_batched_ref( 82 | || p.clone(), 83 | |mut p| fft::interpolate_poly_with_offset(&mut p, &inv_twiddles, B::GENERATOR), 84 | BatchSize::LargeInput, 85 | ); 86 | }); 87 | } 88 | 89 | group.finish(); 90 | } 91 | 92 | fn get_twiddles(c: &mut Criterion) { 93 | let mut group = c.benchmark_group("fft_get_twiddles"); 94 | group.sample_size(10); 95 | for &size in SIZES.iter() { 96 | group.bench_with_input(BenchmarkId::from_parameter(size), &size, |bench, &size| { 97 | bench.iter(|| fft::get_twiddles::(size)); 98 | }); 99 | } 100 | group.finish(); 101 | } 102 | 103 | fn bench_fft(c: &mut Criterion) { 104 | fft_evaluate_poly::(c, "f62"); 105 | fft_evaluate_poly::(c, "f64"); 106 | fft_evaluate_poly::(c, "f128"); 107 | 108 | fft_evaluate_poly::>(c, "f62_quad"); 109 | fft_evaluate_poly::>(c, "f64_quad"); 110 | fft_evaluate_poly::>(c, "f128_quad"); 111 | 112 | fft_evaluate_poly::>(c, "f64_cube"); 113 | 114 | fft_interpolate_poly::(c, "f62"); 115 | fft_interpolate_poly::(c, "f64"); 116 | fft_interpolate_poly::(c, "f128"); 117 | } 118 | 119 | criterion_group!(fft_group, bench_fft, get_twiddles); 120 | criterion_main!(fft_group); 121 | -------------------------------------------------------------------------------- /winterfell/math/benches/polynom.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; 7 | use rand_utils::rand_vector; 8 | use std::time::Duration; 9 | use winter_math::{fft, fields::f128::BaseElement, polynom, FieldElement}; 10 | 11 | const SIZES: [usize; 3] = [262_144, 524_288, 1_048_576]; 12 | 13 | fn syn_div(c: &mut Criterion) { 14 | let mut group = c.benchmark_group("syn_div"); 15 | group.sample_size(10); 16 | group.measurement_time(Duration::from_secs(10)); 17 | 18 | for &size in SIZES.iter() { 19 | let stride = 8; 20 | let mut values: Vec = rand_vector(size); 21 | for v in values.iter_mut().skip(stride) { 22 | *v = BaseElement::ZERO; 23 | } 24 | let inv_twiddles = fft::get_inv_twiddles::(size); 25 | fft::interpolate_poly(&mut values, &inv_twiddles); 26 | let p = values; 27 | let z_power = size / stride; 28 | 29 | group.bench_function(BenchmarkId::new("high_degree", size), |bench| { 30 | bench.iter_batched_ref( 31 | || p.clone(), 32 | |mut p| polynom::syn_div(&mut p, z_power, BaseElement::ONE), 33 | BatchSize::LargeInput, 34 | ); 35 | }); 36 | } 37 | 38 | group.finish(); 39 | } 40 | 41 | criterion_group!(polynom_group, syn_div); 42 | criterion_main!(polynom_group); 43 | -------------------------------------------------------------------------------- /winterfell/math/src/fft/tests.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use crate::{ 7 | field::{f128::BaseElement, StarkField}, 8 | polynom, 9 | utils::{get_power_series, log2}, 10 | }; 11 | use rand_utils::rand_vector; 12 | use utils::collections::Vec; 13 | 14 | // CORE ALGORITHMS 15 | // ================================================================================================ 16 | 17 | #[test] 18 | fn fft_in_place() { 19 | // degree 3 20 | let n = 4; 21 | let mut p = rand_vector(n); 22 | let domain = build_domain(n); 23 | let expected = polynom::eval_many(&p, &domain); 24 | let twiddles = super::get_twiddles::(n); 25 | super::serial::fft_in_place(&mut p, &twiddles, 1, 1, 0); 26 | super::permute(&mut p); 27 | assert_eq!(expected, p); 28 | 29 | // degree 7 30 | let n = 8; 31 | let mut p = rand_vector(n); 32 | let domain = build_domain(n); 33 | let twiddles = super::get_twiddles::(n); 34 | let expected = polynom::eval_many(&p, &domain); 35 | super::serial::fft_in_place(&mut p, &twiddles, 1, 1, 0); 36 | super::permute(&mut p); 37 | assert_eq!(expected, p); 38 | 39 | // degree 15 40 | let n = 16; 41 | let mut p = rand_vector(n); 42 | let domain = build_domain(n); 43 | let twiddles = super::get_twiddles::(16); 44 | let expected = polynom::eval_many(&p, &domain); 45 | super::serial::fft_in_place(&mut p, &twiddles, 1, 1, 0); 46 | super::permute(&mut p); 47 | assert_eq!(expected, p); 48 | 49 | // degree 1023 50 | let n = 1024; 51 | let mut p = rand_vector(n); 52 | let domain = build_domain(n); 53 | let expected = polynom::eval_many(&p, &domain); 54 | let twiddles = super::get_twiddles::(n); 55 | super::serial::fft_in_place(&mut p, &twiddles, 1, 1, 0); 56 | super::permute(&mut p); 57 | assert_eq!(expected, p); 58 | } 59 | 60 | #[test] 61 | fn fft_get_twiddles() { 62 | let n = super::MIN_CONCURRENT_SIZE * 2; 63 | let g = BaseElement::get_root_of_unity(log2(n)); 64 | 65 | let mut expected = get_power_series(g, n / 2); 66 | super::permute(&mut expected); 67 | 68 | let twiddles = super::get_twiddles::(n); 69 | assert_eq!(expected, twiddles); 70 | } 71 | 72 | // HELPER FUNCTIONS 73 | // ================================================================================================ 74 | 75 | fn build_domain(size: usize) -> Vec { 76 | let g = BaseElement::get_root_of_unity(log2(size)); 77 | get_power_series(g, size) 78 | } 79 | -------------------------------------------------------------------------------- /winterfell/math/src/field/extensions/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | mod quadratic; 7 | pub use quadratic::QuadExtension; 8 | 9 | mod cubic; 10 | pub use cubic::CubeExtension; 11 | 12 | use super::{ExtensibleField, ExtensionOf, FieldElement}; 13 | -------------------------------------------------------------------------------- /winterfell/math/src/field/f256/tests.rs: -------------------------------------------------------------------------------- 1 | // BASIC ALGEBRA 2 | // ================================================================================================ 3 | 4 | use num_bigint::BigUint; 5 | use rand_utils::{rand_value, rand_vector}; 6 | 7 | use super::{AsBytes, BaseElement, FieldElement, StarkField, M}; 8 | 9 | #[test] 10 | fn add() { 11 | // test identity 12 | let r: BaseElement = rand_value(); 13 | assert_eq!(r, r + BaseElement::ZERO); 14 | 15 | // test addition within bounds 16 | assert_eq!( 17 | BaseElement::from(5u8), 18 | BaseElement::from(2u8) + BaseElement::from(3u8) 19 | ); 20 | 21 | // test overflow 22 | let t = BaseElement::from(BaseElement::MODULUS - (1 as u64)); 23 | assert_eq!(BaseElement::ZERO, t + BaseElement::ONE); 24 | assert_eq!(BaseElement::ONE, t + BaseElement::from(2u8)); 25 | 26 | // test random values 27 | let r1: BaseElement = rand_value(); 28 | let r2: BaseElement = rand_value(); 29 | 30 | let mut bytes = [0u8; 32]; 31 | M.to_little_endian(&mut bytes); 32 | let big_m = BigUint::from_bytes_le(&bytes); 33 | 34 | let expected = (r1.to_biguint() + r2.to_biguint()) % big_m; 35 | let expected = BaseElement::from_biguint(expected); 36 | assert_eq!(expected, r1 + r2); 37 | } 38 | 39 | #[test] 40 | fn sub() { 41 | let r: BaseElement = rand_value(); 42 | assert_eq!(r, r - BaseElement::ZERO); 43 | 44 | // test sub within bounds 45 | assert_eq!( 46 | BaseElement::from(2u8), 47 | BaseElement::from(5u8) - BaseElement::from(3u8) 48 | ); 49 | 50 | // test underflow 51 | let expected = BaseElement::from(BaseElement::MODULUS - 2); 52 | assert_eq!(expected, BaseElement::from(3u8) - BaseElement::from(5u8)); 53 | } 54 | 55 | #[test] 56 | fn mul() { 57 | // identity 58 | let r: BaseElement = rand_value(); 59 | assert_eq!(BaseElement::ZERO, BaseElement::ZERO * r); 60 | assert_eq!(r, r * BaseElement::ONE); 61 | 62 | // test multiplication within bounds 63 | assert_eq!( 64 | BaseElement::from(15u8), 65 | BaseElement::from(3u8) * BaseElement::from(5u8) 66 | ); 67 | 68 | // test overflow 69 | let m = BaseElement::MODULUS; 70 | let t = BaseElement::from(m - 1); 71 | assert_eq!(BaseElement::ONE, t * t); 72 | assert_eq!(BaseElement::from(m - 2), t * BaseElement::from(2u8)); 73 | assert_eq!(BaseElement::from(m - 4), t * BaseElement::from(4u8)); 74 | 75 | let t = (m + 1) / 2; 76 | assert_eq!( 77 | BaseElement::ONE, 78 | BaseElement::from(t) * BaseElement::from(2u8) 79 | ); 80 | } 81 | 82 | #[test] 83 | fn inv() { 84 | // identity 85 | assert_eq!(BaseElement::ONE, BaseElement::inv(BaseElement::ONE)); 86 | assert_eq!(BaseElement::ZERO, BaseElement::inv(BaseElement::ZERO)); 87 | 88 | // test random values 89 | let x: Vec = rand_vector(1000); 90 | for i in 0..x.len() { 91 | let y = BaseElement::inv(x[i]); 92 | assert_eq!(BaseElement::ONE, x[i] * y); 93 | } 94 | } 95 | 96 | // HELPER FUNCTIONS 97 | // ================================================================================================ 98 | 99 | impl BaseElement { 100 | fn to_biguint(&self) -> BigUint { 101 | BigUint::from_bytes_le(self.as_bytes()) 102 | } 103 | 104 | fn from_biguint(value: BigUint) -> Self { 105 | let bytes = value.to_bytes_le(); 106 | let mut buffer = [0u8; 32]; 107 | buffer[0..bytes.len()].copy_from_slice(&bytes); 108 | BaseElement::try_from(buffer).unwrap() 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /winterfell/math/src/field/f256/u256.rs: -------------------------------------------------------------------------------- 1 | use uint::construct_uint; 2 | 3 | construct_uint! { 4 | /// 256-bit unsigned integer 5 | pub struct U256(4); 6 | } 7 | 8 | impl U256 { 9 | #[inline(always)] 10 | pub fn is_even(&self) -> bool { 11 | *self & U256::one() != U256::one() 12 | } 13 | } 14 | 15 | #[cfg(test)] 16 | mod tests { 17 | use super::U256; 18 | 19 | #[test] 20 | fn test_add() { 21 | // a = 2^256 - 1 22 | let a = U256::max_value(); 23 | // check overflowing add 24 | assert_eq!(a.overflowing_add(U256::from(1)).0, U256::zero()); 25 | } 26 | 27 | #[test] 28 | fn test_low() { 29 | let a = [1, 1, 1, 1]; 30 | let e = U256(a); 31 | 32 | assert_eq!( 33 | e.low_u128(), 34 | a[0] as u128 + ((a[1] as u128) << 64) 35 | ); 36 | 37 | assert_eq!( 38 | (e >> 128).low_u128(), 39 | a[2] as u128 + ((a[3] as u128) << 64) 40 | ) 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /winterfell/math/src/field/f256/u512.rs: -------------------------------------------------------------------------------- 1 | use uint::construct_uint; 2 | use super::u256::U256; 3 | 4 | construct_uint! { 5 | /// 512-bit unsigned integer 6 | pub struct U512(8); 7 | } 8 | 9 | impl U512 { 10 | pub fn low_u256(&self) -> U256 { 11 | let mut slice = [0u64; 4]; 12 | slice.copy_from_slice(&self.0[..4]); 13 | U256(slice) 14 | } 15 | } 16 | 17 | impl From for U512 { 18 | fn from(value: U256) -> Self { 19 | let mut slice = [0u64; 8]; 20 | slice[..4].copy_from_slice(&value.0); 21 | Self(slice) 22 | } 23 | } 24 | 25 | #[cfg(test)] 26 | mod tests { 27 | use super::{U256, U512}; 28 | 29 | #[test] 30 | fn low_u256() { 31 | let a = U512([1, 2, 3, 4, 5, 6, 7, 8]); 32 | assert_eq!( 33 | a.low_u256(), 34 | U256([1, 2, 3, 4]) 35 | ); 36 | } 37 | } -------------------------------------------------------------------------------- /winterfell/math/src/field/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | mod traits; 7 | pub use traits::{ExtensibleField, ExtensionOf, FieldElement, StarkField}; 8 | 9 | pub mod f256; 10 | pub mod f128; 11 | pub mod f62; 12 | pub mod f64; 13 | 14 | mod extensions; 15 | pub use extensions::{CubeExtension, QuadExtension}; 16 | -------------------------------------------------------------------------------- /winterfell/prover/.cargo/katex-header.html: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VictorColomb/stark-snark-recursive-proofs/185076895b7339daa8dd45b9160ed19a1de9ce3f/winterfell/prover/.cargo/katex-header.html -------------------------------------------------------------------------------- /winterfell/prover/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "winter-prover" 3 | version = "0.4.0" 4 | description = "Winterfell STARK prover" 5 | authors = ["winterfell contributors"] 6 | readme = "README.md" 7 | license = "MIT" 8 | repository = "https://github.com/novifinancial/winterfell" 9 | documentation = "https://docs.rs/winter-prover/0.4.0" 10 | categories = ["cryptography", "no-std"] 11 | keywords = ["crypto", "zkp", "stark", "prover"] 12 | edition = "2021" 13 | rust-version = "1.60" 14 | 15 | [lib] 16 | bench = false 17 | 18 | [features] 19 | concurrent = ["crypto/concurrent", "math/concurrent", "fri/concurrent", "utils/concurrent", "std"] 20 | default = ["std"] 21 | std = ["air/std", "crypto/std", "fri/std", "math/std", "utils/std"] 22 | 23 | [dependencies] 24 | air = { version = "0.4", path = "../air", package = "winter-air", default-features = false } 25 | crypto = { version = "0.4", path = "../crypto", package = "winter-crypto", default-features = false } 26 | fri = { version = "0.4", path = '../fri', package = "winter-fri", default-features = false } 27 | log = { version = "0.4", default-features = false } 28 | math = { version = "0.4", path = "../math", package = "winter-math", default-features = false } 29 | utils = { version = "0.4", path = "../utils/core", package = "winter-utils", default-features = false } 30 | 31 | # Allow math in docs 32 | [package.metadata.docs.rs] 33 | rustdoc-args = ["--html-in-header", ".cargo/katex-header.html"] 34 | -------------------------------------------------------------------------------- /winterfell/prover/src/constraints/commitment.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use super::Matrix; 7 | use air::proof::Queries; 8 | use crypto::{ElementHasher, MerkleTree}; 9 | use math::FieldElement; 10 | use utils::collections::Vec; 11 | 12 | // CONSTRAINT COMMITMENT 13 | // ================================================================================================ 14 | 15 | /// Constraint evaluation commitment. 16 | /// 17 | /// The commitment consists of two components: 18 | /// * Evaluations of composition polynomial columns over the LDE domain. 19 | /// * Merkle tree where each leaf in the tree corresponds to a row in the composition polynomial 20 | /// evaluation matrix. 21 | pub struct ConstraintCommitment> { 22 | evaluations: Matrix, 23 | commitment: MerkleTree, 24 | } 25 | 26 | impl> ConstraintCommitment { 27 | /// Creates a new constraint evaluation commitment from the provided composition polynomial 28 | /// evaluations and the corresponding Merkle tree commitment. 29 | pub fn new(evaluations: Matrix, commitment: MerkleTree) -> ConstraintCommitment { 30 | assert_eq!( 31 | evaluations.num_rows(), 32 | commitment.leaves().len(), 33 | "number of rows in constraint evaluation matrix must be the same as number of leaves in constraint commitment" 34 | ); 35 | ConstraintCommitment { 36 | evaluations, 37 | commitment, 38 | } 39 | } 40 | 41 | /// Returns the root of the commitment Merkle tree. 42 | pub fn root(&self) -> H::Digest { 43 | *self.commitment.root() 44 | } 45 | 46 | /// Returns the depth of the commitment Merkle tree. 47 | #[allow(unused)] 48 | pub fn tree_depth(&self) -> usize { 49 | self.commitment.depth() 50 | } 51 | 52 | /// Returns constraint evaluations at the specified positions along with Merkle authentication 53 | /// paths from the root of the commitment to these evaluations. 54 | pub fn query(self, positions: &[usize]) -> Queries { 55 | // build Merkle authentication paths to the leaves specified by positions 56 | let merkle_proof = self 57 | .commitment 58 | .prove_batch(positions) 59 | .expect("failed to generate a Merkle proof for constraint queries"); 60 | 61 | // determine a set of evaluations corresponding to each position 62 | let mut evaluations = Vec::new(); 63 | for &position in positions { 64 | let mut row = vec![E::ZERO; self.evaluations.num_cols()]; 65 | self.evaluations.read_row_into(position, &mut row); 66 | evaluations.push(row); 67 | } 68 | 69 | Queries::new(merkle_proof, evaluations) 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /winterfell/prover/src/constraints/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use super::{ConstraintDivisor, Matrix, ProverError, StarkDomain}; 7 | 8 | mod boundary; 9 | use boundary::BoundaryConstraints; 10 | 11 | mod periodic_table; 12 | use periodic_table::PeriodicValueTable; 13 | 14 | mod evaluator; 15 | pub use evaluator::ConstraintEvaluator; 16 | 17 | mod composition_poly; 18 | pub use composition_poly::CompositionPoly; 19 | 20 | mod evaluation_table; 21 | pub use evaluation_table::ConstraintEvaluationTable; 22 | 23 | mod commitment; 24 | pub use commitment::ConstraintCommitment; 25 | -------------------------------------------------------------------------------- /winterfell/prover/src/domain.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use air::Air; 7 | use math::{fft, log2, StarkField}; 8 | use utils::collections::Vec; 9 | 10 | // TYPES AND INTERFACES 11 | // ================================================================================================ 12 | 13 | pub struct StarkDomain { 14 | /// Twiddles which can be used to evaluate polynomials in the trace domain. Length of this 15 | /// vector is half the length of the trace domain size. 16 | trace_twiddles: Vec, 17 | 18 | /// Size of the constraint evaluation domain. 19 | ce_domain_size: usize, 20 | 21 | /// LDE domain size / constraint evaluation domain size 22 | ce_to_lde_blowup: usize, 23 | 24 | /// Offset of the low-degree extension domain. 25 | domain_offset: B, 26 | } 27 | 28 | // STARK DOMAIN IMPLEMENTATION 29 | // ================================================================================================ 30 | 31 | impl StarkDomain { 32 | /// Returns a new STARK domain initialized with the provided `context`. 33 | pub fn new>(air: &A) -> Self { 34 | let trace_twiddles = fft::get_twiddles(air.trace_length()); 35 | StarkDomain { 36 | trace_twiddles, 37 | ce_domain_size: air.ce_domain_size(), 38 | ce_to_lde_blowup: air.lde_domain_size() / air.ce_domain_size(), 39 | domain_offset: air.domain_offset(), 40 | } 41 | } 42 | 43 | // EXECUTION TRACE 44 | // -------------------------------------------------------------------------------------------- 45 | 46 | /// Returns length of the execution trace for this computation. 47 | pub fn trace_length(&self) -> usize { 48 | &self.trace_twiddles.len() * 2 49 | } 50 | 51 | /// Returns twiddles which can be used to evaluate trace polynomials. 52 | pub fn trace_twiddles(&self) -> &[B] { 53 | &self.trace_twiddles 54 | } 55 | 56 | /// Returns blowup factor from trace to constraint evaluation domain. 57 | pub fn trace_to_ce_blowup(&self) -> usize { 58 | self.ce_domain_size() / self.trace_length() 59 | } 60 | 61 | /// Returns blowup factor from trace to LDE domain. 62 | pub fn trace_to_lde_blowup(&self) -> usize { 63 | self.lde_domain_size() / self.trace_length() 64 | } 65 | 66 | // CONSTRAINT EVALUATION DOMAIN 67 | // -------------------------------------------------------------------------------------------- 68 | 69 | /// Returns the size of the constraint evaluation domain for this computation. 70 | pub fn ce_domain_size(&self) -> usize { 71 | self.ce_domain_size 72 | } 73 | 74 | /// Returns the generator of constraint evaluation domain. 75 | pub fn ce_domain_generator(&self) -> B { 76 | B::get_root_of_unity(log2(self.ce_domain_size())) 77 | } 78 | 79 | /// Returns blowup factor from constraint evaluation to LDE domain. 80 | pub fn ce_to_lde_blowup(&self) -> usize { 81 | self.ce_to_lde_blowup 82 | } 83 | 84 | // LOW-DEGREE EXTENSION DOMAIN 85 | // -------------------------------------------------------------------------------------------- 86 | 87 | /// Returns the size of the low-degree extension domain. 88 | pub fn lde_domain_size(&self) -> usize { 89 | self.ce_domain_size() * self.ce_to_lde_blowup() 90 | } 91 | 92 | /// Returns LDE domain offset. 93 | pub fn offset(&self) -> B { 94 | self.domain_offset 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /winterfell/prover/src/errors.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | //! Contains common error types for prover and verifier. 7 | 8 | use core::fmt; 9 | 10 | // PROVER ERROR 11 | // ================================================================================================ 12 | /// Represents an error returned by the prover during an execution of the protocol. 13 | #[derive(Debug, PartialEq)] 14 | pub enum ProverError { 15 | /// This error occurs when a transition constraint evaluated over a specific execution trace 16 | /// does not evaluate to zero at any of the steps. 17 | UnsatisfiedTransitionConstraintError(usize), 18 | /// This error occurs when polynomials built from the columns of a constraint evaluation 19 | /// table do not all have the same degree. 20 | MismatchedConstraintPolynomialDegree(usize, usize), 21 | /// This error occurs when the base field specified by the AIR does not support field extension 22 | /// of degree specified by proof options. 23 | UnsupportedFieldExtension(usize), 24 | } 25 | 26 | impl fmt::Display for ProverError { 27 | #[rustfmt::skip] 28 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 29 | match self { 30 | Self::UnsatisfiedTransitionConstraintError(step) => { 31 | write!(f, "a transition constraint was not satisfied at step {}", step) 32 | } 33 | Self::MismatchedConstraintPolynomialDegree(expected, actual) => { 34 | write!(f, "the constraint polynomial's components do not all have the same degree; expected {}, but was {}", expected, actual) 35 | } 36 | Self::UnsupportedFieldExtension(degree) => { 37 | write!(f, "field extension of degree {} is not supported for the specified base field", degree) 38 | } 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /winterfell/prover/src/tests/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use crate::TraceTable; 7 | use air::{ 8 | Air, AirContext, Assertion, EvaluationFrame, FieldExtension, HashFunction, ProofOptions, 9 | TraceInfo, TransitionConstraintDegree, 10 | }; 11 | use math::{fields::f128::BaseElement, FieldElement, StarkField}; 12 | use utils::collections::Vec; 13 | 14 | // FIBONACCI TRACE BUILDER 15 | // ================================================================================================ 16 | 17 | pub fn build_fib_trace(length: usize) -> TraceTable { 18 | assert!(length.is_power_of_two(), "length must be a power of 2"); 19 | 20 | let mut reg1 = vec![BaseElement::ONE]; 21 | let mut reg2 = vec![BaseElement::ONE]; 22 | 23 | for i in 0..(length / 2 - 1) { 24 | reg1.push(reg1[i] + reg2[i]); 25 | reg2.push(reg1[i] + BaseElement::from(2u8) * reg2[i]); 26 | } 27 | 28 | TraceTable::init(vec![reg1, reg2]) 29 | } 30 | 31 | // MOCK AIR 32 | // ================================================================================================ 33 | 34 | pub struct MockAir { 35 | context: AirContext, 36 | assertions: Vec>, 37 | periodic_columns: Vec>, 38 | } 39 | 40 | impl MockAir { 41 | pub fn with_trace_length(trace_length: usize) -> Self { 42 | Self::new( 43 | TraceInfo::new(4, trace_length), 44 | (), 45 | ProofOptions::new( 46 | 32, 47 | 8, 48 | 0, 49 | HashFunction::Blake3_256, 50 | FieldExtension::None, 51 | 4, 52 | 256, 53 | ), 54 | ) 55 | } 56 | 57 | pub fn with_periodic_columns( 58 | column_values: Vec>, 59 | trace_length: usize, 60 | ) -> Self { 61 | let mut result = Self::new( 62 | TraceInfo::new(4, trace_length), 63 | (), 64 | ProofOptions::new( 65 | 32, 66 | 8, 67 | 0, 68 | HashFunction::Blake3_256, 69 | FieldExtension::None, 70 | 4, 71 | 256, 72 | ), 73 | ); 74 | result.periodic_columns = column_values; 75 | result 76 | } 77 | 78 | pub fn with_assertions(assertions: Vec>, trace_length: usize) -> Self { 79 | let mut result = Self::new( 80 | TraceInfo::new(4, trace_length), 81 | (), 82 | ProofOptions::new( 83 | 32, 84 | 8, 85 | 0, 86 | HashFunction::Blake3_256, 87 | FieldExtension::None, 88 | 4, 89 | 256, 90 | ), 91 | ); 92 | result.assertions = assertions; 93 | result 94 | } 95 | } 96 | 97 | impl Air for MockAir { 98 | type BaseField = BaseElement; 99 | type PublicInputs = (); 100 | 101 | fn new(trace_info: TraceInfo, _pub_inputs: (), _options: ProofOptions) -> Self { 102 | let context = build_context(trace_info, 8, 1); 103 | MockAir { 104 | context, 105 | assertions: Vec::new(), 106 | periodic_columns: Vec::new(), 107 | } 108 | } 109 | 110 | fn context(&self) -> &AirContext { 111 | &self.context 112 | } 113 | 114 | fn evaluate_transition>( 115 | &self, 116 | _frame: &EvaluationFrame, 117 | _periodic_values: &[E], 118 | _result: &mut [E], 119 | ) { 120 | } 121 | 122 | fn get_assertions(&self) -> Vec> { 123 | self.assertions.clone() 124 | } 125 | 126 | fn get_periodic_column_values(&self) -> Vec> { 127 | self.periodic_columns.clone() 128 | } 129 | } 130 | 131 | // HELPER FUNCTIONS 132 | // ================================================================================================ 133 | 134 | fn build_context( 135 | trace_info: TraceInfo, 136 | blowup_factor: usize, 137 | num_assertions: usize, 138 | ) -> AirContext { 139 | let options = ProofOptions::new( 140 | 32, 141 | blowup_factor, 142 | 0, 143 | HashFunction::Blake3_256, 144 | FieldExtension::None, 145 | 4, 146 | 256, 147 | ); 148 | let t_degrees = vec![TransitionConstraintDegree::new(2)]; 149 | AirContext::new(trace_info, t_degrees, num_assertions, options) 150 | } 151 | -------------------------------------------------------------------------------- /winterfell/prover/src/trace/poly_table.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use crate::{ 7 | matrix::{ColumnIter, MultiColumnIter}, 8 | Matrix, 9 | }; 10 | use math::{log2, FieldElement, StarkField}; 11 | use utils::collections::Vec; 12 | 13 | // TRACE POLYNOMIAL TABLE 14 | // ================================================================================================ 15 | 16 | /// Trace polynomials in coefficient from for all segments of the execution trace. 17 | /// 18 | /// Coefficients of the polynomials for the main trace segment are always in the base field. 19 | /// However, coefficients of the polynomials for the auxiliary trace segments may be either in the 20 | /// base field, or in the extension field, depending on whether extension field is being used. 21 | pub struct TracePolyTable { 22 | main_segment_polys: Matrix, 23 | aux_segment_polys: Vec>, 24 | } 25 | 26 | impl TracePolyTable { 27 | // CONSTRUCTOR 28 | // -------------------------------------------------------------------------------------------- 29 | /// Creates a new table of trace polynomials from the provided main trace segment polynomials. 30 | pub fn new(main_trace_polys: Matrix) -> Self { 31 | Self { 32 | main_segment_polys: main_trace_polys, 33 | aux_segment_polys: Vec::new(), 34 | } 35 | } 36 | 37 | // STATE MUTATORS 38 | // -------------------------------------------------------------------------------------------- 39 | 40 | /// Adds the provided auxiliary segment polynomials to this polynomial table. 41 | pub fn add_aux_segment(&mut self, aux_segment_polys: Matrix) { 42 | assert_eq!( 43 | self.main_segment_polys.num_rows(), 44 | aux_segment_polys.num_rows(), 45 | "polynomials in auxiliary segment must be of the same size as in the main segment" 46 | ); 47 | self.aux_segment_polys.push(aux_segment_polys); 48 | } 49 | 50 | // PUBLIC ACCESSORS 51 | // -------------------------------------------------------------------------------------------- 52 | 53 | /// Returns the size of each polynomial - i.e. size of a vector needed to hold a polynomial. 54 | pub fn poly_size(&self) -> usize { 55 | self.main_segment_polys.num_rows() 56 | } 57 | 58 | /// Evaluates all trace polynomials (across all trace segments) at the specified point `x`. 59 | pub fn evaluate_at(&self, x: E) -> Vec { 60 | let mut result = self.main_segment_polys.evaluate_columns_at(x); 61 | for aux_polys in self.aux_segment_polys.iter() { 62 | result.append(&mut aux_polys.evaluate_columns_at(x)); 63 | } 64 | result 65 | } 66 | 67 | /// Returns an out-of-domain evaluation frame constructed by evaluating trace polynomials 68 | /// for all columns at points z and z * g, where g is the generator of the trace domain. 69 | pub fn get_ood_frame(&self, z: E) -> Vec> { 70 | let g = E::from(E::BaseField::get_root_of_unity(log2(self.poly_size()))); 71 | vec![self.evaluate_at(z), self.evaluate_at(z * g)] 72 | } 73 | 74 | /// Returns an iterator over the polynomials of the main trace segment. 75 | pub fn main_trace_polys(&self) -> ColumnIter { 76 | self.main_segment_polys.columns() 77 | } 78 | 79 | /// Returns an iterator over the polynomials of all auxiliary trace segments. 80 | pub fn aux_trace_polys(&self) -> MultiColumnIter { 81 | MultiColumnIter::new(self.aux_segment_polys.as_slice()) 82 | } 83 | 84 | // TEST HELPERS 85 | // -------------------------------------------------------------------------------------------- 86 | 87 | /// Returns the number of polynomials in the main segment of the trace. 88 | #[cfg(test)] 89 | pub fn num_main_trace_polys(&self) -> usize { 90 | self.main_segment_polys.num_cols() 91 | } 92 | 93 | /// Returns a polynomial from the main segment of the trace at the specified index. 94 | #[cfg(test)] 95 | pub fn get_main_trace_poly(&self, idx: usize) -> &[E::BaseField] { 96 | &self.main_segment_polys.get_column(idx) 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /winterfell/prover/src/trace/tests.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use crate::{ 7 | tests::{build_fib_trace, MockAir}, 8 | trace::TracePolyTable, 9 | StarkDomain, Trace, TraceCommitment, 10 | }; 11 | use crypto::{hashers::Blake3_256, ElementHasher, MerkleTree}; 12 | use math::{ 13 | fields::f128::BaseElement, get_power_series, get_power_series_with_offset, log2, polynom, 14 | FieldElement, StarkField, 15 | }; 16 | use utils::collections::Vec; 17 | 18 | type Blake3 = Blake3_256; 19 | 20 | #[test] 21 | fn new_trace_table() { 22 | let trace_length = 8; 23 | let trace = build_fib_trace(trace_length * 2); 24 | 25 | assert_eq!(2, trace.main_trace_width()); 26 | assert_eq!(8, trace.length()); 27 | 28 | let expected: Vec = vec![1u32, 2, 5, 13, 34, 89, 233, 610] 29 | .into_iter() 30 | .map(BaseElement::from) 31 | .collect(); 32 | assert_eq!(expected, trace.get_column(0)); 33 | 34 | let expected: Vec = vec![1u32, 3, 8, 21, 55, 144, 377, 987] 35 | .into_iter() 36 | .map(BaseElement::from) 37 | .collect(); 38 | assert_eq!(expected, trace.get_column(1)); 39 | } 40 | 41 | #[test] 42 | fn extend_trace_table() { 43 | // build the trace and the domain 44 | let trace_length = 8; 45 | let air = MockAir::with_trace_length(trace_length); 46 | let trace = build_fib_trace(trace_length * 2); 47 | let domain = StarkDomain::new(&air); 48 | 49 | // build extended trace commitment 50 | let trace_polys = trace.main_segment().interpolate_columns(); 51 | let trace_lde = trace_polys.evaluate_columns_over(&domain); 52 | let trace_tree = trace_lde.commit_to_rows::(); 53 | let trace_comm = TraceCommitment::::new( 54 | trace_lde, 55 | trace_tree, 56 | domain.trace_to_lde_blowup(), 57 | ); 58 | let trace_polys = TracePolyTable::::new(trace_polys); 59 | 60 | assert_eq!(2, trace_comm.trace_table().main_trace_width()); 61 | assert_eq!(64, trace_comm.trace_table().trace_len()); 62 | 63 | // make sure trace polynomials evaluate to Fibonacci trace 64 | let trace_root = BaseElement::get_root_of_unity(log2(trace_length)); 65 | let trace_domain = get_power_series(trace_root, trace_length); 66 | assert_eq!(2, trace_polys.num_main_trace_polys()); 67 | assert_eq!( 68 | vec![1u32, 2, 5, 13, 34, 89, 233, 610] 69 | .into_iter() 70 | .map(BaseElement::from) 71 | .collect::>(), 72 | polynom::eval_many(trace_polys.get_main_trace_poly(0), &trace_domain) 73 | ); 74 | assert_eq!( 75 | vec![1u32, 3, 8, 21, 55, 144, 377, 987] 76 | .into_iter() 77 | .map(BaseElement::from) 78 | .collect::>(), 79 | polynom::eval_many(trace_polys.get_main_trace_poly(1), &trace_domain) 80 | ); 81 | 82 | // make sure column values are consistent with trace polynomials 83 | let lde_domain = build_lde_domain(domain.lde_domain_size()); 84 | assert_eq!( 85 | trace_polys.get_main_trace_poly(0), 86 | polynom::interpolate(&lde_domain, trace_comm.get_main_trace_column(0), true) 87 | ); 88 | assert_eq!( 89 | trace_polys.get_main_trace_poly(1), 90 | polynom::interpolate(&lde_domain, trace_comm.get_main_trace_column(1), true) 91 | ); 92 | } 93 | 94 | #[test] 95 | fn commit_trace_table() { 96 | // build the trade and the domain 97 | let trace_length = 8; 98 | let air = MockAir::with_trace_length(trace_length); 99 | let trace = build_fib_trace(trace_length * 2); 100 | let domain = StarkDomain::new(&air); 101 | 102 | // build extended trace commitment 103 | let trace_polys = trace.main_segment().interpolate_columns(); 104 | let trace_lde = trace_polys.evaluate_columns_over(&domain); 105 | let trace_tree = trace_lde.commit_to_rows::(); 106 | let trace_comm = TraceCommitment::::new( 107 | trace_lde, 108 | trace_tree, 109 | domain.trace_to_lde_blowup(), 110 | ); 111 | 112 | // build Merkle tree from trace rows 113 | let trace_table = trace_comm.trace_table(); 114 | let mut hashed_states = Vec::new(); 115 | let mut trace_state = vec![BaseElement::ZERO; trace_table.main_trace_width()]; 116 | #[allow(clippy::needless_range_loop)] 117 | for i in 0..trace_table.trace_len() { 118 | for j in 0..trace_table.main_trace_width() { 119 | trace_state[j] = trace_table.get_main_segment().get(j, i); 120 | } 121 | let buf = Blake3::hash_elements(&trace_state); 122 | hashed_states.push(buf); 123 | } 124 | let expected_tree = MerkleTree::::new(hashed_states).unwrap(); 125 | 126 | // compare the result 127 | assert_eq!(*expected_tree.root(), trace_comm.main_trace_root()) 128 | } 129 | 130 | // HELPER FUNCTIONS 131 | // ================================================================================================ 132 | 133 | fn build_lde_domain(domain_size: usize) -> Vec { 134 | let g = B::get_root_of_unity(log2(domain_size)); 135 | get_power_series_with_offset(g, B::GENERATOR, domain_size) 136 | } 137 | -------------------------------------------------------------------------------- /winterfell/prover/src/trace/trace_lde.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use crate::Matrix; 7 | use air::EvaluationFrame; 8 | use math::FieldElement; 9 | use utils::collections::Vec; 10 | 11 | // TRACE LOW DEGREE EXTENSION 12 | // ================================================================================================ 13 | /// TODO: add docs 14 | pub struct TraceLde { 15 | main_segment_lde: Matrix, 16 | aux_segment_ldes: Vec>, 17 | blowup: usize, 18 | } 19 | 20 | impl TraceLde { 21 | // CONSTRUCTOR 22 | // -------------------------------------------------------------------------------------------- 23 | /// Creates a new trace low-degree extension table from the provided main trace segment LDE. 24 | pub fn new(main_trace_lde: Matrix, blowup: usize) -> Self { 25 | Self { 26 | main_segment_lde: main_trace_lde, 27 | aux_segment_ldes: Vec::new(), 28 | blowup, 29 | } 30 | } 31 | 32 | // STATE MUTATORS 33 | // -------------------------------------------------------------------------------------------- 34 | 35 | /// Adds the provided auxiliary segment LDE to this trace LDE. 36 | pub fn add_aux_segment(&mut self, aux_segment_lde: Matrix) { 37 | assert_eq!( 38 | self.main_segment_lde.num_rows(), 39 | aux_segment_lde.num_rows(), 40 | "number of rows in auxiliary segment must be of the same as in the main segment" 41 | ); 42 | self.aux_segment_ldes.push(aux_segment_lde); 43 | } 44 | 45 | // PUBLIC ACCESSORS 46 | // -------------------------------------------------------------------------------------------- 47 | 48 | /// Returns number of columns in the main segment of the execution trace. 49 | pub fn main_trace_width(&self) -> usize { 50 | self.main_segment_lde.num_cols() 51 | } 52 | 53 | /// Returns number of columns in the auxiliary segments of the execution trace. 54 | pub fn aux_trace_width(&self) -> usize { 55 | self.aux_segment_ldes 56 | .iter() 57 | .fold(0, |s, m| s + m.num_cols()) 58 | } 59 | 60 | /// Returns the number of rows in the execution trace. 61 | pub fn trace_len(&self) -> usize { 62 | self.main_segment_lde.num_rows() 63 | } 64 | 65 | /// Returns blowup factor which was used to extend original execution trace into trace LDE. 66 | pub fn blowup(&self) -> usize { 67 | self.blowup 68 | } 69 | 70 | /// Reads current and next rows from the main trace segment into the specified frame. 71 | pub fn read_main_trace_frame_into( 72 | &self, 73 | lde_step: usize, 74 | frame: &mut EvaluationFrame, 75 | ) { 76 | // at the end of the trace, next state wraps around and we read the first step again 77 | let next_lde_step = (lde_step + self.blowup()) % self.trace_len(); 78 | 79 | // copy main trace segment values into the frame 80 | self.main_segment_lde 81 | .read_row_into(lde_step, frame.current_mut()); 82 | self.main_segment_lde 83 | .read_row_into(next_lde_step, frame.next_mut()); 84 | } 85 | 86 | /// Reads current and next rows from the auxiliary trace segment into the specified frame. 87 | pub fn read_aux_trace_frame_into(&self, lde_step: usize, frame: &mut EvaluationFrame) { 88 | // at the end of the trace, next state wraps around and we read the first step again 89 | let next_lde_step = (lde_step + self.blowup()) % self.trace_len(); 90 | 91 | //copy auxiliary trace segment values into the frame 92 | let mut offset = 0; 93 | for segment in self.aux_segment_ldes.iter() { 94 | segment.read_row_into(lde_step, &mut frame.current_mut()[offset..]); 95 | segment.read_row_into(next_lde_step, &mut frame.next_mut()[offset..]); 96 | offset += segment.num_cols(); 97 | } 98 | } 99 | 100 | /// Returns a reference to [Matrix] representing the main trace segment. 101 | pub fn get_main_segment(&self) -> &Matrix { 102 | &self.main_segment_lde 103 | } 104 | 105 | /// Returns a reference to a [Matrix] representing an auxiliary trace segment at the specified 106 | /// index. 107 | pub fn get_aux_segment(&self, aux_segment_idx: usize) -> &Matrix { 108 | &self.aux_segment_ldes[aux_segment_idx] 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /winterfell/utils/core/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "winter-utils" 3 | version = "0.4.0" 4 | description = "Utilities for the Winterfell STARK prover/verifier" 5 | authors = ["winterfell contributors"] 6 | readme = "README.md" 7 | license = "MIT" 8 | repository = "https://github.com/novifinancial/winterfell" 9 | documentation = "https://docs.rs/winter-utils/0.4.0" 10 | categories = ["cryptography", "no-std"] 11 | keywords = ["serialization", "transmute"] 12 | edition = "2021" 13 | rust-version = "1.60" 14 | 15 | [lib] 16 | bench = false 17 | 18 | [features] 19 | concurrent = ["rayon", "std"] 20 | default = ["std"] 21 | std = [] 22 | 23 | [dependencies] 24 | rayon = { version = "1.5", optional = true } 25 | -------------------------------------------------------------------------------- /winterfell/utils/core/README.md: -------------------------------------------------------------------------------- 1 | # Winter utils 2 | This crate contains utilities used by the Winterfell STARK prover and verifier. These utilities fall into the following broad categories: 3 | 4 | * Traits used for serialization and deserialization. 5 | * Functions for transmuting vectors and slices. 6 | * Macros for easily switching between regular and parallel iterators. 7 | * Feature-based re-exports of collections and strings. 8 | 9 | ## Crate features 10 | This crate can be compiled with the following features: 11 | 12 | * `std` - enabled by default and relies on the Rust standard library. 13 | * `concurrent` - implies `std` and also re-exports `rayon` crate and enables multi-threaded execution for some of the crate functions. 14 | * `no_std` - does not rely on Rust's standard library and enables compilation to WebAssembly. 15 | 16 | To compile with `no_std`, disable default features via `--no-default-features` flag. 17 | 18 | ### Concurrent execution 19 | 20 | When compiled with `concurrent` feature enabled, this crate re-exports `rayon` crate and executes the following functions using multiple threads: 21 | 22 | * `transpose_slice()` 23 | 24 | The number of threads can be configured via `RAYON_NUM_THREADS` environment variable, and usually defaults to the number of logical cores on the machine. 25 | 26 | License 27 | ------- 28 | 29 | This project is [MIT licensed](../LICENSE). -------------------------------------------------------------------------------- /winterfell/utils/core/src/collections.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | //! Feature-based re-export of common collection components. 7 | //! 8 | //! When `std` feature is enabled, this module exports collections from the Rust standard library. 9 | //! When `alloc` feature is enabled, same collected are provided without relying on the Rust 10 | //! standard library. 11 | 12 | #[cfg(not(feature = "std"))] 13 | pub use alloc::collections::{BTreeMap, BTreeSet}; 14 | 15 | #[cfg(not(feature = "std"))] 16 | pub use alloc::vec::{self as vec, Vec}; 17 | 18 | #[cfg(feature = "std")] 19 | pub use std::collections::{BTreeMap, BTreeSet}; 20 | 21 | #[cfg(feature = "std")] 22 | pub use std::vec::{self as vec, Vec}; 23 | -------------------------------------------------------------------------------- /winterfell/utils/core/src/errors.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use crate::string::String; 7 | use core::fmt; 8 | 9 | // DESERIALIZATION ERROR 10 | // ================================================================================================ 11 | 12 | /// Defines errors which can occur during deserialization. 13 | #[derive(Debug, PartialEq)] 14 | pub enum DeserializationError { 15 | /// Bytes in the input do not represent a valid value. 16 | InvalidValue(String), 17 | /// An end of input was reached before a valid value could be deserialized. 18 | UnexpectedEOF, 19 | /// Deserialization has finished but not all bytes have been consumed. 20 | UnconsumedBytes, 21 | /// An unknown error has occurred. 22 | UnknownError(String), 23 | } 24 | 25 | impl fmt::Display for DeserializationError { 26 | #[rustfmt::skip] 27 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 28 | match self { 29 | Self::InvalidValue(err_msg) => { 30 | write!(f, "{}", err_msg) 31 | } 32 | Self::UnexpectedEOF => { 33 | write!(f, "unexpected EOF") 34 | } 35 | Self::UnconsumedBytes => { 36 | write!(f, "not all bytes were consumed") 37 | } 38 | Self::UnknownError(err_msg) => { 39 | write!(f, "unknown error: {}", err_msg) 40 | } 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /winterfell/utils/core/src/iterators.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | //! Components needed for parallel iterators. 7 | //! 8 | //! When `concurrent` feature is enabled, this module re-exports `rayon::prelude`. Otherwise, 9 | //! this is an empty module. 10 | 11 | #[cfg(feature = "concurrent")] 12 | pub use rayon::{current_num_threads as rayon_num_threads, prelude::*}; 13 | 14 | /// Returns either a regular or a parallel iterator depending on whether `concurrent` feature 15 | /// is enabled. 16 | /// 17 | /// When `concurrent` feature is enabled, creates a parallel iterator; otherwise, creates a 18 | /// regular iterator. Optionally, `min_length` can be used to specify the minimum length of 19 | /// iterator to be processed in each thread. 20 | /// 21 | /// Adapted from: 22 | #[macro_export] 23 | macro_rules! iter { 24 | ($e: expr) => {{ 25 | #[cfg(feature = "concurrent")] 26 | let result = $e.par_iter(); 27 | 28 | #[cfg(not(feature = "concurrent"))] 29 | let result = $e.iter(); 30 | 31 | result 32 | }}; 33 | ($e: expr, $min_len: expr) => {{ 34 | #[cfg(feature = "concurrent")] 35 | let result = $e.par_iter().with_min_len($min_len); 36 | 37 | #[cfg(not(feature = "concurrent"))] 38 | let result = $e.iter(); 39 | 40 | result 41 | }}; 42 | } 43 | 44 | /// Returns either a regular or a parallel mutable iterator depending on whether `concurrent` 45 | /// feature is enabled. 46 | /// 47 | /// When `concurrent` feature is enabled, creates a mutable parallel iterator; otherwise, 48 | /// creates a regular mutable iterator. Optionally, `min_length` can be used to specify the 49 | /// minimum length of iterator to be processed in each thread. 50 | /// 51 | /// Adapted from: 52 | #[macro_export] 53 | macro_rules! iter_mut { 54 | ($e: expr) => {{ 55 | #[cfg(feature = "concurrent")] 56 | let result = $e.par_iter_mut(); 57 | 58 | #[cfg(not(feature = "concurrent"))] 59 | let result = $e.iter_mut(); 60 | 61 | result 62 | }}; 63 | ($e: expr, $min_len: expr) => {{ 64 | #[cfg(feature = "concurrent")] 65 | let result = $e.par_iter_mut().with_min_len($min_len); 66 | 67 | #[cfg(not(feature = "concurrent"))] 68 | let result = $e.iter_mut(); 69 | 70 | result 71 | }}; 72 | } 73 | 74 | /// Applies a procedure to the provided slice either in a single thread or multiple threads 75 | /// based on whether `concurrent` feature is enabled. 76 | /// 77 | /// When `concurrent` feature is enabled, breaks the slice into batches and processes each 78 | /// batch in a separate thread; otherwise, the entire slice is processed as a single batch 79 | /// in one thread. Optionally, `min_batch_size` can be used to specify the minimum size of 80 | /// the resulting batches. 81 | #[macro_export] 82 | macro_rules! batch_iter_mut { 83 | ($e: expr, $c: expr) => { 84 | #[cfg(feature = "concurrent")] 85 | { 86 | let batch_size = $e.len() / rayon_num_threads().next_power_of_two(); 87 | if batch_size < 1 { 88 | $c($e, 0); 89 | } 90 | else { 91 | $e.par_chunks_mut(batch_size).enumerate().for_each(|(i, batch)| { 92 | $c(batch, i * batch_size); 93 | }); 94 | } 95 | } 96 | 97 | #[cfg(not(feature = "concurrent"))] 98 | $c($e, 0); 99 | }; 100 | ($e: expr, $min_batch_size: expr, $c: expr) => { 101 | #[cfg(feature = "concurrent")] 102 | { 103 | let batch_size = $e.len() / rayon_num_threads().next_power_of_two(); 104 | if batch_size < $min_batch_size { 105 | $c($e, 0); 106 | } 107 | else { 108 | $e.par_chunks_mut(batch_size).enumerate().for_each(|(i, batch)| { 109 | $c(batch, i * batch_size); 110 | }); 111 | } 112 | } 113 | 114 | #[cfg(not(feature = "concurrent"))] 115 | $c($e, 0); 116 | }; 117 | } 118 | -------------------------------------------------------------------------------- /winterfell/utils/core/src/string.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | //! Feature-based re-export of common string components. 7 | //! 8 | //! When `std` feature is enabled, this module exports string components from the Rust standard 9 | //! library. When `alloc` feature is enabled, same components are provided without relying on the 10 | //! Rust standard library. 11 | 12 | #[cfg(not(feature = "std"))] 13 | pub use alloc::string::{String, ToString}; 14 | 15 | #[cfg(feature = "std")] 16 | pub use std::string::{String, ToString}; 17 | -------------------------------------------------------------------------------- /winterfell/utils/core/src/tests.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use super::{collections::Vec, ByteReader, ByteWriter, Serializable, SliceReader}; 7 | 8 | // VECTOR UTILS TESTS 9 | // ================================================================================================ 10 | 11 | #[test] 12 | fn group_vector_elements() { 13 | let n = 16; 14 | let a = (0..n).map(|v| v as u64).collect::>(); 15 | 16 | let b = super::group_vector_elements::(a.clone()); 17 | for i in 0..b.len() { 18 | for j in 0..4 { 19 | assert_eq!(a[i * 4 + j], b[i][j]); 20 | } 21 | } 22 | 23 | let b = super::group_vector_elements::(a.clone()); 24 | for i in 0..b.len() { 25 | for j in 0..2 { 26 | assert_eq!(a[i * 2 + j], b[i][j]); 27 | } 28 | } 29 | } 30 | 31 | // SLICE READER TESTS 32 | // ================================================================================================ 33 | 34 | #[test] 35 | fn read_u8() { 36 | let source = [1u8, 3, 5, 7]; 37 | let mut a = SliceReader::new(&source); 38 | 39 | assert_eq!(1, a.read_u8().unwrap()); 40 | assert_eq!(3, a.read_u8().unwrap()); 41 | assert_eq!(5, a.read_u8().unwrap()); 42 | assert_eq!(7, a.read_u8().unwrap()); 43 | assert!(a.read_u8().is_err()); 44 | } 45 | 46 | #[test] 47 | fn read_u16() { 48 | let mut source = 12345u16.to_le_bytes().to_vec(); 49 | source.append(&mut 23456u16.to_le_bytes().to_vec()); 50 | let mut a = SliceReader::new(&source); 51 | 52 | assert_eq!(12345, a.read_u16().unwrap()); 53 | assert_eq!(23456, a.read_u16().unwrap()); 54 | assert!(a.read_u16().is_err()); 55 | } 56 | 57 | #[test] 58 | fn read_u32() { 59 | let mut source = 123456789u32.to_le_bytes().to_vec(); 60 | source.append(&mut 2345678910u32.to_le_bytes().to_vec()); 61 | let mut a = SliceReader::new(&source); 62 | 63 | assert_eq!(123456789, a.read_u32().unwrap()); 64 | assert_eq!(2345678910, a.read_u32().unwrap()); 65 | assert!(a.read_u32().is_err()); 66 | } 67 | 68 | #[test] 69 | fn read_u64() { 70 | let mut source = 12345678910u64.to_le_bytes().to_vec(); 71 | source.append(&mut 234567891011u64.to_le_bytes().to_vec()); 72 | let mut a = SliceReader::new(&source); 73 | 74 | assert_eq!(12345678910, a.read_u64().unwrap()); 75 | assert_eq!(234567891011, a.read_u64().unwrap()); 76 | assert!(a.read_u64().is_err()); 77 | } 78 | 79 | #[test] 80 | fn read_u8_vec() { 81 | let source = [1u8, 2, 3, 4, 5, 6, 7, 8]; 82 | let mut a = SliceReader::new(&source); 83 | 84 | assert_eq!(vec![1, 2], a.read_u8_vec(2).unwrap()); 85 | assert_eq!(vec![3, 4, 5], a.read_u8_vec(3).unwrap()); 86 | assert_eq!(vec![6, 7], a.read_u8_vec(2).unwrap()); 87 | assert_eq!(vec![8], a.read_u8_vec(1).unwrap()); 88 | assert!(a.read_u8_vec(2).is_err()); 89 | } 90 | 91 | // SERIALIZATION TESTS 92 | // ================================================================================================ 93 | 94 | impl Serializable for u128 { 95 | fn write_into(&self, target: &mut W) { 96 | target.write_u8_slice(&self.to_le_bytes()); 97 | } 98 | } 99 | 100 | #[test] 101 | fn write_serializable() { 102 | let mut target: Vec = Vec::new(); 103 | 104 | 123456u128.write_into(&mut target); 105 | assert_eq!(16, target.len()); 106 | 107 | target.write(234567u128); 108 | assert_eq!(32, target.len()); 109 | 110 | let mut reader = SliceReader::new(&target); 111 | assert_eq!(123456u128, reader.read_u128().unwrap()); 112 | assert_eq!(234567u128, reader.read_u128().unwrap()); 113 | } 114 | 115 | #[test] 116 | fn write_serializable_batch() { 117 | let mut target: Vec = Vec::new(); 118 | 119 | let batch1 = vec![1u128, 2, 3, 4]; 120 | batch1.write_into(&mut target); 121 | assert_eq!(64, target.len()); 122 | 123 | let batch2 = [5u128, 6, 7, 8]; 124 | target.write(&batch2[..]); 125 | assert_eq!(128, target.len()); 126 | 127 | let mut reader = SliceReader::new(&target); 128 | for i in 1u128..9 { 129 | assert_eq!(i, reader.read_u128().unwrap()); 130 | } 131 | } 132 | 133 | #[test] 134 | fn write_serializable_array_batch() { 135 | let mut target: Vec = Vec::new(); 136 | 137 | let batch1 = vec![[1u128, 2], [3, 4]]; 138 | batch1.write_into(&mut target); 139 | assert_eq!(64, target.len()); 140 | 141 | let batch2 = [[5u128, 6], [7, 8]]; 142 | target.write(&batch2[..]); 143 | assert_eq!(128, target.len()); 144 | 145 | let mut reader = SliceReader::new(&target); 146 | for i in 1u128..9 { 147 | assert_eq!(i, reader.read_u128().unwrap()); 148 | } 149 | } 150 | -------------------------------------------------------------------------------- /winterfell/utils/rand/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "winter-rand-utils" 3 | version = "0.4.0" 4 | description = "Random value generation utilities for Winterfell crates" 5 | authors = ["winterfell contributors"] 6 | readme = "README.md" 7 | license = "MIT" 8 | repository = "https://github.com/novifinancial/winterfell" 9 | documentation = "https://docs.rs/winter-rand-utils/0.4.0" 10 | categories = ["cryptography"] 11 | keywords = ["rand"] 12 | edition = "2021" 13 | rust-version = "1.60" 14 | 15 | [lib] 16 | bench = false 17 | 18 | [dependencies] 19 | utils = { version = "0.4", path = "../core", package = "winter-utils" } 20 | 21 | [target.'cfg(not(target_family = "wasm"))'.dependencies] 22 | rand = { version = "0.8" } 23 | -------------------------------------------------------------------------------- /winterfell/utils/rand/README.md: -------------------------------------------------------------------------------- 1 | # Winter rand utils 2 | This crate contains functions for generating random values. These functions are intended to be used in tests, benchmarks, and examples by other Winterfell crates. 3 | 4 | When compiled to WebAssembly target, all of the functions are omitted. 5 | 6 | License 7 | ------- 8 | 9 | This project is [MIT licensed](../LICENSE). -------------------------------------------------------------------------------- /winterfell/verifier/.cargo/katex-header.html: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VictorColomb/stark-snark-recursive-proofs/185076895b7339daa8dd45b9160ed19a1de9ce3f/winterfell/verifier/.cargo/katex-header.html -------------------------------------------------------------------------------- /winterfell/verifier/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "winter-verifier" 3 | version = "0.4.0" 4 | description = "Winterfell STARK verifier" 5 | authors = ["winterfell contributors"] 6 | readme = "README.md" 7 | license = "MIT" 8 | repository = "https://github.com/novifinancial/winterfell" 9 | documentation = "https://docs.rs/winter-verifier/0.3.0" 10 | categories = ["cryptography", "no-std"] 11 | keywords = ["crypto", "zkp", "stark", "verifier"] 12 | edition = "2021" 13 | rust-version = "1.60" 14 | 15 | [lib] 16 | bench = false 17 | 18 | [features] 19 | default = ["std"] 20 | std = ["air/std", "crypto/std", "fri/std", "math/std", "utils/std"] 21 | 22 | [dependencies] 23 | air = { version = "0.4", path = "../air", package = "winter-air", default-features = false } 24 | crypto = { version = "0.4", path = "../crypto", package = "winter-crypto", default-features = false } 25 | fri = { version = "0.4", path = "../fri", package = "winter-fri", default-features = false } 26 | math = { version = "0.4", path = "../math", package = "winter-math", default-features = false } 27 | utils = { version = "0.4", path = "../utils/core", package = "winter-utils", default-features = false } 28 | 29 | # Allow math in docs 30 | [package.metadata.docs.rs] 31 | rustdoc-args = ["--html-in-header", ".cargo/katex-header.html"] 32 | -------------------------------------------------------------------------------- /winterfell/verifier/README.md: -------------------------------------------------------------------------------- 1 | # Winterfell STARK verifier 2 | This crate contains an implementation of a STARK verifier which can verify proofs generated by a prover from the [prover](../prover) crate. 3 | 4 | ## Usage 5 | To verify a proof you can use `verifier::verify()` function, which has the following signature: 6 | ```Rust 7 | pub fn verify( 8 | proof: StarkProof, 9 | pub_inputs: AIR::PublicInputs, 10 | ) -> Result<(), VerifierError>; 11 | ``` 12 | where: 13 | 14 | * `AIR` is a type implementing `Air` trait for your computation (see [air crate](../air) for more info). 15 | * `proof` is the proof generated by the prover attesting that the computation was executed correctly against some set of public inputs. 16 | * `pub_inputs` is the set of public inputs against which the computation was executed by the prover. 17 | 18 | For example, if we have a struct `FibAir` which implements the `Air` trait and describes a computation of a Fibonacci sequence (see [examples crate](../examples) for the concrete implementation), we could verify that the prover computed the 1,048,576th term of the sequence correctly, by executing the following: 19 | 20 | ```Rust 21 | let fib_result = BaseElement::new(226333832811148522147755045522163790995); 22 | match verifier::verify::(proof, fib_result) { 23 | Ok(_) => debug!("Proof verified!"), 24 | Err(err) => debug!("Failed to verify proof: {}", err), 25 | } 26 | ``` 27 | where, `226333832811148522147755045522163790995` is the 1,048,576th term of the Fibonacci sequence when the sequence is computed in a 128-bit field with modulus 2128 - 45 * 240. 28 | 29 | ## Performance 30 | Proof verification is extremely fast and is nearly independent of the complexity of the computation being verified. In vast majority of cases proofs can be verified in 3 - 5 ms on a modern mid-range laptop CPU (using a single core). 31 | 32 | There is one exception, however: if a computation requires a lot of `sequence` assertions (see [air crate](../air) for more info), the verification time may grow beyond 5 ms. But for the impact to be noticeable, the number of asserted values would need to be in tens of thousands. And even for hundreds of thousands of `sequence` assertions, the verification time should not exceed 50 ms. 33 | 34 | ## Crate features 35 | This crate can be compiled with the following features: 36 | 37 | * `std` - enabled by default and relies on the Rust standard library. 38 | * `no_std` - does not rely on the Rust standard library and enables compilation to WebAssembly. 39 | 40 | To compile with `no_std`, disable default features via `--no-default-features` flag. 41 | 42 | License 43 | ------- 44 | 45 | This project is [MIT licensed](../LICENSE). -------------------------------------------------------------------------------- /winterfell/verifier/src/errors.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | //! Contains common error types for prover and verifier. 7 | 8 | use core::fmt; 9 | use utils::string::String; 10 | 11 | // VERIFIER ERROR 12 | // ================================================================================================ 13 | /// Represents an error returned by the verifier during an execution of the protocol. 14 | #[derive(Debug, PartialEq)] 15 | pub enum VerifierError { 16 | /// This error occurs when base field read by a verifier from a proof does not match the 17 | /// base field of AIR with which the verifier was instantiated. 18 | InconsistentBaseField, 19 | /// This error occurs when the base field in which the proof was generated does not support 20 | /// field extension of degree specified by the proof. 21 | UnsupportedFieldExtension(usize), 22 | /// This error occurs when a verifier cannot deserialize the specified proof. 23 | ProofDeserializationError(String), 24 | /// This error occurs when a verifier fails to draw a random value from a random coin 25 | /// within a specified number of tries. 26 | RandomCoinError, 27 | /// This error occurs when constraints evaluated over out-of-domain trace rows do not match 28 | /// evaluations of the constraint composition polynomial at the out-of-domain point. 29 | InconsistentOodConstraintEvaluations, 30 | /// This error occurs when Merkle authentication paths of trace queries do not resolve to the 31 | /// execution trace commitment included in the proof. 32 | TraceQueryDoesNotMatchCommitment, 33 | /// This error occurs when Merkle authentication paths of constraint evaluation queries do not 34 | /// resolve to the constraint evaluation commitment included in the proof. 35 | ConstraintQueryDoesNotMatchCommitment, 36 | /// This error occurs when the proof-of-work nonce hashed with the current state of the public 37 | /// coin resolves to a value which does not meet the proof-of-work threshold specified by the 38 | // proof options. 39 | QuerySeedProofOfWorkVerificationFailed, 40 | /// This error occurs when the DEEP composition polynomial evaluations derived from trace and 41 | /// constraint evaluation queries do not represent a polynomial of the degree expected by the 42 | /// verifier. 43 | FriVerificationFailed(fri::VerifierError), 44 | } 45 | 46 | impl fmt::Display for VerifierError { 47 | #[rustfmt::skip] 48 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 49 | match self { 50 | Self::InconsistentBaseField => { 51 | write!(f, "base field of the proof does not match base field of the specified AIR") 52 | } 53 | Self::UnsupportedFieldExtension(degree) => { 54 | write!(f, "field extension of degree {} is not supported for the proof base field", degree) 55 | } 56 | Self::ProofDeserializationError(msg) => { 57 | write!(f, "proof deserialization failed: {}", msg) 58 | } 59 | Self::RandomCoinError => { 60 | write!(f, "failed to draw a random value from a random coin") 61 | } 62 | Self::InconsistentOodConstraintEvaluations => { 63 | write!(f, "constraint evaluations over the out-of-domain frame are inconsistent") 64 | } 65 | Self::TraceQueryDoesNotMatchCommitment => { 66 | write!(f, "trace query did not match the commitment") 67 | } 68 | Self::ConstraintQueryDoesNotMatchCommitment => { 69 | write!(f, "constraint query did not match the commitment") 70 | } 71 | Self::QuerySeedProofOfWorkVerificationFailed => { 72 | write!(f, "query seed proof-of-work verification failed") 73 | } 74 | Self::FriVerificationFailed(err) => { 75 | write!(f, "verification of low-degree proof failed: {}", err) 76 | } 77 | } 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /winterfell/verifier/src/evaluator.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) Facebook, Inc. and its affiliates. 2 | // 3 | // This source code is licensed under the MIT license found in the 4 | // LICENSE file in the root directory of this source tree. 5 | 6 | use air::{Air, AuxTraceRandElements, ConstraintCompositionCoefficients, EvaluationFrame}; 7 | use math::{polynom, FieldElement}; 8 | use utils::collections::Vec; 9 | 10 | // CONSTRAINT EVALUATION 11 | // ================================================================================================ 12 | 13 | /// Evaluates constraints for the specified evaluation frame. 14 | pub fn evaluate_constraints>( 15 | air: &A, 16 | composition_coefficients: ConstraintCompositionCoefficients, 17 | main_trace_frame: &EvaluationFrame, 18 | aux_trace_frame: &Option>, 19 | aux_rand_elements: AuxTraceRandElements, 20 | x: E, 21 | ) -> E { 22 | // 1 ----- evaluate transition constraints ---------------------------------------------------- 23 | 24 | // initialize a buffer to hold transition constraint evaluations 25 | let t_constraints = air.get_transition_constraints(&composition_coefficients.transition); 26 | 27 | // compute values of periodic columns at x 28 | let periodic_values = air 29 | .get_periodic_column_polys() 30 | .iter() 31 | .map(|poly| { 32 | let num_cycles = air.trace_length() / poly.len(); 33 | let x = x.exp((num_cycles as u32).into()); 34 | polynom::eval(poly, x) 35 | }) 36 | .collect::>(); 37 | 38 | // evaluate transition constraints for the main trace segment 39 | let mut t_evaluations1 = E::zeroed_vector(t_constraints.num_main_constraints()); 40 | air.evaluate_transition(main_trace_frame, &periodic_values, &mut t_evaluations1); 41 | 42 | // evaluate transition constraints for auxiliary trace segments (if any) 43 | let mut t_evaluations2 = E::zeroed_vector(t_constraints.num_aux_constraints()); 44 | if let Some(aux_trace_frame) = aux_trace_frame { 45 | air.evaluate_aux_transition( 46 | main_trace_frame, 47 | aux_trace_frame, 48 | &periodic_values, 49 | &aux_rand_elements, 50 | &mut t_evaluations2, 51 | ); 52 | } 53 | 54 | // merge all constraint evaluations into a single value by computing their random linear 55 | // combination using coefficients drawn from the public coin. this also divides the result 56 | // by the divisor of transition constraints. 57 | let mut result = t_constraints.combine_evaluations::(&t_evaluations1, &t_evaluations2, x); 58 | 59 | // 2 ----- evaluate boundary constraints ------------------------------------------------------ 60 | 61 | // get boundary constraints grouped by common divisor from the AIR 62 | let b_constraints = 63 | air.get_boundary_constraints(&aux_rand_elements, &composition_coefficients.boundary); 64 | 65 | // cache power of x here so that we only re-compute it when degree_adjustment changes 66 | let mut degree_adjustment = b_constraints.main_constraints()[0].degree_adjustment(); 67 | let mut xp = x.exp(degree_adjustment.into()); 68 | 69 | // iterate over boundary constraint groups for the main trace segment (each group has a 70 | // distinct divisor), evaluate constraints in each group and add their combination to the 71 | // result 72 | for group in b_constraints.main_constraints().iter() { 73 | // if adjustment degree hasn't changed, no need to recompute `xp` - so just reuse the 74 | // previous value; otherwise, compute new `xp` 75 | if group.degree_adjustment() != degree_adjustment { 76 | degree_adjustment = group.degree_adjustment(); 77 | xp = x.exp(degree_adjustment.into()); 78 | } 79 | // evaluate all constraints in the group, and add the evaluation to the result 80 | result += group.evaluate_at(main_trace_frame.current(), x, xp); 81 | } 82 | 83 | // iterate over boundary constraint groups for auxiliary trace segments (each group has a 84 | // distinct divisor), evaluate constraints in each group and add their combination to the 85 | // result 86 | if let Some(aux_trace_frame) = aux_trace_frame { 87 | for group in b_constraints.aux_constraints().iter() { 88 | // if adjustment degree hasn't changed, no need to recompute `xp` - so just reuse the 89 | // previous value; otherwise, compute new `xp` 90 | if group.degree_adjustment() != degree_adjustment { 91 | degree_adjustment = group.degree_adjustment(); 92 | xp = x.exp(degree_adjustment.into()); 93 | } 94 | // evaluate all constraints in the group, and add the evaluation to the result 95 | result += group.evaluate_at(aux_trace_frame.current(), x, xp); 96 | } 97 | } 98 | 99 | result 100 | } 101 | -------------------------------------------------------------------------------- /winterfell/winterfell/.cargo/katex-header.html: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/VictorColomb/stark-snark-recursive-proofs/185076895b7339daa8dd45b9160ed19a1de9ce3f/winterfell/winterfell/.cargo/katex-header.html -------------------------------------------------------------------------------- /winterfell/winterfell/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "winterfell" 3 | version = "0.4.0" 4 | description = "Winterfell STARK prover and verifier" 5 | authors = ["winterfell contributors"] 6 | readme = "../README.md" 7 | license = "MIT" 8 | repository = "https://github.com/novifinancial/winterfell" 9 | documentation = "https://docs.rs/winterfell/0.4.0" 10 | categories = ["cryptography", "no-std"] 11 | keywords = ["crypto", "zkp", "stark", "prover", "verifier"] 12 | edition = "2021" 13 | rust-version = "1.60" 14 | 15 | [lib] 16 | bench = false 17 | 18 | [features] 19 | concurrent = ["prover/concurrent", "std"] 20 | default = ["std"] 21 | std = ["prover/std", "verifier/std"] 22 | 23 | [dependencies] 24 | prover = { version = "0.4", path = "../prover", package = "winter-prover", default-features = false } 25 | verifier = { version = "0.4", path = "../verifier", package = "winter-verifier", default-features = false } 26 | 27 | # Allow math in docs 28 | [package.metadata.docs.rs] 29 | rustdoc-args = ["--html-in-header", ".cargo/katex-header.html"] 30 | -------------------------------------------------------------------------------- /winterfell/winterfell/README.md: -------------------------------------------------------------------------------- 1 | # Winterfell 2 | 3 | This crate contains Winterfell STARK prover and verifier. It simply re-exports components defined in the [prover](../prover) and [verifier](../verifier) crates. 4 | 5 | License 6 | ------- 7 | 8 | This project is [MIT licensed](../LICENSE). --------------------------------------------------------------------------------