├── LICENSE ├── README.md ├── blind_trust ├── Cargo.toml ├── README.md ├── proof ├── src │ ├── circuit.rs │ ├── lib.rs │ ├── sith_generate_proof.rs │ └── solution.rs └── srs ├── broken_heart ├── Cargo.toml ├── README.md └── src │ ├── circuit.rs │ ├── lib.rs │ ├── server.rs │ └── solution.rs ├── challenge_1 ├── Cargo.toml ├── README.md └── src │ ├── cypher.rs │ ├── data.rs │ ├── field.rs │ ├── main.rs │ └── solver.rs ├── challenge_2 ├── Cargo.toml ├── README.md ├── src │ └── main.rs └── srs.bin ├── challenge_3 ├── Cargo.lock ├── Cargo.toml ├── README.md ├── src │ └── main.rs └── srs.bin └── message ├── Cargo.toml ├── LICENSE ├── README.md └── src ├── cairo ├── air.rs ├── cairo_layout.rs ├── cairo_mem.rs ├── decode │ ├── instruction_flags.rs │ ├── instruction_offsets.rs │ └── mod.rs ├── errors.rs ├── execution_trace.rs ├── mod.rs ├── register_states.rs └── runner │ ├── file_writer.rs │ ├── mod.rs │ ├── program.json │ ├── program.memory │ ├── program.trace │ ├── run.rs │ └── vec_writer.rs ├── lib.rs ├── main.rs └── starks ├── config.rs ├── constraints ├── boundary.rs ├── evaluation_table.rs ├── evaluator.rs └── mod.rs ├── context.rs ├── debug.rs ├── domain.rs ├── example ├── dummy_air.rs ├── fibonacci_2_columns.rs ├── fibonacci_rap.rs ├── mod.rs ├── quadratic_air.rs └── simple_fibonacci.rs ├── frame.rs ├── fri ├── fri_commitment.rs ├── fri_decommit.rs ├── fri_functions.rs └── mod.rs ├── grinding.rs ├── mod.rs ├── proof ├── errors.rs ├── mod.rs ├── options.rs └── stark.rs ├── prover.rs ├── trace.rs ├── traits.rs ├── transcript.rs ├── utils.rs └── verifier.rs /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Lambdaworks Exercises & Challenges 2 | 3 | Contains several examples and challenges to use Lambdaworks. 4 | 5 | Challenges 1, 2 and 3 appeared in [Ingonyama's CTF event](https://ingonyama.ctfd.io/) 6 | 7 | Challenges message, blind_trust and broken heart appeared in the first LambdaIngo ZK CTF 8 | -------------------------------------------------------------------------------- /blind_trust/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "blind_trust" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | rand = "0.8.5" 10 | 11 | 12 | lambdaworks-math = { git = "https://github.com/lambdaclass/lambdaworks", rev = "8fcd64f" } 13 | lambdaworks-crypto = { git = "https://github.com/lambdaclass/lambdaworks", rev = "8fcd64f" } 14 | lambdaworks-plonk = { git = "https://github.com/lambdaclass/lambdaworks_plonk_prover", rev = "6e39865"} 15 | 16 | -------------------------------------------------------------------------------- /blind_trust/README.md: -------------------------------------------------------------------------------- 1 | # Obi-Wan's search for the Sith Foundry 2 | 3 | In his quest to stop the Sith’s menace, Obi-Wan Kenobi finds a (Sith) holocron, giving a zero-knowledge proof of the existence of the Sith’s galactic foundry (using galactic Plonk). This place is rumored to contain several artifacts that could aid the Galactic Republic in its war efforts. The position, given by (x,h,y), satisfies the equation y=x*h+b. After some study, Obi-Wan finds the values of y and b (which belong to Sith lore). The only problem is that, even with this knowledge, it may take him quite long to find the mysterious planet, and the situation in the Republic is desperate. He also finds, together with the holocron, a second item containing the SRS used to generate the proof, the prover, and a description of the circuit used. Will he be able to find the position of the foundry before it is too late? The flag consists of the x and h concatenated and written in hex (for example, x=0x123, h=0x789, the FLAG=123789) 4 | 5 | ## Description 6 | In this challenge the participants have to exploit a vulnerability in a PLONK implementation that's missing the blindings of the wire polynomials. 7 | 8 | The first round of PLONK reads as follows: 9 | 10 | ``` 11 | Compute polynomials a',b',c' as the interpolation polynomials of the columns of T at the domain H. 12 | Sample random b_1, b_2, b_3, b_4, b_5, b_6 13 | Let 14 | 15 | a := (b_1X + b_2)Z_H + a' 16 | 17 | b := (b_3X + b_4)Z_H + b' 18 | 19 | c := (b_5X + b_6)Z_H + c' 20 | 21 | Compute [a]_1, [b]_1, [c]_1 and add them to the transcript. 22 | ``` 23 | 24 | The multiples of $Z_H$ that are added to $a', b', c'$ are the called the blindings. In subsequent rounds the polynomials $a, b, c$ are opened at a point chosen by the verifier. If the blindings are missing, information about the prover's private inputs can be leaked. 25 | 26 | In this challenge the participant is given a single proof of the following simple circuit, along with the corresponding values of $b$ and $y$: 27 | 28 | ``` 29 | PRIVATE INPUT: 30 | x 31 | h 32 | 33 | PUBLIC INPUT: 34 | b 35 | y 36 | 37 | OUTPUT: 38 | ASSERT y == h * x + b 39 | ``` 40 | 41 | The flag is `x.representative() || h.representative()`. The objective of the challenge is to utilize the provided information in order to retrieve the private inputs. 42 | 43 | ## Data provided to participants 44 | 45 | Participants get the following values: 46 | 47 | 1. `y: "3610e39ce7acc430c1fa91efcec93722d77bc4e910ccb195fa4294b64ecb0d35"`, 48 | 1. `b: "1b0871ce73e72c599426228e37e7469be9f4fa0b7c9dae950bb77539ca9ebb0f"`. 49 | 50 | They also get access to the following files: 51 | 52 | 1. `src/sith_generate_proof.rs` (this file has flags and toxic waste replaced by `???`) 53 | 1. `src/circuit.rs` 54 | 1. `srs` 55 | 1. `proof` 56 | 57 | The files `srs` and `proof` can be deserialized using Lambdaworks methods as follows. 58 | 59 | ```rust 60 | use std::{fs, io::{BufReader, Read}}; 61 | use lambdaworks_plonk::prover::Proof; 62 | use lambdaworks_crypto::commitments::kzg::StructuredReferenceString; 63 | use lambdaworks_math::traits::{Deserializable, Serializable}; 64 | use crate::sith_generate_proof::{SithProof, SithSRS}; 65 | 66 | fn read_challenge_data_from_files() -> (SithSRS, SithProof) { 67 | // Read proof from file 68 | let f = fs::File::open("./proof").unwrap(); 69 | let mut reader = BufReader::new(f); 70 | let mut buffer = Vec::new(); 71 | reader.read_to_end(&mut buffer).unwrap(); 72 | let proof = Proof::deserialize(&buffer).unwrap(); 73 | 74 | // Read SRS from file 75 | let f = fs::File::open("./srs").unwrap(); 76 | let mut reader = BufReader::new(f); 77 | let mut buffer = Vec::new(); 78 | reader.read_to_end(&mut buffer).unwrap(); 79 | let srs = StructuredReferenceString::deserialize(&buffer).unwrap(); 80 | (srs, proof) 81 | } 82 | ``` 83 | 84 | ## Solution 85 | 86 | The solution for the coordinates is: 87 | 88 | 1. `x: "2194826651b32ca1055614fc6e2f2de86eab941d2c55bd467268e9"`, 89 | 1. `h: "432904cca36659420aac29f8dc5e5bd0dd57283a58ab7a8ce4d1ca"`. 90 | 91 | The flag is the concatenation of the two: `FLAG: 2194826651b32ca1055614fc6e2f2de86eab941d2c55bd467268e9432904cca36659420aac29f8dc5e5bd0dd57283a58ab7a8ce4d1ca` 92 | 93 | ## Solution description 94 | 95 | We'll use the notation of the `lambdaworks_plonk_prover` docs. 96 | 97 | By checking the code of the challenge the participants can find the following in `circuit.rs` 98 | 99 | ```rust 100 | /// Witness generator for the circuit `ASSERT y == x * h + b` 101 | pub fn circuit_witness( 102 | b: &FrElement, 103 | y: &FrElement, 104 | h: &FrElement, 105 | x: &FrElement, 106 | ) -> Witness { 107 | let z = x * h; 108 | let w = &z + b; 109 | let empty = b.clone(); 110 | Witness { 111 | a: vec![ 112 | b.clone(), 113 | y.clone(), 114 | x.clone(), 115 | b.clone(), 116 | w.clone(), 117 | empty.clone(), 118 | empty.clone(), 119 | empty.clone(), 120 | ], 121 | ... 122 | ``` 123 | 124 | This code reveals that the way prover constructs the $V$ matrix is 125 | 126 | | A | B | C | 127 | | --- | --- | --- | 128 | | b | - | - | 129 | | y | - | - | 130 | | x | h | z | 131 | | b | z | w | 132 | | w | y | - | 133 | | - | - | - | 134 | | - | - | - | 135 | | - | - | - | 136 | 137 | Where `-` are empty values. The PLONK implementation of `lambdaworks-plonk` requires the empty values to be filled in with the first public input. So in this case the values `-` will be replaced by $b$. This can be seen directly from the code of the challenge 138 | 139 | Therefore, the polynomial $a'$, being the interpolation of the column `A` is 140 | 141 | $$a' = b L_1 + y L_2 + x L_3 + b L_4 + w L_5 + b L_6 + b L_7 + b L_8,$$ 142 | 143 | where $L_i$ is the $i$-th polynomial of the Lagrange basis. Also, the value $w$ is equal to $y$. That can be seen from the code and the fact that the last row of the $V$ matrix corresponds to the assertion of the actual output of the circuit being equal to the claimed output $y$. 144 | 145 | During the proof, the verifier sends a challenge $\zeta$ and the prover opens, among other things, the polynomial $a$ at $\zeta$. Since the implementation of the challenge does not include blindings, $a(\zeta) = a'(\zeta)$ and we get 146 | 147 | $$a(\zeta) = b L_1(\zeta) + y L_2(\zeta) + x L_3(\zeta) + b L_4(\zeta) + y L_5(\zeta) + b L_6(\zeta) + b L_7(\zeta) + b L_8(\zeta).$$ 148 | 149 | All the terms in this expression are known to the participants except for $x$, which can be cleared from the equation. To do so the participants need to know how to recover the challenges to get $\zeta$ and how to compute the Lagrange polynomials evaluated at it. The second private input $h$ can be computed as $h = (y - b) / x$. 150 | 151 | ## Test 152 | 153 | A test with the above solution is given in `solution.rs`. To make it pass, lines 25 and 26 of `sith_generate_proof.rs` need to be replaced by the following 154 | 155 | ```rust 156 | pub const FLAG1: &str = "2194826651b32ca1055614fc6e2f2de86eab941d2c55bd467268e9"; 157 | pub const FLAG2: &str = "432904cca36659420aac29f8dc5e5bd0dd57283a58ab7a8ce4d1ca"; 158 | ``` 159 | -------------------------------------------------------------------------------- /blind_trust/proof: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lambdaclass/lambdaworks_exercises/946d9950d5a0c532c1c154d3366f978d98fca233/blind_trust/proof -------------------------------------------------------------------------------- /blind_trust/src/circuit.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_math::{ 2 | elliptic_curve::short_weierstrass::curves::bls12_381::default_types::{FrElement, FrField}, 3 | field::element::FieldElement, 4 | polynomial::Polynomial, 5 | }; 6 | use lambdaworks_plonk::setup::{CommonPreprocessedInput, Witness}; 7 | 8 | use crate::sith_generate_proof::{ORDER_8_ROOT_UNITY, ORDER_R_MINUS_1_ROOT_UNITY}; 9 | 10 | /// Generates a domain to interpolate: 1, omega, omega², ..., omega^size 11 | pub fn generate_domain(omega: &FrElement, size: usize) -> Vec { 12 | (1..size).fold(vec![FieldElement::one()], |mut acc, _| { 13 | acc.push(acc.last().unwrap() * omega); 14 | acc 15 | }) 16 | } 17 | 18 | /// The identity permutation, auxiliary function to generate the copy constraints. 19 | fn identity_permutation(w: &FrElement, n: usize) -> Vec { 20 | let u = ORDER_R_MINUS_1_ROOT_UNITY; 21 | let mut result: Vec = vec![]; 22 | for index_column in 0..=2 { 23 | for index_row in 0..n { 24 | result.push(w.pow(index_row) * u.pow(index_column as u64)); 25 | } 26 | } 27 | result 28 | } 29 | 30 | /// Generates the permutation coefficients for the copy constraints. 31 | /// polynomials S1, S2, S3. 32 | pub fn generate_permutation_coefficients( 33 | omega: &FrElement, 34 | n: usize, 35 | permutation: &[usize], 36 | ) -> Vec { 37 | let identity = identity_permutation(omega, n); 38 | let permuted: Vec = (0..n * 3) 39 | .map(|i| identity[permutation[i]].clone()) 40 | .collect(); 41 | permuted 42 | } 43 | 44 | /// Witness generator for the circuit `ASSERT y == x * h + b` 45 | pub fn circuit_witness( 46 | b: &FrElement, 47 | y: &FrElement, 48 | h: &FrElement, 49 | x: &FrElement, 50 | ) -> Witness { 51 | let z = x * h; 52 | let w = &z + b; 53 | let empty = b.clone(); 54 | Witness { 55 | a: vec![ 56 | b.clone(), 57 | y.clone(), 58 | x.clone(), 59 | b.clone(), 60 | w.clone(), 61 | empty.clone(), 62 | empty.clone(), 63 | empty.clone(), 64 | ], 65 | b: vec![ 66 | empty.clone(), 67 | empty.clone(), 68 | h.clone(), 69 | z.clone(), 70 | y.clone(), 71 | empty.clone(), 72 | empty.clone(), 73 | empty.clone(), 74 | ], 75 | c: vec![ 76 | empty.clone(), 77 | empty.clone(), 78 | z.clone(), 79 | w.clone(), 80 | empty.clone(), 81 | empty.clone(), 82 | empty.clone(), 83 | empty.clone(), 84 | ], 85 | } 86 | } 87 | 88 | /// Common preprocessed input for the circuit `ASSERT y == x * h + b` 89 | pub fn circuit_common_preprocessed_input() -> CommonPreprocessedInput { 90 | let n: usize = 8; 91 | let omega = ORDER_8_ROOT_UNITY; 92 | let domain = generate_domain(&omega, n); 93 | let permutation = &[ 94 | 23, 12, 2, 0, 19, 3, 5, 6, 7, 8, 10, 18, 1, 9, 13, 14, 15, 16, 11, 4, 17, 20, 21, 22, 95 | ]; 96 | let permuted = generate_permutation_coefficients(&omega, n, permutation); 97 | 98 | let s1_lagrange: Vec = permuted[..8].to_vec(); 99 | let s2_lagrange: Vec = permuted[8..16].to_vec(); 100 | let s3_lagrange: Vec = permuted[16..].to_vec(); 101 | 102 | CommonPreprocessedInput { 103 | n, 104 | omega, 105 | k1: ORDER_R_MINUS_1_ROOT_UNITY, 106 | domain: domain.clone(), 107 | 108 | ql: Polynomial::interpolate( 109 | &domain, 110 | &[ 111 | -FieldElement::one(), 112 | -FieldElement::one(), 113 | FieldElement::zero(), 114 | FieldElement::one(), 115 | FieldElement::one(), 116 | FieldElement::zero(), 117 | FieldElement::zero(), 118 | FieldElement::zero(), 119 | ], 120 | ) 121 | .unwrap(), 122 | qr: Polynomial::interpolate( 123 | &domain, 124 | &[ 125 | FieldElement::zero(), 126 | FieldElement::zero(), 127 | FieldElement::zero(), 128 | FieldElement::one(), 129 | -FieldElement::one(), 130 | FieldElement::zero(), 131 | FieldElement::zero(), 132 | FieldElement::zero(), 133 | ], 134 | ) 135 | .unwrap(), 136 | qo: Polynomial::interpolate( 137 | &domain, 138 | &[ 139 | FieldElement::zero(), 140 | FieldElement::zero(), 141 | -FieldElement::one(), 142 | -FieldElement::one(), 143 | FieldElement::zero(), 144 | FieldElement::zero(), 145 | FieldElement::zero(), 146 | FieldElement::zero(), 147 | ], 148 | ) 149 | .unwrap(), 150 | qm: Polynomial::interpolate( 151 | &domain, 152 | &[ 153 | FieldElement::zero(), 154 | FieldElement::zero(), 155 | FieldElement::one(), 156 | FieldElement::zero(), 157 | FieldElement::zero(), 158 | FieldElement::zero(), 159 | FieldElement::zero(), 160 | FieldElement::zero(), 161 | ], 162 | ) 163 | .unwrap(), 164 | qc: Polynomial::interpolate( 165 | &domain, 166 | &[ 167 | FieldElement::zero(), 168 | FieldElement::zero(), 169 | FieldElement::zero(), 170 | FieldElement::zero(), 171 | FieldElement::zero(), 172 | FieldElement::zero(), 173 | FieldElement::zero(), 174 | FieldElement::zero(), 175 | ], 176 | ) 177 | .unwrap(), 178 | 179 | s1: Polynomial::interpolate(&domain, &s1_lagrange).unwrap(), 180 | s2: Polynomial::interpolate(&domain, &s2_lagrange).unwrap(), 181 | s3: Polynomial::interpolate(&domain, &s3_lagrange).unwrap(), 182 | 183 | s1_lagrange, 184 | s2_lagrange, 185 | s3_lagrange, 186 | } 187 | } 188 | -------------------------------------------------------------------------------- /blind_trust/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod circuit; 2 | pub mod sith_generate_proof; 3 | pub mod solution; 4 | -------------------------------------------------------------------------------- /blind_trust/src/sith_generate_proof.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_crypto::commitments::{ 2 | kzg::{KateZaveruchaGoldberg, StructuredReferenceString}, 3 | traits::IsCommitmentScheme, 4 | }; 5 | use lambdaworks_math::{ 6 | cyclic_group::IsGroup, 7 | elliptic_curve::{ 8 | short_weierstrass::curves::bls12_381::{ 9 | curve::BLS12381Curve, 10 | default_types::{FrElement, FrField}, 11 | pairing::BLS12381AtePairing, 12 | twist::BLS12381TwistCurve, 13 | }, 14 | traits::{IsEllipticCurve, IsPairing}, 15 | }, 16 | traits::IsRandomFieldElementGenerator, 17 | unsigned_integer::element::U256, 18 | }; 19 | use lambdaworks_plonk::{ 20 | prover::{Proof, Prover}, 21 | setup::{setup, VerificationKey}, 22 | }; 23 | 24 | use crate::circuit::{circuit_common_preprocessed_input, circuit_witness}; 25 | use rand::Rng; 26 | 27 | pub const FLAG1: &str = "??????????????????????????????????????????????????????"; 28 | pub const FLAG2: &str = "??????????????????????????????????????????????????????"; 29 | 30 | pub const X_COORDINATE: FrElement = FrElement::from_hex_unchecked(FLAG1); 31 | pub const H_COORDINATE: FrElement = FrElement::from_hex_unchecked(FLAG2); 32 | 33 | pub type SithSRS = StructuredReferenceString< 34 | ::G1Point, 35 | ::G2Point, 36 | >; 37 | 38 | pub const ORDER_8_ROOT_UNITY: FrElement = FrElement::from_hex_unchecked( 39 | "345766f603fa66e78c0625cd70d77ce2b38b21c28713b7007228fd3397743f7a", 40 | ); // order 8 41 | 42 | pub const ORDER_R_MINUS_1_ROOT_UNITY: FrElement = FrElement::from_hex_unchecked("7"); 43 | pub type SithCS = KateZaveruchaGoldberg; 44 | pub type SithVK = VerificationKey<>::Commitment>; 45 | pub type SithProof = Proof; 46 | pub type Pairing = BLS12381AtePairing; 47 | pub type KZG = KateZaveruchaGoldberg; 48 | type G1Point = ::PointRepresentation; 49 | type G2Point = ::PointRepresentation; 50 | 51 | /// Generates a test SRS for the BLS12381 curve 52 | /// n is the number of constraints in the system. 53 | pub fn generate_srs(n: usize) -> StructuredReferenceString { 54 | let mut rng = rand::thread_rng(); 55 | let s = FrElement::new(U256 { 56 | limbs: [ 57 | rng.gen::(), 58 | rng.gen::(), 59 | rng.gen::(), 60 | rng.gen::(), 61 | ], 62 | }); 63 | let g1 = ::generator(); 64 | let g2 = ::generator(); 65 | 66 | let powers_main_group: Vec = (0..n + 3) 67 | .map(|exp| g1.operate_with_self(s.pow(exp as u64).representative())) 68 | .collect(); 69 | let powers_secondary_group = [g2.clone(), g2.operate_with_self(s.representative())]; 70 | 71 | StructuredReferenceString::new(&powers_main_group, &powers_secondary_group) 72 | } 73 | 74 | pub struct SithRandomFieldGenerator; 75 | impl IsRandomFieldElementGenerator for SithRandomFieldGenerator { 76 | fn generate(&self) -> FrElement { 77 | FrElement::zero() 78 | } 79 | } 80 | 81 | pub fn generate_proof(b: &FrElement) -> (FrElement, Proof, SithSRS) { 82 | let common_preprocessed_input = circuit_common_preprocessed_input(); 83 | let srs = generate_srs(common_preprocessed_input.n); 84 | let kzg = KZG::new(srs.clone()); 85 | let verifying_key = setup(&common_preprocessed_input.clone(), &kzg); 86 | 87 | let x = X_COORDINATE; 88 | let h = H_COORDINATE; 89 | 90 | // Output 91 | let y = &x * &h + b; 92 | 93 | // This is the circuit for y == x * h + b 94 | let witness = circuit_witness(&b, &y, &h, &x); 95 | let public_input = vec![b.clone(), y.clone()]; 96 | 97 | let random_generator = SithRandomFieldGenerator {}; 98 | let prover = Prover::new(kzg.clone(), random_generator); 99 | let proof = prover.prove( 100 | &witness, 101 | &public_input, 102 | &common_preprocessed_input, 103 | &verifying_key, 104 | ); 105 | (y, proof, srs) 106 | } 107 | -------------------------------------------------------------------------------- /blind_trust/src/solution.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_crypto::{ 2 | commitments::traits::IsCommitmentScheme, fiat_shamir::transcript::Transcript, 3 | }; 4 | use lambdaworks_math::{ 5 | field::{element::FieldElement, traits::IsField}, 6 | traits::{ByteConversion, Serializable}, 7 | }; 8 | use lambdaworks_plonk::{ 9 | prover::Proof, 10 | setup::{new_strong_fiat_shamir_transcript, CommonPreprocessedInput, VerificationKey}, 11 | }; 12 | 13 | fn compute_private_input( 14 | proof: &Proof, 15 | vk: &VerificationKey, 16 | public_input: &[FieldElement], 17 | common_preprocessed_input: &CommonPreprocessedInput, 18 | ) -> (FieldElement, FieldElement) 19 | where 20 | F: IsField, 21 | CS: IsCommitmentScheme, 22 | CS::Commitment: Serializable, 23 | FieldElement: ByteConversion, 24 | { 25 | // Replay interactions to recover challenges 26 | let mut transcript = new_strong_fiat_shamir_transcript::(vk, public_input); 27 | transcript.append(&proof.a_1.serialize()); 28 | transcript.append(&proof.b_1.serialize()); 29 | transcript.append(&proof.c_1.serialize()); 30 | let _beta = FieldElement::from_bytes_be(&transcript.challenge()).unwrap(); 31 | let _gamma = FieldElement::from_bytes_be(&transcript.challenge()).unwrap(); 32 | 33 | transcript.append(&proof.z_1.serialize()); 34 | let _alpha = FieldElement::from_bytes_be(&transcript.challenge()).unwrap(); 35 | 36 | transcript.append(&proof.t_lo_1.serialize()); 37 | transcript.append(&proof.t_mid_1.serialize()); 38 | transcript.append(&proof.t_hi_1.serialize()); 39 | let zeta = FieldElement::from_bytes_be(&transcript.challenge()).unwrap(); 40 | 41 | // Compute `x` and `h` 42 | let [b, y] = [&public_input[0], &public_input[1]]; 43 | let n = common_preprocessed_input.n as u64; 44 | let omega = &common_preprocessed_input.omega; 45 | let domain = &common_preprocessed_input.domain; 46 | let l1_zeta = 47 | (zeta.pow(n) - FieldElement::one()) / (&zeta - FieldElement::one()) / FieldElement::from(n); 48 | 49 | let mut li_zeta = l1_zeta; 50 | let mut lagrange_basis_zeta = Vec::new(); 51 | lagrange_basis_zeta.push(li_zeta.clone()); 52 | for i in 1..domain.len() { 53 | li_zeta = omega * &li_zeta * ((&zeta - &domain[i - 1]) / (&zeta - &domain[i])); 54 | lagrange_basis_zeta.push(li_zeta.clone()); 55 | } 56 | 57 | let x = (&proof.a_zeta 58 | - b * &lagrange_basis_zeta[3] 59 | - y * &lagrange_basis_zeta[4] 60 | - b * &lagrange_basis_zeta[0] 61 | - y * &lagrange_basis_zeta[1] 62 | - b * &lagrange_basis_zeta[5] 63 | - b * &lagrange_basis_zeta[6] 64 | - b * &lagrange_basis_zeta[7]) 65 | / &lagrange_basis_zeta[2]; 66 | let h = (y - b) / &x; 67 | (x, h) 68 | } 69 | 70 | #[cfg(test)] 71 | mod tests { 72 | use std::{ 73 | fs, 74 | io::{BufReader, Read}, 75 | }; 76 | 77 | use lambdaworks_crypto::commitments::kzg::StructuredReferenceString; 78 | use lambdaworks_math::{ 79 | elliptic_curve::short_weierstrass::curves::bls12_381::default_types::FrElement, 80 | traits::{Deserializable, Serializable}, 81 | }; 82 | use lambdaworks_plonk::{prover::Proof, setup::setup, verifier::Verifier}; 83 | 84 | use crate::{ 85 | circuit::circuit_common_preprocessed_input, 86 | sith_generate_proof::{ 87 | generate_proof, SithProof, SithSRS, H_COORDINATE, KZG, X_COORDINATE, 88 | }, 89 | solution::compute_private_input, 90 | }; 91 | 92 | fn read_challenge_data_from_files() -> (SithSRS, SithProof) { 93 | // Read proof from file 94 | let f = fs::File::open("./proof").unwrap(); 95 | let mut reader = BufReader::new(f); 96 | let mut buffer = Vec::new(); 97 | reader.read_to_end(&mut buffer).unwrap(); 98 | let proof = Proof::deserialize(&buffer).unwrap(); 99 | 100 | // Read SRS from file 101 | let f = fs::File::open("./srs").unwrap(); 102 | let mut reader = BufReader::new(f); 103 | let mut buffer = Vec::new(); 104 | reader.read_to_end(&mut buffer).unwrap(); 105 | let srs = StructuredReferenceString::deserialize(&buffer).unwrap(); 106 | (srs, proof) 107 | } 108 | 109 | #[test] 110 | fn test_challenge_data() { 111 | let b = 112 | FrElement::from_hex("1b0871ce73e72c599426228e37e7469be9f4fa0b7c9dae950bb77539ca9ebb0f") 113 | .unwrap(); 114 | let y = 115 | FrElement::from_hex("3610e39ce7acc430c1fa91efcec93722d77bc4e910ccb195fa4294b64ecb0d35") 116 | .unwrap(); 117 | let public_input = vec![b, y]; 118 | 119 | let (srs, proof) = read_challenge_data_from_files(); 120 | let common_preprocessed_input = circuit_common_preprocessed_input(); 121 | let kzg = KZG::new(srs.clone()); 122 | let verifier = Verifier::new(kzg.clone()); 123 | let vk = setup(&common_preprocessed_input, &kzg); 124 | 125 | assert!(verifier.verify(&proof, &public_input, &common_preprocessed_input, &vk)) 126 | } 127 | 128 | fn export_challenge_data() { 129 | use std::fs; 130 | use std::io::Write; 131 | 132 | let b = 133 | FrElement::from_hex("1b0871ce73e72c599426228e37e7469be9f4fa0b7c9dae950bb77539ca9ebb0f") 134 | .unwrap(); 135 | let (y, proof, srs) = generate_proof(&b); 136 | 137 | let mut srs_file = fs::File::create("./srs").unwrap(); 138 | srs_file.write_all(&srs.serialize()).unwrap(); 139 | let mut srs_file = fs::File::create("./proof").unwrap(); 140 | srs_file.write_all(&proof.serialize()).unwrap(); 141 | println!("{}", y); 142 | } 143 | 144 | #[test] 145 | fn test_solution() { 146 | let b = 147 | FrElement::from_hex("1b0871ce73e72c599426228e37e7469be9f4fa0b7c9dae950bb77539ca9ebb0f") 148 | .unwrap(); 149 | let y = 150 | FrElement::from_hex("3610e39ce7acc430c1fa91efcec93722d77bc4e910ccb195fa4294b64ecb0d35") 151 | .unwrap(); 152 | let public_input = vec![b, y]; 153 | 154 | let (srs, proof) = read_challenge_data_from_files(); 155 | let common_preprocessed_input = circuit_common_preprocessed_input(); 156 | let kzg = KZG::new(srs.clone()); 157 | 158 | let vk = setup(&common_preprocessed_input, &kzg); 159 | // Extract private input from proof, public input and public keys 160 | let (x, h) = compute_private_input(&proof, &vk, &public_input, &common_preprocessed_input); 161 | 162 | assert_eq!(&X_COORDINATE, &x); 163 | assert_eq!(&H_COORDINATE, &h); 164 | } 165 | } 166 | -------------------------------------------------------------------------------- /blind_trust/srs: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lambdaclass/lambdaworks_exercises/946d9950d5a0c532c1c154d3366f978d98fca233/blind_trust/srs -------------------------------------------------------------------------------- /broken_heart/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "irreducibull" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | lambdaworks-math = { git = "https://github.com/lambdaclass/lambdaworks", rev = "d8f14cb"} 10 | lambdaworks-crypto = { git = "https://github.com/lambdaclass/lambdaworks", rev = "d8f14cb"} 11 | # lambdaworks-plonk = { git = "https://github.com/lambdaclass/lambdaworks_plonk_prover", rev="07e36bf"} 12 | lambdaworks-plonk = { path = "path/to/local/lambdaworks_plonk_prover"} 13 | -------------------------------------------------------------------------------- /broken_heart/README.md: -------------------------------------------------------------------------------- 1 | # Loki’s broken heart 2 | 3 | After successfully breaking into Loki’s vault and getting access to some of his finest treasures and weapons, you spot a small trapdoor under a carpet. The trapdoor is locked and contains a device with a PLONK prover. It says: “Prove that the point $(1,y)$ belongs to the elliptic curve $y^2 = x^3 + 4$”. You see that, in order to prove this, you need that $y^2 - x^3 - 4$ is equal to zero, which corresponds to the circuit for the prover provided by Loki. Can you open the trapdoor? 4 | 5 | # Description 6 | 7 | This challenge is about exploiting a vulnerability in weak Fiat-Shamir implementations. 8 | 9 | The idea is to have a small server with an endpoint accepting proofs of executions of circuits. The plonk backend will have a bug in the initialization of the transcript and won't add the public inputs to the transcript. So as long as one public input is in control of the attacker, he can forge fake proofs. 10 | 11 | At the moment the circuit is: 12 | 13 | ``` 14 | PUBLIC INPUT: x 15 | PUBLIC INPUT: y 16 | 17 | ASSERT 0 == y^2 - x^3 - 4 18 | ``` 19 | And it instantiated over the `BLS12 381` scalar field. 20 | If the user achieves to send a proof for `x==1`, then they obtain the flag. Since $5$ is a quadratic non residue in the base field of the circuit, this can only be achieved by forging a fake proof. 21 | 22 | The vulnerability stems from a bug in the implementation of strong Fiat-Shamir. A correct implementation should add, among other things, all the public inputs to the transcript at initialization. If a public input is not added to the transcript and is in control of the attacker, they can forge a fake proof. Here, fixing `x=1` leaves `y` under control of the user. 23 | 24 | The attack is described in Section V of [Weak Fiat-Shamir Attacks on Modern Proof Systems](https://eprint.iacr.org/2023/691.pdf). 25 | 26 | Here is a description of the attack. 27 | 28 | ![image](https://github.com/lambdaclass/challenges-ctf/assets/41742639/d2040ccd-17ad-4f0e-b910-a17ceda96ed4) 29 | 30 | Instead of taking random polynomials (steps (1) to (7)), the current solution takes a valid proof for the pair `x=0`, `y=2` and uses it to forge a `y'` for `x=1` that's compatible with the original proof. 31 | 32 | At the moment, the server endpoint is simulated with the following function. 33 | 34 | ```rust 35 | pub fn server_endpoint_verify( 36 | srs: ChallengeSRS, 37 | common_preprocessed_input: CommonPreprocessedInput, 38 | vk: &ChallengeVK, 39 | x: &FrElement, 40 | y: &FrElement, 41 | proof: &ChallengeProof, 42 | ) -> String { 43 | let public_input = [x.clone(), y.clone()]; 44 | let kzg = KZG::new(srs); 45 | let verifier = Verifier::new(kzg); 46 | let result = verifier.verify(proof, &public_input, &common_preprocessed_input, vk); 47 | if !result { 48 | "Invalid Proof".to_string() 49 | } else if x != &FieldElement::one() { 50 | "Valid Proof. Congrats!".to_string() 51 | } else { 52 | FLAG.to_string() 53 | } 54 | } 55 | ``` 56 | 57 | The attack can be found in `src/solution.rs` along with a test that showcases it. 58 | 59 | ## Get it to work 60 | 61 | Currently `lambdaworks_plonk_prover` does not expose the weak Fiat-Shamir vulnerability. 62 | So to make the challenge work we need to modify it. 63 | 64 | 1. Clone `lambdaworks_plonk_prover` repo: `git clone git@github.com:lambdaclass/lambdaworks_plonk_prover.git` 65 | 1. `git checkout 07e36bf` 66 | 1. Make the following changes to it: 67 | 68 | ```diff 69 | diff --git a/Cargo.toml b/Cargo.toml 70 | index 7f0e324..c36a00d 100644 71 | --- a/Cargo.toml 72 | +++ b/Cargo.toml 73 | @@ -7,8 +7,8 @@ edition = "2021" 74 | 75 | [dependencies] 76 | serde = { version = "1.0", features = ["derive"]} 77 | -lambdaworks-math = { git = "https://github.com/lambdaclass/lambdaworks", rev = "943963c" } 78 | -lambdaworks-crypto = { git = "https://github.com/lambdaclass/lambdaworks", rev = "943963c" } 79 | +lambdaworks-math = { git = "https://github.com/lambdaclass/lambdaworks", rev = "d8f14cb" } 80 | +lambdaworks-crypto = { git = "https://github.com/lambdaclass/lambdaworks", rev = "d8f14cb" } 81 | 82 | thiserror = "1.0.38" 83 | serde_json = "1.0" 84 | diff --git a/src/setup.rs b/src/setup.rs 85 | index 493278a..437bcc9 100644 86 | --- a/src/setup.rs 87 | +++ b/src/setup.rs 88 | @@ -69,7 +69,7 @@ pub fn setup>( 89 | 90 | pub fn new_strong_fiat_shamir_transcript( 91 | vk: &VerificationKey, 92 | - public_input: &[FieldElement], 93 | + _public_input: &[FieldElement], 94 | ) -> DefaultTranscript 95 | where 96 | F: IsField, 97 | @@ -88,9 +88,6 @@ where 98 | transcript.append(&vk.qo_1.serialize()); 99 | transcript.append(&vk.qc_1.serialize()); 100 | 101 | - for value in public_input.iter() { 102 | - transcript.append(&value.to_bytes_be()); 103 | - } 104 | transcript 105 | } 106 | ``` 107 | 108 | 1. Clone this repo and modify its `Cargo.toml` to point the `lambdaworks-plonk` dependency to your local copy of `lambdaworks_plonk_prover`. 109 | 1. Run `cargo test` 110 | -------------------------------------------------------------------------------- /broken_heart/src/circuit.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_math::{ 2 | elliptic_curve::short_weierstrass::curves::bls12_381::default_types::{FrElement, FrField}, 3 | field::element::FieldElement, 4 | polynomial::Polynomial, 5 | }; 6 | use lambdaworks_plonk::setup::{CommonPreprocessedInput, Witness}; 7 | 8 | use crate::server::{ORDER_8_ROOT_UNITY, ORDER_R_MINUS_1_ROOT_UNITY}; 9 | 10 | /// Generates a domain to interpolate: 1, omega, omega², ..., omega^size 11 | pub fn generate_domain(omega: &FrElement, size: usize) -> Vec { 12 | (1..size).fold(vec![FieldElement::one()], |mut acc, _| { 13 | acc.push(acc.last().unwrap() * omega); 14 | acc 15 | }) 16 | } 17 | 18 | /// The identity permutation, auxiliary function to generate the copy constraints. 19 | fn identity_permutation(w: &FrElement, n: usize) -> Vec { 20 | let u = ORDER_R_MINUS_1_ROOT_UNITY; 21 | let mut result: Vec = vec![]; 22 | for index_column in 0..=2 { 23 | for index_row in 0..n { 24 | result.push(w.pow(index_row) * u.pow(index_column as u64)); 25 | } 26 | } 27 | result 28 | } 29 | 30 | /// Generates the permutation coefficients for the copy constraints. 31 | /// polynomials S1, S2, S3. 32 | pub fn generate_permutation_coefficients( 33 | omega: &FrElement, 34 | n: usize, 35 | permutation: &[usize], 36 | ) -> Vec { 37 | let identity = identity_permutation(omega, n); 38 | let permuted: Vec = (0..n * 3) 39 | .map(|i| identity[permutation[i]].clone()) 40 | .collect(); 41 | permuted 42 | } 43 | 44 | /// Witness generator for the circuit `ASSERT 0 == y ** 2 - x ** 3 - 4` 45 | pub fn circuit_witness(x: &FrElement, y: &FrElement) -> Witness { 46 | let b = FieldElement::from(4); 47 | let x2 = x * x; 48 | let x3 = &x2 * x; 49 | let u = &x3 + &b; 50 | let y2 = y * y; 51 | let w = &y2 - &u; 52 | let empty = x.clone(); 53 | Witness { 54 | a: vec![ 55 | x.clone(), 56 | y.clone(), 57 | x.clone(), 58 | x2.clone(), 59 | x3.clone(), 60 | y.clone(), 61 | u.clone(), 62 | w.clone(), 63 | ], 64 | b: vec![ 65 | empty.clone(), 66 | empty.clone(), 67 | x.clone(), 68 | x.clone(), 69 | empty.clone(), 70 | y.clone(), 71 | y2.clone(), 72 | empty.clone(), 73 | ], 74 | c: vec![empty.clone(), empty.clone(), x2, x3, u, y2, w, empty], 75 | } 76 | } 77 | 78 | /// Common preprocessed input for the circuit `ASSERT 0 == y ** 2 - x ** 3 - 4` 79 | pub fn circuit_common_preprocessed_input() -> CommonPreprocessedInput { 80 | let n: usize = 8; 81 | let omega = ORDER_8_ROOT_UNITY; 82 | let domain = generate_domain(&omega, n); 83 | 84 | let permutation = &[ 85 | 23, 13, 0, 18, 19, 1, 20, 22, 2, 8, 9, 10, 11, 5, 21, 12, 15, 16, 3, 4, 6, 14, 7, 17, 86 | ]; 87 | let permuted = generate_permutation_coefficients(&omega, n, permutation); 88 | 89 | let s1_lagrange: Vec = permuted[..8].to_vec(); 90 | let s2_lagrange: Vec = permuted[8..16].to_vec(); 91 | let s3_lagrange: Vec = permuted[16..].to_vec(); 92 | 93 | CommonPreprocessedInput { 94 | n, 95 | omega, 96 | k1: ORDER_R_MINUS_1_ROOT_UNITY, 97 | domain: domain.clone(), 98 | 99 | ql: Polynomial::interpolate( 100 | &domain, 101 | &[ 102 | -FieldElement::one(), 103 | -FieldElement::one(), 104 | FieldElement::zero(), 105 | FieldElement::zero(), 106 | FieldElement::one(), 107 | FieldElement::zero(), 108 | -FieldElement::one(), 109 | FieldElement::one(), 110 | ], 111 | ) 112 | .unwrap(), 113 | qr: Polynomial::interpolate( 114 | &domain, 115 | &[ 116 | FieldElement::zero(), 117 | FieldElement::zero(), 118 | FieldElement::zero(), 119 | FieldElement::zero(), 120 | FieldElement::zero(), 121 | FieldElement::zero(), 122 | FieldElement::one(), 123 | FieldElement::zero(), 124 | ], 125 | ) 126 | .unwrap(), 127 | qo: Polynomial::interpolate( 128 | &domain, 129 | &[ 130 | FieldElement::zero(), 131 | FieldElement::zero(), 132 | -FieldElement::one(), 133 | -FieldElement::one(), 134 | -FieldElement::one(), 135 | -FieldElement::one(), 136 | -FieldElement::one(), 137 | FieldElement::zero(), 138 | ], 139 | ) 140 | .unwrap(), 141 | qm: Polynomial::interpolate( 142 | &domain, 143 | &[ 144 | FieldElement::zero(), 145 | FieldElement::zero(), 146 | FieldElement::one(), 147 | FieldElement::one(), 148 | FieldElement::zero(), 149 | FieldElement::one(), 150 | FieldElement::zero(), 151 | FieldElement::zero(), 152 | ], 153 | ) 154 | .unwrap(), 155 | qc: Polynomial::interpolate( 156 | &domain, 157 | &[ 158 | FieldElement::zero(), 159 | FieldElement::zero(), 160 | FieldElement::zero(), 161 | FieldElement::zero(), 162 | FieldElement::from(4), 163 | FieldElement::zero(), 164 | FieldElement::zero(), 165 | FieldElement::zero(), 166 | ], 167 | ) 168 | .unwrap(), 169 | 170 | s1: Polynomial::interpolate(&domain, &s1_lagrange).unwrap(), 171 | s2: Polynomial::interpolate(&domain, &s2_lagrange).unwrap(), 172 | s3: Polynomial::interpolate(&domain, &s3_lagrange).unwrap(), 173 | 174 | s1_lagrange, 175 | s2_lagrange, 176 | s3_lagrange, 177 | } 178 | } 179 | -------------------------------------------------------------------------------- /broken_heart/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod circuit; 2 | pub mod server; 3 | pub mod solution; 4 | -------------------------------------------------------------------------------- /broken_heart/src/server.rs: -------------------------------------------------------------------------------- 1 | // -------- Irreduci-bull challenge --------- 2 | 3 | use lambdaworks_crypto::commitments::{ 4 | kzg::{KateZaveruchaGoldberg, StructuredReferenceString}, 5 | traits::IsCommitmentScheme, 6 | }; 7 | use lambdaworks_math::{ 8 | cyclic_group::IsGroup, 9 | elliptic_curve::{ 10 | short_weierstrass::curves::bls12_381::{ 11 | curve::BLS12381Curve, 12 | default_types::{FrElement, FrField}, 13 | pairing::BLS12381AtePairing, 14 | twist::BLS12381TwistCurve, 15 | }, 16 | traits::{IsEllipticCurve, IsPairing}, 17 | }, 18 | field::element::FieldElement, 19 | traits::IsRandomFieldElementGenerator, 20 | }; 21 | use lambdaworks_plonk::{ 22 | prover::Proof, 23 | setup::{CommonPreprocessedInput, VerificationKey}, 24 | verifier::Verifier, 25 | }; 26 | 27 | pub const FLAG: &str = "ZK{dummy_flag}"; 28 | 29 | type ChallengeSRS = StructuredReferenceString< 30 | ::G1Point, 31 | ::G2Point, 32 | >; 33 | 34 | pub const ORDER_8_ROOT_UNITY: FrElement = FrElement::from_hex_unchecked( 35 | "345766f603fa66e78c0625cd70d77ce2b38b21c28713b7007228fd3397743f7a", 36 | ); // order 8 37 | 38 | pub const ORDER_R_MINUS_1_ROOT_UNITY: FrElement = FrElement::from_hex_unchecked("7"); 39 | pub type ChallengeCS = KateZaveruchaGoldberg; 40 | pub type ChallengeVK = VerificationKey<>::Commitment>; 41 | pub type ChallengeProof = Proof; 42 | pub type Pairing = BLS12381AtePairing; 43 | pub type KZG = KateZaveruchaGoldberg; 44 | pub type CPI = CommonPreprocessedInput; 45 | type G1Point = ::PointRepresentation; 46 | type G2Point = ::PointRepresentation; 47 | 48 | pub fn quadratic_non_residue() -> FrElement { 49 | ORDER_R_MINUS_1_ROOT_UNITY 50 | } 51 | 52 | /// Generates a test SRS for the BLS12381 curve 53 | /// n is the number of constraints in the system. 54 | pub fn generate_srs(n: usize) -> StructuredReferenceString { 55 | let s = FrElement::from(2); 56 | let g1 = ::generator(); 57 | let g2 = ::generator(); 58 | 59 | let powers_main_group: Vec = (0..n + 3) 60 | .map(|exp| g1.operate_with_self(s.pow(exp as u64).representative())) 61 | .collect(); 62 | let powers_secondary_group = [g2.clone(), g2.operate_with_self(s.representative())]; 63 | 64 | StructuredReferenceString::new(&powers_main_group, &powers_secondary_group) 65 | } 66 | 67 | /// A mock of a random number generator, to have deterministic tests. 68 | /// When set to zero, there is no zero knowledge applied, because it is used 69 | /// to get random numbers to blind polynomials. 70 | pub struct TestRandomFieldGenerator; 71 | impl IsRandomFieldElementGenerator for TestRandomFieldGenerator { 72 | fn generate(&self) -> FrElement { 73 | FrElement::zero() 74 | } 75 | } 76 | 77 | pub fn server_endpoint_verify( 78 | srs: ChallengeSRS, 79 | common_preprocessed_input: CommonPreprocessedInput, 80 | vk: &ChallengeVK, 81 | x: &FrElement, 82 | y: &FrElement, 83 | proof: &ChallengeProof, 84 | ) -> String { 85 | let public_input = [x.clone(), y.clone()]; 86 | let kzg = KZG::new(srs); 87 | let verifier = Verifier::new(kzg); 88 | let result = verifier.verify(proof, &public_input, &common_preprocessed_input, vk); 89 | if !result { 90 | "Invalid Proof".to_string() 91 | } else if x != &FieldElement::one() { 92 | "Valid Proof. Congrats!".to_string() 93 | } else { 94 | FLAG.to_string() 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /broken_heart/src/solution.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_crypto::{ 2 | commitments::traits::IsCommitmentScheme, fiat_shamir::transcript::Transcript, 3 | }; 4 | use lambdaworks_math::{ 5 | field::{element::FieldElement, traits::IsField}, 6 | traits::{ByteConversion, Serializable}, 7 | }; 8 | use lambdaworks_plonk::{ 9 | prover::Proof, 10 | setup::{new_strong_fiat_shamir_transcript, CommonPreprocessedInput, VerificationKey}, 11 | }; 12 | 13 | #[allow(unused)] 14 | fn forge_y_for_valid_proof>( 15 | proof: &Proof, 16 | vk: &VerificationKey, 17 | common_preprocessed_input: CommonPreprocessedInput, 18 | ) -> FieldElement 19 | where 20 | CS::Commitment: Serializable, 21 | FieldElement: ByteConversion, 22 | { 23 | // Replay interactions like the verifier 24 | let mut transcript = new_strong_fiat_shamir_transcript::(vk, &[]); 25 | 26 | transcript.append(&proof.a_1.serialize()); 27 | transcript.append(&proof.b_1.serialize()); 28 | transcript.append(&proof.c_1.serialize()); 29 | let beta = FieldElement::from_bytes_be(&transcript.challenge()).unwrap(); 30 | let gamma = FieldElement::from_bytes_be(&transcript.challenge()).unwrap(); 31 | 32 | transcript.append(&proof.z_1.serialize()); 33 | let alpha = FieldElement::from_bytes_be(&transcript.challenge()).unwrap(); 34 | 35 | transcript.append(&proof.t_lo_1.serialize()); 36 | transcript.append(&proof.t_mid_1.serialize()); 37 | transcript.append(&proof.t_hi_1.serialize()); 38 | let zeta = &FieldElement::from_bytes_be(&transcript.challenge()).unwrap(); 39 | 40 | // Forge public input 41 | let zh_zeta = zeta.pow(common_preprocessed_input.n) - FieldElement::one(); 42 | 43 | let omega = &common_preprocessed_input.omega; 44 | let n = common_preprocessed_input.n as u64; 45 | let one = &FieldElement::one(); 46 | 47 | let l1_zeta = ((zeta.pow(n) - one) / (zeta - one)) / FieldElement::from(n); 48 | 49 | let l2_zeta = omega * &l1_zeta * (zeta - one) / (zeta - omega); 50 | 51 | let mut p_constant_zeta = &alpha 52 | * &proof.z_zeta_omega 53 | * (&proof.c_zeta + &gamma) 54 | * (&proof.a_zeta + &beta * &proof.s1_zeta + &gamma) 55 | * (&proof.b_zeta + &beta * &proof.s2_zeta + &gamma); 56 | p_constant_zeta = p_constant_zeta - &l1_zeta * &alpha * α 57 | 58 | let p_zeta = p_constant_zeta + &proof.p_non_constant_zeta; 59 | -(p_zeta + l1_zeta * one - (&zh_zeta * &proof.t_zeta)) / l2_zeta 60 | } 61 | 62 | #[cfg(test)] 63 | mod tests { 64 | use lambdaworks_math::field::element::FieldElement; 65 | use lambdaworks_plonk::{prover::Prover, setup::setup}; 66 | 67 | use crate::{ 68 | circuit::{circuit_common_preprocessed_input, circuit_witness}, 69 | server::{generate_srs, server_endpoint_verify, TestRandomFieldGenerator, FLAG, KZG}, 70 | solution::forge_y_for_valid_proof, 71 | }; 72 | 73 | #[test] 74 | fn test_challenge() { 75 | // This is the circuit for `ASSERT 0 == y ** 2 - x ** 3 - 4` 76 | let cpi = circuit_common_preprocessed_input(); 77 | let srs = generate_srs(cpi.n); 78 | let kzg = KZG::new(srs.clone()); 79 | let verifying_key = setup(&cpi.clone(), &kzg); 80 | 81 | let x = FieldElement::from(0); 82 | let y = FieldElement::from(2); 83 | 84 | let public_input = vec![x.clone(), y.clone()]; 85 | let witness = circuit_witness(&x, &y); 86 | 87 | let random_generator = TestRandomFieldGenerator {}; 88 | let prover = Prover::new(kzg.clone(), random_generator); 89 | let proof = prover.prove(&witness, &public_input, &cpi, &verifying_key); 90 | 91 | let response_valid = 92 | server_endpoint_verify(srs.clone(), cpi.clone(), &verifying_key, &x, &y, &proof); 93 | assert_eq!("Valid Proof. Congrats!".to_string(), response_valid); 94 | 95 | let response_invalid = server_endpoint_verify( 96 | srs.clone(), 97 | cpi.clone(), 98 | &verifying_key, 99 | &FieldElement::one(), 100 | &y, 101 | &proof, 102 | ); 103 | assert_eq!("Invalid Proof".to_string(), response_invalid); 104 | 105 | // Use the real proof to modify the public input 106 | // and make it pass for `x = 1` 107 | let forged_y = forge_y_for_valid_proof(&proof, &verifying_key, cpi.clone()); 108 | 109 | let response_solution = server_endpoint_verify( 110 | srs.clone(), 111 | cpi.clone(), 112 | &verifying_key, 113 | &FieldElement::one(), 114 | &forged_y, 115 | &proof, 116 | ); 117 | assert_eq!(FLAG.to_string(), response_solution); 118 | } 119 | } 120 | -------------------------------------------------------------------------------- /challenge_1/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "block_cypher" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | lambdaworks-math = { git = "https://github.com/lambdaclass/lambdaworks.git" } 10 | rand = "0.8.5" 11 | -------------------------------------------------------------------------------- /challenge_1/README.md: -------------------------------------------------------------------------------- 1 | # The Lost Relic 2 | 3 | During their quest to find the greatest treasure in the world, the One Piece, Luffy and his friends are wandering inside a subterranean maze. After many hours, they arrive at the door hiding an old relic, which can be instrumental to achieving their goal. The big problem is that it is made of sea stone and Luffy is unable to use his strength to break it. There are some inscriptions on the walls, which Nico Robin is able to translate. 4 | It says: 5 | "If you can find the secret hidden among these texts, the door will open." 6 | There are many input plaintexts and their corresponding ciphertexts, all of them encrypted using a custom MiMC algorithm under the same key. There are also many skeletons around, of all the people who have so far failed this test. Luckily, Usopp brought his computing device and will try to break the secret. What can he do to recover the secret? -------------------------------------------------------------------------------- /challenge_1/src/cypher.rs: -------------------------------------------------------------------------------- 1 | use crate::field::ChallengeElement; 2 | 3 | const ROUNDS: usize = 2_usize.pow(24); 4 | 5 | pub fn evaluate(x: &ChallengeElement, key: &ChallengeElement) -> ChallengeElement { 6 | (0..ROUNDS).fold(x.clone(), |acc, _| evaluate_round(&acc, key)) 7 | } 8 | 9 | pub fn evaluate_round(x: &ChallengeElement, key: &ChallengeElement) -> ChallengeElement { 10 | (x + key).pow(2_u64) 11 | } 12 | -------------------------------------------------------------------------------- /challenge_1/src/data.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_math::unsigned_integer::element::UnsignedInteger; 2 | 3 | use crate::field::ChallengeElement; 4 | 5 | pub fn pairs() -> [(ChallengeElement, ChallengeElement); 10] { 6 | [ 7 | ( 8 | ChallengeElement::new(UnsignedInteger::from_limbs([ 9 | 183240637262039384, 10 | 10134058328874369520, 11 | 15648036570429036245, 12 | 12373630818091389922, 13 | ])), 14 | ChallengeElement::new(UnsignedInteger::from_limbs([ 15 | 58125280520191997, 16 | 7563295584845025545, 17 | 10505934299123791696, 18 | 7243434167283202274, 19 | ])), 20 | ), 21 | ( 22 | ChallengeElement::new(UnsignedInteger::from_limbs([ 23 | 53793503318333808, 24 | 3207637959751464466, 25 | 14494950274942836597, 26 | 4136788124041118761, 27 | ])), 28 | ChallengeElement::new(UnsignedInteger::from_limbs([ 29 | 311364612610444531, 30 | 15966843845133580520, 31 | 11733134576879619546, 32 | 15728250733723614304, 33 | ])), 34 | ), 35 | ( 36 | ChallengeElement::new(UnsignedInteger::from_limbs([ 37 | 568211605542321808, 38 | 3665906368153986805, 39 | 17170247225323812746, 40 | 15915536692872977361, 41 | ])), 42 | ChallengeElement::new(UnsignedInteger::from_limbs([ 43 | 217852115983473995, 44 | 4166846794769956288, 45 | 6317771942840139764, 46 | 14625875306968385082, 47 | ])), 48 | ), 49 | ( 50 | ChallengeElement::new(UnsignedInteger::from_limbs([ 51 | 207549010898490221, 52 | 4243616016599017025, 53 | 9848596493685366018, 54 | 7918147356679190828, 55 | ])), 56 | ChallengeElement::new(UnsignedInteger::from_limbs([ 57 | 477451920176997361, 58 | 15490207318061191317, 59 | 18290929028207201553, 60 | 12313619366283486653, 61 | ])), 62 | ), 63 | ( 64 | ChallengeElement::new(UnsignedInteger::from_limbs([ 65 | 373838775372616312, 66 | 2673582186589028492, 67 | 7759321698787748238, 68 | 13844063650817543069, 69 | ])), 70 | ChallengeElement::new(UnsignedInteger::from_limbs([ 71 | 501628185314588506, 72 | 360101150955423272, 73 | 9640428090924426801, 74 | 4582961170966616797, 75 | ])), 76 | ), 77 | ( 78 | ChallengeElement::new(UnsignedInteger::from_limbs([ 79 | 87489608385277507, 80 | 7831811476568950391, 81 | 2088690257692214216, 82 | 5066699409404733431, 83 | ])), 84 | ChallengeElement::new(UnsignedInteger::from_limbs([ 85 | 355875529635643805, 86 | 13282387072996050664, 87 | 8345773773609640016, 88 | 16439847079178157230, 89 | ])), 90 | ), 91 | ( 92 | ChallengeElement::new(UnsignedInteger::from_limbs([ 93 | 81907762606414085, 94 | 13685489810001955596, 95 | 1500287613219709574, 96 | 2446883281376595718, 97 | ])), 98 | ChallengeElement::new(UnsignedInteger::from_limbs([ 99 | 285826094074462510, 100 | 2412521888121343816, 101 | 17074562155301273597, 102 | 743918196287760338, 103 | ])), 104 | ), 105 | ( 106 | ChallengeElement::new(UnsignedInteger::from_limbs([ 107 | 539334648243077342, 108 | 1241819602239411681, 109 | 5927184179348544209, 110 | 11640860157447976251, 111 | ])), 112 | ChallengeElement::new(UnsignedInteger::from_limbs([ 113 | 502287359473451983, 114 | 2105484562428168146, 115 | 2070489610481465832, 116 | 9074849931098206603, 117 | ])), 118 | ), 119 | ( 120 | ChallengeElement::new(UnsignedInteger::from_limbs([ 121 | 536884484895507488, 122 | 5347777065114328110, 123 | 18395674441153693721, 124 | 10552627961256589724, 125 | ])), 126 | ChallengeElement::new(UnsignedInteger::from_limbs([ 127 | 55173036628470166, 128 | 605478499650837527, 129 | 3313559020999214570, 130 | 17867174237670907305, 131 | ])), 132 | ), 133 | ( 134 | ChallengeElement::new(UnsignedInteger::from_limbs([ 135 | 420067545220093896, 136 | 2975148993333630768, 137 | 15772310561964876975, 138 | 5736425213799489626, 139 | ])), 140 | ChallengeElement::new(UnsignedInteger::from_limbs([ 141 | 245745081194512045, 142 | 17128497290282674116, 143 | 12860542778099276299, 144 | 11433466637600805681, 145 | ])), 146 | ), 147 | ] 148 | } 149 | -------------------------------------------------------------------------------- /challenge_1/src/field.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_math::field::{ 2 | element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, 3 | }; 4 | 5 | pub type ChallengeElement = FieldElement; 6 | -------------------------------------------------------------------------------- /challenge_1/src/main.rs: -------------------------------------------------------------------------------- 1 | use data::pairs; 2 | use solver::solve; 3 | 4 | use crate::cypher::evaluate; 5 | 6 | mod cypher; 7 | mod data; 8 | mod field; 9 | mod solver; 10 | 11 | fn main() { 12 | let key = solve(); 13 | 14 | let (p, c) = pairs()[0].clone(); 15 | assert_eq!(evaluate(&p, &key), c); 16 | 17 | println!("Found Key! {}", &key); 18 | } 19 | -------------------------------------------------------------------------------- /challenge_1/src/solver.rs: -------------------------------------------------------------------------------- 1 | use crate::field::ChallengeElement; 2 | 3 | pub fn solve() -> ChallengeElement { 4 | println!("Solving..."); 5 | todo!(); 6 | } 7 | -------------------------------------------------------------------------------- /challenge_2/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "template_solution_srs_1" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | lambdaworks-crypto = { git = "https://github.com/lambdaclass/lambdaworks", rev = "366ac95" } 10 | lambdaworks-math= { git = "https://github.com/lambdaclass/lambdaworks", rev = "366ac95" } 11 | -------------------------------------------------------------------------------- /challenge_2/README.md: -------------------------------------------------------------------------------- 1 | # Breaking into the vault of Loki 2 | 3 | After years of careful investigation, you have reached the gate to Loki's vault in the icy mountains of Norway, where it is said that many great treasures and powerful weapons are hidden. The gate seems unbreakable, but you spot some ancient machinery with inscriptions in old runes. After some help from ChatGPT, you are able to translate the symbols and the whole message into modern English, and it reads: 4 | 5 | If you can prove that the polynomial 6 | 7 | $$ 8 | \begin{aligned} 9 | p(x) &= 69 +78x + 32x^2 + 65x^3 + 82x^4 + 71x^5 + 69x^6 + 78x^7 + 84x^8 + 73x^9 \newline &+78x^{10} + 65x^{11} + 32x^{12} + 78x^{13} + 65x^{14}+ 67x^{15} + 73x^{16} + 32x^{17} \newline 10 | &+ 84x^{18} + 73x^{19} + 69x^{20} + 82x^{21} + 82x^{22} + 65 x^{23} 11 | \end{aligned} 12 | $$ 13 | 14 | is equal to $3$ at $x = 1$ modulo the BLS12-381 $r$ parameter, then the gate will open. 15 | 16 | Below is a long list of bytes representing the SRS that can be used to perform KZG commitments. The machinery, after careful examination, performs the KZG verification using pairings. There is only one open place where you can place a wooden tablet with your answer, comprising 48 bytes. You guess this should be the proof of the KZG scheme, providing the point in compressed form, following the ZCash standard. The other elements contain the commitment to $p(x)$, the desired value $3$, and the point $x=1$. You ask ChatGPT for enlightenment, but it suddenly collapses and only shows the message: fatal error. Is this just an impossible task? Perhaps there is some trick to get by Loki's challenge... -------------------------------------------------------------------------------- /challenge_2/src/main.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_crypto::commitments::{ 2 | kzg::{KateZaveruchaGoldberg, StructuredReferenceString}, 3 | traits::IsCommitmentScheme, 4 | }; 5 | use lambdaworks_math::{ 6 | elliptic_curve::{ 7 | short_weierstrass::{ 8 | curves::bls12_381::{ 9 | curve::BLS12381Curve, 10 | default_types::{FrConfig, FrElement}, 11 | field_extension::BLS12381PrimeField, 12 | pairing::BLS12381AtePairing, 13 | twist::BLS12381TwistCurve, 14 | }, 15 | point::ShortWeierstrassProjectivePoint, 16 | }, 17 | traits::FromAffine, 18 | }, 19 | field::{ 20 | element::FieldElement, fields::montgomery_backed_prime_fields::MontgomeryBackendPrimeField, 21 | }, 22 | polynomial::Polynomial, 23 | unsigned_integer::element::UnsignedInteger, 24 | }; 25 | 26 | type G1Point = ShortWeierstrassProjectivePoint; 27 | type G2Point = ShortWeierstrassProjectivePoint; 28 | 29 | type KZG = KateZaveruchaGoldberg, BLS12381AtePairing>; 30 | pub type Fq = FieldElement; 31 | 32 | fn challenge_polynomial() -> Polynomial { 33 | Polynomial::::new(&[ 34 | FieldElement::from(69), 35 | FieldElement::from(78), 36 | FieldElement::from(32), 37 | FieldElement::from(65), 38 | FieldElement::from(82), 39 | FieldElement::from(71), 40 | FieldElement::from(69), 41 | FieldElement::from(78), 42 | FieldElement::from(84), 43 | FieldElement::from(73), 44 | FieldElement::from(78), 45 | FieldElement::from(65), 46 | FieldElement::from(32), 47 | FieldElement::from(78), 48 | FieldElement::from(65), 49 | FieldElement::from(67), 50 | FieldElement::from(73), 51 | FieldElement::from(32), 52 | FieldElement::from(84), 53 | FieldElement::from(73), 54 | FieldElement::from(69), 55 | FieldElement::from(82), 56 | FieldElement::from(65), 57 | ]) 58 | } 59 | 60 | fn main() { 61 | let base_dir = env!("CARGO_MANIFEST_DIR"); 62 | let srs_path = base_dir.to_owned() + "/srs.bin"; 63 | let srs = StructuredReferenceString::::from_file(&srs_path).unwrap(); 64 | 65 | let kzg = KZG::new(srs.clone()); 66 | 67 | let p = challenge_polynomial(); 68 | 69 | let p_commitment: G1Point = kzg.commit(&p); 70 | 71 | // If you need to write a bigger number, you can use 72 | // If you are writing the solution in rust you shouldn't need this 73 | let big_number = UnsignedInteger::<6>::from_limbs([0, 0, 0, 0, 0, 2]); 74 | let y = Fq::new(big_number); 75 | 76 | // TO DO: Make your own fake proof 77 | let fake_proof = 78 | ShortWeierstrassProjectivePoint::::from_affine(Fq::from(0), y).unwrap(); 79 | 80 | println!("Fake proof for submission:"); 81 | println!("{:?}", &fake_proof.to_affine().x().to_string()); 82 | println!("{:?}", &fake_proof.to_affine().y().to_string()); 83 | 84 | assert!(kzg.verify( 85 | &FrElement::from(1), 86 | &FrElement::from(3), 87 | &p_commitment, 88 | &fake_proof 89 | )); 90 | } 91 | -------------------------------------------------------------------------------- /challenge_2/srs.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lambdaclass/lambdaworks_exercises/946d9950d5a0c532c1c154d3366f978d98fca233/challenge_2/srs.bin -------------------------------------------------------------------------------- /challenge_3/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "power" 3 | version = "1.0.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | json = "0.12" 8 | serde = { version = "1.0", features = ["derive"] } 9 | actix-web = "4.3" 10 | env_logger = "0.10" 11 | log = "0.4" 12 | rand = "0.8" 13 | serde_json = "1" 14 | tokio = { version = "1.24.2", features = ["sync"] } 15 | lambdaworks-crypto = { git = "https://github.com/lambdaclass/lambdaworks", rev = "366ac95" } 16 | lambdaworks-math= { git = "https://github.com/lambdaclass/lambdaworks", rev = "366ac95" } 17 | serde_cbor = "0.10" 18 | -------------------------------------------------------------------------------- /challenge_3/README.md: -------------------------------------------------------------------------------- 1 | # It is over 9000! 2 | 3 | The Saiyans have landed on planet earth. Our great defenders Krillin, Piccolo, Tien and Gohan 4 | have to hold on till Goku arrives on the scene. 5 | 6 | Vegeta and Nappa have scouters that indicate our heroes power levels 7 | and sadly we are not doing too well. 8 | 9 | Somehow, Gohan has raised his power level to `p_4(X) = 9000`, but it is not good enough. Piccolo `p_3(X)` can help but he is still regenerating, and Krillin `p_2(X)` and Tien `p_1(X)` are in bad shape. The total power of the team is computed as 10 | ``` 11 | P = p_1(X) * 0 + p_2(X) * 0 + p_3(X) * 0 + p_4(X) 12 | ``` 13 | At the current moment, the X is equal to `42`. 14 | 15 | Suddenly Gohan, and Piccolo recieve a message from Bulma that the scouters verify the sensed power level of individual enemies using KZG and for multiple enemies with batched KZG method. Vegeta knows for sure that the power level of Gohan is `p_4(X) = 9000`, so he will know if we change that. If only the team had a way to trick their opponents to believe that their total power level is `P > 9000` - then the enemies will surely flee. -------------------------------------------------------------------------------- /challenge_3/src/main.rs: -------------------------------------------------------------------------------- 1 | use std::net::TcpStream; 2 | use std::io::{Read, Write}; 3 | 4 | use lambdaworks_crypto::commitments::{ 5 | kzg::{KateZaveruchaGoldberg, StructuredReferenceString}, 6 | traits::IsCommitmentScheme, 7 | }; 8 | use lambdaworks_math::{ 9 | elliptic_curve::{ 10 | short_weierstrass::{ 11 | curves::bls12_381::{ 12 | curve::BLS12381Curve, 13 | default_types::{FrElement, FrField}, 14 | field_extension::BLS12381PrimeField, 15 | pairing::BLS12381AtePairing, 16 | twist::BLS12381TwistCurve, 17 | }, 18 | point::ShortWeierstrassProjectivePoint, 19 | }, 20 | }, 21 | field::element::FieldElement, 22 | polynomial::Polynomial, 23 | }; 24 | use serde::{Deserialize, Serialize}; 25 | 26 | const X: u64 = 42; 27 | const NUM_POLYS: usize = 4; 28 | #[allow(clippy::upper_case_acronyms)] 29 | type KZG = KateZaveruchaGoldberg; 30 | 31 | pub type Fq = FieldElement; 32 | 33 | #[derive(Debug, Serialize, Deserialize)] 34 | pub struct PowerProof { 35 | pub proof_x_hex: String, 36 | pub proof_y_hex: String, 37 | pub u: String, 38 | pub y: [String; NUM_POLYS], 39 | pub commitments_x: [String; NUM_POLYS], 40 | pub commitments_y: [String; NUM_POLYS], 41 | } 42 | 43 | type G1Point = ShortWeierstrassProjectivePoint; 44 | type G2Point = ShortWeierstrassProjectivePoint; 45 | 46 | fn load_srs() -> StructuredReferenceString:: { 47 | let base_dir = env!("CARGO_MANIFEST_DIR"); 48 | let srs_path = base_dir.to_owned() + "/srs.bin"; 49 | StructuredReferenceString::::from_file(&srs_path).unwrap() 50 | } 51 | 52 | fn upload_solution(proof: &PowerProof) { 53 | let mut stream = TcpStream::connect("52.7.211.188:8000").unwrap(); 54 | let proof_vec = serde_cbor::to_vec(&proof).expect("Failed serialization"); 55 | 56 | stream.write(&(proof_vec.len() as u64).to_be_bytes()).unwrap(); 57 | stream.write(&proof_vec).unwrap(); 58 | 59 | let mut response = String::new(); 60 | stream.read_to_string(&mut response).unwrap(); 61 | println!("Received response: {}", response); 62 | } 63 | 64 | fn main() { 65 | let srs = load_srs(); 66 | let kzg = KZG::new(srs); 67 | let x = FieldElement::from(X); 68 | 69 | let p1_coeffs = [FieldElement::one(), FieldElement::one()]; 70 | let p2_coeffs = [FieldElement::one(), FieldElement::one()]; 71 | let p3_coeffs = [FieldElement::one(), FieldElement::one()]; 72 | // This is Gohan power level, it can't be tampered with 73 | let p4_coeffs = [FieldElement::from(9000)]; 74 | 75 | // Sample random u 76 | let u = FieldElement::from(rand::random::()); 77 | 78 | let commit_and_open_at = |coeffs: &[FieldElement<_>]| -> ( 79 | Polynomial<_>, 80 | G1Point, 81 | FieldElement<_> 82 | ) { 83 | let poly = Polynomial::::new(coeffs); 84 | let commitment = kzg.commit(&poly); 85 | let eval = poly.evaluate(&x); 86 | 87 | (poly, commitment, eval) 88 | }; 89 | 90 | let (p1, 91 | p1_comm, 92 | y1) = commit_and_open_at(&p1_coeffs); 93 | 94 | let (p2, 95 | p2_comm, 96 | y2) = commit_and_open_at(&p2_coeffs); 97 | 98 | let (p3, 99 | p3_comm, 100 | y3) = commit_and_open_at(&p3_coeffs); 101 | 102 | let (p4, 103 | p4_comm, 104 | y4) = commit_and_open_at(&p4_coeffs); 105 | 106 | 107 | let ys = [y1, y2, y3, y4]; 108 | let ps = [p1, p2, p3 ,p4]; 109 | let ps_c = [p1_comm, p2_comm, p3_comm, p4_comm]; 110 | 111 | let proof = kzg.open_batch(&x, &ys, &ps, &u); 112 | assert!(kzg.verify_batch(&x, &ys, &ps_c, &proof, &u)); 113 | 114 | let power_proof = PowerProof { 115 | proof_x_hex: proof.to_affine().x().to_string(), 116 | proof_y_hex: proof.to_affine().y().to_string(), 117 | u: u.to_string(), 118 | y: ys.map(|y| y.to_string() ), 119 | commitments_x: ps_c.clone().map(|c| c.to_affine().x().to_string()), 120 | commitments_y: ps_c.map(|c| c.to_affine().y().to_string()), 121 | }; 122 | 123 | upload_solution(&power_proof); 124 | } -------------------------------------------------------------------------------- /challenge_3/srs.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lambdaclass/lambdaworks_exercises/946d9950d5a0c532c1c154d3366f978d98fca233/challenge_3/srs.bin -------------------------------------------------------------------------------- /message/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "lambdaworks-stark" 3 | version = "0.1.0" 4 | edition = "2021" 5 | rust-version = "1.66" 6 | 7 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 8 | 9 | [dependencies] 10 | rand = "0.8.5" 11 | lambdaworks-math = { git = "https://github.com/lambdaclass/lambdaworks", rev = "a17b951" } 12 | lambdaworks-crypto = { git = "https://github.com/lambdaclass/lambdaworks", rev = "a17b951" } 13 | thiserror = "1.0.38" 14 | log = "0.4.17" 15 | bincode = { version = "2.0.0-rc.2", tag = "v2.0.0-rc.2", git = "https://github.com/bincode-org/bincode.git" } 16 | sha3 = "0.10.6" 17 | serde_json = "1.0" 18 | num-integer = "0.1.45" 19 | itertools = "0.11.0" 20 | rayon = { version = "1.7.0", optional = true } 21 | 22 | [dev-dependencies] 23 | proptest = "1.2.0" 24 | hex = "0.4.3" 25 | criterion = "0.4" 26 | env_logger = "*" 27 | test-log = { version = "0.2.11", features = ["log"] } 28 | assert_matches = "1.5.0" 29 | rstest = "0.17.0" 30 | 31 | [features] 32 | test_fiat_shamir = [] 33 | instruments = [] # This enables timing prints in prover and verifier 34 | metal = ["lambdaworks-math/metal"] 35 | parallel = ["dep:rayon"] 36 | 37 | [profile.release] 38 | lto = true 39 | opt-level = 3 40 | codegen-units = 1 41 | 42 | [profile.test] 43 | lto = "thin" 44 | opt-level = 3 45 | debug = 2 46 | -------------------------------------------------------------------------------- /message/LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /message/src/cairo/cairo_layout.rs: -------------------------------------------------------------------------------- 1 | #[derive(Debug, Clone, PartialEq, Eq)] 2 | pub enum CairoLayout { 3 | Plain, 4 | Small, 5 | Dex, 6 | Recursive, 7 | Starknet, 8 | StarknetWithKeccak, 9 | RecursiveLargeOutput, 10 | AllCairo, 11 | AllSolidity, 12 | Dynamic, 13 | } 14 | 15 | impl CairoLayout { 16 | pub fn as_str(&self) -> &'static str { 17 | match self { 18 | CairoLayout::Plain => "plain", 19 | CairoLayout::Small => "small", 20 | CairoLayout::Dex => "dex", 21 | CairoLayout::Recursive => "recursive", 22 | CairoLayout::Starknet => "starknet", 23 | CairoLayout::StarknetWithKeccak => "starknet_with_keccak", 24 | CairoLayout::RecursiveLargeOutput => "recursive_large_output", 25 | CairoLayout::AllCairo => "all_cairo", 26 | CairoLayout::AllSolidity => "all_solidity", 27 | CairoLayout::Dynamic => "dynamic", 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /message/src/cairo/cairo_mem.rs: -------------------------------------------------------------------------------- 1 | use crate::FE; 2 | 3 | use super::errors::CairoImportError; 4 | // use crate::FE; 5 | use lambdaworks_math::traits::ByteConversion; 6 | use std::{collections::HashMap, fs}; 7 | 8 | // `FE` is used as the type of values stored in 9 | // the Cairo memory. We should decide if this is 10 | // correct or we should consider another type. 11 | #[derive(Clone, Debug, PartialEq)] 12 | pub struct CairoMemory { 13 | pub data: HashMap, 14 | } 15 | 16 | impl CairoMemory { 17 | pub fn new(data: HashMap) -> Self { 18 | Self { data } 19 | } 20 | 21 | /// Given a memory address, gets the value stored in it if 22 | /// the address exists. 23 | pub fn get(&self, addr: &u64) -> Option<&FE> { 24 | self.data.get(addr) 25 | } 26 | 27 | pub fn len(&self) -> usize { 28 | self.data.len() 29 | } 30 | 31 | pub fn is_empty(&self) -> bool { 32 | self.data.is_empty() 33 | } 34 | 35 | pub fn from_bytes_le(bytes: &[u8]) -> Result { 36 | // Each row is an 8 bytes address 37 | // and a value of 32 bytes (which is a field) 38 | const ROW_SIZE: usize = 8 + 32; 39 | 40 | if bytes.len() % ROW_SIZE != 0 { 41 | return Err(CairoImportError::IncorrectNumberOfBytes); 42 | } 43 | let num_rows = bytes.len() / ROW_SIZE; 44 | 45 | let mut data = HashMap::with_capacity(num_rows); 46 | 47 | for i in 0..num_rows { 48 | let address = 49 | u64::from_le_bytes(bytes[i * ROW_SIZE..i * ROW_SIZE + 8].try_into().unwrap()); 50 | let value = FE::from_bytes_le( 51 | bytes[i * ROW_SIZE + 8..i * ROW_SIZE + 40] 52 | .try_into() 53 | .unwrap(), 54 | ) 55 | .unwrap(); 56 | 57 | data.insert(address, value); 58 | } 59 | 60 | Ok(Self::new(data)) 61 | } 62 | 63 | pub fn from_file(path: &str) -> Result { 64 | let data = fs::read(path)?; 65 | Self::from_bytes_le(&data) 66 | } 67 | } 68 | 69 | #[cfg(test)] 70 | mod tests { 71 | use super::*; 72 | 73 | #[test] 74 | fn mem_indexes_are_contiguos_in_bytes_of_mul_program() { 75 | /* 76 | Hex from the trace of the following cairo program 77 | 78 | func main() { 79 | let x = 2; 80 | let y = 3; 81 | assert x * y = 6; 82 | return(); 83 | } 84 | 85 | Generated with: 86 | 87 | cairo-compile multiply.cairo --output multiply.out 88 | 89 | cairo-run --layout all --trace_file trace.out --memory_file mem.out --program multiply.out 90 | 91 | xxd -p mem.out 92 | */ 93 | 94 | let bytes = hex::decode("01000000000000000080ff7f01800648000000000000000000000000000000000000000000000000020000000000000006000000000000000000000000000000000000000000000000000000000000000300000000000000ff7fff7f01800640000000000000000000000000000000000000000000000000040000000000000006000000000000000000000000000000000000000000000000000000000000000500000000000000fe7fff7fff7f8b20000000000000000000000000000000000000000000000000060000000000000009000000000000000000000000000000000000000000000000000000000000000700000000000000090000000000000000000000000000000000000000000000000000000000000008000000000000000600000000000000000000000000000000000000000000000000000000000000").unwrap(); 95 | 96 | let memory = CairoMemory::from_bytes_le(&bytes).unwrap(); 97 | 98 | let mut sorted_addrs = memory.data.into_keys().collect::>(); 99 | sorted_addrs.sort(); 100 | 101 | for (i, addr) in sorted_addrs.into_iter().enumerate() { 102 | assert_eq!(addr, (i + 1) as u64); 103 | } 104 | } 105 | 106 | #[test] 107 | fn test_wrong_amount_of_bytes_gives_err() { 108 | let bytes = hex::decode("01000000000000000080ff7f01800648000000000000000000000000000000000000000000000000020000000000000006000000000000000000000000000000000000000000000000000000000000000300000000000000ff7fff7f01800640000000000000000000000000000000000000000000000000040000000000000006000000000000000000000000000000000000000000000000000000000000000500000000000000fe7fff7fff7f8b2000000000000000000000000000000000000000000000000006000000000000000900000000000000000000000000000000000000000000000000000000000000070000000000000009000000000000000000000000000000000000000000000000000000000000000800000000000000060000000000000000000000000000000000000000000000000000000000000088").unwrap(); 109 | 110 | match CairoMemory::from_bytes_le(&bytes) { 111 | Err(CairoImportError::IncorrectNumberOfBytes) => (), 112 | Err(_) => panic!(), 113 | Ok(_) => panic!(), 114 | } 115 | } 116 | 117 | #[test] 118 | fn mem_indexes_are_contiguos_when_loading_from_file_mul_program() { 119 | let base_dir = env!("CARGO_MANIFEST_DIR"); 120 | dbg!(base_dir); 121 | let dir = base_dir.to_owned() + "/tests/data/mul_mem.out"; 122 | 123 | let memory = CairoMemory::from_file(&dir).unwrap(); 124 | 125 | let mut sorted_addrs = memory.data.into_keys().collect::>(); 126 | sorted_addrs.sort(); 127 | 128 | for (i, addr) in sorted_addrs.into_iter().enumerate() { 129 | assert_eq!(addr, (i + 1) as u64); 130 | } 131 | } 132 | } 133 | -------------------------------------------------------------------------------- /message/src/cairo/decode/instruction_offsets.rs: -------------------------------------------------------------------------------- 1 | use crate::FE; 2 | use lambdaworks_math::field::{element::FieldElement, traits::IsField}; 3 | 4 | use super::instruction_flags::aux_get_last_nim_of_field_element; 5 | 6 | const OFF_DST_OFF: u32 = 0; 7 | const OFF_OP0_OFF: u32 = 16; 8 | const OFF_OP1_OFF: u32 = 32; 9 | const OFFX_MASK: u64 = 0xFFFF; 10 | 11 | #[derive(Debug, PartialEq, Eq)] 12 | pub struct InstructionOffsets { 13 | pub off_dst: i32, 14 | pub off_op0: i32, 15 | pub off_op1: i32, 16 | } 17 | 18 | impl InstructionOffsets { 19 | pub fn new(mem_value: &FE) -> Self { 20 | Self { 21 | off_dst: Self::decode_offset(mem_value, OFF_DST_OFF), 22 | off_op0: Self::decode_offset(mem_value, OFF_OP0_OFF), 23 | off_op1: Self::decode_offset(mem_value, OFF_OP1_OFF), 24 | } 25 | } 26 | 27 | pub fn decode_offset(mem_value: &FE, instruction_offset: u32) -> i32 { 28 | let offset = aux_get_last_nim_of_field_element(mem_value) >> instruction_offset & OFFX_MASK; 29 | let vectorized_offset = offset.to_le_bytes(); 30 | let aux = [ 31 | vectorized_offset[0], 32 | vectorized_offset[1].overflowing_sub(128).0, 33 | ]; 34 | i32::from(i16::from_le_bytes(aux)) 35 | } 36 | 37 | pub fn to_trace_representation(&self) -> [FieldElement; 3] { 38 | [ 39 | to_unbiased_representation(self.off_dst), 40 | to_unbiased_representation(self.off_op0), 41 | to_unbiased_representation(self.off_op1), 42 | ] 43 | } 44 | } 45 | 46 | /// Returns an unbiased representation of the number. This is applied to 47 | /// instruction offsets as explained in section 9.4 of the Cairo whitepaper 48 | /// to be in the range [0, 2^16). https://eprint.iacr.org/2021/1063.pdf 49 | fn to_unbiased_representation(n: i32) -> FieldElement { 50 | let b15 = 2u64.pow(15u32); 51 | if n < 0 { 52 | FieldElement::::from(b15 - n.unsigned_abs() as u64) 53 | } else { 54 | FieldElement::::from(n as u64 + b15) 55 | } 56 | } 57 | 58 | #[cfg(test)] 59 | mod tests { 60 | use super::*; 61 | 62 | #[test] 63 | fn assert_opcode_flag_is_correct_1() { 64 | // Instruction A 65 | let value = FE::from(0x480680017fff8000); 66 | let instruction_offsets = InstructionOffsets::new(&value); 67 | 68 | assert_eq!(instruction_offsets.off_dst, 0); 69 | assert_eq!(instruction_offsets.off_op0, -1); 70 | assert_eq!(instruction_offsets.off_op1, 1); 71 | } 72 | 73 | #[test] 74 | fn assert_opcode_flag_is_correct_2() { 75 | // Instruction A 76 | let value = FE::from(0x208b7fff7fff7ffe); 77 | let instruction_offsets = InstructionOffsets::new(&value); 78 | 79 | assert_eq!(instruction_offsets.off_dst, -2); 80 | assert_eq!(instruction_offsets.off_op0, -1); 81 | assert_eq!(instruction_offsets.off_op1, -1); 82 | } 83 | 84 | #[test] 85 | fn assert_opcode_flag_is_correct_3() { 86 | // Instruction A 87 | let value = FE::from(0x48327ffc7ffa8000); 88 | let instruction_offsets = InstructionOffsets::new(&value); 89 | 90 | assert_eq!(instruction_offsets.off_dst, 0); 91 | assert_eq!(instruction_offsets.off_op0, -6); 92 | assert_eq!(instruction_offsets.off_op1, -4); 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /message/src/cairo/decode/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod instruction_flags; 2 | pub mod instruction_offsets; 3 | -------------------------------------------------------------------------------- /message/src/cairo/errors.rs: -------------------------------------------------------------------------------- 1 | use thiserror::Error; 2 | 3 | #[derive(Error, Debug)] 4 | pub enum CairoImportError { 5 | #[error("Bytes should be a multiple of 24 for trace or 40 for memory")] 6 | IncorrectNumberOfBytes, 7 | #[error("IO Error")] 8 | FileError(#[from] std::io::Error), 9 | } 10 | 11 | #[derive(Error, Debug, PartialEq)] 12 | pub enum InstructionDecodingError { 13 | #[error("Invalid opcode value")] 14 | InvalidOpcode, 15 | #[error("Invalid pc_update value")] 16 | InvalidPcUpdate, 17 | #[error("Invalid ap_update value")] 18 | InvalidApUpdate, 19 | #[error("Invalid res_logic value")] 20 | InvalidResLogic, 21 | #[error("Invalid op1_src value")] 22 | InvalidOp1Src, 23 | #[error("Invalid op0_reg value")] 24 | InvalidOp0Reg, 25 | #[error("Invalid dst_reg value")] 26 | InvalidDstReg, 27 | #[error("Instruction not found in memory")] 28 | InstructionNotFound, 29 | } 30 | -------------------------------------------------------------------------------- /message/src/cairo/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod air; 2 | pub mod cairo_layout; 3 | pub mod cairo_mem; 4 | pub mod decode; 5 | pub mod errors; 6 | pub mod execution_trace; 7 | pub mod register_states; 8 | pub mod runner; 9 | -------------------------------------------------------------------------------- /message/src/cairo/register_states.rs: -------------------------------------------------------------------------------- 1 | use super::{ 2 | cairo_mem::CairoMemory, 3 | decode::{instruction_flags::CairoInstructionFlags, instruction_offsets::InstructionOffsets}, 4 | errors::{CairoImportError, InstructionDecodingError}, 5 | }; 6 | use std::fs; 7 | 8 | #[derive(PartialEq, Clone, Debug)] 9 | pub struct RegistersState { 10 | pub pc: u64, 11 | pub fp: u64, 12 | pub ap: u64, 13 | } 14 | 15 | impl RegistersState { 16 | fn instruction_flags_and_offsets( 17 | &self, 18 | memory: &CairoMemory, 19 | ) -> Result<(CairoInstructionFlags, InstructionOffsets), InstructionDecodingError> { 20 | let instruction = memory 21 | .get(&self.pc) 22 | .ok_or(InstructionDecodingError::InstructionNotFound)?; 23 | 24 | let flags = CairoInstructionFlags::try_from(instruction)?; 25 | let offsets = InstructionOffsets::new(instruction); 26 | 27 | Ok((flags, offsets)) 28 | } 29 | } 30 | 31 | #[derive(PartialEq, Clone, Debug)] 32 | pub struct RegisterStates { 33 | pub rows: Vec, 34 | } 35 | 36 | impl RegisterStates { 37 | pub fn steps(&self) -> usize { 38 | self.rows.len() 39 | } 40 | 41 | pub fn flags_and_offsets( 42 | &self, 43 | memory: &CairoMemory, 44 | ) -> Result, InstructionDecodingError> { 45 | self.rows 46 | .iter() 47 | .map(|state| state.instruction_flags_and_offsets(memory)) 48 | .collect() 49 | } 50 | 51 | pub fn from_bytes_le(bytes: &[u8]) -> Result { 52 | // Each row of the trace is a RegisterState 53 | // ap, fp, pc, each 8 bytes long (u64) 54 | const ROW_SIZE: usize = 8 * 3; 55 | 56 | if bytes.len() % ROW_SIZE != 0 { 57 | return Err(CairoImportError::IncorrectNumberOfBytes); 58 | } 59 | let num_rows = bytes.len() / ROW_SIZE; 60 | 61 | let rows = (0..num_rows) 62 | .map(|i| RegistersState { 63 | ap: u64::from_le_bytes(bytes[i * ROW_SIZE..i * ROW_SIZE + 8].try_into().unwrap()), 64 | fp: u64::from_le_bytes( 65 | bytes[i * ROW_SIZE + 8..i * ROW_SIZE + 16] 66 | .try_into() 67 | .unwrap(), 68 | ), 69 | pc: u64::from_le_bytes( 70 | bytes[i * ROW_SIZE + 16..i * 24 + ROW_SIZE] 71 | .try_into() 72 | .unwrap(), 73 | ), 74 | }) 75 | .collect::>(); 76 | 77 | Ok(Self { rows }) 78 | } 79 | 80 | pub fn from_file(path: &str) -> Result { 81 | let data = fs::read(path)?; 82 | Self::from_bytes_le(&data) 83 | } 84 | } 85 | 86 | #[cfg(test)] 87 | mod tests { 88 | use crate::{cairo::decode::instruction_flags::*, FE}; 89 | 90 | use super::*; 91 | use std::collections::HashMap; 92 | 93 | #[test] 94 | fn mul_program_gives_expected_trace() { 95 | /* 96 | Hex from the trace of the following cairo program 97 | 98 | func main() { 99 | let x = 2; 100 | let y = 3; 101 | assert x * y = 6; 102 | return(); 103 | } 104 | 105 | Generated with: 106 | 107 | cairo-compile multiply.cairo --output multiply.out 108 | 109 | cairo-run --layout all --trace_file trace.out --memory_file mem.out --program multiply.out 110 | 111 | xxd -p trace.out 112 | */ 113 | 114 | let bytes = hex::decode("080000000000000008000000000000000100000000000000090000000000000008000000000000000300000000000000090000000000000008000000000000000500000000000000").unwrap(); 115 | 116 | let register_states = RegisterStates::from_bytes_le(&bytes); 117 | 118 | let expected_state0 = RegistersState { 119 | ap: 8, 120 | fp: 8, 121 | pc: 1, 122 | }; 123 | 124 | let expected_state1 = RegistersState { 125 | ap: 9, 126 | fp: 8, 127 | pc: 3, 128 | }; 129 | 130 | let expected_state2 = RegistersState { 131 | ap: 9, 132 | fp: 8, 133 | pc: 5, 134 | }; 135 | 136 | let expected_reg_states = RegisterStates { 137 | rows: [expected_state0, expected_state1, expected_state2].to_vec(), 138 | }; 139 | 140 | assert_eq!(register_states.unwrap(), expected_reg_states) 141 | } 142 | 143 | #[test] 144 | fn wrong_amount_of_bytes_gives_err() { 145 | let bytes = hex::decode("080000000000").unwrap(); 146 | 147 | match RegisterStates::from_bytes_le(&bytes) { 148 | Err(CairoImportError::IncorrectNumberOfBytes) => (), 149 | Err(_) => panic!(), 150 | Ok(_) => panic!(), 151 | } 152 | } 153 | 154 | #[test] 155 | fn loads_mul_trace_from_file_correctly() { 156 | let base_dir = env!("CARGO_MANIFEST_DIR"); 157 | dbg!(base_dir); 158 | let dir = base_dir.to_owned() + "/tests/data/mul_trace.out"; 159 | 160 | let register_states = RegisterStates::from_file(&dir).unwrap(); 161 | 162 | let expected_state0 = RegistersState { 163 | ap: 8, 164 | fp: 8, 165 | pc: 1, 166 | }; 167 | 168 | let expected_state1 = RegistersState { 169 | ap: 9, 170 | fp: 8, 171 | pc: 3, 172 | }; 173 | 174 | let expected_state2 = RegistersState { 175 | ap: 9, 176 | fp: 8, 177 | pc: 5, 178 | }; 179 | 180 | let expected_reg_states = RegisterStates { 181 | rows: [expected_state0, expected_state1, expected_state2].to_vec(), 182 | }; 183 | 184 | assert_eq!(register_states, expected_reg_states); 185 | } 186 | 187 | #[test] 188 | fn decode_instruction_flags_and_offsets() { 189 | let data = HashMap::from([ 190 | (1u64, FE::from(0x480680017fff8000)), 191 | (2u64, FE::from(0x1104800180018000)), 192 | ]); 193 | 194 | let memory = CairoMemory::new(data); 195 | let state1 = RegistersState { 196 | ap: 8, 197 | fp: 8, 198 | pc: 1, 199 | }; 200 | let state2 = RegistersState { 201 | ap: 9, 202 | fp: 8, 203 | pc: 2, 204 | }; 205 | 206 | let trace = RegisterStates { 207 | rows: [state1, state2].to_vec(), 208 | }; 209 | 210 | let expected_flags1 = CairoInstructionFlags { 211 | opcode: CairoOpcode::AssertEq, 212 | pc_update: PcUpdate::Regular, 213 | ap_update: ApUpdate::Add1, 214 | op0_reg: Op0Reg::FP, 215 | op1_src: Op1Src::Imm, 216 | res_logic: ResLogic::Op1, 217 | dst_reg: DstReg::AP, 218 | }; 219 | 220 | let expected_offsets1 = InstructionOffsets { 221 | off_dst: 0, 222 | off_op0: -1, 223 | off_op1: 1, 224 | }; 225 | 226 | let expected_flags2 = CairoInstructionFlags { 227 | opcode: CairoOpcode::Call, 228 | pc_update: PcUpdate::JumpRel, 229 | ap_update: ApUpdate::Regular, 230 | op0_reg: Op0Reg::AP, 231 | op1_src: Op1Src::Imm, 232 | res_logic: ResLogic::Op1, 233 | dst_reg: DstReg::AP, 234 | }; 235 | 236 | let expected_offsets2 = InstructionOffsets { 237 | off_dst: 0, 238 | off_op0: 1, 239 | off_op1: 1, 240 | }; 241 | 242 | let flags_and_offsets = trace.flags_and_offsets(&memory).unwrap(); 243 | 244 | assert_eq!( 245 | flags_and_offsets, 246 | vec![ 247 | (expected_flags1, expected_offsets1), 248 | (expected_flags2, expected_offsets2) 249 | ] 250 | ); 251 | } 252 | } 253 | -------------------------------------------------------------------------------- /message/src/cairo/runner/file_writer.rs: -------------------------------------------------------------------------------- 1 | use bincode::enc::write::Writer; 2 | use std::io::{self, Write}; 3 | 4 | pub struct FileWriter { 5 | buf_writer: io::BufWriter, 6 | bytes_written: usize, 7 | } 8 | 9 | impl Writer for FileWriter { 10 | fn write(&mut self, bytes: &[u8]) -> Result<(), bincode::error::EncodeError> { 11 | self.buf_writer 12 | .write_all(bytes) 13 | .map_err(|e| bincode::error::EncodeError::Io { 14 | inner: e, 15 | index: self.bytes_written, 16 | })?; 17 | 18 | self.bytes_written += bytes.len(); 19 | 20 | Ok(()) 21 | } 22 | } 23 | 24 | impl FileWriter { 25 | pub fn new(buf_writer: io::BufWriter) -> Self { 26 | Self { 27 | buf_writer, 28 | bytes_written: 0, 29 | } 30 | } 31 | 32 | pub fn flush(&mut self) -> io::Result<()> { 33 | self.buf_writer.flush() 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /message/src/cairo/runner/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod file_writer; 2 | pub mod run; 3 | pub mod vec_writer; 4 | -------------------------------------------------------------------------------- /message/src/cairo/runner/program.json: -------------------------------------------------------------------------------- 1 | { 2 | "attributes": [], 3 | "builtins": [], 4 | "compiler_version": "0.10.3", 5 | "data": [ 6 | "0x480680017fff8000", 7 | "0x3", 8 | "0x400680017fff7fff", 9 | "0x3", 10 | "0x208b7fff7fff7ffe" 11 | ], 12 | "debug_info": { 13 | "file_contents": {}, 14 | "instruction_locations": { 15 | "0": { 16 | "accessible_scopes": [ 17 | "__main__", 18 | "__main__.main" 19 | ], 20 | "flow_tracking_data": { 21 | "ap_tracking": { 22 | "group": 0, 23 | "offset": 0 24 | }, 25 | "reference_ids": { 26 | "__main__.main.x": 0, 27 | "__main__.main.y": 1 28 | } 29 | }, 30 | "hints": [], 31 | "inst": { 32 | "end_col": 18, 33 | "end_line": 4, 34 | "input_file": { 35 | "filename": "simple_program.cairo" 36 | }, 37 | "start_col": 17, 38 | "start_line": 4 39 | } 40 | }, 41 | "2": { 42 | "accessible_scopes": [ 43 | "__main__", 44 | "__main__.main" 45 | ], 46 | "flow_tracking_data": { 47 | "ap_tracking": { 48 | "group": 0, 49 | "offset": 1 50 | }, 51 | "reference_ids": { 52 | "__main__.main.__temp0": 2, 53 | "__main__.main.x": 0, 54 | "__main__.main.y": 1 55 | } 56 | }, 57 | "hints": [], 58 | "inst": { 59 | "end_col": 19, 60 | "end_line": 4, 61 | "input_file": { 62 | "filename": "simple_program.cairo" 63 | }, 64 | "start_col": 2, 65 | "start_line": 4 66 | } 67 | }, 68 | "4": { 69 | "accessible_scopes": [ 70 | "__main__", 71 | "__main__.main" 72 | ], 73 | "flow_tracking_data": { 74 | "ap_tracking": { 75 | "group": 0, 76 | "offset": 1 77 | }, 78 | "reference_ids": { 79 | "__main__.main.__temp0": 2, 80 | "__main__.main.x": 0, 81 | "__main__.main.y": 1 82 | } 83 | }, 84 | "hints": [], 85 | "inst": { 86 | "end_col": 12, 87 | "end_line": 5, 88 | "input_file": { 89 | "filename": "simple_program.cairo" 90 | }, 91 | "start_col": 2, 92 | "start_line": 5 93 | } 94 | } 95 | } 96 | }, 97 | "hints": {}, 98 | "identifiers": { 99 | "__main__.main": { 100 | "decorators": [], 101 | "pc": 0, 102 | "type": "function" 103 | }, 104 | "__main__.main.Args": { 105 | "full_name": "__main__.main.Args", 106 | "members": {}, 107 | "size": 0, 108 | "type": "struct" 109 | }, 110 | "__main__.main.ImplicitArgs": { 111 | "full_name": "__main__.main.ImplicitArgs", 112 | "members": {}, 113 | "size": 0, 114 | "type": "struct" 115 | }, 116 | "__main__.main.Return": { 117 | "cairo_type": "()", 118 | "type": "type_definition" 119 | }, 120 | "__main__.main.SIZEOF_LOCALS": { 121 | "type": "const", 122 | "value": 0 123 | }, 124 | "__main__.main.__temp0": { 125 | "cairo_type": "felt", 126 | "full_name": "__main__.main.__temp0", 127 | "references": [ 128 | { 129 | "ap_tracking_data": { 130 | "group": 0, 131 | "offset": 1 132 | }, 133 | "pc": 2, 134 | "value": "[cast(ap + (-1), felt*)]" 135 | } 136 | ], 137 | "type": "reference" 138 | }, 139 | "__main__.main.x": { 140 | "cairo_type": "felt", 141 | "full_name": "__main__.main.x", 142 | "references": [ 143 | { 144 | "ap_tracking_data": { 145 | "group": 0, 146 | "offset": 0 147 | }, 148 | "pc": 0, 149 | "value": "cast(1, felt)" 150 | } 151 | ], 152 | "type": "reference" 153 | }, 154 | "__main__.main.y": { 155 | "cairo_type": "felt", 156 | "full_name": "__main__.main.y", 157 | "references": [ 158 | { 159 | "ap_tracking_data": { 160 | "group": 0, 161 | "offset": 0 162 | }, 163 | "pc": 0, 164 | "value": "cast(2, felt)" 165 | } 166 | ], 167 | "type": "reference" 168 | } 169 | }, 170 | "main_scope": "__main__", 171 | "prime": "0x800000000000011000000000000000000000000000000000000000000000001", 172 | "reference_manager": { 173 | "references": [ 174 | { 175 | "ap_tracking_data": { 176 | "group": 0, 177 | "offset": 0 178 | }, 179 | "pc": 0, 180 | "value": "cast(1, felt)" 181 | }, 182 | { 183 | "ap_tracking_data": { 184 | "group": 0, 185 | "offset": 0 186 | }, 187 | "pc": 0, 188 | "value": "cast(2, felt)" 189 | }, 190 | { 191 | "ap_tracking_data": { 192 | "group": 0, 193 | "offset": 1 194 | }, 195 | "pc": 2, 196 | "value": "[cast(ap + (-1), felt*)]" 197 | } 198 | ] 199 | } 200 | } 201 | -------------------------------------------------------------------------------- /message/src/cairo/runner/program.memory: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lambdaclass/lambdaworks_exercises/946d9950d5a0c532c1c154d3366f978d98fca233/message/src/cairo/runner/program.memory -------------------------------------------------------------------------------- /message/src/cairo/runner/program.trace: -------------------------------------------------------------------------------- 1 |    -------------------------------------------------------------------------------- /message/src/cairo/runner/vec_writer.rs: -------------------------------------------------------------------------------- 1 | use cairo_vm::felt::Felt252; 2 | use std::io::{self, Write}; 3 | 4 | pub struct VecWriter<'a> { 5 | buf_writer: &'a mut Vec, 6 | } 7 | 8 | impl bincode::enc::write::Writer for VecWriter<'_> { 9 | fn write(&mut self, bytes: &[u8]) -> Result<(), bincode::error::EncodeError> { 10 | self.buf_writer 11 | .write_all(bytes) 12 | .expect("Shouldn't fail in memory vector"); 13 | 14 | Ok(()) 15 | } 16 | } 17 | 18 | impl<'a> VecWriter<'a> { 19 | pub fn new(vec: &'a mut Vec) -> Self { 20 | Self { buf_writer: vec } 21 | } 22 | 23 | pub fn flush(&mut self) -> io::Result<()> { 24 | self.buf_writer.flush() 25 | } 26 | 27 | pub fn write_encoded_trace( 28 | &mut self, 29 | relocated_trace: &[cairo_vm::vm::trace::trace_entry::TraceEntry], 30 | ) { 31 | for entry in relocated_trace.iter() { 32 | self.buf_writer 33 | .extend_from_slice(&((entry.ap as u64).to_le_bytes())); 34 | self.buf_writer 35 | .extend_from_slice(&((entry.fp as u64).to_le_bytes())); 36 | self.buf_writer 37 | .extend_from_slice(&((entry.pc as u64).to_le_bytes())); 38 | } 39 | } 40 | 41 | /// Writes a binary representation of the relocated memory. 42 | /// 43 | /// The memory pairs (address, value) are encoded and concatenated: 44 | /// * address -> 8-byte encoded 45 | /// * value -> 32-byte encoded 46 | pub fn write_encoded_memory(&mut self, relocated_memory: &[Option]) { 47 | for (i, memory_cell) in relocated_memory.iter().enumerate() { 48 | match memory_cell { 49 | None => continue, 50 | Some(unwrapped_memory_cell) => { 51 | self.buf_writer.extend_from_slice(&(i as u64).to_le_bytes()); 52 | self.buf_writer 53 | .extend_from_slice(&unwrapped_memory_cell.to_le_bytes()); 54 | } 55 | } 56 | } 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /message/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Op ref suggest to not use references with FieldElements operations 2 | // This adds overhead of copying all the limbs, so clippy op_ref is disabled 3 | #![allow(clippy::op_ref)] 4 | 5 | use lambdaworks_math::field::{ 6 | element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, 7 | }; 8 | 9 | // pub mod cairo; 10 | pub mod starks; 11 | 12 | pub type PrimeField = Stark252PrimeField; 13 | pub type FE = FieldElement; 14 | -------------------------------------------------------------------------------- /message/src/starks/config.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_crypto::merkle_tree::{ 2 | backends::types::{BatchKeccak256Tree, Keccak256Tree}, 3 | merkle::MerkleTree, 4 | }; 5 | 6 | // Merkle Trees configuration 7 | 8 | // Security of both hashes should match 9 | 10 | pub type FriMerkleTreeBackend = Keccak256Tree; 11 | pub type FriMerkleTree = MerkleTree>; 12 | 13 | // If using hashes with 256-bit security, commitment size should be 32 14 | // If using hashes with 512-bit security, commitment size should be 64 15 | // TODO: Commitment type should be obtained from MerkleTrees 16 | pub const COMMITMENT_SIZE: usize = 32; 17 | pub type Commitment = [u8; COMMITMENT_SIZE]; 18 | 19 | pub type BatchedMerkleTreeBackend = BatchKeccak256Tree; 20 | pub type BatchedMerkleTree = MerkleTree>; 21 | -------------------------------------------------------------------------------- /message/src/starks/constraints/boundary.rs: -------------------------------------------------------------------------------- 1 | use itertools::Itertools; 2 | use lambdaworks_math::{ 3 | field::{element::FieldElement, traits::IsField}, 4 | polynomial::Polynomial, 5 | }; 6 | 7 | #[derive(Debug)] 8 | /// Represents a boundary constraint that must hold in an execution 9 | /// trace: 10 | /// * col: The column of the trace where the constraint must hold 11 | /// * step: The step (or row) of the trace where the constraint must hold 12 | /// * value: The value the constraint must have in that column and step 13 | pub struct BoundaryConstraint { 14 | pub col: usize, 15 | pub step: usize, 16 | pub value: FieldElement, 17 | } 18 | 19 | impl BoundaryConstraint { 20 | pub fn new(col: usize, step: usize, value: FieldElement) -> Self { 21 | Self { col, step, value } 22 | } 23 | 24 | /// Used for creating boundary constraints for a trace with only one column 25 | pub fn new_simple(step: usize, value: FieldElement) -> Self { 26 | Self { 27 | col: 0, 28 | step, 29 | value, 30 | } 31 | } 32 | } 33 | 34 | /// Data structure that stores all the boundary constraints that must 35 | /// hold for the execution trace 36 | #[derive(Default, Debug)] 37 | pub struct BoundaryConstraints { 38 | pub constraints: Vec>, 39 | } 40 | 41 | impl BoundaryConstraints { 42 | #[allow(dead_code)] 43 | pub fn new() -> Self { 44 | Self { 45 | constraints: Vec::>::new(), 46 | } 47 | } 48 | 49 | /// To instantiate from a vector of BoundaryConstraint elements 50 | pub fn from_constraints(constraints: Vec>) -> Self { 51 | Self { constraints } 52 | } 53 | 54 | /// Returns all the steps where boundary conditions exist for the given column 55 | pub fn steps(&self, col: usize) -> Vec { 56 | self.constraints 57 | .iter() 58 | .filter(|v| v.col == col) 59 | .map(|c| c.step) 60 | .collect() 61 | } 62 | 63 | pub fn steps_for_boundary(&self) -> Vec { 64 | self.constraints 65 | .iter() 66 | .unique_by(|elem| elem.step) 67 | .map(|v| v.step) 68 | .collect() 69 | } 70 | 71 | pub fn cols_for_boundary(&self) -> Vec { 72 | self.constraints 73 | .iter() 74 | .unique_by(|elem| elem.col) 75 | .map(|v| v.col) 76 | .collect() 77 | } 78 | 79 | /// Given the primitive root of some domain, returns the domain values corresponding 80 | /// to the steps where the boundary conditions hold. This is useful when interpolating 81 | /// the boundary conditions, since we must know the x values 82 | pub fn generate_roots_of_unity( 83 | &self, 84 | primitive_root: &FieldElement, 85 | cols_trace: &[usize], 86 | ) -> Vec>> { 87 | cols_trace 88 | .iter() 89 | .map(|i| { 90 | self.steps(*i) 91 | .into_iter() 92 | .map(|s| primitive_root.pow(s)) 93 | .collect::>>() 94 | }) 95 | .collect::>>>() 96 | } 97 | 98 | /// For every trace column, give all the values the trace must be equal to in 99 | /// the steps where the boundary constraints hold 100 | pub fn values(&self, cols_trace: &[usize]) -> Vec>> { 101 | cols_trace 102 | .iter() 103 | .map(|i| { 104 | self.constraints 105 | .iter() 106 | .filter(|c| c.col == *i) 107 | .map(|c| c.value.clone()) 108 | .collect() 109 | }) 110 | .collect() 111 | } 112 | 113 | /// Computes the zerofier of the boundary quotient. The result is the 114 | /// multiplication of each binomial that evaluates to zero in the domain 115 | /// values where the boundary constraints must hold. 116 | /// 117 | /// Example: If there are boundary conditions in the third and fifth steps, 118 | /// then the zerofier will be (x - w^3) * (x - w^5) 119 | pub fn compute_zerofier( 120 | &self, 121 | primitive_root: &FieldElement, 122 | col: usize, 123 | ) -> Polynomial> { 124 | self.steps(col).into_iter().fold( 125 | Polynomial::new_monomial(FieldElement::::one(), 0), 126 | |zerofier, step| { 127 | let binomial = 128 | Polynomial::new(&[-primitive_root.pow(step), FieldElement::::one()]); 129 | // TODO: Implement the MulAssign trait for Polynomials? 130 | zerofier * binomial 131 | }, 132 | ) 133 | } 134 | } 135 | 136 | #[cfg(test)] 137 | mod test { 138 | use lambdaworks_math::field::{ 139 | fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, traits::IsFFTField, 140 | }; 141 | type PrimeField = Stark252PrimeField; 142 | 143 | use super::*; 144 | 145 | #[test] 146 | fn zerofier_is_the_correct_one() { 147 | let one = FieldElement::::one(); 148 | 149 | // Fibonacci constraints: 150 | // * a0 = 1 151 | // * a1 = 1 152 | // * a7 = 32 153 | let a0 = BoundaryConstraint::new_simple(0, one); 154 | let a1 = BoundaryConstraint::new_simple(1, one); 155 | let result = BoundaryConstraint::new_simple(7, FieldElement::::from(32)); 156 | 157 | let constraints = BoundaryConstraints::from_constraints(vec![a0, a1, result]); 158 | 159 | let primitive_root = PrimeField::get_primitive_root_of_unity(3).unwrap(); 160 | 161 | // P_0(x) = (x - 1) 162 | let a0_zerofier = Polynomial::new(&[-one, one]); 163 | // P_1(x) = (x - w^1) 164 | let a1_zerofier = Polynomial::new(&[-primitive_root.pow(1u32), one]); 165 | // P_res(x) = (x - w^7) 166 | let res_zerofier = Polynomial::new(&[-primitive_root.pow(7u32), one]); 167 | 168 | let expected_zerofier = a0_zerofier * a1_zerofier * res_zerofier; 169 | 170 | let zerofier = constraints.compute_zerofier(&primitive_root, 0); 171 | 172 | assert_eq!(expected_zerofier, zerofier); 173 | } 174 | } 175 | -------------------------------------------------------------------------------- /message/src/starks/constraints/evaluation_table.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_math::fft::polynomial::FFTPoly; 2 | use lambdaworks_math::{ 3 | field::{ 4 | element::FieldElement, 5 | traits::{IsFFTField, IsField}, 6 | }, 7 | polynomial::Polynomial, 8 | }; 9 | 10 | #[derive(Clone, Debug)] 11 | pub struct ConstraintEvaluationTable { 12 | // Accumulation of the evaluation of the constraints 13 | pub evaluations_acc: Vec>, 14 | pub trace_length: usize, 15 | } 16 | 17 | impl ConstraintEvaluationTable { 18 | pub fn new(_n_cols: usize, domain: &[FieldElement]) -> Self { 19 | let evaluations_acc = Vec::with_capacity(domain.len()); 20 | 21 | ConstraintEvaluationTable { 22 | evaluations_acc, 23 | trace_length: domain.len(), 24 | } 25 | } 26 | 27 | pub fn compute_composition_poly(&self, offset: &FieldElement) -> Polynomial> 28 | where 29 | F: IsFFTField, 30 | Polynomial>: FFTPoly, 31 | { 32 | Polynomial::interpolate_offset_fft(&self.evaluations_acc, offset).unwrap() 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /message/src/starks/constraints/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod boundary; 2 | pub mod evaluation_table; 3 | pub mod evaluator; 4 | -------------------------------------------------------------------------------- /message/src/starks/context.rs: -------------------------------------------------------------------------------- 1 | use super::proof::options::ProofOptions; 2 | 3 | #[derive(Clone, Debug)] 4 | pub struct AirContext { 5 | pub proof_options: ProofOptions, 6 | pub trace_columns: usize, 7 | pub transition_degrees: Vec, 8 | 9 | /// This is a vector with the indices of all the rows that constitute 10 | /// an evaluation frame. Note that, because of how we write all constraints 11 | /// in one method (`compute_transitions`), this vector needs to include the 12 | /// offsets that are needed to compute EVERY transition constraint, even if some 13 | /// constraints don't use all of the indexes in said offsets. 14 | pub transition_offsets: Vec, 15 | pub transition_exemptions: Vec, 16 | pub num_transition_constraints: usize, 17 | pub num_transition_exemptions: usize, 18 | } 19 | 20 | impl AirContext { 21 | pub fn num_transition_constraints(&self) -> usize { 22 | self.num_transition_constraints 23 | } 24 | 25 | pub fn transition_degrees(&self) -> &[usize] { 26 | &self.transition_degrees 27 | } 28 | 29 | pub fn transition_degrees_len(&self) -> usize { 30 | self.transition_degrees.len() 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /message/src/starks/debug.rs: -------------------------------------------------------------------------------- 1 | use crate::starks::frame::Frame; 2 | use crate::starks::trace::TraceTable; 3 | 4 | use super::domain::Domain; 5 | use super::traits::AIR; 6 | use lambdaworks_math::fft::polynomial::FFTPoly; 7 | use lambdaworks_math::{ 8 | field::{element::FieldElement, traits::IsFFTField}, 9 | polynomial::Polynomial, 10 | }; 11 | use log::{error, info}; 12 | 13 | /// Validates that the trace is valid with respect to the supplied AIR constraints 14 | pub fn validate_trace>( 15 | air: &A, 16 | trace_polys: &[Polynomial>], 17 | domain: &Domain, 18 | rap_challenges: &A::RAPChallenges, 19 | ) -> bool { 20 | info!("Starting constraints validation over trace..."); 21 | let mut ret = true; 22 | 23 | let trace_columns: Vec<_> = trace_polys 24 | .iter() 25 | .map(|poly| { 26 | poly.evaluate_fft(1, Some(domain.interpolation_domain_size)) 27 | .unwrap() 28 | }) 29 | .collect(); 30 | let trace = TraceTable::new_from_cols(&trace_columns); 31 | 32 | // --------- VALIDATE BOUNDARY CONSTRAINTS ------------ 33 | air.boundary_constraints(rap_challenges) 34 | .constraints 35 | .iter() 36 | .for_each(|constraint| { 37 | let col = constraint.col; 38 | let step = constraint.step; 39 | let boundary_value = constraint.value.clone(); 40 | let trace_value = trace.get(step, col); 41 | 42 | if boundary_value != trace_value { 43 | ret = false; 44 | error!("Boundary constraint inconsistency - Expected value {} in step {} and column {}, found: {}", boundary_value.representative(), step, col, trace_value.representative()); 45 | } 46 | }); 47 | 48 | // --------- VALIDATE TRANSITION CONSTRAINTS ----------- 49 | let n_transition_constraints = air.context().num_transition_constraints(); 50 | let transition_exemptions = &air.context().transition_exemptions; 51 | 52 | let exemption_steps: Vec = vec![trace.n_rows(); n_transition_constraints] 53 | .iter() 54 | .zip(transition_exemptions) 55 | .map(|(trace_steps, exemptions)| trace_steps - exemptions) 56 | .collect(); 57 | 58 | // Iterate over trace and compute transitions 59 | for step in 0..trace.n_rows() { 60 | let frame = Frame::read_from_trace(&trace, step, 1, &air.context().transition_offsets); 61 | 62 | let evaluations = air.compute_transition(&frame, rap_challenges); 63 | // Iterate over each transition evaluation. When the evaluated step is not from 64 | // the exemption steps corresponding to the transition, it should have zero as a 65 | // result 66 | evaluations.iter().enumerate().for_each(|(i, eval)| { 67 | // Check that all the transition constraint evaluations of the trace are zero. 68 | // We don't take into account the transition exemptions. 69 | if step < exemption_steps[i] && eval != &FieldElement::::zero() { 70 | ret = false; 71 | error!( 72 | "Inconsistent evaluation of transition {} in step {} - expected 0, got {}", 73 | i, 74 | step, 75 | eval.representative() 76 | ); 77 | } 78 | }) 79 | } 80 | info!("Constraints validation check ended"); 81 | ret 82 | } 83 | 84 | pub fn check_boundary_polys_divisibility( 85 | boundary_polys: Vec>>, 86 | boundary_zerofiers: Vec>>, 87 | ) { 88 | for (i, (poly, z)) in boundary_polys 89 | .iter() 90 | .zip(boundary_zerofiers.iter()) 91 | .enumerate() 92 | { 93 | let (_, b) = poly.clone().long_division_with_remainder(z); 94 | if b != Polynomial::zero() { 95 | error!("Boundary poly {} is not divisible by its zerofier", i); 96 | } 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /message/src/starks/domain.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_math::{ 2 | fft::cpu::roots_of_unity::get_powers_of_primitive_root_coset, 3 | field::{element::FieldElement, traits::IsFFTField}, 4 | }; 5 | 6 | use super::traits::AIR; 7 | 8 | pub struct Domain { 9 | pub(crate) root_order: u32, 10 | pub(crate) lde_roots_of_unity_coset: Vec>, 11 | pub(crate) lde_root_order: u32, 12 | pub(crate) trace_primitive_root: FieldElement, 13 | pub(crate) trace_roots_of_unity: Vec>, 14 | pub(crate) coset_offset: FieldElement, 15 | pub(crate) blowup_factor: usize, 16 | pub(crate) interpolation_domain_size: usize, 17 | } 18 | 19 | impl Domain { 20 | pub fn new(air: &A) -> Self 21 | where 22 | A: AIR, 23 | { 24 | // Initial definitions 25 | let blowup_factor = air.options().blowup_factor as usize; 26 | let coset_offset = FieldElement::::from(air.options().coset_offset); 27 | let interpolation_domain_size = air.trace_length(); 28 | let root_order = air.trace_length().trailing_zeros(); 29 | // * Generate Coset 30 | let trace_primitive_root = F::get_primitive_root_of_unity(root_order as u64).unwrap(); 31 | let trace_roots_of_unity = get_powers_of_primitive_root_coset( 32 | root_order as u64, 33 | interpolation_domain_size, 34 | &FieldElement::::one(), 35 | ) 36 | .unwrap(); 37 | 38 | let lde_root_order = (air.trace_length() * blowup_factor).trailing_zeros(); 39 | let lde_roots_of_unity_coset = get_powers_of_primitive_root_coset( 40 | lde_root_order as u64, 41 | air.trace_length() * blowup_factor, 42 | &coset_offset, 43 | ) 44 | .unwrap(); 45 | 46 | Self { 47 | root_order, 48 | lde_roots_of_unity_coset, 49 | lde_root_order, 50 | trace_primitive_root, 51 | trace_roots_of_unity, 52 | blowup_factor, 53 | coset_offset, 54 | interpolation_domain_size, 55 | } 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /message/src/starks/example/dummy_air.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_crypto::fiat_shamir::transcript::Transcript; 2 | use lambdaworks_math::field::{ 3 | element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, 4 | traits::IsFFTField, 5 | }; 6 | 7 | use crate::starks::{ 8 | constraints::boundary::{BoundaryConstraint, BoundaryConstraints}, 9 | context::AirContext, 10 | frame::Frame, 11 | proof::options::ProofOptions, 12 | trace::TraceTable, 13 | traits::AIR, 14 | }; 15 | 16 | #[derive(Clone)] 17 | pub struct DummyAIR { 18 | context: AirContext, 19 | trace_length: usize, 20 | } 21 | 22 | impl AIR for DummyAIR { 23 | type Field = Stark252PrimeField; 24 | type RAPChallenges = (); 25 | type PublicInputs = (); 26 | 27 | fn new( 28 | trace_length: usize, 29 | _pub_inputs: &Self::PublicInputs, 30 | proof_options: &ProofOptions, 31 | ) -> Self { 32 | let context = AirContext { 33 | proof_options: proof_options.clone(), 34 | trace_columns: 2, 35 | transition_degrees: vec![2, 1], 36 | transition_exemptions: vec![0, 2], 37 | transition_offsets: vec![0, 1, 2], 38 | num_transition_constraints: 2, 39 | num_transition_exemptions: 1, 40 | }; 41 | 42 | Self { 43 | context, 44 | trace_length, 45 | } 46 | } 47 | 48 | fn build_auxiliary_trace( 49 | &self, 50 | _main_trace: &TraceTable, 51 | _rap_challenges: &Self::RAPChallenges, 52 | ) -> TraceTable { 53 | TraceTable::empty() 54 | } 55 | 56 | fn build_rap_challenges(&self, _transcript: &mut T) -> Self::RAPChallenges {} 57 | fn compute_transition( 58 | &self, 59 | frame: &Frame, 60 | _rap_challenges: &Self::RAPChallenges, 61 | ) -> Vec> { 62 | let first_row = frame.get_row(0); 63 | let second_row = frame.get_row(1); 64 | let third_row = frame.get_row(2); 65 | 66 | let f_constraint = &first_row[0] * (&first_row[0] - FieldElement::one()); 67 | 68 | let fib_constraint = &third_row[1] - &second_row[1] - &first_row[1]; 69 | 70 | vec![f_constraint, fib_constraint] 71 | } 72 | 73 | fn boundary_constraints( 74 | &self, 75 | _rap_challenges: &Self::RAPChallenges, 76 | ) -> BoundaryConstraints { 77 | let a0 = BoundaryConstraint::new(1, 0, FieldElement::::one()); 78 | let a1 = BoundaryConstraint::new(1, 1, FieldElement::::one()); 79 | 80 | BoundaryConstraints::from_constraints(vec![a0, a1]) 81 | } 82 | 83 | fn number_auxiliary_rap_columns(&self) -> usize { 84 | 0 85 | } 86 | 87 | fn context(&self) -> &AirContext { 88 | &self.context 89 | } 90 | 91 | fn composition_poly_degree_bound(&self) -> usize { 92 | self.trace_length 93 | } 94 | 95 | fn trace_length(&self) -> usize { 96 | self.trace_length 97 | } 98 | 99 | fn pub_inputs(&self) -> &Self::PublicInputs { 100 | &() 101 | } 102 | } 103 | 104 | pub fn dummy_trace(trace_length: usize) -> TraceTable { 105 | let mut ret: Vec> = vec![]; 106 | 107 | let a0 = FieldElement::one(); 108 | let a1 = FieldElement::one(); 109 | 110 | ret.push(a0); 111 | ret.push(a1); 112 | 113 | for i in 2..(trace_length) { 114 | ret.push(ret[i - 1].clone() + ret[i - 2].clone()); 115 | } 116 | 117 | TraceTable::new_from_cols(&[vec![FieldElement::::one(); trace_length], ret]) 118 | } 119 | -------------------------------------------------------------------------------- /message/src/starks/example/fibonacci_2_columns.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_crypto::fiat_shamir::transcript::Transcript; 2 | use lambdaworks_math::field::{element::FieldElement, traits::IsFFTField}; 3 | 4 | use crate::starks::{ 5 | constraints::boundary::{BoundaryConstraint, BoundaryConstraints}, 6 | context::AirContext, 7 | frame::Frame, 8 | proof::options::ProofOptions, 9 | trace::TraceTable, 10 | traits::AIR, 11 | }; 12 | 13 | use super::simple_fibonacci::FibonacciPublicInputs; 14 | 15 | #[derive(Clone, Debug)] 16 | pub struct Fibonacci2ColsAIR 17 | where 18 | F: IsFFTField, 19 | { 20 | context: AirContext, 21 | trace_length: usize, 22 | pub_inputs: FibonacciPublicInputs, 23 | } 24 | 25 | impl AIR for Fibonacci2ColsAIR 26 | where 27 | F: IsFFTField, 28 | { 29 | type Field = F; 30 | type RAPChallenges = (); 31 | type PublicInputs = FibonacciPublicInputs; 32 | 33 | fn new( 34 | trace_length: usize, 35 | pub_inputs: &Self::PublicInputs, 36 | proof_options: &ProofOptions, 37 | ) -> Self { 38 | let context = AirContext { 39 | proof_options: proof_options.clone(), 40 | transition_degrees: vec![1, 1], 41 | transition_exemptions: vec![1, 1], 42 | transition_offsets: vec![0, 1], 43 | num_transition_constraints: 2, 44 | trace_columns: 2, 45 | num_transition_exemptions: 1, 46 | }; 47 | 48 | Self { 49 | trace_length, 50 | context, 51 | pub_inputs: pub_inputs.clone(), 52 | } 53 | } 54 | 55 | fn build_auxiliary_trace( 56 | &self, 57 | _main_trace: &TraceTable, 58 | _rap_challenges: &Self::RAPChallenges, 59 | ) -> TraceTable { 60 | TraceTable::empty() 61 | } 62 | 63 | fn build_rap_challenges(&self, _transcript: &mut T) -> Self::RAPChallenges {} 64 | 65 | fn compute_transition( 66 | &self, 67 | frame: &Frame, 68 | _rap_challenges: &Self::RAPChallenges, 69 | ) -> Vec> { 70 | let first_row = frame.get_row(0); 71 | let second_row = frame.get_row(1); 72 | 73 | // constraints of Fibonacci sequence (2 terms per step): 74 | // s_{0, i+1} = s_{0, i} + s_{1, i} 75 | // s_{1, i+1} = s_{1, i} + s_{0, i+1} 76 | let first_transition = &second_row[0] - &first_row[0] - &first_row[1]; 77 | let second_transition = &second_row[1] - &first_row[1] - &second_row[0]; 78 | 79 | vec![first_transition, second_transition] 80 | } 81 | 82 | fn number_auxiliary_rap_columns(&self) -> usize { 83 | 0 84 | } 85 | 86 | fn boundary_constraints( 87 | &self, 88 | _rap_challenges: &Self::RAPChallenges, 89 | ) -> BoundaryConstraints { 90 | let a0 = BoundaryConstraint::new(0, 0, self.pub_inputs.a0.clone()); 91 | let a1 = BoundaryConstraint::new(1, 0, self.pub_inputs.a1.clone()); 92 | 93 | BoundaryConstraints::from_constraints(vec![a0, a1]) 94 | } 95 | 96 | fn context(&self) -> &AirContext { 97 | &self.context 98 | } 99 | 100 | fn composition_poly_degree_bound(&self) -> usize { 101 | self.trace_length() 102 | } 103 | 104 | fn trace_length(&self) -> usize { 105 | self.trace_length 106 | } 107 | 108 | fn pub_inputs(&self) -> &Self::PublicInputs { 109 | &self.pub_inputs 110 | } 111 | } 112 | 113 | pub fn fibonacci_trace_2_columns( 114 | initial_values: [FieldElement; 2], 115 | trace_length: usize, 116 | ) -> TraceTable { 117 | let mut ret1: Vec> = vec![]; 118 | let mut ret2: Vec> = vec![]; 119 | 120 | ret1.push(initial_values[0].clone()); 121 | ret2.push(initial_values[1].clone()); 122 | 123 | for i in 1..(trace_length) { 124 | let new_val = ret1[i - 1].clone() + ret2[i - 1].clone(); 125 | ret1.push(new_val.clone()); 126 | ret2.push(new_val + ret2[i - 1].clone()); 127 | } 128 | 129 | TraceTable::new_from_cols(&[ret1, ret2]) 130 | } 131 | -------------------------------------------------------------------------------- /message/src/starks/example/fibonacci_rap.rs: -------------------------------------------------------------------------------- 1 | use std::ops::Div; 2 | 3 | use lambdaworks_crypto::fiat_shamir::transcript::Transcript; 4 | use lambdaworks_math::{ 5 | field::{element::FieldElement, traits::IsFFTField}, 6 | helpers::resize_to_next_power_of_two, 7 | traits::ByteConversion, 8 | }; 9 | 10 | use crate::starks::{ 11 | constraints::boundary::{BoundaryConstraint, BoundaryConstraints}, 12 | context::AirContext, 13 | frame::Frame, 14 | proof::options::ProofOptions, 15 | trace::TraceTable, 16 | traits::AIR, 17 | transcript::transcript_to_field, 18 | }; 19 | 20 | #[derive(Clone)] 21 | pub struct FibonacciRAP 22 | where 23 | F: IsFFTField, 24 | { 25 | context: AirContext, 26 | trace_length: usize, 27 | pub_inputs: FibonacciRAPPublicInputs, 28 | } 29 | 30 | #[derive(Clone, Debug)] 31 | pub struct FibonacciRAPPublicInputs 32 | where 33 | F: IsFFTField, 34 | { 35 | pub steps: usize, 36 | pub a0: FieldElement, 37 | pub a1: FieldElement, 38 | } 39 | 40 | impl AIR for FibonacciRAP 41 | where 42 | F: IsFFTField, 43 | FieldElement: ByteConversion, 44 | { 45 | type Field = F; 46 | type RAPChallenges = FieldElement; 47 | type PublicInputs = FibonacciRAPPublicInputs; 48 | 49 | fn new( 50 | trace_length: usize, 51 | pub_inputs: &Self::PublicInputs, 52 | proof_options: &ProofOptions, 53 | ) -> Self { 54 | let exemptions = 3 + trace_length - pub_inputs.steps - 1; 55 | 56 | let context = AirContext { 57 | proof_options: proof_options.clone(), 58 | trace_columns: 3, 59 | transition_degrees: vec![1, 2], 60 | transition_offsets: vec![0, 1, 2], 61 | transition_exemptions: vec![exemptions, 1], 62 | num_transition_constraints: 2, 63 | num_transition_exemptions: 2, 64 | }; 65 | 66 | Self { 67 | context, 68 | trace_length, 69 | pub_inputs: pub_inputs.clone(), 70 | } 71 | } 72 | 73 | fn build_auxiliary_trace( 74 | &self, 75 | main_trace: &TraceTable, 76 | gamma: &Self::RAPChallenges, 77 | ) -> TraceTable { 78 | let main_segment_cols = main_trace.cols(); 79 | let not_perm = &main_segment_cols[0]; 80 | let perm = &main_segment_cols[1]; 81 | 82 | let trace_len = main_trace.n_rows(); 83 | 84 | let mut aux_col = Vec::new(); 85 | for i in 0..trace_len { 86 | if i == 0 { 87 | aux_col.push(FieldElement::::one()); 88 | } else { 89 | let z_i = &aux_col[i - 1]; 90 | let n_p_term = not_perm[i - 1].clone() + gamma; 91 | let p_term = &perm[i - 1] + gamma; 92 | 93 | aux_col.push(z_i * n_p_term.div(p_term)); 94 | } 95 | } 96 | TraceTable::new_from_cols(&[aux_col]) 97 | } 98 | 99 | fn build_rap_challenges(&self, transcript: &mut T) -> Self::RAPChallenges { 100 | transcript_to_field(transcript) 101 | } 102 | 103 | fn number_auxiliary_rap_columns(&self) -> usize { 104 | 1 105 | } 106 | 107 | fn compute_transition( 108 | &self, 109 | frame: &Frame, 110 | gamma: &Self::RAPChallenges, 111 | ) -> Vec> { 112 | // Main constraints 113 | let first_row = frame.get_row(0); 114 | let second_row = frame.get_row(1); 115 | let third_row = frame.get_row(2); 116 | 117 | let mut constraints = 118 | vec![third_row[0].clone() - second_row[0].clone() - first_row[0].clone()]; 119 | 120 | // Auxiliary constraints 121 | let z_i = &frame.get_row(0)[2]; 122 | let z_i_plus_one = &frame.get_row(1)[2]; 123 | 124 | let a_i = &frame.get_row(0)[0]; 125 | let b_i = &frame.get_row(0)[1]; 126 | 127 | let eval = z_i_plus_one * (b_i + gamma) - z_i * (a_i + gamma); 128 | 129 | constraints.push(eval); 130 | constraints 131 | } 132 | 133 | fn boundary_constraints( 134 | &self, 135 | _rap_challenges: &Self::RAPChallenges, 136 | ) -> BoundaryConstraints { 137 | // Main boundary constraints 138 | let a0 = BoundaryConstraint::new_simple(0, FieldElement::::one()); 139 | let a1 = BoundaryConstraint::new_simple(1, FieldElement::::one()); 140 | 141 | // Auxiliary boundary constraints 142 | let a0_aux = BoundaryConstraint::new(2, 0, FieldElement::::one()); 143 | 144 | BoundaryConstraints::from_constraints(vec![a0, a1, a0_aux]) 145 | } 146 | 147 | fn context(&self) -> &AirContext { 148 | &self.context 149 | } 150 | 151 | fn composition_poly_degree_bound(&self) -> usize { 152 | self.trace_length() 153 | } 154 | 155 | fn trace_length(&self) -> usize { 156 | self.trace_length 157 | } 158 | 159 | fn pub_inputs(&self) -> &Self::PublicInputs { 160 | &self.pub_inputs 161 | } 162 | } 163 | 164 | pub fn fibonacci_rap_trace( 165 | initial_values: [FieldElement; 2], 166 | trace_length: usize, 167 | ) -> TraceTable { 168 | let mut fib_seq: Vec> = vec![]; 169 | 170 | fib_seq.push(initial_values[0].clone()); 171 | fib_seq.push(initial_values[1].clone()); 172 | 173 | for i in 2..(trace_length) { 174 | fib_seq.push(fib_seq[i - 1].clone() + fib_seq[i - 2].clone()); 175 | } 176 | 177 | let last_value = fib_seq[trace_length - 1].clone(); 178 | let mut fib_permuted = fib_seq.clone(); 179 | fib_permuted[0] = last_value; 180 | fib_permuted[trace_length - 1] = initial_values[0].clone(); 181 | 182 | fib_seq.push(FieldElement::::zero()); 183 | fib_permuted.push(FieldElement::::zero()); 184 | let mut trace_cols = vec![fib_seq, fib_permuted]; 185 | resize_to_next_power_of_two(&mut trace_cols); 186 | 187 | TraceTable::new_from_cols(&trace_cols) 188 | } 189 | 190 | #[cfg(test)] 191 | mod test { 192 | use super::*; 193 | use lambdaworks_math::field::fields::u64_prime_field::FE17; 194 | 195 | #[test] 196 | fn test_build_fibonacci_rap_trace() { 197 | // The fibonacci RAP trace should have two columns: 198 | // * The usual fibonacci sequence column 199 | // * The permuted fibonacci sequence column. The first and last elements are permuted. 200 | // Also, a 0 is appended at the end of both columns. The reason for this can be read in 201 | // https://hackmd.io/@aztec-network/plonk-arithmetiization-air#RAPs---PAIRs-with-interjected-verifier-randomness 202 | 203 | let trace = fibonacci_rap_trace([FE17::from(1), FE17::from(1)], 8); 204 | let mut expected_trace = vec![ 205 | vec![ 206 | FE17::one(), 207 | FE17::one(), 208 | FE17::from(2), 209 | FE17::from(3), 210 | FE17::from(5), 211 | FE17::from(8), 212 | FE17::from(13), 213 | FE17::from(21), 214 | FE17::zero(), 215 | ], 216 | vec![ 217 | FE17::from(21), 218 | FE17::one(), 219 | FE17::from(2), 220 | FE17::from(3), 221 | FE17::from(5), 222 | FE17::from(8), 223 | FE17::from(13), 224 | FE17::one(), 225 | FE17::zero(), 226 | ], 227 | ]; 228 | resize_to_next_power_of_two(&mut expected_trace); 229 | 230 | assert_eq!(trace.cols(), expected_trace); 231 | } 232 | 233 | #[test] 234 | fn aux_col() { 235 | let trace = fibonacci_rap_trace([FE17::from(1), FE17::from(1)], 64); 236 | let trace_cols = trace.cols(); 237 | 238 | let not_perm = trace_cols[0].clone(); 239 | let perm = trace_cols[1].clone(); 240 | let gamma = FE17::from(10); 241 | 242 | assert_eq!(perm.len(), not_perm.len()); 243 | let trace_len = not_perm.len(); 244 | 245 | let mut aux_col = Vec::new(); 246 | for i in 0..trace_len { 247 | if i == 0 { 248 | aux_col.push(FE17::one()); 249 | } else { 250 | let z_i = aux_col[i - 1]; 251 | let n_p_term = not_perm[i - 1] + gamma; 252 | let p_term = perm[i - 1] + gamma; 253 | 254 | aux_col.push(z_i * n_p_term.div(p_term)); 255 | } 256 | } 257 | 258 | assert_eq!(aux_col.last().unwrap(), &FE17::one()); 259 | } 260 | } 261 | -------------------------------------------------------------------------------- /message/src/starks/example/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod dummy_air; 2 | pub mod fibonacci_2_columns; 3 | pub mod fibonacci_rap; 4 | pub mod quadratic_air; 5 | pub mod simple_fibonacci; 6 | -------------------------------------------------------------------------------- /message/src/starks/example/quadratic_air.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_crypto::fiat_shamir::transcript::Transcript; 2 | use lambdaworks_math::field::{element::FieldElement, traits::IsFFTField}; 3 | 4 | use crate::starks::{ 5 | constraints::boundary::{BoundaryConstraint, BoundaryConstraints}, 6 | context::AirContext, 7 | frame::Frame, 8 | proof::options::ProofOptions, 9 | trace::TraceTable, 10 | traits::AIR, 11 | }; 12 | 13 | #[derive(Clone)] 14 | pub struct QuadraticAIR 15 | where 16 | F: IsFFTField, 17 | { 18 | context: AirContext, 19 | trace_length: usize, 20 | pub_inputs: QuadraticPublicInputs, 21 | } 22 | 23 | #[derive(Clone, Debug)] 24 | pub struct QuadraticPublicInputs 25 | where 26 | F: IsFFTField, 27 | { 28 | pub a0: FieldElement, 29 | } 30 | 31 | impl AIR for QuadraticAIR 32 | where 33 | F: IsFFTField, 34 | { 35 | type Field = F; 36 | type RAPChallenges = (); 37 | type PublicInputs = QuadraticPublicInputs; 38 | 39 | fn new( 40 | trace_length: usize, 41 | pub_inputs: &Self::PublicInputs, 42 | proof_options: &ProofOptions, 43 | ) -> Self { 44 | let context = AirContext { 45 | proof_options: proof_options.clone(), 46 | trace_columns: 1, 47 | transition_degrees: vec![2], 48 | transition_exemptions: vec![1], 49 | transition_offsets: vec![0, 1], 50 | num_transition_constraints: 1, 51 | num_transition_exemptions: 1, 52 | }; 53 | 54 | Self { 55 | trace_length, 56 | context, 57 | pub_inputs: pub_inputs.clone(), 58 | } 59 | } 60 | 61 | fn build_auxiliary_trace( 62 | &self, 63 | _main_trace: &TraceTable, 64 | _rap_challenges: &Self::RAPChallenges, 65 | ) -> TraceTable { 66 | TraceTable::empty() 67 | } 68 | 69 | fn build_rap_challenges(&self, _transcript: &mut T) -> Self::RAPChallenges {} 70 | 71 | fn compute_transition( 72 | &self, 73 | frame: &Frame, 74 | _rap_challenges: &Self::RAPChallenges, 75 | ) -> Vec> { 76 | let first_row = frame.get_row(0); 77 | let second_row = frame.get_row(1); 78 | 79 | vec![&second_row[0] - &first_row[0] * &first_row[0]] 80 | } 81 | 82 | fn number_auxiliary_rap_columns(&self) -> usize { 83 | 0 84 | } 85 | 86 | fn boundary_constraints( 87 | &self, 88 | _rap_challenges: &Self::RAPChallenges, 89 | ) -> BoundaryConstraints { 90 | let a0 = BoundaryConstraint::new_simple(0, self.pub_inputs.a0.clone()); 91 | 92 | BoundaryConstraints::from_constraints(vec![a0]) 93 | } 94 | 95 | fn context(&self) -> &AirContext { 96 | &self.context 97 | } 98 | 99 | fn composition_poly_degree_bound(&self) -> usize { 100 | 2 * self.trace_length() 101 | } 102 | 103 | fn trace_length(&self) -> usize { 104 | self.trace_length 105 | } 106 | 107 | fn pub_inputs(&self) -> &Self::PublicInputs { 108 | &self.pub_inputs 109 | } 110 | } 111 | 112 | pub fn quadratic_trace( 113 | initial_value: FieldElement, 114 | trace_length: usize, 115 | ) -> TraceTable { 116 | let mut ret: Vec> = vec![]; 117 | 118 | ret.push(initial_value); 119 | 120 | for i in 1..(trace_length) { 121 | ret.push(ret[i - 1].clone() * ret[i - 1].clone()); 122 | } 123 | 124 | TraceTable::new_from_cols(&[ret]) 125 | } 126 | -------------------------------------------------------------------------------- /message/src/starks/example/simple_fibonacci.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_crypto::fiat_shamir::transcript::Transcript; 2 | use lambdaworks_math::field::{element::FieldElement, traits::IsFFTField}; 3 | 4 | use crate::starks::{ 5 | constraints::boundary::{BoundaryConstraint, BoundaryConstraints}, 6 | context::AirContext, 7 | frame::Frame, 8 | proof::options::ProofOptions, 9 | trace::TraceTable, 10 | traits::AIR, 11 | }; 12 | 13 | #[derive(Clone)] 14 | pub struct FibonacciAIR 15 | where 16 | F: IsFFTField, 17 | { 18 | context: AirContext, 19 | trace_length: usize, 20 | pub_inputs: FibonacciPublicInputs, 21 | } 22 | 23 | #[derive(Clone, Debug)] 24 | pub struct FibonacciPublicInputs 25 | where 26 | F: IsFFTField, 27 | { 28 | pub a0: FieldElement, 29 | pub a1: FieldElement, 30 | } 31 | 32 | impl AIR for FibonacciAIR 33 | where 34 | F: IsFFTField, 35 | { 36 | type Field = F; 37 | type RAPChallenges = (); 38 | type PublicInputs = FibonacciPublicInputs; 39 | 40 | fn new( 41 | trace_length: usize, 42 | pub_inputs: &Self::PublicInputs, 43 | proof_options: &ProofOptions, 44 | ) -> Self { 45 | let context = AirContext { 46 | proof_options: proof_options.clone(), 47 | trace_columns: 1, 48 | transition_degrees: vec![1], 49 | transition_exemptions: vec![2], 50 | transition_offsets: vec![0, 1, 2], 51 | num_transition_constraints: 1, 52 | num_transition_exemptions: 1, 53 | }; 54 | 55 | Self { 56 | pub_inputs: pub_inputs.clone(), 57 | context, 58 | trace_length, 59 | } 60 | } 61 | 62 | fn composition_poly_degree_bound(&self) -> usize { 63 | self.trace_length() 64 | } 65 | 66 | fn build_auxiliary_trace( 67 | &self, 68 | _main_trace: &TraceTable, 69 | _rap_challenges: &Self::RAPChallenges, 70 | ) -> TraceTable { 71 | TraceTable::empty() 72 | } 73 | 74 | fn build_rap_challenges(&self, _transcript: &mut T) -> Self::RAPChallenges {} 75 | 76 | fn compute_transition( 77 | &self, 78 | frame: &Frame, 79 | _rap_challenges: &Self::RAPChallenges, 80 | ) -> Vec> { 81 | let first_row = frame.get_row(0); 82 | let second_row = frame.get_row(1); 83 | let third_row = frame.get_row(2); 84 | 85 | vec![third_row[0].clone() - second_row[0].clone() - first_row[0].clone()] 86 | } 87 | 88 | fn boundary_constraints( 89 | &self, 90 | _rap_challenges: &Self::RAPChallenges, 91 | ) -> BoundaryConstraints { 92 | let a0 = BoundaryConstraint::new_simple(0, self.pub_inputs.a0.clone()); 93 | let a1 = BoundaryConstraint::new_simple(1, self.pub_inputs.a1.clone()); 94 | BoundaryConstraints::from_constraints(vec![a0, a1]) 95 | } 96 | 97 | fn number_auxiliary_rap_columns(&self) -> usize { 98 | 0 99 | } 100 | 101 | fn context(&self) -> &AirContext { 102 | &self.context 103 | } 104 | 105 | fn trace_length(&self) -> usize { 106 | self.trace_length 107 | } 108 | 109 | fn pub_inputs(&self) -> &Self::PublicInputs { 110 | &self.pub_inputs 111 | } 112 | } 113 | 114 | pub fn fibonacci_trace( 115 | initial_values: [FieldElement; 2], 116 | trace_length: usize, 117 | ) -> TraceTable { 118 | let mut ret: Vec> = vec![]; 119 | 120 | ret.push(initial_values[0].clone()); 121 | ret.push(initial_values[1].clone()); 122 | 123 | for i in 2..(trace_length) { 124 | ret.push(ret[i - 1].clone() + ret[i - 2].clone()); 125 | } 126 | 127 | TraceTable::new_from_cols(&[ret]) 128 | } 129 | -------------------------------------------------------------------------------- /message/src/starks/frame.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_math::{ 2 | errors::DeserializationError, 3 | field::{element::FieldElement, traits::IsFFTField}, 4 | polynomial::Polynomial, 5 | traits::{ByteConversion, Deserializable, Serializable}, 6 | }; 7 | 8 | use super::trace::TraceTable; 9 | 10 | #[derive(Clone, Debug, PartialEq)] 11 | pub struct Frame { 12 | // Vector of rows 13 | data: Vec>, 14 | row_width: usize, 15 | } 16 | 17 | impl Frame { 18 | pub fn new(data: Vec>, row_width: usize) -> Self { 19 | Self { data, row_width } 20 | } 21 | 22 | pub fn num_rows(&self) -> usize { 23 | self.data.len() / self.row_width 24 | } 25 | 26 | pub fn num_columns(&self) -> usize { 27 | self.row_width 28 | } 29 | 30 | pub fn get_row(&self, row_idx: usize) -> &[FieldElement] { 31 | let row_offset = row_idx * self.row_width; 32 | &self.data[row_offset..row_offset + self.row_width] 33 | } 34 | 35 | pub fn get_row_mut(&mut self, row_idx: usize) -> &mut [FieldElement] { 36 | let row_offset = row_idx * self.row_width; 37 | &mut self.data[row_offset..row_offset + self.row_width] 38 | } 39 | 40 | pub fn read_from_trace( 41 | trace: &TraceTable, 42 | step: usize, 43 | blowup: u8, 44 | offsets: &[usize], 45 | ) -> Self { 46 | // Get trace length to apply module with it when getting elements of 47 | // the frame from the trace. 48 | let trace_steps = trace.n_rows(); 49 | let data = offsets 50 | .iter() 51 | .flat_map(|frame_row_idx| { 52 | trace 53 | .get_row((step + (frame_row_idx * blowup as usize)) % trace_steps) 54 | .to_vec() 55 | }) 56 | .collect(); 57 | 58 | Self::new(data, trace.n_cols) 59 | } 60 | 61 | /// Given a slice of trace polynomials, an evaluation point `x`, the frame offsets 62 | /// corresponding to the computation of the transitions, and a primitive root, 63 | /// outputs the trace evaluations of each trace polynomial over the values used to 64 | /// compute a transition. 65 | /// Example: For a simple Fibonacci computation, if t(x) is the trace polynomial of 66 | /// the computation, this will output evaluations t(x), t(g * x), t(g^2 * z). 67 | pub fn get_trace_evaluations( 68 | trace_polys: &[Polynomial>], 69 | x: &FieldElement, 70 | frame_offsets: &[usize], 71 | primitive_root: &FieldElement, 72 | ) -> Vec>> { 73 | frame_offsets 74 | .iter() 75 | .map(|offset| x * primitive_root.pow(*offset)) 76 | .map(|eval_point| { 77 | trace_polys 78 | .iter() 79 | .map(|poly| poly.evaluate(&eval_point)) 80 | .collect::>>() 81 | }) 82 | .collect() 83 | } 84 | } 85 | 86 | impl Serializable for Frame 87 | where 88 | F: IsFFTField, 89 | FieldElement: ByteConversion, 90 | { 91 | fn serialize(&self) -> Vec { 92 | let mut bytes = vec![]; 93 | bytes.extend(self.data.len().to_be_bytes()); 94 | let felt_len = if self.data.is_empty() { 95 | 0 96 | } else { 97 | self.data[0].to_bytes_be().len() 98 | }; 99 | bytes.extend(felt_len.to_be_bytes()); 100 | for felt in &self.data { 101 | bytes.extend(felt.to_bytes_be()); 102 | } 103 | bytes.extend(self.row_width.to_be_bytes()); 104 | bytes 105 | } 106 | } 107 | 108 | impl Deserializable for Frame 109 | where 110 | F: IsFFTField, 111 | FieldElement: ByteConversion, 112 | { 113 | fn deserialize(bytes: &[u8]) -> Result 114 | where 115 | Self: Sized, 116 | { 117 | let mut bytes = bytes; 118 | let data_len = usize::from_be_bytes( 119 | bytes 120 | .get(..8) 121 | .ok_or(DeserializationError::InvalidAmountOfBytes)? 122 | .try_into() 123 | .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, 124 | ); 125 | bytes = &bytes[8..]; 126 | 127 | let felt_len = usize::from_be_bytes( 128 | bytes 129 | .get(..8) 130 | .ok_or(DeserializationError::InvalidAmountOfBytes)? 131 | .try_into() 132 | .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, 133 | ); 134 | bytes = &bytes[8..]; 135 | 136 | let mut data = vec![]; 137 | for _ in 0..data_len { 138 | let felt = FieldElement::::from_bytes_be( 139 | bytes 140 | .get(..felt_len) 141 | .ok_or(DeserializationError::InvalidAmountOfBytes)?, 142 | )?; 143 | data.push(felt); 144 | bytes = &bytes[felt_len..]; 145 | } 146 | 147 | let row_width = usize::from_be_bytes( 148 | bytes 149 | .get(..8) 150 | .ok_or(DeserializationError::InvalidAmountOfBytes)? 151 | .try_into() 152 | .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, 153 | ); 154 | 155 | Ok(Self::new(data, row_width)) 156 | } 157 | } 158 | 159 | #[cfg(test)] 160 | mod tests { 161 | use lambdaworks_math::field::{ 162 | element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, 163 | }; 164 | use proptest::{collection, prelude::*, prop_compose, proptest}; 165 | 166 | use crate::starks::frame::Frame; 167 | use lambdaworks_math::traits::{Deserializable, Serializable}; 168 | 169 | type FE = FieldElement; 170 | 171 | prop_compose! { 172 | fn some_felt()(base in any::(), exponent in any::()) -> FE { 173 | FE::from(base).pow(exponent) 174 | } 175 | } 176 | 177 | prop_compose! { 178 | fn field_vec()(vec in collection::vec(some_felt(), 16)) -> Vec { 179 | vec 180 | } 181 | } 182 | 183 | proptest! { 184 | #![proptest_config(ProptestConfig {cases: 5, .. ProptestConfig::default()})] 185 | #[test] 186 | fn test_serialize_and_deserialize(data in field_vec(), row_width in any::()) { 187 | let frame = Frame::new(data, row_width); 188 | let serialized = frame.serialize(); 189 | let deserialized: Frame = Frame::deserialize(&serialized).unwrap(); 190 | 191 | prop_assert_eq!(frame.data, deserialized.data); 192 | prop_assert_eq!(frame.row_width, deserialized.row_width); 193 | } 194 | } 195 | } 196 | -------------------------------------------------------------------------------- /message/src/starks/fri/fri_commitment.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_math::{ 2 | fft::polynomial::FFTPoly, 3 | field::{ 4 | element::FieldElement, 5 | traits::{IsFFTField, IsField}, 6 | }, 7 | polynomial::Polynomial, 8 | traits::ByteConversion, 9 | }; 10 | 11 | use crate::starks::config::FriMerkleTree; 12 | 13 | #[derive(Clone)] 14 | pub struct FriLayer 15 | where 16 | F: IsField, 17 | FieldElement: ByteConversion, 18 | { 19 | pub evaluation: Vec>, 20 | pub merkle_tree: FriMerkleTree, 21 | pub coset_offset: FieldElement, 22 | pub domain_size: usize, 23 | } 24 | 25 | impl FriLayer 26 | where 27 | F: IsField + IsFFTField, 28 | FieldElement: ByteConversion, 29 | { 30 | pub fn new( 31 | poly: &Polynomial>, 32 | coset_offset: &FieldElement, 33 | domain_size: usize, 34 | ) -> Self { 35 | let evaluation = poly 36 | .evaluate_offset_fft(1, Some(domain_size), coset_offset) 37 | .unwrap(); // TODO: return error 38 | 39 | let merkle_tree = FriMerkleTree::build(&evaluation); 40 | 41 | Self { 42 | evaluation, 43 | merkle_tree, 44 | coset_offset: coset_offset.clone(), 45 | domain_size, 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /message/src/starks/fri/fri_decommit.rs: -------------------------------------------------------------------------------- 1 | pub use lambdaworks_crypto::fiat_shamir::transcript::Transcript; 2 | use lambdaworks_crypto::merkle_tree::proof::Proof; 3 | use lambdaworks_math::errors::DeserializationError; 4 | use lambdaworks_math::field::element::FieldElement; 5 | use lambdaworks_math::field::traits::IsField; 6 | use lambdaworks_math::traits::{ByteConversion, Deserializable, Serializable}; 7 | 8 | use crate::starks::config::Commitment; 9 | use crate::starks::utils::{deserialize_proof, serialize_proof}; 10 | 11 | #[derive(Debug, Clone)] 12 | pub struct FriDecommitment { 13 | pub layers_auth_paths_sym: Vec>, 14 | pub layers_evaluations_sym: Vec>, 15 | pub layers_auth_paths: Vec>, 16 | pub layers_evaluations: Vec>, 17 | } 18 | 19 | impl Serializable for FriDecommitment 20 | where 21 | F: IsField, 22 | FieldElement: ByteConversion, 23 | { 24 | fn serialize(&self) -> Vec { 25 | let mut bytes = vec![]; 26 | bytes.extend(self.layers_auth_paths_sym.len().to_be_bytes()); 27 | for proof in &self.layers_auth_paths_sym { 28 | bytes.extend(serialize_proof(proof)); 29 | } 30 | let felt_len = self.layers_evaluations[0].to_bytes_be().len(); 31 | bytes.extend(felt_len.to_be_bytes()); 32 | bytes.extend(self.layers_evaluations_sym.len().to_be_bytes()); 33 | for evaluation in &self.layers_evaluations_sym { 34 | bytes.extend(evaluation.to_bytes_be()); 35 | } 36 | bytes.extend(self.layers_evaluations.len().to_be_bytes()); 37 | for evaluation in &self.layers_evaluations { 38 | bytes.extend(evaluation.to_bytes_be()); 39 | } 40 | bytes.extend(self.layers_auth_paths.len().to_be_bytes()); 41 | for proof in &self.layers_auth_paths { 42 | bytes.extend(serialize_proof(proof)); 43 | } 44 | bytes 45 | } 46 | } 47 | 48 | impl Deserializable for FriDecommitment 49 | where 50 | F: IsField, 51 | FieldElement: ByteConversion, 52 | { 53 | fn deserialize(bytes: &[u8]) -> Result 54 | where 55 | Self: Sized, 56 | { 57 | let mut bytes = bytes; 58 | let mut layers_auth_paths_sym = vec![]; 59 | let layers_auth_paths_sym_len = usize::from_be_bytes( 60 | bytes 61 | .get(..8) 62 | .ok_or(DeserializationError::InvalidAmountOfBytes)? 63 | .try_into() 64 | .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, 65 | ); 66 | bytes = &bytes[8..]; 67 | 68 | for _ in 0..layers_auth_paths_sym_len { 69 | let proof; 70 | (proof, bytes) = deserialize_proof(bytes)?; 71 | layers_auth_paths_sym.push(proof); 72 | } 73 | 74 | let felt_len = usize::from_be_bytes( 75 | bytes 76 | .get(..8) 77 | .ok_or(DeserializationError::InvalidAmountOfBytes)? 78 | .try_into() 79 | .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, 80 | ); 81 | bytes = &bytes[8..]; 82 | 83 | let layers_evaluations_sym_len = usize::from_be_bytes( 84 | bytes 85 | .get(..8) 86 | .ok_or(DeserializationError::InvalidAmountOfBytes)? 87 | .try_into() 88 | .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, 89 | ); 90 | bytes = &bytes[8..]; 91 | 92 | let mut layers_evaluations_sym = vec![]; 93 | for _ in 0..layers_evaluations_sym_len { 94 | let evaluation = FieldElement::::from_bytes_be( 95 | bytes 96 | .get(..felt_len) 97 | .ok_or(DeserializationError::InvalidAmountOfBytes)? 98 | .try_into() 99 | .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, 100 | )?; 101 | bytes = &bytes[felt_len..]; 102 | layers_evaluations_sym.push(evaluation); 103 | } 104 | 105 | let layer_evaluations_len = usize::from_be_bytes( 106 | bytes 107 | .get(..8) 108 | .ok_or(DeserializationError::InvalidAmountOfBytes)? 109 | .try_into() 110 | .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, 111 | ); 112 | bytes = &bytes[8..]; 113 | 114 | let mut layers_evaluations = vec![]; 115 | for _ in 0..layer_evaluations_len { 116 | let evaluation = FieldElement::::from_bytes_be( 117 | bytes 118 | .get(..felt_len) 119 | .ok_or(DeserializationError::InvalidAmountOfBytes)? 120 | .try_into() 121 | .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, 122 | )?; 123 | bytes = &bytes[felt_len..]; 124 | layers_evaluations.push(evaluation); 125 | } 126 | 127 | let mut layers_auth_paths = vec![]; 128 | let layers_auth_paths_len = usize::from_be_bytes( 129 | bytes 130 | .get(..8) 131 | .ok_or(DeserializationError::InvalidAmountOfBytes)? 132 | .try_into() 133 | .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, 134 | ); 135 | bytes = &bytes[8..]; 136 | 137 | for _ in 0..layers_auth_paths_len { 138 | let proof; 139 | (proof, bytes) = deserialize_proof(bytes)?; 140 | layers_auth_paths.push(proof); 141 | } 142 | 143 | Ok(Self { 144 | layers_auth_paths_sym, 145 | layers_evaluations_sym, 146 | layers_evaluations, 147 | layers_auth_paths, 148 | }) 149 | } 150 | } 151 | 152 | #[cfg(test)] 153 | mod tests { 154 | use lambdaworks_crypto::merkle_tree::proof::Proof; 155 | use lambdaworks_math::field::{ 156 | element::FieldElement, fields::fft_friendly::stark_252_prime_field::Stark252PrimeField, 157 | }; 158 | use proptest::{collection, prelude::*, prop_compose, proptest}; 159 | 160 | use lambdaworks_math::traits::{Deserializable, Serializable}; 161 | 162 | use crate::starks::config::{Commitment, COMMITMENT_SIZE}; 163 | 164 | use super::FriDecommitment; 165 | 166 | type FE = FieldElement; 167 | 168 | prop_compose! { 169 | fn some_commitment()(high in any::(), low in any::()) -> Commitment { 170 | let mut bytes = [0u8; COMMITMENT_SIZE]; 171 | bytes[..16].copy_from_slice(&high.to_be_bytes()); 172 | bytes[16..].copy_from_slice(&low.to_be_bytes()); 173 | bytes 174 | } 175 | } 176 | 177 | prop_compose! { 178 | fn commitment_vec()(vec in collection::vec(some_commitment(), 4)) -> Vec { 179 | vec 180 | } 181 | } 182 | 183 | prop_compose! { 184 | fn some_proof()(merkle_path in commitment_vec()) -> Proof { 185 | Proof{merkle_path} 186 | } 187 | } 188 | 189 | prop_compose! { 190 | fn proof_vec()(vec in collection::vec(some_proof(), 4)) -> Vec> { 191 | vec 192 | } 193 | } 194 | 195 | prop_compose! { 196 | fn some_felt()(base in any::(), exponent in any::()) -> FE { 197 | FE::from(base).pow(exponent) 198 | } 199 | } 200 | 201 | prop_compose! { 202 | fn field_vec()(vec in collection::vec(some_felt(), 16)) -> Vec { 203 | vec 204 | } 205 | } 206 | 207 | prop_compose! { 208 | fn some_fri_decommitment()( 209 | layers_auth_paths_sym in proof_vec(), 210 | layers_evaluations_sym in field_vec(), 211 | layers_evaluations in field_vec(), 212 | layers_auth_paths in proof_vec() 213 | ) -> FriDecommitment { 214 | FriDecommitment{ 215 | layers_auth_paths_sym, 216 | layers_evaluations_sym, 217 | layers_evaluations, 218 | layers_auth_paths 219 | } 220 | } 221 | } 222 | 223 | proptest! { 224 | #![proptest_config(ProptestConfig {cases: 5, .. ProptestConfig::default()})] 225 | #[test] 226 | fn test_serialize_and_deserialize(fri_decommitment in some_fri_decommitment()) { 227 | let serialized = fri_decommitment.serialize(); 228 | let deserialized: FriDecommitment = FriDecommitment::deserialize(&serialized).unwrap(); 229 | 230 | for (a, b) in fri_decommitment.layers_auth_paths_sym.iter().zip(deserialized.layers_auth_paths_sym.iter()) { 231 | prop_assert_eq!(&a.merkle_path, &b.merkle_path); 232 | } 233 | 234 | for (a, b) in fri_decommitment.layers_evaluations_sym.iter().zip(deserialized.layers_evaluations_sym.iter()) { 235 | prop_assert_eq!(a, b); 236 | } 237 | 238 | for (a, b) in fri_decommitment.layers_evaluations.iter().zip(deserialized.layers_evaluations.iter()) { 239 | prop_assert_eq!(a, b); 240 | } 241 | 242 | for (a, b) in fri_decommitment.layers_auth_paths.iter().zip(deserialized.layers_auth_paths.iter()) { 243 | prop_assert_eq!(&a.merkle_path, &b.merkle_path); 244 | } 245 | } 246 | } 247 | } 248 | -------------------------------------------------------------------------------- /message/src/starks/fri/fri_functions.rs: -------------------------------------------------------------------------------- 1 | use super::Polynomial; 2 | use lambdaworks_math::field::{element::FieldElement, traits::IsField}; 3 | 4 | pub fn fold_polynomial( 5 | poly: &Polynomial>, 6 | beta: &FieldElement, 7 | ) -> Polynomial> 8 | where 9 | F: IsField, 10 | { 11 | let coef = poly.coefficients(); 12 | let even_coef: Vec> = coef.iter().step_by(2).cloned().collect(); 13 | 14 | // odd coeficients of poly are multiplied by beta 15 | let odd_coef_mul_beta: Vec> = coef 16 | .iter() 17 | .skip(1) 18 | .step_by(2) 19 | .map(|v| (v.clone()) * beta) 20 | .collect(); 21 | 22 | let (even_poly, odd_poly) = Polynomial::pad_with_zero_coefficients( 23 | &Polynomial::new(&even_coef), 24 | &Polynomial::new(&odd_coef_mul_beta), 25 | ); 26 | even_poly + odd_poly 27 | } 28 | 29 | #[cfg(test)] 30 | mod tests { 31 | use super::fold_polynomial; 32 | use lambdaworks_math::field::element::FieldElement; 33 | use lambdaworks_math::field::fields::u64_prime_field::U64PrimeField; 34 | const MODULUS: u64 = 293; 35 | type FE = FieldElement>; 36 | use lambdaworks_math::polynomial::Polynomial; 37 | 38 | #[test] 39 | fn test_fold() { 40 | let p0 = Polynomial::new(&[ 41 | FE::new(3), 42 | FE::new(1), 43 | FE::new(2), 44 | FE::new(7), 45 | FE::new(3), 46 | FE::new(5), 47 | ]); 48 | let beta = FE::new(4); 49 | let p1 = fold_polynomial(&p0, &beta); 50 | assert_eq!( 51 | p1, 52 | Polynomial::new(&[FE::new(7), FE::new(30), FE::new(23),]) 53 | ); 54 | 55 | let gamma = FE::new(3); 56 | let p2 = fold_polynomial(&p1, &gamma); 57 | assert_eq!(p2, Polynomial::new(&[FE::new(97), FE::new(23),])); 58 | 59 | let delta = FE::new(2); 60 | let p3 = fold_polynomial(&p2, &delta); 61 | assert_eq!(p3, Polynomial::new(&[FE::new(143)])); 62 | assert_eq!(p3.degree(), 0); 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /message/src/starks/fri/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod fri_commitment; 2 | pub mod fri_decommit; 3 | mod fri_functions; 4 | 5 | use lambdaworks_crypto::fiat_shamir::transcript::Transcript; 6 | use lambdaworks_math::field::traits::{IsFFTField, IsField}; 7 | use lambdaworks_math::traits::ByteConversion; 8 | pub use lambdaworks_math::{ 9 | field::{element::FieldElement, fields::u64_prime_field::U64PrimeField}, 10 | polynomial::Polynomial, 11 | }; 12 | 13 | use self::fri_commitment::FriLayer; 14 | use self::fri_decommit::FriDecommitment; 15 | use self::fri_functions::fold_polynomial; 16 | 17 | use super::traits::AIR; 18 | use super::transcript::{transcript_to_field, transcript_to_usize}; 19 | 20 | pub fn fri_commit_phase( 21 | number_layers: usize, 22 | p_0: Polynomial>, 23 | transcript: &mut T, 24 | coset_offset: &FieldElement, 25 | domain_size: usize, 26 | ) -> (FieldElement, Vec>) 27 | where 28 | FieldElement: ByteConversion, 29 | { 30 | let mut domain_size = domain_size; 31 | 32 | let mut fri_layer_list = Vec::with_capacity(number_layers); 33 | let mut current_layer = FriLayer::new(&p_0, coset_offset, domain_size); 34 | fri_layer_list.push(current_layer.clone()); 35 | let mut current_poly = p_0; 36 | // >>>> Send commitment: [p₀] 37 | transcript.append(¤t_layer.merkle_tree.root); 38 | 39 | let mut coset_offset = coset_offset.clone(); 40 | 41 | for _ in 1..number_layers { 42 | // <<<< Receive challenge 𝜁ₖ₋₁ 43 | let zeta = transcript_to_field(transcript); 44 | coset_offset = coset_offset.square(); 45 | domain_size /= 2; 46 | 47 | // Compute layer polynomial and domain 48 | current_poly = fold_polynomial(¤t_poly, &zeta); 49 | current_layer = FriLayer::new(¤t_poly, &coset_offset, domain_size); 50 | let new_data = ¤t_layer.merkle_tree.root; 51 | fri_layer_list.push(current_layer.clone()); // TODO: remove this clone 52 | 53 | // >>>> Send commitment: [pₖ] 54 | transcript.append(new_data); 55 | } 56 | 57 | // <<<< Receive challenge: 𝜁ₙ₋₁ 58 | let zeta = transcript_to_field(transcript); 59 | 60 | let last_poly = fold_polynomial(¤t_poly, &zeta); 61 | 62 | let last_value = last_poly 63 | .coefficients() 64 | .get(0) 65 | .unwrap_or(&FieldElement::zero()) 66 | .clone(); 67 | 68 | // >>>> Send value: pₙ 69 | transcript.append(&last_value.to_bytes_be()); 70 | 71 | (last_value, fri_layer_list) 72 | } 73 | 74 | pub fn fri_query_phase( 75 | air: &A, 76 | domain_size: usize, 77 | fri_layers: &Vec>, 78 | transcript: &mut T, 79 | ) -> (Vec>, Vec) 80 | where 81 | F: IsFFTField, 82 | A: AIR, 83 | T: Transcript, 84 | FieldElement: ByteConversion, 85 | { 86 | if !fri_layers.is_empty() { 87 | let number_of_queries = air.options().fri_number_of_queries; 88 | let iotas = (0..number_of_queries) 89 | .map(|_| transcript_to_usize(transcript) % domain_size) 90 | .collect::>(); 91 | let query_list = iotas 92 | .iter() 93 | .map(|iota_s| { 94 | // <<<< Receive challenge 𝜄ₛ (iota_s) 95 | let mut layers_auth_paths_sym = vec![]; 96 | let mut layers_evaluations_sym = vec![]; 97 | let mut layers_evaluations = vec![]; 98 | let mut layers_auth_paths = vec![]; 99 | 100 | for layer in fri_layers { 101 | // symmetric element 102 | let index = iota_s % layer.domain_size; 103 | let index_sym = (iota_s + layer.domain_size / 2) % layer.domain_size; 104 | let evaluation_sym = layer.evaluation[index_sym].clone(); 105 | let auth_path_sym = layer.merkle_tree.get_proof_by_pos(index_sym).unwrap(); 106 | let evaluation = layer.evaluation[index].clone(); 107 | let auth_path = layer.merkle_tree.get_proof_by_pos(index).unwrap(); 108 | layers_auth_paths_sym.push(auth_path_sym); 109 | layers_evaluations_sym.push(evaluation_sym); 110 | layers_evaluations.push(evaluation); 111 | layers_auth_paths.push(auth_path); 112 | } 113 | 114 | FriDecommitment { 115 | layers_auth_paths_sym, 116 | layers_evaluations_sym, 117 | layers_evaluations, 118 | layers_auth_paths, 119 | } 120 | }) 121 | .collect(); 122 | 123 | (query_list, iotas) 124 | } else { 125 | (vec![], vec![]) 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /message/src/starks/grinding.rs: -------------------------------------------------------------------------------- 1 | use sha3::{Digest, Keccak256}; 2 | 3 | /// Build data with the concatenation of transcript hash and value. 4 | /// Computes the hash of this element and returns the number of 5 | /// leading zeros in the resulting value (in the big-endian representation). 6 | /// 7 | /// # Parameters 8 | /// 9 | /// * `transcript_challenge` - the hash value obtained from the transcript 10 | /// * `value` - the value to be concatenated with the transcript hash 11 | /// (i.e. a candidate nonce). 12 | /// 13 | /// # Returns 14 | /// 15 | /// The number of leading zeros in the resulting hash value. 16 | #[inline(always)] 17 | pub fn hash_transcript_with_int_and_get_leading_zeros( 18 | transcript_challenge: &[u8; 32], 19 | value: u64, 20 | ) -> u8 { 21 | let mut data = [0; 40]; 22 | data[..32].copy_from_slice(transcript_challenge); 23 | data[32..].copy_from_slice(&value.to_le_bytes()); 24 | 25 | let digest = Keccak256::digest(data); 26 | 27 | let seed_head = u64::from_be_bytes(digest[..8].try_into().unwrap()); 28 | seed_head.trailing_zeros() as u8 29 | } 30 | 31 | /// Performs grinding, generating a new nonce for the proof. 32 | /// The nonce generated is such that: 33 | /// Hash(transcript_hash || nonce) has a number of leading zeros 34 | /// greater or equal than `grinding_factor`. 35 | /// 36 | /// # Parameters 37 | /// 38 | /// * `transcript` - the hash of the transcript 39 | /// * `grinding_factor` - the number of leading zeros needed 40 | pub fn generate_nonce_with_grinding( 41 | transcript_challenge: &[u8; 32], 42 | grinding_factor: u8, 43 | ) -> Option { 44 | (0..u64::MAX).find(|&candidate_nonce| { 45 | hash_transcript_with_int_and_get_leading_zeros(transcript_challenge, candidate_nonce) 46 | >= grinding_factor 47 | }) 48 | } 49 | 50 | #[cfg(test)] 51 | mod test { 52 | use sha3::{Digest, Keccak256}; 53 | 54 | #[test] 55 | fn hash_transcript_with_int_and_get_leading_zeros_works() { 56 | let transcript_challenge = [ 57 | 226_u8, 27, 133, 168, 62, 203, 20, 59, 122, 230, 227, 33, 76, 44, 53, 150, 200, 45, 58 | 136, 162, 249, 239, 142, 90, 204, 191, 45, 4, 53, 22, 103, 240, 59 | ]; 60 | let grinding_factor = 10; 61 | 62 | let nonce = 63 | super::generate_nonce_with_grinding(&transcript_challenge, grinding_factor).unwrap(); 64 | assert_eq!(nonce, 33); 65 | 66 | // check generated hash has more trailing_zeros than grinding_factor 67 | let mut data = [0; 40]; 68 | data[..32].copy_from_slice(&transcript_challenge); 69 | data[32..].copy_from_slice(&nonce.to_le_bytes()); 70 | 71 | let digest = Keccak256::digest(data); 72 | 73 | let seed_head = u64::from_be_bytes(digest[..8].try_into().unwrap()); 74 | let trailing_zeors = seed_head.trailing_zeros() as u8; 75 | 76 | assert!(trailing_zeors >= grinding_factor); 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /message/src/starks/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod constraints; 2 | pub mod context; 3 | #[cfg(debug_assertions)] 4 | pub mod debug; 5 | pub mod domain; 6 | pub mod example; 7 | pub mod frame; 8 | pub mod fri; 9 | pub mod grinding; 10 | pub mod proof; 11 | pub mod prover; 12 | pub mod trace; 13 | pub mod traits; 14 | pub mod transcript; 15 | pub mod utils; 16 | pub mod verifier; 17 | 18 | /// Configurations of the Prover available in compile time 19 | pub mod config; 20 | -------------------------------------------------------------------------------- /message/src/starks/proof/errors.rs: -------------------------------------------------------------------------------- 1 | use thiserror::Error; 2 | 3 | #[derive(Debug, Error)] 4 | pub enum InsecureOptionError { 5 | #[error("Field size is not large enough")] 6 | FieldSize, 7 | #[error("The number of security bits is not large enough")] 8 | SecurityBits, 9 | } 10 | -------------------------------------------------------------------------------- /message/src/starks/proof/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod errors; 2 | pub mod options; 3 | pub mod stark; 4 | -------------------------------------------------------------------------------- /message/src/starks/proof/options.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_math::field::traits::IsPrimeField; 2 | 3 | use super::errors::InsecureOptionError; 4 | 5 | pub enum SecurityLevel { 6 | Conjecturable80Bits, 7 | Conjecturable100Bits, 8 | Conjecturable128Bits, 9 | Provable80Bits, 10 | Provable100Bits, 11 | Provable128Bits, 12 | } 13 | 14 | /// The options for the proof 15 | /// 16 | /// - `blowup_factor`: the blowup factor for the trace 17 | /// - `fri_number_of_queries`: the number of queries for the FRI layer 18 | /// - `coset_offset`: the offset for the coset 19 | /// - `grinding_factor`: the number of leading zeros that we want for the Hash(hash || nonce) 20 | #[derive(Clone, Debug)] 21 | pub struct ProofOptions { 22 | pub blowup_factor: u8, 23 | pub fri_number_of_queries: usize, 24 | pub coset_offset: u64, 25 | pub grinding_factor: u8, 26 | } 27 | 28 | impl ProofOptions { 29 | // TODO: Make it work for extended fields 30 | const EXTENSION_DEGREE: usize = 1; 31 | // Estimated maximum domain size. 2^40 = 1 TB 32 | const NUM_BITS_MAX_DOMAIN_SIZE: usize = 40; 33 | 34 | /// See section 5.10.1 of https://eprint.iacr.org/2021/582.pdf 35 | pub fn new_secure(security_level: SecurityLevel, coset_offset: u64) -> Self { 36 | match security_level { 37 | SecurityLevel::Conjecturable80Bits => ProofOptions { 38 | blowup_factor: 4, 39 | fri_number_of_queries: 31, 40 | coset_offset, 41 | grinding_factor: 20, 42 | }, 43 | SecurityLevel::Conjecturable100Bits => ProofOptions { 44 | blowup_factor: 4, 45 | fri_number_of_queries: 41, 46 | coset_offset, 47 | grinding_factor: 20, 48 | }, 49 | SecurityLevel::Conjecturable128Bits => ProofOptions { 50 | blowup_factor: 4, 51 | fri_number_of_queries: 55, 52 | coset_offset, 53 | grinding_factor: 20, 54 | }, 55 | SecurityLevel::Provable80Bits => ProofOptions { 56 | blowup_factor: 4, 57 | fri_number_of_queries: 80, 58 | coset_offset, 59 | grinding_factor: 20, 60 | }, 61 | SecurityLevel::Provable100Bits => ProofOptions { 62 | blowup_factor: 4, 63 | fri_number_of_queries: 104, 64 | coset_offset, 65 | grinding_factor: 20, 66 | }, 67 | SecurityLevel::Provable128Bits => ProofOptions { 68 | blowup_factor: 4, 69 | fri_number_of_queries: 140, 70 | coset_offset, 71 | grinding_factor: 20, 72 | }, 73 | } 74 | } 75 | 76 | /// Checks security of proof options given 128 bits of security 77 | pub fn new_with_checked_security( 78 | blowup_factor: u8, 79 | fri_number_of_queries: usize, 80 | coset_offset: u64, 81 | grinding_factor: u8, 82 | security_target: u8, 83 | ) -> Result { 84 | Self::check_field_security::(security_target)?; 85 | 86 | let num_bits_blowup_factor = blowup_factor.trailing_zeros() as usize; 87 | 88 | if security_target as usize 89 | >= grinding_factor as usize + num_bits_blowup_factor * fri_number_of_queries - 1 90 | { 91 | return Err(InsecureOptionError::SecurityBits); 92 | } 93 | 94 | Ok(ProofOptions { 95 | blowup_factor, 96 | fri_number_of_queries, 97 | coset_offset, 98 | grinding_factor, 99 | }) 100 | } 101 | 102 | /// Checks provable security of proof options given 128 bits of security 103 | /// This is an approximation. It's stricter than the formula in the paper. 104 | /// See https://eprint.iacr.org/2021/582.pdf 105 | pub fn new_with_checked_provable_security( 106 | blowup_factor: u8, 107 | fri_number_of_queries: usize, 108 | coset_offset: u64, 109 | grinding_factor: u8, 110 | security_target: u8, 111 | ) -> Result { 112 | Self::check_field_security::(security_target)?; 113 | 114 | let num_bits_blowup_factor = blowup_factor.leading_zeros() as usize; 115 | 116 | if (security_target as usize) 117 | < grinding_factor as usize + num_bits_blowup_factor * fri_number_of_queries / 2 118 | { 119 | return Err(InsecureOptionError::SecurityBits); 120 | } 121 | 122 | Ok(ProofOptions { 123 | blowup_factor, 124 | fri_number_of_queries, 125 | coset_offset, 126 | grinding_factor, 127 | }) 128 | } 129 | 130 | fn check_field_security( 131 | security_target: u8, 132 | ) -> Result<(), InsecureOptionError> { 133 | if F::field_bit_size() * Self::EXTENSION_DEGREE 134 | <= security_target as usize + Self::NUM_BITS_MAX_DOMAIN_SIZE 135 | { 136 | return Err(InsecureOptionError::FieldSize); 137 | } 138 | 139 | Ok(()) 140 | } 141 | 142 | /// Default proof options used for testing purposes. 143 | /// These options should not be used in production. 144 | pub fn default_test_options() -> Self { 145 | Self { 146 | blowup_factor: 4, 147 | fri_number_of_queries: 3, 148 | coset_offset: 3, 149 | grinding_factor: 1, 150 | } 151 | } 152 | } 153 | 154 | #[cfg(test)] 155 | mod tests { 156 | use lambdaworks_math::field::fields::{ 157 | fft_friendly::stark_252_prime_field::Stark252PrimeField, u64_prime_field::F17, 158 | }; 159 | 160 | use crate::starks::proof::{errors::InsecureOptionError, options::SecurityLevel}; 161 | 162 | use super::ProofOptions; 163 | 164 | #[test] 165 | fn u64_prime_field_is_not_large_enough_to_be_secure() { 166 | let ProofOptions { 167 | blowup_factor, 168 | fri_number_of_queries, 169 | coset_offset, 170 | grinding_factor, 171 | } = ProofOptions::new_secure(SecurityLevel::Conjecturable128Bits, 1); 172 | 173 | let u64_options = ProofOptions::new_with_checked_security::( 174 | blowup_factor, 175 | fri_number_of_queries, 176 | coset_offset, 177 | grinding_factor, 178 | 128, 179 | ); 180 | 181 | assert!(matches!(u64_options, Err(InsecureOptionError::FieldSize))); 182 | } 183 | 184 | #[test] 185 | fn generated_stark_proof_options_for_128_bits_are_secure() { 186 | let ProofOptions { 187 | blowup_factor, 188 | fri_number_of_queries, 189 | coset_offset, 190 | grinding_factor, 191 | } = ProofOptions::new_secure(SecurityLevel::Conjecturable128Bits, 1); 192 | 193 | let secure_options = ProofOptions::new_with_checked_security::( 194 | blowup_factor, 195 | fri_number_of_queries, 196 | coset_offset, 197 | grinding_factor, 198 | 128, 199 | ); 200 | 201 | assert!(secure_options.is_ok()); 202 | } 203 | 204 | #[test] 205 | fn generated_proof_options_for_128_bits_with_one_fri_query_less_are_insecure() { 206 | let ProofOptions { 207 | blowup_factor, 208 | fri_number_of_queries, 209 | coset_offset, 210 | grinding_factor, 211 | } = ProofOptions::new_secure(SecurityLevel::Conjecturable128Bits, 1); 212 | 213 | let insecure_options = ProofOptions::new_with_checked_security::( 214 | blowup_factor, 215 | fri_number_of_queries - 1, 216 | coset_offset, 217 | grinding_factor, 218 | 128, 219 | ); 220 | 221 | assert!(matches!( 222 | insecure_options, 223 | Err(InsecureOptionError::SecurityBits) 224 | )); 225 | } 226 | 227 | #[test] 228 | fn generated_stark_proof_options_for_100_bits_are_secure_for_100_target_bits() { 229 | let ProofOptions { 230 | blowup_factor, 231 | fri_number_of_queries, 232 | coset_offset, 233 | grinding_factor, 234 | } = ProofOptions::new_secure(SecurityLevel::Conjecturable100Bits, 1); 235 | 236 | let secure_options = ProofOptions::new_with_checked_security::( 237 | blowup_factor, 238 | fri_number_of_queries, 239 | coset_offset, 240 | grinding_factor, 241 | 100, 242 | ); 243 | 244 | assert!(secure_options.is_ok()); 245 | } 246 | 247 | #[test] 248 | fn generated_stark_proof_options_for_80_bits_are_secure_for_80_target_bits() { 249 | let ProofOptions { 250 | blowup_factor, 251 | fri_number_of_queries, 252 | coset_offset, 253 | grinding_factor, 254 | } = ProofOptions::new_secure(SecurityLevel::Conjecturable80Bits, 1); 255 | 256 | let secure_options = ProofOptions::new_with_checked_security::( 257 | blowup_factor, 258 | fri_number_of_queries, 259 | coset_offset, 260 | grinding_factor, 261 | 80, 262 | ); 263 | 264 | assert!(secure_options.is_ok()); 265 | } 266 | } 267 | -------------------------------------------------------------------------------- /message/src/starks/trace.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_math::fft::errors::FFTError; 2 | use lambdaworks_math::fft::polynomial::FFTPoly; 3 | use lambdaworks_math::{ 4 | field::{element::FieldElement, traits::IsFFTField}, 5 | polynomial::Polynomial, 6 | }; 7 | 8 | #[derive(Clone, Default, Debug, PartialEq, Eq)] 9 | pub struct TraceTable { 10 | /// `table` is row-major trace element description 11 | pub table: Vec>, 12 | pub n_cols: usize, 13 | } 14 | 15 | impl TraceTable { 16 | pub fn empty() -> Self { 17 | Self { 18 | table: Vec::new(), 19 | n_cols: 0, 20 | } 21 | } 22 | 23 | pub fn is_empty(&self) -> bool { 24 | self.n_cols == 0 25 | } 26 | 27 | pub fn new(table: Vec>, n_cols: usize) -> Self { 28 | Self { table, n_cols } 29 | } 30 | 31 | pub fn get_cols(&self, columns: &[usize]) -> Self { 32 | let mut table = Vec::new(); 33 | for row_index in 0..self.n_rows() { 34 | for column in columns { 35 | table.push(self.table[row_index * self.n_cols + column].clone()); 36 | } 37 | } 38 | 39 | Self { 40 | table, 41 | n_cols: columns.len(), 42 | } 43 | } 44 | 45 | pub fn new_from_cols(cols: &[Vec>]) -> Self { 46 | let n_rows = cols[0].len(); 47 | debug_assert!(cols.iter().all(|c| c.len() == n_rows)); 48 | 49 | let n_cols = cols.len(); 50 | 51 | let mut table = Vec::with_capacity(n_cols * n_rows); 52 | 53 | for row_idx in 0..n_rows { 54 | for col in cols { 55 | table.push(col[row_idx].clone()); 56 | } 57 | } 58 | Self { table, n_cols } 59 | } 60 | 61 | pub fn n_rows(&self) -> usize { 62 | if self.n_cols == 0 { 63 | 0 64 | } else { 65 | self.table.len() / self.n_cols 66 | } 67 | } 68 | 69 | pub fn rows(&self) -> Vec>> { 70 | let n_rows = self.n_rows(); 71 | (0..n_rows) 72 | .map(|row_idx| { 73 | self.table[(row_idx * self.n_cols)..(row_idx * self.n_cols + self.n_cols)].to_vec() 74 | }) 75 | .collect() 76 | } 77 | 78 | pub fn get_row(&self, row_idx: usize) -> &[FieldElement] { 79 | let row_offset = row_idx * self.n_cols; 80 | &self.table[row_offset..row_offset + self.n_cols] 81 | } 82 | 83 | pub fn last_row(&self) -> &[FieldElement] { 84 | self.get_row(self.n_rows() - 1) 85 | } 86 | 87 | pub fn cols(&self) -> Vec>> { 88 | let n_rows = self.n_rows(); 89 | (0..self.n_cols) 90 | .map(|col_idx| { 91 | (0..n_rows) 92 | .map(|row_idx| self.table[row_idx * self.n_cols + col_idx].clone()) 93 | .collect() 94 | }) 95 | .collect() 96 | } 97 | 98 | /// Given a step and a column index, gives stored value in that position 99 | pub fn get(&self, step: usize, col: usize) -> FieldElement { 100 | let idx = step * self.n_cols + col; 101 | self.table[idx].clone() 102 | } 103 | 104 | pub fn compute_trace_polys(&self) -> Vec>> { 105 | self.cols() 106 | .iter() 107 | .map(|col| Polynomial::interpolate_fft(col)) 108 | .collect::>>, FFTError>>() 109 | .unwrap() 110 | } 111 | 112 | pub fn concatenate(&self, new_cols: Vec>, n_cols: usize) -> Self { 113 | let mut new_table = Vec::new(); 114 | let mut i = 0; 115 | for row_index in (0..self.table.len()).step_by(self.n_cols) { 116 | new_table.append(&mut self.table[row_index..row_index + self.n_cols].to_vec()); 117 | new_table.append(&mut new_cols[i..(i + n_cols)].to_vec()); 118 | i += n_cols; 119 | } 120 | TraceTable { 121 | table: new_table, 122 | n_cols: self.n_cols + n_cols, 123 | } 124 | } 125 | } 126 | 127 | #[cfg(test)] 128 | mod test { 129 | use super::TraceTable; 130 | use lambdaworks_math::field::{element::FieldElement, fields::u64_prime_field::F17}; 131 | type FE = FieldElement; 132 | 133 | #[test] 134 | fn test_cols() { 135 | let col_1 = vec![FE::from(1), FE::from(2), FE::from(5), FE::from(13)]; 136 | let col_2 = vec![FE::from(1), FE::from(3), FE::from(8), FE::from(21)]; 137 | 138 | let trace_table = TraceTable::new_from_cols(&[col_1.clone(), col_2.clone()]); 139 | let res_cols = trace_table.cols(); 140 | 141 | assert_eq!(res_cols, vec![col_1, col_2]); 142 | } 143 | 144 | #[test] 145 | fn test_subtable_works() { 146 | let table = vec![ 147 | FE::new(1), 148 | FE::new(2), 149 | FE::new(3), 150 | FE::new(1), 151 | FE::new(2), 152 | FE::new(3), 153 | FE::new(1), 154 | FE::new(2), 155 | FE::new(3), 156 | ]; 157 | let trace_table = TraceTable { table, n_cols: 3 }; 158 | let subtable = trace_table.get_cols(&[0, 1]); 159 | assert_eq!( 160 | subtable.table, 161 | vec![ 162 | FE::new(1), 163 | FE::new(2), 164 | FE::new(1), 165 | FE::new(2), 166 | FE::new(1), 167 | FE::new(2) 168 | ] 169 | ); 170 | assert_eq!(subtable.n_cols, 2); 171 | let subtable = trace_table.get_cols(&[0, 2]); 172 | assert_eq!( 173 | subtable.table, 174 | vec![ 175 | FE::new(1), 176 | FE::new(3), 177 | FE::new(1), 178 | FE::new(3), 179 | FE::new(1), 180 | FE::new(3) 181 | ] 182 | ); 183 | assert_eq!(subtable.n_cols, 2); 184 | assert_eq!(trace_table.get_cols(&[]), TraceTable::empty()); 185 | } 186 | 187 | #[test] 188 | fn test_concatenate_works() { 189 | let table1_columns = vec![vec![FE::new(7), FE::new(8), FE::new(9)]]; 190 | let new_columns = vec![ 191 | FE::new(1), 192 | FE::new(2), 193 | FE::new(3), 194 | FE::new(4), 195 | FE::new(5), 196 | FE::new(6), 197 | ]; 198 | let expected_table = TraceTable::new_from_cols(&[ 199 | vec![FE::new(7), FE::new(8), FE::new(9)], 200 | vec![FE::new(1), FE::new(3), FE::new(5)], 201 | vec![FE::new(2), FE::new(4), FE::new(6)], 202 | ]); 203 | let table1 = TraceTable::new_from_cols(&table1_columns); 204 | assert_eq!(table1.concatenate(new_columns, 2), expected_table) 205 | } 206 | } 207 | -------------------------------------------------------------------------------- /message/src/starks/traits.rs: -------------------------------------------------------------------------------- 1 | use itertools::Itertools; 2 | use lambdaworks_crypto::fiat_shamir::transcript::Transcript; 3 | use lambdaworks_math::{ 4 | fft::cpu::roots_of_unity::get_powers_of_primitive_root_coset, 5 | field::{element::FieldElement, traits::IsFFTField}, 6 | polynomial::Polynomial, 7 | }; 8 | 9 | use super::{ 10 | constraints::boundary::BoundaryConstraints, context::AirContext, frame::Frame, 11 | proof::options::ProofOptions, trace::TraceTable, 12 | }; 13 | 14 | /// AIR is a representation of the Constraints 15 | pub trait AIR: Clone { 16 | type Field: IsFFTField; 17 | type RAPChallenges; 18 | type PublicInputs; 19 | 20 | fn new( 21 | trace_length: usize, 22 | pub_inputs: &Self::PublicInputs, 23 | proof_options: &ProofOptions, 24 | ) -> Self; 25 | 26 | fn build_auxiliary_trace( 27 | &self, 28 | main_trace: &TraceTable, 29 | rap_challenges: &Self::RAPChallenges, 30 | ) -> TraceTable; 31 | 32 | fn build_rap_challenges(&self, transcript: &mut T) -> Self::RAPChallenges; 33 | 34 | fn number_auxiliary_rap_columns(&self) -> usize; 35 | 36 | fn composition_poly_degree_bound(&self) -> usize; 37 | 38 | fn compute_transition( 39 | &self, 40 | frame: &Frame, 41 | rap_challenges: &Self::RAPChallenges, 42 | ) -> Vec>; 43 | 44 | fn boundary_constraints( 45 | &self, 46 | rap_challenges: &Self::RAPChallenges, 47 | ) -> BoundaryConstraints; 48 | 49 | fn transition_exemptions(&self) -> Vec>> { 50 | let trace_length = self.trace_length(); 51 | let roots_of_unity_order = trace_length.trailing_zeros(); 52 | let roots_of_unity = get_powers_of_primitive_root_coset( 53 | roots_of_unity_order as u64, 54 | self.trace_length(), 55 | &FieldElement::::one(), 56 | ) 57 | .unwrap(); 58 | let root_of_unity_len = roots_of_unity.len(); 59 | 60 | let x = Polynomial::new_monomial(FieldElement::one(), 1); 61 | 62 | self.context() 63 | .transition_exemptions 64 | .iter() 65 | .unique_by(|elem| *elem) 66 | .filter(|v| *v > &0_usize) 67 | .map(|cant_take| { 68 | roots_of_unity 69 | .iter() 70 | .take(root_of_unity_len) 71 | .rev() 72 | .take(*cant_take) 73 | .fold( 74 | Polynomial::new_monomial(FieldElement::one(), 0), 75 | |acc, root| acc * (&x - root), 76 | ) 77 | }) 78 | .collect() 79 | } 80 | fn context(&self) -> &AirContext; 81 | 82 | fn trace_length(&self) -> usize; 83 | 84 | fn options(&self) -> &ProofOptions { 85 | &self.context().proof_options 86 | } 87 | 88 | fn blowup_factor(&self) -> u8 { 89 | self.options().blowup_factor 90 | } 91 | 92 | fn num_transition_constraints(&self) -> usize { 93 | self.context().num_transition_constraints 94 | } 95 | 96 | fn pub_inputs(&self) -> &Self::PublicInputs; 97 | 98 | fn transition_exemptions_verifier(&self) -> Vec>> { 99 | let trace_length = self.trace_length(); 100 | let roots_of_unity_order = trace_length.trailing_zeros(); 101 | let roots_of_unity = get_powers_of_primitive_root_coset( 102 | roots_of_unity_order as u64, 103 | self.trace_length(), 104 | &FieldElement::::one(), 105 | ) 106 | .unwrap(); 107 | let root_of_unity_len = roots_of_unity.len(); 108 | 109 | let x = Polynomial::new_monomial(FieldElement::one(), 1); 110 | 111 | self.context() 112 | .transition_exemptions 113 | .iter() 114 | .take(self.context().num_transition_constraints) 115 | .map(|cant_take| { 116 | roots_of_unity 117 | .iter() 118 | .take(root_of_unity_len) 119 | .rev() 120 | .take(*cant_take) 121 | .fold( 122 | Polynomial::new_monomial(FieldElement::one(), 0), 123 | |acc, root| acc * (&x - root), 124 | ) 125 | }) 126 | .collect() 127 | } 128 | } 129 | -------------------------------------------------------------------------------- /message/src/starks/transcript.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_crypto::fiat_shamir::transcript::Transcript; 2 | use lambdaworks_math::{ 3 | field::{ 4 | element::FieldElement, 5 | traits::{IsFFTField, IsPrimeField}, 6 | }, 7 | traits::ByteConversion, 8 | }; 9 | 10 | /// Uses randomness from the transcript to create a FieldElement 11 | /// One bit less than the max used by the FieldElement is used as randomness. For StarkFields, this would be 251 bits randomness. 12 | /// Randomness is interpreted as limbs in BigEndian, and each Limb is ordered in BigEndian 13 | pub fn transcript_to_field(transcript: &mut T) -> FieldElement 14 | where 15 | FieldElement: lambdaworks_math::traits::ByteConversion, 16 | { 17 | let mut randomness = transcript.challenge(); 18 | randomness_to_field(&mut randomness) 19 | } 20 | 21 | /// Transforms some random bytes to a field 22 | /// Slicing the randomness to one bit less than what the max number of the field is to ensure each random element has the same probability of appearing 23 | fn randomness_to_field(randomness: &mut [u8; 32]) -> FieldElement 24 | where 25 | FieldElement: ByteConversion, 26 | { 27 | let random_bits_required = F::field_bit_size() - 1; 28 | let random_bits_created = randomness.len() * 8; 29 | let mut bits_to_clear = random_bits_created - random_bits_required; 30 | 31 | let mut i = 0; 32 | while bits_to_clear >= 8 { 33 | randomness[i] = 0; 34 | bits_to_clear -= 8; 35 | i += 1; 36 | } 37 | 38 | let pre_mask: u8 = 1u8.checked_shl(8 - bits_to_clear as u32).unwrap_or(0); 39 | let mask: u8 = pre_mask.wrapping_sub(1); 40 | randomness[i] &= mask; 41 | 42 | FieldElement::from_bytes_be(randomness).unwrap() 43 | } 44 | 45 | pub fn transcript_to_usize(transcript: &mut T) -> usize { 46 | const CANT_BYTES_USIZE: usize = (usize::BITS / 8) as usize; 47 | let value = transcript.challenge()[..CANT_BYTES_USIZE] 48 | .try_into() 49 | .unwrap(); 50 | usize::from_be_bytes(value) 51 | } 52 | 53 | pub fn sample_z_ood( 54 | lde_roots_of_unity_coset: &[FieldElement], 55 | trace_roots_of_unity: &[FieldElement], 56 | transcript: &mut T, 57 | ) -> FieldElement 58 | where 59 | FieldElement: ByteConversion, 60 | { 61 | loop { 62 | let value: FieldElement = transcript_to_field(transcript); 63 | if !lde_roots_of_unity_coset.iter().any(|x| x == &value) 64 | && !trace_roots_of_unity.iter().any(|x| x == &value) 65 | { 66 | return value; 67 | } 68 | } 69 | } 70 | 71 | pub fn batch_sample_challenges( 72 | size: usize, 73 | transcript: &mut T, 74 | ) -> Vec> 75 | where 76 | FieldElement: ByteConversion, 77 | { 78 | (0..size).map(|_| transcript_to_field(transcript)).collect() 79 | } 80 | 81 | #[cfg(test)] 82 | mod tests { 83 | use lambdaworks_math::{ 84 | field::{ 85 | element::FieldElement, 86 | fields::{ 87 | fft_friendly::stark_252_prime_field::Stark252PrimeField, 88 | montgomery_backed_prime_fields::{IsModulus, U256PrimeField}, 89 | }, 90 | }, 91 | unsigned_integer::element::U256, 92 | }; 93 | 94 | use crate::starks::transcript::randomness_to_field; 95 | 96 | #[test] 97 | fn test_stark_prime_field_random_to_field_32() { 98 | #[rustfmt::skip] 99 | let mut randomness: [u8; 32] = [ 100 | 248, 0, 0, 0, 0, 0, 0, 0, 101 | 0, 0, 0, 0, 0, 0, 0, 0, 102 | 0, 0, 0, 0, 0, 0, 0, 0, 103 | 0, 0, 0, 0, 0, 0, 0, 32, 104 | ]; 105 | 106 | type FE = FieldElement; 107 | let field_element: FE = randomness_to_field(&mut randomness); 108 | let expected_fe = FE::from(32u64); 109 | assert_eq!(field_element, expected_fe) 110 | } 111 | 112 | #[test] 113 | fn test_stark_prime_field_random_to_fiel_repeated_f_and_zero() { 114 | #[rustfmt::skip] 115 | let mut randomness: [u8; 32] = [ 116 | 255, 0, 255, 0, 255, 0, 255, 0, 117 | 255, 0, 255, 0, 255, 0, 255, 0, 118 | 255, 0, 255, 0, 255, 0, 255, 0, 119 | 255, 0, 255, 0, 255, 0, 255, 0, 120 | ]; 121 | 122 | type FE = FieldElement; 123 | 124 | // 251 bits should be used (252 of StarkField - 1) to avoid duplicates 125 | // This leaves a 7 126 | let expected_fe = FE::from_hex_unchecked( 127 | "\ 128 | 0700FF00FF00FF00\ 129 | FF00FF00FF00FF00\ 130 | FF00FF00FF00FF00\ 131 | FF00FF00FF00FF00", 132 | ); 133 | 134 | let field_element: FE = randomness_to_field(&mut randomness); 135 | 136 | assert_eq!(field_element, expected_fe) 137 | } 138 | 139 | #[test] 140 | fn test_241_bit_random_to_field() { 141 | #[derive(Clone, Debug)] 142 | pub struct TestModulus; 143 | impl IsModulus for TestModulus { 144 | const MODULUS: U256 = U256::from_hex_unchecked( 145 | "\ 146 | 0001000000000011\ 147 | 0000000000000000\ 148 | 0000000000000000\ 149 | 0000000000000001", 150 | ); 151 | } 152 | 153 | pub type TestField = U256PrimeField; 154 | 155 | #[rustfmt::skip] 156 | let mut randomness: [u8; 32] = [ 157 | 255, 255, 255, 1, 2, 3, 4, 5, 158 | 6, 7, 8, 1, 2, 3, 4, 5, 159 | 6, 7, 8, 1, 2, 3, 4, 5, 160 | 6, 7, 8, 1, 2, 3, 4, 5, 161 | ]; 162 | 163 | type FE = FieldElement; 164 | 165 | let expected_fe = FE::from_hex_unchecked( 166 | "\ 167 | 0000FF0102030405\ 168 | 0607080102030405\ 169 | 0607080102030405\ 170 | 0607080102030405", 171 | ); 172 | 173 | let field_element: FE = randomness_to_field(&mut randomness); 174 | 175 | assert_eq!(field_element, expected_fe); 176 | } 177 | 178 | #[test] 179 | fn test_249_bit_random_to_field() { 180 | #[derive(Clone, Debug)] 181 | pub struct TestModulus; 182 | impl IsModulus for TestModulus { 183 | const MODULUS: U256 = U256::from_hex_unchecked( 184 | "\ 185 | 0200000000000011\ 186 | 0000000000000000\ 187 | 0000000000000000\ 188 | 0000000000000001", 189 | ); 190 | } 191 | 192 | pub type TestField = U256PrimeField; 193 | 194 | #[rustfmt::skip] 195 | let mut randomness: [u8; 32] = [ 196 | 255, 0, 255, 0, 255, 0, 255, 0, 197 | 255, 0, 255, 0, 255, 0, 255, 0, 198 | 255, 0, 255, 0, 255, 0, 255, 0, 199 | 255, 0, 255, 0, 255, 0, 255, 0, 200 | ]; 201 | 202 | let expected_fe = FE::from_hex_unchecked( 203 | "\ 204 | 0100FF00FF00FF00\ 205 | FF00FF00FF00FF00\ 206 | FF00FF00FF00FF00\ 207 | FF00FF00FF00FF00", 208 | ); 209 | 210 | type FE = FieldElement; 211 | 212 | let field_element: FE = randomness_to_field(&mut randomness); 213 | 214 | assert_eq!(field_element, expected_fe) 215 | } 216 | } 217 | -------------------------------------------------------------------------------- /message/src/starks/utils.rs: -------------------------------------------------------------------------------- 1 | use lambdaworks_crypto::merkle_tree::proof::Proof; 2 | use lambdaworks_math::errors::DeserializationError; 3 | 4 | use super::config::Commitment; 5 | 6 | pub fn serialize_proof(proof: &Proof) -> Vec { 7 | let mut bytes = vec![]; 8 | bytes.extend(proof.merkle_path.len().to_be_bytes()); 9 | for commitment in &proof.merkle_path { 10 | bytes.extend(commitment); 11 | } 12 | bytes 13 | } 14 | 15 | pub fn deserialize_proof(bytes: &[u8]) -> Result<(Proof, &[u8]), DeserializationError> { 16 | let mut bytes = bytes; 17 | let mut merkle_path = vec![]; 18 | let merkle_path_len = usize::from_be_bytes( 19 | bytes 20 | .get(..8) 21 | .ok_or(DeserializationError::InvalidAmountOfBytes)? 22 | .try_into() 23 | .map_err(|_| DeserializationError::InvalidAmountOfBytes)?, 24 | ); 25 | bytes = &bytes[8..]; 26 | 27 | for _ in 0..merkle_path_len { 28 | let commitment = bytes 29 | .get(..32) 30 | .ok_or(DeserializationError::InvalidAmountOfBytes)? 31 | .try_into() 32 | .map_err(|_| DeserializationError::InvalidAmountOfBytes)?; 33 | merkle_path.push(commitment); 34 | bytes = &bytes[32..]; 35 | } 36 | 37 | Ok((Proof { merkle_path }, bytes)) 38 | } 39 | --------------------------------------------------------------------------------