├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── README.md ├── rust-toolchain ├── src ├── adaptor │ ├── bls381adaptor │ ├── bn256adaptor.rs │ ├── hashadaptor.rs │ ├── keccakadaptor.rs │ ├── merkleadaptor.rs │ ├── mod.rs │ └── msmadaptor.rs ├── circuits │ ├── anemoi.rs │ ├── babyjub.rs │ ├── bits_arith.rs │ ├── bls │ ├── bn256.rs │ ├── host.rs │ ├── keccak256.rs │ ├── merkle.rs │ ├── mod.rs │ ├── poseidon.rs │ └── range.rs ├── host │ ├── bls.rs │ ├── bn256.rs │ ├── cache.rs │ ├── datahash.rs │ ├── db.rs │ ├── jubjub.rs │ ├── keccak256.rs │ ├── merkle.rs │ ├── mod.rs │ ├── mongomerkle.rs │ └── poseidon.rs ├── lib.rs ├── main.rs ├── migrate │ └── migrate_from_mongo_to_rocksdb.rs ├── proof.rs ├── scripts │ └── kvpair_db_upgrade │ │ ├── README │ │ ├── db_upgrade.ts │ │ ├── package-lock.json │ │ ├── package.json │ │ └── tsconfig.json └── utils │ ├── macros.rs │ └── mod.rs ├── test.sh ├── test_bn254.sh ├── test_jubjub.sh ├── test_keccak.sh ├── test_merkle.sh └── test_poseidon.sh /.gitignore: -------------------------------------------------------------------------------- 1 | target/ 2 | .idea/ 3 | test_db -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "zkwasm-host-circuits" 3 | authors = ["DelphinusLab "] 4 | version = "0.1.0" 5 | edition = "2021" 6 | default-run = "zkwasm-host-circuits-prover" 7 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 8 | 9 | [lib] 10 | name = "zkwasm_host_circuits" 11 | path = "src/lib.rs" 12 | 13 | [[bin]] 14 | name = "zkwasm-host-circuits-prover" 15 | path = "src/main.rs" 16 | 17 | [[bin]] 18 | name = "migrate_from_mongo_to_rocksdb" 19 | path = "src/migrate/migrate_from_mongo_to_rocksdb.rs" 20 | 21 | [dependencies] 22 | strum = "0.24.1" 23 | strum_macros = "0.24.1" 24 | ff = "0.12" 25 | cfg-if = "1.0.0" 26 | halo2_proofs = { git = "https://github.com/DelphinusLab/halo2-gpu-specific.git", tag="stable-logup-shuffle-v1.0.0",default-features = true } 27 | halo2ecc-o = { git = "https://github.com/ooglayout/halo2ecc-o.git"} 28 | itertools = "0.10.0" 29 | num-bigint = { version = "0.4", features = ["rand"] } 30 | poseidon = { git = "https://github.com/DelphinusLab/poseidon" } 31 | circuits-batcher = { git = "https://github.com/DelphinusLab/continuation-batcher.git", tag="stable-logup-shuffle-1.0" } 32 | rand = "0.8" 33 | rand_core = "0.6" 34 | ark-std = { version = "0.4.0" } 35 | serde = { version = "1.0", features = ["serde_derive"] } 36 | serde_json = "1.0" 37 | clap = { version = "3.2.22", features = ["derive", "cargo"] } 38 | subtle = "2.4" 39 | lazy_static = "1.4.0" 40 | hex = "0.4" 41 | mongodb = { version = "2.5.0", default-features = false} 42 | ripemd = "0.1.3" 43 | lru = "0.11.0" 44 | anyhow = "1.0.86" 45 | num-traits = "0.2.15" 46 | num-derive = "0.3" 47 | rocksdb = "0.21.0" 48 | tempfile = "3.18.0" 49 | 50 | [features] 51 | default = ["mongo-tokio-sync"] 52 | perf = ["circuits-batcher/perf"] 53 | mongo-std-sync = ["mongodb/sync"] 54 | mongo-tokio-sync = ["mongodb/tokio-sync"] 55 | cuda = ["halo2_proofs/cuda"] 56 | complex-leaf = [] 57 | profile = ["ark-std/print-trace", "halo2_proofs/profile", "circuits-batcher/profile"] 58 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # zkWasm-host-circuits 2 | 3 | The host circuits contain two components. The operands selection circuit which selects the target opcodes and the host function constraint circuit which enforces the semantics of a specific host function. 4 | 5 | ## Operands selection circuit. 6 | Suppose we have a host calling trace that calls two functions 7 | ``` 8 | a(x,y) = x + y + 1 9 | b(x,y,z) = x * y * z 10 | ``` 11 | 12 | It follows that the shared host op table will look like the following: 13 | 14 | | op_code | arg | idx | 15 | | --------|-----|-----| 16 | | 0 | 0 | 0 | 17 | | op_a | a_0 | 1 | 18 | | op_a | a_1 | 2 | 19 | | op_a | a_r | 3 | 20 | | op_b | b_0 | 4 | 21 | | op_b | b_1 | 5 | 22 | | op_b | b_2 | 6 | 23 | | op_b | b_r | 7 | 24 | 25 | Thus, to implement the selection circuit for function ``b`` then the following should be the filtered result 26 | 27 | | op_code | arg_sel | idx_sel | 28 | | --------|-------- | --------| 29 | | op_b | $b_0$ | 4 | 30 | | op_b | $b_1$ | 5 | 31 | | op_b | $b_2$ | 6 | 32 | | op_b | $b_r$ | 7 | 33 | 34 | # Appendix: 35 | ## configuring async backend 36 | There are two async backend for mongodb: `mongo-std-sync` and `mongo-tokio-sync`(default). Note that when using the non-default backend`mongo-std-sync`, you must also using `default-features = false`. 37 | -------------------------------------------------------------------------------- /rust-toolchain: -------------------------------------------------------------------------------- 1 | nightly-2023-06-01 2 | -------------------------------------------------------------------------------- /src/adaptor/hashadaptor.rs: -------------------------------------------------------------------------------- 1 | use crate::adaptor::get_selected_entries; 2 | use crate::circuits::host::{HostOpConfig, HostOpSelector}; 3 | use crate::circuits::poseidon::PoseidonChip; 4 | use crate::circuits::poseidon::PoseidonGateConfig; 5 | use crate::circuits::CommonGateConfig; 6 | use crate::circuits::LookupAssistChip; 7 | use crate::circuits::LookupAssistConfig; 8 | use crate::host::poseidon::POSEIDON_HASHER; 9 | use crate::host::poseidon::POSEIDON_HASHER_SPEC; 10 | use crate::host::ForeignInst::{PoseidonFinalize, PoseidonNew, PoseidonPush}; 11 | use crate::host::{ExternalHostCallEntry, ExternalHostCallEntryTable, ForeignInst}; 12 | use ark_std::{end_timer, start_timer}; 13 | use halo2_proofs::arithmetic::FieldExt; 14 | use halo2_proofs::circuit::{Layouter, Region}; 15 | use halo2_proofs::pairing::bn256::Fr; 16 | use halo2_proofs::plonk::ConstraintSystem; 17 | use halo2_proofs::plonk::{Advice, Column, Error, Expression, VirtualCells}; 18 | 19 | use crate::utils::Limb; 20 | 21 | impl LookupAssistConfig for () { 22 | /// register a column (col) to be range checked by limb size (sz) 23 | fn register( 24 | &self, 25 | _cs: &mut ConstraintSystem, 26 | _col: impl FnOnce(&mut VirtualCells) -> Vec>, 27 | ) { 28 | () 29 | } 30 | } 31 | 32 | impl LookupAssistChip for () { 33 | fn provide_lookup_evidence( 34 | &mut self, 35 | _region: &Region, 36 | _value: F, 37 | _sz: u64, 38 | ) -> Result<(), Error> { 39 | Ok(()) 40 | } 41 | } 42 | 43 | fn hash_cont(restart: bool) -> Vec { 44 | vec![ExternalHostCallEntry { 45 | op: PoseidonNew as usize, 46 | value: if restart { 1u64 } else { 0u64 }, 47 | is_ret: false, 48 | }] 49 | } 50 | 51 | fn hash_to_host_call_table(inputs: [Fr; 8], result: Fr) -> ExternalHostCallEntryTable { 52 | let mut r = vec![]; 53 | r.push(hash_cont(true)); 54 | for f in inputs.iter() { 55 | r.push(crate::adaptor::fr_to_args(*f, 4, 64, PoseidonPush)); 56 | } 57 | r.push(crate::adaptor::fr_to_args(result, 4, 64, PoseidonFinalize)); 58 | ExternalHostCallEntryTable(r.into_iter().flatten().collect()) 59 | } 60 | 61 | const TOTAL_CONSTRUCTIONS: usize = 2048; 62 | 63 | impl HostOpSelector for PoseidonChip { 64 | type Config = (CommonGateConfig, PoseidonGateConfig); 65 | type Helper = (); 66 | fn configure( 67 | meta: &mut ConstraintSystem, 68 | shared_advices: &Vec>, 69 | ) -> Self::Config { 70 | PoseidonChip::::configure(meta, shared_advices) 71 | } 72 | 73 | fn construct(c: Self::Config) -> Self { 74 | PoseidonChip::construct(c.0, c.1, POSEIDON_HASHER_SPEC.clone()) 75 | } 76 | 77 | fn max_rounds(k: usize) -> usize { 78 | super::get_max_round(k, TOTAL_CONSTRUCTIONS) 79 | } 80 | 81 | fn opcodes() -> Vec { 82 | vec![ 83 | Fr::from(ForeignInst::PoseidonNew as u64), 84 | Fr::from(ForeignInst::PoseidonPush as u64), 85 | Fr::from(ForeignInst::PoseidonFinalize as u64), 86 | ] 87 | } 88 | 89 | fn assign( 90 | region: &Region, 91 | k: usize, 92 | offset: &mut usize, 93 | shared_operands: &Vec, 94 | shared_opcodes: &Vec, 95 | config: &HostOpConfig, 96 | ) -> Result>, Error> { 97 | let opcodes = Self::opcodes(); 98 | let selected_entries = get_selected_entries(shared_operands, shared_opcodes, &opcodes); 99 | let total_used_instructions = selected_entries.len() / (1 + 8 * 4 + 4); 100 | 101 | let mut r = vec![]; 102 | 103 | // TODO: Change 8 to RATE ? 104 | for group in selected_entries.chunks_exact(1 + 8 * 4 + 4) { 105 | let ((operand, opcode), index) = *group.get(0).clone().unwrap(); 106 | assert!(opcode.clone() == Fr::from(PoseidonNew as u64)); 107 | 108 | let (limb, _op) = config.assign_one_line( 109 | region, 110 | offset, 111 | operand, 112 | opcode, 113 | index, 114 | operand, 115 | Fr::zero(), 116 | true, 117 | )?; 118 | r.push(limb); 119 | 120 | for subgroup in group 121 | .into_iter() 122 | .skip(1) 123 | .collect::>() 124 | .chunks_exact(4) 125 | { 126 | let (limb, _op) = config.assign_merged_operands( 127 | region, 128 | offset, 129 | subgroup.to_vec(), 130 | Fr::from_u128(1u128 << 64), 131 | true, 132 | )?; 133 | r.push(limb); 134 | } 135 | } 136 | 137 | let default_table = hash_to_host_call_table( 138 | [ 139 | Fr::one(), 140 | Fr::zero(), 141 | Fr::zero(), 142 | Fr::zero(), 143 | Fr::zero(), 144 | Fr::zero(), 145 | Fr::zero(), 146 | Fr::zero(), 147 | ], 148 | POSEIDON_HASHER.clone().squeeze(), 149 | ); 150 | 151 | //let entries = default_table. 152 | let default_entries: Vec<((Fr, Fr), Fr)> = default_table 153 | .0 154 | .into_iter() 155 | .map(|x| ((Fr::from(x.value), Fr::from(x.op as u64)), Fr::zero())) 156 | .collect::>(); 157 | 158 | assert!(k >= 22); 159 | let total_available = Self::max_rounds(k); 160 | assert!(total_used_instructions <= total_available); 161 | 162 | for _ in 0..=total_available - total_used_instructions { 163 | let ((operand, opcode), index) = default_entries[0].clone(); 164 | assert!(opcode.clone() == Fr::from(PoseidonNew as u64)); 165 | 166 | let (limb, _op) = config.assign_one_line( 167 | region, 168 | offset, 169 | operand, 170 | opcode, 171 | index, 172 | operand, 173 | Fr::zero(), 174 | false, 175 | )?; 176 | r.push(limb); 177 | 178 | for subgroup in default_entries 179 | .clone() 180 | .iter() 181 | .skip(1) 182 | .collect::>() 183 | .chunks_exact(4) 184 | { 185 | let (limb, _op) = config.assign_merged_operands( 186 | region, 187 | offset, 188 | subgroup.to_vec(), 189 | Fr::from_u128(1u128 << 64), 190 | false, 191 | )?; 192 | r.push(limb); 193 | } 194 | } 195 | 196 | Ok(r) 197 | } 198 | 199 | fn synthesize_separate( 200 | &mut self, 201 | _arg_cells: &Vec>, 202 | _layouter: &impl Layouter, 203 | ) -> Result<(), Error> { 204 | Ok(()) 205 | } 206 | 207 | fn synthesize( 208 | &mut self, 209 | offset: &mut usize, 210 | arg_cells: &Vec>, 211 | region: &Region, 212 | _helper: &(), 213 | ) -> Result<(), Error> { 214 | println!("total args is {}", arg_cells.len()); 215 | *offset = { 216 | let mut local_offset = *offset; 217 | let timer = start_timer!(|| "assign"); 218 | let config = self.config.clone(); 219 | self.initialize(&config, region, &mut local_offset)?; 220 | for arg_group in arg_cells.chunks_exact(10).into_iter() { 221 | let args = arg_group.into_iter().map(|x| x.clone()); 222 | let args = args.collect::>(); 223 | self.assign_permute( 224 | region, 225 | &mut local_offset, 226 | &args[1..9].to_vec().try_into().unwrap(), 227 | &args[0], 228 | &args[9], 229 | )?; 230 | } 231 | end_timer!(timer); 232 | local_offset 233 | }; 234 | Ok(()) 235 | } 236 | } 237 | 238 | #[cfg(test)] 239 | mod tests { 240 | use crate::host::{ExternalHostCallEntry, ExternalHostCallEntryTable}; 241 | use halo2_proofs::pairing::bn256::Fr; 242 | use std::fs::File; 243 | 244 | use crate::host::ForeignInst::{PoseidonFinalize, PoseidonNew, PoseidonPush}; 245 | 246 | fn hash_cont(restart: bool) -> Vec { 247 | vec![ExternalHostCallEntry { 248 | op: PoseidonNew as usize, 249 | value: if restart { 1u64 } else { 0u64 }, 250 | is_ret: false, 251 | }] 252 | } 253 | 254 | fn hash_to_host_call_table(inputs: Vec<[Fr; 8]>) -> ExternalHostCallEntryTable { 255 | let mut r = vec![]; 256 | let mut start = true; 257 | let mut hasher = crate::host::poseidon::POSEIDON_HASHER.clone(); 258 | for round in inputs.into_iter() { 259 | r.push(hash_cont(start)); 260 | start = false; 261 | for f in round.iter() { 262 | r.push(crate::adaptor::fr_to_args(*f, 4, 64, PoseidonPush)); 263 | } 264 | let result = hasher.update_exact(&round); 265 | r.push(crate::adaptor::fr_to_args(result, 4, 64, PoseidonFinalize)); 266 | } 267 | ExternalHostCallEntryTable(r.into_iter().flatten().collect()) 268 | } 269 | 270 | #[test] 271 | fn generate_poseidon_input() { 272 | let table = hash_to_host_call_table(vec![[ 273 | Fr::one(), 274 | Fr::zero(), 275 | Fr::zero(), 276 | Fr::zero(), 277 | Fr::zero(), 278 | Fr::zero(), 279 | Fr::zero(), 280 | Fr::zero(), 281 | ]]); 282 | let file = File::create("poseidontest.json").expect("can not create file"); 283 | serde_json::to_writer_pretty(file, &table).expect("can not write to file"); 284 | } 285 | 286 | #[test] 287 | fn generate_poseidon_input_multi() { 288 | let table = hash_to_host_call_table(vec![ 289 | [Fr::one(); 8], 290 | [ 291 | Fr::one(), 292 | Fr::zero(), 293 | Fr::zero(), 294 | Fr::zero(), 295 | Fr::zero(), 296 | Fr::zero(), 297 | Fr::zero(), 298 | Fr::zero(), 299 | ], 300 | ]); 301 | let file = File::create("poseidontest_multi.json").expect("can not create file"); 302 | serde_json::to_writer_pretty(file, &table).expect("can not write to file"); 303 | } 304 | } 305 | -------------------------------------------------------------------------------- /src/adaptor/keccakadaptor.rs: -------------------------------------------------------------------------------- 1 | use crate::adaptor::get_selected_entries; 2 | use crate::circuits::host::{HostOpConfig, HostOpSelector}; 3 | use crate::circuits::keccak256::KeccakChip; 4 | use crate::circuits::keccak256::KeccakGateConfig; 5 | use crate::host::keccak256::KECCAK_HASHER; 6 | use crate::host::ForeignInst::{Keccak256Finalize, Keccak256New, Keccak256Push}; 7 | use crate::host::{ExternalHostCallEntry, ExternalHostCallEntryTable, ForeignInst}; 8 | use crate::utils::Limb; 9 | use ark_std::{end_timer, start_timer}; 10 | use halo2_proofs::circuit::{Layouter, Region}; 11 | use halo2_proofs::pairing::bn256::Fr; 12 | use halo2_proofs::plonk::{Advice, Column, ConstraintSystem, Error}; 13 | 14 | fn hash_cont(restart: bool) -> Vec { 15 | vec![ExternalHostCallEntry { 16 | op: Keccak256New as usize, 17 | value: if restart { 1u64 } else { 0u64 }, 18 | is_ret: false, 19 | }] 20 | } 21 | 22 | // 1 + 17 + 4 23 | fn hash_to_host_call_table(inputs: &[Fr; 17], result: &[Fr; 4]) -> ExternalHostCallEntryTable { 24 | let mut r = vec![]; 25 | r.push(hash_cont(true)); 26 | for f in inputs.iter() { 27 | r.push(crate::adaptor::fr_to_args(*f, 1, 64, Keccak256Push)); 28 | } 29 | for f in result.iter() { 30 | r.push(crate::adaptor::fr_to_args(*f, 1, 64, Keccak256Finalize)); 31 | } 32 | ExternalHostCallEntryTable(r.into_iter().flatten().collect()) 33 | } 34 | 35 | const TOTAL_CONSTRUCTIONS: usize = 50; 36 | 37 | impl HostOpSelector for KeccakChip { 38 | type Config = KeccakGateConfig; 39 | type Helper = (); 40 | fn configure( 41 | meta: &mut ConstraintSystem, 42 | shared_advice: &Vec>, 43 | ) -> Self::Config { 44 | KeccakChip::::configure(meta, shared_advice) 45 | } 46 | 47 | fn construct(c: Self::Config) -> Self { 48 | KeccakChip::::construct(c) 49 | } 50 | 51 | fn max_rounds(k: usize) -> usize { 52 | super::get_max_round(k, TOTAL_CONSTRUCTIONS) 53 | } 54 | 55 | fn opcodes() -> Vec { 56 | vec![ 57 | Fr::from(ForeignInst::Keccak256New as u64), 58 | Fr::from(ForeignInst::Keccak256Push as u64), 59 | Fr::from(ForeignInst::Keccak256Finalize as u64), 60 | ] 61 | } 62 | 63 | fn assign( 64 | region: &Region, 65 | k: usize, 66 | offset: &mut usize, 67 | shared_operands: &Vec, 68 | shared_opcodes: &Vec, 69 | config: &HostOpConfig, 70 | ) -> Result>, Error> { 71 | let opcodes: Vec = Self::opcodes(); 72 | let selected_entries = get_selected_entries(shared_operands, shared_opcodes, &opcodes); 73 | 74 | let total_used_instructions = selected_entries.len() / (1 + 17 + 4); 75 | println!(" selected entries: {:?}", total_used_instructions); 76 | 77 | let mut r = vec![]; 78 | 79 | // TODO: Change 8 to RATE ? 80 | for group in selected_entries.chunks_exact(1 + 17 + 4) { 81 | let ((operand, opcode), index) = *group.get(0).clone().unwrap(); 82 | assert_eq!(opcode.clone(), Fr::from(Keccak256New as u64)); 83 | let (limb, _op) = config.assign_one_line( 84 | //operand, opcode 85 | region, 86 | offset, 87 | operand, 88 | opcode, 89 | index, 90 | operand, //same as operand as indicator is 0 91 | Fr::zero(), //not merged 92 | true, // in filtered table 93 | )?; 94 | r.push(limb); 95 | 96 | for subgroup in group.into_iter().skip(1) { 97 | let ((operand, opcode), index) = subgroup.clone(); 98 | let (limb, _op) = config.assign_one_line( 99 | region, 100 | offset, 101 | operand, 102 | opcode, 103 | index, 104 | operand, //same as operand as indicator is 0 105 | Fr::zero(), //not merged 106 | true, // in filtered table 107 | )?; 108 | r.push(limb); 109 | } 110 | } 111 | 112 | let default_table = hash_to_host_call_table( 113 | &[ 114 | Fr::one(), 115 | Fr::zero(), 116 | Fr::zero(), 117 | Fr::zero(), 118 | Fr::zero(), 119 | Fr::zero(), 120 | Fr::zero(), 121 | Fr::zero(), 122 | Fr::zero(), 123 | Fr::zero(), 124 | Fr::zero(), 125 | Fr::zero(), 126 | Fr::zero(), 127 | Fr::zero(), 128 | Fr::zero(), 129 | Fr::zero(), 130 | Fr::from(1u64 << 63), 131 | ], 132 | &KECCAK_HASHER.clone().squeeze().map(|x| Fr::from(x)), 133 | ); 134 | 135 | //let entries = default_table. 136 | let default_entries: Vec<((Fr, Fr), Fr)> = default_table 137 | .0 138 | .into_iter() 139 | .map(|x| ((Fr::from(x.value), Fr::from(x.op as u64)), Fr::zero())) 140 | .collect::>(); 141 | 142 | assert!(k >= 22); 143 | let total_available = Self::max_rounds(k); 144 | assert!(total_used_instructions <= total_available); 145 | for _ in 0..=total_available - total_used_instructions { 146 | let ((operand, opcode), index) = default_entries[0].clone(); 147 | assert_eq!(opcode.clone(), Fr::from(Keccak256New as u64)); 148 | 149 | let (limb, _op) = config.assign_one_line( 150 | region, 151 | offset, 152 | operand, 153 | opcode, 154 | index, 155 | operand, 156 | Fr::zero(), 157 | false, 158 | )?; 159 | r.push(limb); 160 | 161 | for subgroup in default_entries.clone().into_iter().skip(1) { 162 | let ((operand, opcode), index) = subgroup.clone(); 163 | let (limb, _op) = config.assign_one_line( 164 | region, 165 | offset, 166 | operand, 167 | opcode, 168 | index, 169 | operand, //same as operand as indicator is 0 170 | Fr::zero(), //not merged 171 | false, // in filtered table 172 | )?; 173 | r.push(limb); 174 | } 175 | } 176 | Ok(r) 177 | } 178 | 179 | fn synthesize_separate( 180 | &mut self, 181 | _arg_cells: &Vec>, 182 | _layouter: &impl Layouter, 183 | ) -> Result<(), Error> { 184 | Ok(()) 185 | } 186 | 187 | fn synthesize( 188 | &mut self, 189 | offset: &mut usize, 190 | arg_cells: &Vec>, 191 | region: &Region, 192 | _helper: &(), 193 | ) -> Result<(), Error> { 194 | println!("keccak total args is {}", arg_cells.len()); 195 | *offset = { 196 | println!("keccak adaptor total args is {}", arg_cells.len()); 197 | let mut local_offset = *offset; 198 | let timer = start_timer!(|| "assign"); 199 | let config = self.config.clone(); 200 | self.initialize(&config, region, &mut local_offset)?; 201 | for arg_group in arg_cells.chunks_exact(22).into_iter() { 202 | let args = arg_group.into_iter().map(|x| x.clone()); 203 | let args = args.collect::>(); 204 | //println!("args {:?}", args); 205 | self.assign_permute( 206 | region, 207 | &mut local_offset, 208 | &args[1..18].to_vec().try_into().unwrap(), 209 | &args[0], 210 | &args[18..22].to_vec().try_into().unwrap(), 211 | )?; 212 | } 213 | end_timer!(timer); 214 | local_offset 215 | }; 216 | Ok(()) 217 | } 218 | } 219 | 220 | #[cfg(test)] 221 | mod tests { 222 | use crate::host::ForeignInst::{Keccak256Finalize, Keccak256New, Keccak256Push}; 223 | use crate::host::{ExternalHostCallEntry, ExternalHostCallEntryTable}; 224 | use crate::utils::field_to_u64; 225 | use halo2_proofs::pairing::bn256::Fr; 226 | use std::fs::File; 227 | 228 | fn hash_cont(restart: bool) -> Vec { 229 | vec![ExternalHostCallEntry { 230 | op: Keccak256New as usize, 231 | value: if restart { 1u64 } else { 0u64 }, 232 | is_ret: false, 233 | }] 234 | } 235 | 236 | fn hash_to_host_call_table(inputs: Vec<[Fr; 17]>) -> ExternalHostCallEntryTable { 237 | let mut r = vec![]; 238 | let mut start = true; 239 | let mut hasher = crate::host::keccak256::KECCAK_HASHER.clone(); 240 | for round in inputs.into_iter() { 241 | r.push(hash_cont(start)); 242 | start = false; 243 | for f in round.iter() { 244 | r.push(crate::adaptor::fr_to_args(*f, 1, 64, Keccak256Push)); 245 | } 246 | let result = hasher.update_exact(&round.map(|x| field_to_u64(&x))); 247 | for f in result.iter() { 248 | r.push(crate::adaptor::fr_to_args( 249 | Fr::from(*f), 250 | 1, 251 | 64, 252 | Keccak256Finalize, 253 | )); 254 | } 255 | } 256 | ExternalHostCallEntryTable(r.into_iter().flatten().collect()) 257 | } 258 | 259 | #[test] 260 | fn generate_keccak_input_single() { 261 | let table = hash_to_host_call_table(vec![[ 262 | Fr::one(), 263 | Fr::one(), 264 | Fr::zero(), 265 | Fr::zero(), 266 | Fr::zero(), 267 | Fr::zero(), 268 | Fr::zero(), 269 | Fr::zero(), 270 | Fr::zero(), 271 | Fr::zero(), 272 | Fr::zero(), 273 | Fr::zero(), 274 | Fr::zero(), 275 | Fr::zero(), 276 | Fr::zero(), 277 | Fr::zero(), 278 | Fr::from(1u64 << 63), 279 | ]]); 280 | let file = File::create("keccak256_test.json").expect("can not create file"); 281 | serde_json::to_writer_pretty(file, &table).expect("can not write to file"); 282 | } 283 | 284 | #[test] 285 | fn generate_keccak_input_multi() { 286 | let table = hash_to_host_call_table(vec![ 287 | [Fr::one(); 17], 288 | [ 289 | Fr::one(), 290 | Fr::one(), 291 | Fr::zero(), 292 | Fr::zero(), 293 | Fr::zero(), 294 | Fr::zero(), 295 | Fr::zero(), 296 | Fr::zero(), 297 | Fr::zero(), 298 | Fr::zero(), 299 | Fr::zero(), 300 | Fr::zero(), 301 | Fr::zero(), 302 | Fr::zero(), 303 | Fr::zero(), 304 | Fr::zero(), 305 | Fr::from(1u64 << 63), 306 | ], 307 | ]); 308 | let file = File::create("keccak256_test_multi.json").expect("can not create file"); 309 | serde_json::to_writer_pretty(file, &table).expect("can not write to file"); 310 | } 311 | 312 | #[test] 313 | fn generate_keccak_input_multi_byte() { 314 | let table = hash_to_host_call_table(vec![ 315 | [Fr::one(); 17], 316 | [Fr::one(); 17], 317 | [Fr::one(); 17], 318 | [Fr::one(); 17], 319 | [Fr::one(); 17], 320 | [Fr::one(); 17], 321 | [Fr::one(); 17], 322 | [Fr::one(); 17], 323 | [Fr::one(); 17], 324 | [Fr::one(); 17], 325 | [Fr::one(); 17], 326 | [Fr::one(); 17], 327 | [Fr::one(); 17], 328 | [Fr::one(); 17], 329 | [Fr::one(); 17], 330 | [Fr::one(); 17], 331 | [Fr::one(); 17], 332 | [Fr::one(); 17], 333 | [Fr::one(); 17], 334 | [Fr::one(); 17], 335 | [Fr::one(); 17], 336 | [Fr::one(); 17], 337 | [Fr::one(); 17], 338 | [Fr::one(); 17], 339 | [Fr::one(); 17], 340 | [Fr::one(); 17], 341 | [Fr::one(); 17], 342 | [Fr::one(); 17], 343 | [Fr::one(); 17], 344 | [Fr::one(); 17], 345 | [Fr::one(); 17], 346 | [Fr::one(); 17], 347 | [Fr::one(); 17], 348 | [Fr::one(); 17], 349 | [Fr::one(); 17], 350 | [Fr::one(); 17], 351 | [Fr::one(); 17], 352 | [Fr::one(); 17], 353 | [Fr::one(); 17], 354 | [Fr::one(); 17], 355 | [Fr::one(); 17], 356 | [Fr::one(); 17], 357 | [Fr::one(); 17], 358 | [Fr::one(); 17], 359 | [Fr::one(); 17], 360 | [Fr::one(); 17], 361 | [Fr::one(); 17], 362 | [Fr::one(); 17], 363 | [Fr::one(); 17], 364 | [Fr::one(); 17], 365 | [Fr::one(); 17], 366 | [Fr::one(); 17], 367 | [Fr::one(); 17], 368 | [Fr::one(); 17], 369 | [ 370 | Fr::one(), 371 | Fr::one(), 372 | Fr::zero(), 373 | Fr::zero(), 374 | Fr::zero(), 375 | Fr::zero(), 376 | Fr::zero(), 377 | Fr::zero(), 378 | Fr::zero(), 379 | Fr::zero(), 380 | Fr::zero(), 381 | Fr::zero(), 382 | Fr::zero(), 383 | Fr::zero(), 384 | Fr::zero(), 385 | Fr::zero(), 386 | Fr::from(1u64 << 63), 387 | ], 388 | ]); 389 | let file = File::create("keccak256_test_multi_byte.json").expect("can not create file"); 390 | serde_json::to_writer_pretty(file, &table).expect("can not write to file"); 391 | } 392 | } 393 | -------------------------------------------------------------------------------- /src/adaptor/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::host::ExternalHostCallEntry; 2 | use crate::host::ForeignInst; 3 | use crate::utils::field_to_bn; 4 | use halo2_proofs::arithmetic::{BaseExt, FieldExt}; 5 | use num_bigint::BigUint; 6 | 7 | // pub mod bls381adaptor; 8 | pub mod bn256adaptor; 9 | pub mod hashadaptor; 10 | pub mod keccakadaptor; 11 | pub mod merkleadaptor; 12 | pub mod msmadaptor; 13 | 14 | pub fn get_max_round(k: usize, reference_max: usize) -> usize { 15 | if k >= 22 { 16 | reference_max << (k - 22) 17 | } else { 18 | // we do not support host when k < 22 19 | 0 20 | } 21 | } 22 | 23 | pub fn fr_to_args( 24 | f: F, 25 | nblimbs: usize, 26 | sz: usize, 27 | op: ForeignInst, 28 | ) -> Vec { 29 | let mut bn = field_to_bn(&f); 30 | let mut ret = vec![]; 31 | for _ in 0..nblimbs { 32 | let d: BigUint = BigUint::from(1u128 << sz); 33 | let r = bn.clone() % d.clone(); 34 | let value = if r == BigUint::from(0 as u32) { 35 | 0 as u64 36 | } else { 37 | r.to_u64_digits()[0] 38 | }; 39 | bn = bn / d; 40 | let entry = ExternalHostCallEntry { 41 | op: op as usize, 42 | value, 43 | is_ret: false, 44 | }; 45 | ret.append(&mut vec![entry]); 46 | } 47 | ret 48 | } 49 | 50 | pub fn get_selected_entries( 51 | shared_operands: &Vec, 52 | shared_opcodes: &Vec, 53 | opcodes: &Vec, 54 | ) -> Vec<((Fr, Fr), Fr)> { 55 | let entries = shared_operands 56 | .clone() 57 | .into_iter() 58 | .zip(shared_opcodes.clone()); 59 | 60 | let v = entries 61 | .filter(|(_operand, opcode)| opcodes.contains(opcode)) 62 | .collect::>(); 63 | 64 | let len = v.len(); 65 | 66 | let shared_index: Vec = v 67 | .iter() 68 | .enumerate() 69 | .map(|(i, _)| Fr::from((len - i) as u64)) 70 | .collect(); 71 | 72 | v.into_iter() 73 | .zip(shared_index) 74 | .collect::>() 75 | } 76 | -------------------------------------------------------------------------------- /src/adaptor/msmadaptor.rs: -------------------------------------------------------------------------------- 1 | use crate::adaptor::field_to_bn; 2 | use crate::adaptor::get_selected_entries; 3 | use crate::circuits::babyjub::{AltJubChip, Point as CircuitPoint}; 4 | use crate::circuits::host::{HostOpConfig, HostOpSelector}; 5 | use crate::circuits::CommonGateConfig; 6 | use crate::host::jubjub::Point; 7 | use crate::host::ExternalHostCallEntry; 8 | use crate::host::ForeignInst::{JubjubSumNew, JubjubSumPush, JubjubSumResult}; 9 | use crate::utils::Limb; 10 | use ark_std::{end_timer, start_timer}; 11 | use halo2_proofs::arithmetic::FieldExt; 12 | use halo2_proofs::circuit::{Layouter, Region}; 13 | use halo2_proofs::pairing::bn256::Fr; 14 | use halo2_proofs::plonk::ConstraintSystem; 15 | use halo2_proofs::plonk::{Advice, Column, Error}; 16 | 17 | const MERGE_SIZE: usize = 4; 18 | const CHUNK_SIZE: usize = 1 + (2 + 1 + 2) * MERGE_SIZE; 19 | 20 | const TOTAL_CONSTRUCTIONS: usize = 600; 21 | 22 | fn msm_new(restart: bool) -> Vec { 23 | vec![ExternalHostCallEntry { 24 | op: JubjubSumNew as usize, 25 | value: if restart { 1u64 } else { 0u64 }, 26 | is_ret: false, 27 | }] 28 | } 29 | 30 | fn msm_to_host_call_table(inputs: &Vec<(Point, F)>) -> Vec { 31 | let mut r = vec![]; 32 | let mut start = true; 33 | let mut result = Point::identity(); 34 | for (p, c) in inputs.into_iter() { 35 | r.push(msm_new(start)); 36 | r.push(crate::adaptor::fr_to_args(p.x, 4, 64, JubjubSumPush)); 37 | r.push(crate::adaptor::fr_to_args(p.y, 4, 64, JubjubSumPush)); 38 | r.push(crate::adaptor::fr_to_args(*c, 4, 64, JubjubSumPush)); 39 | result = result.add(&p.mul_scalar(&field_to_bn(c))); 40 | r.push(crate::adaptor::fr_to_args(result.x, 4, 64, JubjubSumResult)); 41 | r.push(crate::adaptor::fr_to_args(result.y, 4, 64, JubjubSumResult)); 42 | start = false; 43 | } 44 | r.into_iter().flatten().collect::>() 45 | } 46 | 47 | impl HostOpSelector for AltJubChip { 48 | type Config = CommonGateConfig; 49 | type Helper = (); 50 | fn configure( 51 | meta: &mut ConstraintSystem, 52 | shared_advice: &Vec>, 53 | ) -> Self::Config { 54 | AltJubChip::::configure(meta, shared_advice) 55 | } 56 | 57 | fn construct(c: Self::Config) -> Self { 58 | AltJubChip::new(c) 59 | } 60 | 61 | fn max_rounds(k: usize) -> usize { 62 | super::get_max_round(k, TOTAL_CONSTRUCTIONS) 63 | } 64 | 65 | fn opcodes() -> Vec { 66 | vec![ 67 | Fr::from(JubjubSumNew as u64), 68 | Fr::from(JubjubSumPush as u64), 69 | Fr::from(JubjubSumResult as u64), 70 | ] 71 | } 72 | 73 | fn assign( 74 | region: &Region, 75 | k: usize, 76 | offset: &mut usize, 77 | shared_operands: &Vec, 78 | shared_opcodes: &Vec, 79 | config: &HostOpConfig, 80 | ) -> Result>, Error> { 81 | println!("host op assign {}!", offset); 82 | let opcodes = Self::opcodes(); 83 | let selected_entries = get_selected_entries(shared_operands, shared_opcodes, &opcodes); 84 | let total_used_instructions = selected_entries.len() / (CHUNK_SIZE); 85 | 86 | let mut r = vec![]; 87 | 88 | for group in selected_entries.chunks_exact(CHUNK_SIZE) { 89 | let ((operand, opcode), index) = *group.get(0).clone().unwrap(); 90 | assert!(opcode.clone() == Fr::from(JubjubSumNew as u64)); 91 | 92 | let (limb, _) = config.assign_one_line( 93 | region, 94 | offset, 95 | operand, 96 | opcode, 97 | index, 98 | operand, 99 | Fr::zero(), 100 | true, 101 | )?; 102 | r.push(limb); 103 | 104 | for subgroup in group 105 | .into_iter() 106 | .skip(1) 107 | .collect::>() 108 | .chunks_exact(MERGE_SIZE) 109 | { 110 | let (limb, _) = config.assign_merged_operands( 111 | region, 112 | offset, 113 | subgroup.to_vec(), 114 | Fr::from_u128(1u128 << 64), 115 | true, 116 | )?; 117 | r.push(limb); 118 | } 119 | } 120 | 121 | let default_table = msm_to_host_call_table(&vec![(Point::identity(), Fr::one())]); 122 | 123 | //let entries = default_table. 124 | let default_entries: Vec<((Fr, Fr), Fr)> = default_table 125 | .into_iter() 126 | .map(|x| ((Fr::from(x.value), Fr::from(x.op as u64)), Fr::zero())) 127 | .collect::>(); 128 | 129 | assert!(k >= 22); 130 | let total_available = Self::max_rounds(k); 131 | assert!(total_used_instructions <= total_available); 132 | 133 | for _ in 0..=total_available - total_used_instructions { 134 | let ((operand, opcode), index) = default_entries[0].clone(); 135 | assert!(opcode.clone() == Fr::from(JubjubSumNew as u64)); 136 | 137 | let (limb, _) = config.assign_one_line( 138 | region, 139 | offset, 140 | operand, 141 | opcode, 142 | index, 143 | operand, 144 | Fr::zero(), 145 | false, 146 | )?; 147 | r.push(limb); 148 | 149 | for subgroup in default_entries 150 | .clone() 151 | .iter() 152 | .skip(1) 153 | .collect::>() 154 | .chunks_exact(MERGE_SIZE) 155 | { 156 | let (limb, _) = config.assign_merged_operands( 157 | region, 158 | offset, 159 | subgroup.to_vec(), 160 | Fr::from_u128(1u128 << 64), 161 | false, 162 | )?; 163 | r.push(limb); 164 | } 165 | } 166 | 167 | Ok(r) 168 | } 169 | 170 | fn synthesize_separate( 171 | &mut self, 172 | _arg_cells: &Vec>, 173 | _layouter: &impl Layouter, 174 | ) -> Result<(), Error> { 175 | Ok(()) 176 | } 177 | 178 | fn synthesize( 179 | &mut self, 180 | offset: &mut usize, 181 | arg_cells: &Vec>, 182 | region: &Region, 183 | _helper: &(), 184 | ) -> Result<(), Error> { 185 | println!("msm adaptor total args is {}", arg_cells.len()); 186 | *offset = { 187 | println!("msm adaptor starting offset is {}", offset); 188 | let mut local_offset = *offset; 189 | let timer = start_timer!(|| "assign"); 190 | let config = self.config.clone(); 191 | self.initialize(&config, region, &mut local_offset)?; 192 | // arg_cells format 1 + 2 + 1 + 2 193 | for arg_group in arg_cells.chunks_exact(6).into_iter() { 194 | let args = arg_group.into_iter().map(|x| x.clone()); 195 | let args = args.collect::>(); 196 | self.assign_incremental_msm( 197 | region, 198 | &mut local_offset, 199 | &CircuitPoint { 200 | x: args[1].clone(), 201 | y: args[2].clone(), 202 | }, 203 | &args[3], 204 | &args[0], 205 | &CircuitPoint { 206 | x: args[4].clone(), 207 | y: args[5].clone(), 208 | }, 209 | )?; 210 | } 211 | end_timer!(timer); 212 | local_offset 213 | }; 214 | Ok(()) 215 | } 216 | } 217 | 218 | #[cfg(test)] 219 | mod tests { 220 | use super::msm_to_host_call_table; 221 | use crate::host::jubjub::Point; 222 | use crate::host::ExternalHostCallEntryTable; 223 | use halo2_proofs::pairing::bn256::Fr; 224 | use std::fs::File; 225 | 226 | #[test] 227 | fn generate_jubjub_msm_input() { 228 | let default_table = msm_to_host_call_table(&vec![(Point::identity(), Fr::one())]); 229 | let file = File::create("jubjub.json").expect("can not create file"); 230 | serde_json::to_writer_pretty(file, &ExternalHostCallEntryTable(default_table)) 231 | .expect("can not write to file"); 232 | } 233 | 234 | #[test] 235 | fn generate_jubjub_msm_input_multi() { 236 | let default_table = msm_to_host_call_table(&vec![(Point::identity(), Fr::one())]); 237 | let file = File::create("jubjub_multi.json").expect("can not create file"); 238 | serde_json::to_writer_pretty(file, &ExternalHostCallEntryTable(default_table)) 239 | .expect("can not write to file"); 240 | } 241 | } 242 | -------------------------------------------------------------------------------- /src/circuits/bits_arith.rs: -------------------------------------------------------------------------------- 1 | use crate::circuits::{LookupAssistChip, LookupAssistConfig}; 2 | use crate::utils::{GateCell, Limb}; 3 | use crate::{ 4 | customized_circuits, customized_circuits_expand, item_count, table_item, value_for_assign, 5 | }; 6 | use halo2_proofs::{ 7 | arithmetic::FieldExt, 8 | circuit::Region, 9 | plonk::{Advice, Column, ConstraintSystem, Error, Expression, Fixed, Selector, VirtualCells}, 10 | poly::Rotation, 11 | }; 12 | use std::marker::PhantomData; 13 | 14 | pub const BIT_XOR: u8 = 1; 15 | pub const BIT_AND: u8 = 2; 16 | pub const BIT_NOT_AND: u8 = 3; 17 | pub const BIT_ROTATE_LEFT: u8 = 4; // 4 + 7, max 11 ---- total 2^4 18 | pub const BIT_ROTATE_RIGHT: u8 = 12; // 12 + 7, max 21 -- total 2^5 19 | 20 | // a0 a1 a2 a3 21 | // a4 a5 a6 a7 22 | // b0 b1 b2 b3 23 | // b4 b5 b6 b7 24 | // c0 c1 c2 c3 25 | // c4 c5 c6 c7 26 | // (a0,b0,c0) in lookup_set 27 | 28 | #[rustfmt::skip] 29 | customized_circuits!(BitsArithConfig, 1, 0, 4, 0, 30 | | lhs | rhs | res | op 31 | ); 32 | 33 | impl LookupAssistConfig for BitsArithConfig { 34 | /// register columns (col) to be XOR checked by limb size (sz) 35 | fn register( 36 | &self, 37 | cs: &mut ConstraintSystem, 38 | cols: impl Fn(&mut VirtualCells) -> Vec>, 39 | ) { 40 | for i in 0..4 { 41 | cs.lookup_any("check bits arith", |meta| { 42 | let lhs = self.get_expr(meta, BitsArithConfig::lhs()); 43 | let rhs = self.get_expr(meta, BitsArithConfig::rhs()); 44 | let op = self.get_expr(meta, BitsArithConfig::op()); 45 | let res = self.get_expr(meta, BitsArithConfig::res()); 46 | let icols = cols(meta); 47 | vec![ 48 | (icols[i].clone(), lhs), 49 | (icols[i + 4].clone(), rhs), 50 | (icols[i + 8].clone(), res), 51 | (icols[12].clone(), op), 52 | ] 53 | }); 54 | } 55 | } 56 | } 57 | 58 | pub struct BitsArithChip { 59 | config: BitsArithConfig, 60 | _marker: PhantomData, 61 | } 62 | 63 | impl LookupAssistChip for BitsArithChip { 64 | fn provide_lookup_evidence( 65 | &mut self, 66 | _region: &Region, 67 | _value: F, 68 | _sz: u64, 69 | ) -> Result<(), Error> { 70 | Ok(()) 71 | } 72 | } 73 | 74 | impl BitsArithChip { 75 | pub fn new(config: BitsArithConfig) -> Self { 76 | BitsArithChip { 77 | config, 78 | _marker: PhantomData, 79 | } 80 | } 81 | 82 | pub fn configure(cs: &mut ConstraintSystem) -> BitsArithConfig { 83 | let fixed = [0; 4].map(|_| cs.fixed_column()); 84 | let selector = []; 85 | 86 | let config = BitsArithConfig { 87 | fixed, 88 | selector, 89 | witness: [], 90 | }; 91 | 92 | config 93 | } 94 | 95 | fn assign_table_entries( 96 | &mut self, 97 | region: &Region, 98 | opcall: impl Fn(u8, u8) -> u8, 99 | opcode: u8, 100 | offset: &mut usize, 101 | ) -> Result<(), Error> { 102 | let op = F::from(opcode as u64); 103 | for i in 0..=u8::MAX { 104 | for j in 0..=u8::MAX { 105 | let lhs = F::from(i as u64); 106 | let rhs = F::from(j as u64); 107 | let res = F::from(opcall(i, j) as u64); 108 | self.config 109 | .assign_cell(region, *offset, &BitsArithConfig::lhs(), lhs)?; 110 | self.config 111 | .assign_cell(region, *offset, &BitsArithConfig::rhs(), rhs)?; 112 | self.config 113 | .assign_cell(region, *offset, &BitsArithConfig::res(), res)?; 114 | self.config 115 | .assign_cell(region, *offset, &BitsArithConfig::op(), op)?; 116 | *offset = *offset + 1; 117 | } 118 | } 119 | Ok(()) 120 | } 121 | 122 | /// initialize the table columns that contains every possible result of 8-bit value via XOR or ADD operation 123 | /// initialize needs to be called before using the BitsArithchip 124 | pub fn initialize(&mut self, region: &Region, offset: &mut usize) -> Result<(), Error> { 125 | // initialize the XOR table with the encoded value 126 | self.assign_table_entries(region, |x, y| x ^ y, BIT_XOR, offset)?; 127 | self.assign_table_entries(region, |x, y| x & y, BIT_AND, offset)?; 128 | self.assign_table_entries(region, |x, y| (!x) & y, BIT_NOT_AND, offset)?; 129 | for i in 0..8 { 130 | self.assign_table_entries( 131 | region, 132 | |x, y| { 133 | if i != 0 { 134 | ((x << i) & 0xff) + (y >> (8 - i)) 135 | } else { 136 | x 137 | } 138 | }, 139 | BIT_ROTATE_LEFT + i, 140 | offset, 141 | )?; 142 | } 143 | Ok(()) 144 | } 145 | } 146 | -------------------------------------------------------------------------------- /src/circuits/bn256.rs: -------------------------------------------------------------------------------- 1 | use ark_std::{end_timer, start_timer}; 2 | use halo2_proofs::pairing::bn256::Fr; 3 | use halo2_proofs::{ 4 | arithmetic::{BaseExt, FieldExt}, 5 | circuit::{Chip, Layouter, Region}, 6 | pairing::bn256::G1Affine, 7 | plonk::{ConstraintSystem, Error}, 8 | }; 9 | use halo2ecc_o::circuit::assign::AssignedValue; 10 | use halo2ecc_o::circuit::chips::pairing_chip::fq::Fq12ChipOps; 11 | 12 | use std::marker::PhantomData; 13 | 14 | use halo2_proofs::pairing::bn256::Fq as Bn256Fq; 15 | use halo2ecc_o::circuit::assign::AssignedFq; 16 | use halo2ecc_o::circuit::chips::{ 17 | ecc_chip::EccChipBaseOps, msm_chip::EccChipMSMOps, native_chip::NativeChipOps, 18 | pairing_chip::PairingChipOps, 19 | }; 20 | 21 | use halo2ecc_o::circuit::assign::{AssignedFq12, AssignedG2Affine, AssignedPoint}; 22 | 23 | use halo2ecc_o::{circuit::NativeScalarEccConfig, context::NativeScalarEccContext}; 24 | 25 | use crate::utils::Limb; 26 | use num_bigint::BigUint; 27 | use std::ops::{AddAssign, Mul}; 28 | 29 | #[derive(Clone, Debug)] 30 | pub struct Bn256ChipConfig { 31 | ecc_chip_config: NativeScalarEccConfig, 32 | } 33 | 34 | pub struct Bn256PairChip { 35 | config: Bn256ChipConfig, 36 | _marker: PhantomData, 37 | } 38 | 39 | impl Chip for Bn256PairChip { 40 | type Config = Bn256ChipConfig; 41 | type Loaded = (); 42 | 43 | fn config(&self) -> &Self::Config { 44 | &self.config 45 | } 46 | 47 | fn loaded(&self) -> &Self::Loaded { 48 | &() 49 | } 50 | } 51 | 52 | pub fn fr_to_bn(f: &Fr) -> BigUint { 53 | let mut bytes: Vec = Vec::new(); 54 | f.write(&mut bytes).unwrap(); 55 | BigUint::from_bytes_le(&bytes[..]) 56 | } 57 | 58 | pub fn fr_to_bool(f: &Fr) -> bool { 59 | let mut bytes: Vec = Vec::new(); 60 | f.write(&mut bytes).unwrap(); 61 | return bytes[0] == 1u8; 62 | } 63 | 64 | fn assigned_cells_to_bn256( 65 | a: &Vec>, //G1 (3 * 2 + 1) 66 | start: usize, 67 | ) -> BigUint { 68 | let mut bn = BigUint::from(0 as u64); 69 | for i in start..start + 3 { 70 | let shift = BigUint::from(2 as u32).pow(108 * (i - start) as u32); 71 | bn.add_assign(fr_to_bn(&a[i].value).mul(shift.clone())); 72 | } 73 | bn 74 | } 75 | 76 | fn assign_scalar(ctx: &mut NativeScalarEccContext, a: Fr) -> AssignedValue { 77 | ctx.plonk_region_context().assign(a).unwrap() 78 | } 79 | 80 | fn assign_point_g1( 81 | ctx: &mut NativeScalarEccContext, 82 | a: &Vec>, //G1 (3 * 2 + 1) 83 | ) -> AssignedPoint { 84 | let x_bn = assigned_cells_to_bn256(a, 0); 85 | let y_bn = assigned_cells_to_bn256(a, 3); 86 | let is_identity = fr_to_bool(&a[6].value); 87 | let x = ctx.integer_context().assign_w(Some(x_bn)).unwrap(); 88 | let y = ctx.integer_context().assign_w(Some(y_bn)).unwrap(); 89 | AssignedPoint::new( 90 | x, 91 | y, 92 | ctx.plonk_region_context() 93 | .assign(if is_identity { Fr::one() } else { Fr::zero() }) 94 | .unwrap() 95 | .into(), 96 | ) 97 | } 98 | 99 | fn assign_point_g2( 100 | ctx: &mut NativeScalarEccContext, 101 | b: &Vec>, //G2 (3 * 4 + 1) 102 | ) -> AssignedG2Affine { 103 | let x1_bn = assigned_cells_to_bn256(b, 0); 104 | let x2_bn = assigned_cells_to_bn256(b, 3); 105 | let y1_bn = assigned_cells_to_bn256(b, 6); 106 | let y2_bn = assigned_cells_to_bn256(b, 9); 107 | let x1 = ctx.integer_context().assign_w(Some(x1_bn)).unwrap(); 108 | let x2 = ctx.integer_context().assign_w(Some(x2_bn)).unwrap(); 109 | let y1 = ctx.integer_context().assign_w(Some(y1_bn)).unwrap(); 110 | let y2 = ctx.integer_context().assign_w(Some(y2_bn)).unwrap(); 111 | let is_identity = fr_to_bool(&b[12].value); 112 | AssignedG2Affine::new( 113 | (x1, x2), 114 | (y1, y2), 115 | ctx.plonk_region_context() 116 | .assign(if is_identity { Fr::one() } else { Fr::zero() }) 117 | .unwrap() 118 | .into(), 119 | ) 120 | } 121 | 122 | fn enable_fr_permute( 123 | region: &Region, 124 | fr: &AssignedValue, 125 | input: &Vec>, 126 | ) -> Result<(), Error> { 127 | region.constrain_equal(input[0].get_the_cell().cell(), fr.cell()) 128 | } 129 | 130 | fn enable_fq_permute( 131 | region: &Region, 132 | fq: &AssignedFq, 133 | input: &Vec>, 134 | ) -> Result<(), Error> { 135 | for i in 0..3 { 136 | region.constrain_equal( 137 | input[i].get_the_cell().cell(), 138 | fq.limbs()[i].unwrap().cell(), 139 | )?; 140 | } 141 | Ok(()) 142 | } 143 | 144 | fn enable_g1affine_permute( 145 | region: &Region, 146 | point: &AssignedPoint, 147 | input: &Vec>, 148 | ) -> Result<(), Error> { 149 | let mut inputs = input.chunks(3); 150 | enable_fq_permute(region, &point.x, &inputs.next().unwrap().to_vec())?; 151 | enable_fq_permute(region, &point.y, &inputs.next().unwrap().to_vec())?; 152 | region.constrain_equal(input[6].get_the_cell().cell(), point.z.cell())?; 153 | Ok(()) 154 | } 155 | 156 | fn enable_g2affine_permute( 157 | region: &Region, 158 | point: &AssignedG2Affine, 159 | input: &Vec>, 160 | ) -> Result<(), Error> { 161 | let mut inputs = input.chunks(3); 162 | enable_fq_permute(region, &point.x.0, &inputs.next().unwrap().to_vec())?; 163 | enable_fq_permute(region, &point.x.1, &inputs.next().unwrap().to_vec())?; 164 | enable_fq_permute(region, &point.y.0, &inputs.next().unwrap().to_vec())?; 165 | enable_fq_permute(region, &point.y.1, &inputs.next().unwrap().to_vec())?; 166 | region.constrain_equal(input[12].get_the_cell().cell(), point.z.cell())?; 167 | Ok(()) 168 | } 169 | 170 | fn enable_fq12_permute( 171 | region: &Region, 172 | fq12: &AssignedFq12, 173 | input: &Vec>, 174 | ) -> Result<(), Error> { 175 | let mut inputs = input.chunks(3); 176 | enable_fq_permute(region, &fq12.0 .0 .0, &inputs.next().unwrap().to_vec())?; 177 | enable_fq_permute(region, &fq12.0 .0 .1, &inputs.next().unwrap().to_vec())?; 178 | enable_fq_permute(region, &fq12.0 .1 .0, &inputs.next().unwrap().to_vec())?; 179 | enable_fq_permute(region, &fq12.0 .1 .1, &inputs.next().unwrap().to_vec())?; 180 | enable_fq_permute(region, &fq12.0 .2 .0, &inputs.next().unwrap().to_vec())?; 181 | enable_fq_permute(region, &fq12.0 .2 .1, &inputs.next().unwrap().to_vec())?; 182 | enable_fq_permute(region, &fq12.1 .0 .0, &inputs.next().unwrap().to_vec())?; 183 | enable_fq_permute(region, &fq12.1 .0 .1, &inputs.next().unwrap().to_vec())?; 184 | enable_fq_permute(region, &fq12.1 .1 .0, &inputs.next().unwrap().to_vec())?; 185 | enable_fq_permute(region, &fq12.1 .1 .1, &inputs.next().unwrap().to_vec())?; 186 | enable_fq_permute(region, &fq12.1 .2 .0, &inputs.next().unwrap().to_vec())?; 187 | enable_fq_permute(region, &fq12.1 .2 .1, &inputs.next().unwrap().to_vec())?; 188 | Ok(()) 189 | } 190 | 191 | impl Bn256PairChip { 192 | pub fn construct(config: >::Config) -> Self { 193 | Self { 194 | config: config.clone(), 195 | _marker: PhantomData, 196 | } 197 | } 198 | 199 | pub fn configure(cs: &mut ConstraintSystem) -> >::Config { 200 | Bn256ChipConfig { 201 | ecc_chip_config: NativeScalarEccConfig::configure::(cs), 202 | } 203 | } 204 | 205 | pub fn load_bn256_pair_circuit( 206 | &self, 207 | a: &Vec>, //G1 (3 * 2 + 1) 208 | b: &Vec>, //G2 (3 * 4 + 1) 209 | ab: &Vec>, // Fq_12 (3 * 12) 210 | layouter: &impl Layouter, 211 | ) -> Result<(), Error> { 212 | layouter.assign_region( 213 | || "base", 214 | |region| { 215 | let timer = start_timer!(|| "assign"); 216 | 217 | let mut ctx = self.config.ecc_chip_config.to_context(region); 218 | let a_g1 = assign_point_g1(&mut ctx, a); 219 | let b_g2 = assign_point_g2(&mut ctx, b); 220 | let ab_fq12_raw = ctx.pairing(&[(&a_g1, &b_g2)])?; 221 | let ab_fq12 = ctx.fq12_reduce(&ab_fq12_raw)?; 222 | 223 | enable_g1affine_permute(region, &a_g1, a)?; 224 | enable_g2affine_permute(region, &b_g2, b)?; 225 | enable_fq12_permute(region, &ab_fq12, ab)?; 226 | end_timer!(timer); 227 | 228 | let timer = start_timer!(|| "finalize int mul"); 229 | ctx.integer_context().finalize_int_mul()?; 230 | end_timer!(timer); 231 | 232 | ctx.get_range_region_context().init()?; 233 | let timer = start_timer!(|| "finalize compact cells"); 234 | ctx.get_range_region_context().finalize_compact_cells()?; 235 | end_timer!(timer); 236 | 237 | Ok(()) 238 | }, 239 | )?; 240 | Ok(()) 241 | } 242 | } 243 | 244 | pub struct Bn256SumChip { 245 | config: Bn256ChipConfig, 246 | _marker: PhantomData, 247 | } 248 | 249 | impl Chip for Bn256SumChip { 250 | type Config = Bn256ChipConfig; 251 | type Loaded = (); 252 | 253 | fn config(&self) -> &Self::Config { 254 | &self.config 255 | } 256 | 257 | fn loaded(&self) -> &Self::Loaded { 258 | &() 259 | } 260 | } 261 | 262 | impl Bn256SumChip { 263 | pub fn construct(config: >::Config) -> Self { 264 | Self { 265 | config: config.clone(), 266 | _marker: PhantomData, 267 | } 268 | } 269 | 270 | pub fn configure(cs: &mut ConstraintSystem) -> >::Config { 271 | Bn256ChipConfig { 272 | ecc_chip_config: NativeScalarEccConfig::configure::(cs), 273 | } 274 | } 275 | 276 | pub fn load_bn256_sum_circuit( 277 | &self, 278 | ls: &Vec>, // n * (new, fr , g1, sum) 279 | layouter: &impl Layouter, 280 | ) -> Result<(), Error> { 281 | layouter.assign_region( 282 | || "base", 283 | |mut region| { 284 | let timer = start_timer!(|| "assign"); 285 | let mut ctx = self.config.ecc_chip_config.to_context(region); 286 | 287 | let mut ais = vec![]; 288 | let mut g1s = vec![]; 289 | let mut sums = vec![]; 290 | let identity = ctx.assign_identity()?; 291 | let mut sum = identity.clone(); 292 | for group in ls.chunks_exact(16) { 293 | // using constraint to fix if to reset 294 | let lhs = if group.get(0).unwrap().value != Fr::zero() { 295 | identity.clone() 296 | } else { 297 | sum 298 | }; 299 | let a = assign_scalar(&mut ctx, group.get(1).unwrap().value); 300 | let g = assign_point_g1(&mut ctx, &group.get(2..9).unwrap().to_vec()); 301 | let rhs = ctx.ecc_mul(&g, a); 302 | let sum_ret = ctx.ecc_add(&lhs, &rhs)?; 303 | let sum_ret = ctx.ecc_reduce(&sum_ret)?; 304 | 305 | sum = sum_ret.clone(); 306 | ais.push(a); 307 | g1s.push(g); 308 | sums.push(sum_ret); 309 | } 310 | 311 | ais.iter().enumerate().for_each(|(i, x)| { 312 | enable_fr_permute(&mut region, x, &ls[16 * i + 1..16 * i + 2].to_vec()).unwrap() 313 | }); 314 | g1s.iter().enumerate().for_each(|(i, x)| { 315 | enable_g1affine_permute(&mut region, x, &ls[16 * i + 2..16 * i + 9].to_vec()) 316 | .unwrap() 317 | }); 318 | sums.iter().enumerate().for_each(|(i, x)| { 319 | enable_g1affine_permute(&mut region, x, &ls[16 * i + 9..16 * i + 16].to_vec()) 320 | .unwrap() 321 | }); 322 | end_timer!(timer); 323 | 324 | let timer = start_timer!(|| "finalize int mul"); 325 | ctx.integer_context().finalize_int_mul()?; 326 | end_timer!(timer); 327 | 328 | ctx.get_range_region_context().init()?; 329 | let timer = start_timer!(|| "finalize compact cells"); 330 | ctx.get_range_region_context().finalize_compact_cells()?; 331 | end_timer!(timer); 332 | 333 | Ok(()) 334 | }, 335 | )?; 336 | Ok(()) 337 | } 338 | } 339 | -------------------------------------------------------------------------------- /src/circuits/keccak256.rs: -------------------------------------------------------------------------------- 1 | use crate::circuits::bits_arith::BitsArithChip; 2 | use crate::circuits::bits_arith::BitsArithConfig; 3 | use crate::circuits::bits_arith::BIT_NOT_AND; 4 | use crate::circuits::bits_arith::BIT_ROTATE_LEFT; 5 | use crate::circuits::bits_arith::BIT_XOR; 6 | use crate::circuits::{CommonGateConfig, Limb}; 7 | use crate::host::keccak256::{N_R, RATE_LANES, ROTATION_CONSTANTS, ROUND_CONSTANTS}; 8 | use crate::utils::field_to_u64; 9 | use halo2_proofs::arithmetic::FieldExt; 10 | use halo2_proofs::{circuit::*, plonk::*}; 11 | use std::marker::PhantomData; 12 | 13 | #[derive(Debug, Clone)] 14 | pub struct KeccakState { 15 | state: [[Limb; 5]; 5], 16 | default: [[Limb; 5]; 5], 17 | rc: [Limb; N_R], 18 | } 19 | 20 | #[derive(Debug, Clone)] 21 | pub struct KeccakGateConfig { 22 | pub common: CommonGateConfig, 23 | pub arith: BitsArithConfig, 24 | } 25 | 26 | pub struct KeccakChip { 27 | pub config: KeccakGateConfig, 28 | keccak_state: KeccakState, 29 | round: u64, 30 | _marker: PhantomData, 31 | } 32 | 33 | impl KeccakChip { 34 | pub fn construct(config: KeccakGateConfig) -> Self { 35 | let state = [[0u32; 5]; 5].map(|x| x.map(|_| Limb::new(None, F::zero()))); 36 | let default = [[0u32; 5]; 5].map(|x| x.map(|_| Limb::new(None, F::zero()))); 37 | let rc = ROUND_CONSTANTS.map(|x| Limb::new(None, F::from(x))); 38 | let state = KeccakState { state, default, rc }; 39 | 40 | KeccakChip { 41 | round: 0, 42 | config, 43 | keccak_state: state, 44 | // mapping rule: S[w(5y+x)+z] = state[x][y][z])] 45 | _marker: PhantomData, 46 | } 47 | } 48 | 49 | pub fn initialize( 50 | &mut self, 51 | config: &KeccakGateConfig, 52 | region: &Region, 53 | offset: &mut usize, 54 | ) -> Result<(), Error> { 55 | let mut bitschip = BitsArithChip::new(self.config.arith.clone()); 56 | bitschip.initialize(region, &mut 1)?; 57 | self.keccak_state.initialize(&config.common, region, offset) 58 | } 59 | 60 | pub fn configure( 61 | cs: &mut ConstraintSystem, 62 | shared_advice: &Vec>, 63 | ) -> KeccakGateConfig { 64 | let bitsarithconfig = BitsArithChip::configure(cs); 65 | KeccakGateConfig { 66 | arith: bitsarithconfig.clone(), 67 | common: CommonGateConfig::configure(cs, &bitsarithconfig, shared_advice), 68 | } 69 | } 70 | 71 | // assign the r as witness to call the permutation function and constrain the result to be the same as the digest 72 | pub fn get_permute_result( 73 | &mut self, 74 | region: &Region, 75 | offset: &mut usize, 76 | values: &[Limb; RATE_LANES], 77 | reset: &Limb, 78 | ) -> Result<[Limb; 4], Error> { 79 | let mut new_state = self.keccak_state.default.clone(); 80 | for (x, (current_state, default)) in (self 81 | .keccak_state 82 | .state 83 | .iter() 84 | .zip(self.keccak_state.default.iter())) 85 | .enumerate() 86 | { 87 | for i in 0..5 { 88 | new_state[x][i] = self.config.common.select( 89 | region, 90 | &mut (), 91 | offset, 92 | &reset, 93 | ¤t_state[i], 94 | &default[i], 95 | self.round, 96 | )?; 97 | } 98 | } 99 | 100 | //absorb 101 | let mut x = 0; 102 | let mut y = 0; 103 | 104 | for i in 0..RATE_LANES { 105 | new_state[x][y] = self.keccak_state.xor( 106 | &self.config.common, 107 | region, 108 | offset, 109 | &new_state[x][y], 110 | &values[i], 111 | )?; 112 | if x < 5 - 1 { 113 | x += 1; 114 | } else { 115 | y += 1; 116 | x = 0; 117 | } 118 | } 119 | 120 | self.keccak_state.state = new_state; 121 | 122 | self.keccak_state 123 | .permute(&self.config.common, region, offset)?; 124 | 125 | let part0 = self.keccak_state.state[0][0].clone(); 126 | let part1 = self.keccak_state.state[1][0].clone(); 127 | let part2 = self.keccak_state.state[2][0].clone(); 128 | let part3 = self.keccak_state.state[3][0].clone(); 129 | 130 | Ok([part0, part1, part2, part3]) 131 | } 132 | 133 | pub(crate) fn assign_permute( 134 | &mut self, 135 | region: &Region, 136 | offset: &mut usize, 137 | values: &[Limb; RATE_LANES], 138 | reset: &Limb, 139 | result: &[Limb; 4], 140 | ) -> Result<(), Error> { 141 | let r = self.get_permute_result(region, offset, values, reset)?; 142 | for (r, result) in r.iter().zip(result.iter()) { 143 | assert_eq!(r.value, result.value); 144 | region.constrain_equal( 145 | result.cell.as_ref().unwrap().cell(), 146 | r.cell.as_ref().unwrap().cell(), 147 | )?; 148 | } 149 | Ok(()) 150 | } 151 | } 152 | 153 | impl KeccakState { 154 | pub fn initialize( 155 | &mut self, 156 | config: &CommonGateConfig, 157 | region: &Region, 158 | offset: &mut usize, 159 | ) -> Result<(), Error> { 160 | let zero = config.assign_constant(region, &mut (), offset, &F::zero())?; 161 | self.state = [[0u32; 5]; 5].map(|x| x.map(|_| zero.clone())); 162 | self.default = [[0u32; 5]; 5].map(|x| x.map(|_| zero.clone())); 163 | for i in 0..N_R { 164 | self.rc[i] = config.assign_constant(region, &mut (), offset, &self.rc[i].value)?; 165 | } 166 | Ok(()) 167 | } 168 | 169 | pub fn debug(&mut self) { 170 | println!("debug state"); 171 | for i in 0..5 { 172 | let c = self.state[i] 173 | .clone() 174 | .map(|x| format!("{:02x}", field_to_u64(&x.value))) 175 | .join("-"); 176 | println!("state({}): {}", i, c); 177 | } 178 | } 179 | 180 | // Combine for optimization opportunity, i.e. reduce decompose count 181 | pub fn xor_not_and( 182 | &self, 183 | config: &CommonGateConfig, 184 | region: &Region, 185 | offset: &mut usize, 186 | a: &Limb, 187 | b: &Limb, 188 | c: &Limb, 189 | ) -> Result, Error> { 190 | let d = (!field_to_u64(&b.value)) & field_to_u64(&c.value); 191 | let e = field_to_u64(&a.value) ^ d; 192 | let not_b_and_c = Limb::new(None, F::from(d)); 193 | let res = Limb::new(None, F::from(e)); // reference 194 | config.decompose_bytes(region, offset, b, 0, BIT_NOT_AND as u64)?; 195 | config.decompose_bytes(region, offset, c, 0, 0)?; 196 | config.decompose_bytes(region, offset, ¬_b_and_c, 0, BIT_XOR as u64)?; 197 | config.decompose_bytes(region, offset, a, 0, 0)?; 198 | let (output, _) = config.decompose_bytes(region, offset, &res, 0, 0)?; 199 | 200 | { 201 | let a = field_to_u64(&a.value); 202 | let b = field_to_u64(&b.value); 203 | let c = field_to_u64(&c.value); 204 | let res = a ^ ((!b) & c); 205 | assert_eq!(F::from(res), output.value); 206 | } 207 | Ok(output) 208 | } 209 | 210 | pub fn xor( 211 | &self, 212 | config: &CommonGateConfig, 213 | region: &Region, 214 | offset: &mut usize, 215 | lhs: &Limb, 216 | rhs: &Limb, 217 | ) -> Result, Error> { 218 | let res = Limb::new( 219 | None, 220 | F::from(field_to_u64(&lhs.value) ^ field_to_u64(&rhs.value)), 221 | ); 222 | let (_, b1) = config.decompose_bytes(region, offset, lhs, 0, BIT_XOR as u64)?; // start of the lookup line 223 | let (_, b2) = config.decompose_bytes(region, offset, rhs, 0, 0)?; 224 | let (output, b3) = config.decompose_bytes(region, offset, &res, 0, 0)?; 225 | assert_eq!( 226 | field_to_u64(&b2[0].value) ^ field_to_u64(&b1[0].value), 227 | field_to_u64(&b3[0].value) 228 | ); 229 | Ok(output) 230 | } 231 | 232 | pub fn rotate_left( 233 | &self, 234 | config: &CommonGateConfig, 235 | region: &Region, 236 | offset: &mut usize, 237 | input: &Limb, 238 | n: usize, 239 | ) -> Result, Error> { 240 | let v = field_to_u64(&input.value).rotate_left(n as u32); 241 | let chunk = n / 8; // how many chunks we have to move 242 | let rem = n % 8; // how many bits we have to move 243 | let (_, bytes) = config.decompose_bytes( 244 | region, 245 | offset, 246 | input, 247 | chunk, 248 | (BIT_ROTATE_LEFT as usize + rem) as u64, 249 | )?; 250 | config.assign_witness( 251 | region, 252 | &mut (), 253 | offset, 254 | [ 255 | Some(bytes[7].clone()), 256 | Some(bytes[0].clone()), 257 | Some(bytes[1].clone()), 258 | Some(bytes[2].clone()), 259 | None, 260 | ], 261 | 0, 262 | )?; 263 | config.assign_witness( 264 | region, 265 | &mut (), 266 | offset, 267 | [ 268 | Some(bytes[3].clone()), 269 | Some(bytes[4].clone()), 270 | Some(bytes[5].clone()), 271 | Some(bytes[6].clone()), 272 | None, 273 | ], 274 | 0, 275 | )?; 276 | let (v, bs) = config.decompose_bytes(region, offset, &Limb::new(None, F::from(v)), 0, 0)?; 277 | for i in 0..7 { 278 | let op1 = field_to_u64(&bytes[i].value); 279 | let op2 = field_to_u64(&bytes[(i + 7) % 8].value); 280 | let op3 = field_to_u64(&bs[i].value); 281 | if rem == 0 { 282 | assert_eq!(op1, op3); 283 | } else { 284 | assert_eq!(((op1 << rem) & 0xff) + (op2 >> (8 - rem)), op3); 285 | } 286 | } 287 | Ok(v) 288 | } 289 | 290 | pub fn theta( 291 | &mut self, 292 | config: &CommonGateConfig, 293 | region: &Region, 294 | offset: &mut usize, 295 | ) -> Result<(), Error> { 296 | let mut c = self.default[0].clone(); 297 | 298 | let prev = |x| (x + 4) % 5; 299 | let next = |x| (x + 1) % 5; 300 | 301 | for x in 0..5 { 302 | let y = &self.state[x]; 303 | let mut ci = y[0].clone(); 304 | for i in 1..5 { 305 | ci = self.xor(config, region, offset, &ci, &y[i])?; 306 | } 307 | c[x] = ci; 308 | } 309 | 310 | for x in 0..5 { 311 | let di = self.rotate_left(config, region, offset, &c[next(x)], 1)?; 312 | let di = self.xor(config, region, offset, &c[prev(x)], &di)?; 313 | for y in 0..5 { 314 | self.state[x][y] = self.xor(config, region, offset, &self.state[x][y], &di)?; 315 | } 316 | } 317 | 318 | Ok(()) 319 | } 320 | 321 | pub fn rho( 322 | &mut self, 323 | config: &CommonGateConfig, 324 | region: &Region, 325 | offset: &mut usize, 326 | ) -> Result<(), Error> { 327 | let mut out = self.default.clone(); 328 | 329 | for x in 0..5 { 330 | for y in 0..5 { 331 | let rc = ROTATION_CONSTANTS[x][y]; 332 | let rotate_limb = self.rotate_left( 333 | config, 334 | region, 335 | offset, 336 | &self.state[x][y], 337 | rc.try_into().unwrap(), 338 | )?; 339 | out[x][y] = rotate_limb; 340 | } 341 | } 342 | 343 | self.state = out; 344 | 345 | Ok(()) 346 | } 347 | 348 | pub fn pi( 349 | &mut self, 350 | _config: &CommonGateConfig, 351 | _region: &Region, 352 | _offset: &mut usize, 353 | ) -> Result<(), Error> { 354 | let mut out = self.default.clone(); 355 | 356 | for x in 0..5 { 357 | for y in 0..5 { 358 | out[y][(2 * x + 3 * y) % 5] = self.state[x][y].clone(); 359 | } 360 | } 361 | 362 | self.state = out; 363 | Ok(()) 364 | } 365 | 366 | pub fn xi( 367 | &mut self, 368 | config: &CommonGateConfig, 369 | region: &Region, 370 | offset: &mut usize, 371 | ) -> Result<(), Error> { 372 | let next = |x| (x + 1) % 5; 373 | let skip = |x| (x + 2) % 5; 374 | 375 | let mut out = self.state.clone(); 376 | 377 | for x in 0..5 { 378 | for y in 0..5 { 379 | out[x][y] = self.xor_not_and( 380 | config, 381 | region, 382 | offset, 383 | &self.state[x][y], 384 | &self.state[next(x)][y], 385 | &self.state[skip(x)][y], 386 | )?; 387 | } 388 | } 389 | 390 | self.state = out; 391 | Ok(()) 392 | } 393 | 394 | pub fn iota( 395 | &mut self, 396 | config: &CommonGateConfig, 397 | region: &Region, 398 | offset: &mut usize, 399 | round: usize, 400 | ) -> Result<(), Error> { 401 | self.state[0][0] = self.xor(config, region, offset, &self.state[0][0], &self.rc[round])?; 402 | Ok(()) 403 | } 404 | 405 | pub fn round( 406 | &mut self, 407 | config: &CommonGateConfig, 408 | region: &Region, 409 | offset: &mut usize, 410 | round: usize, 411 | ) -> Result<(), Error> { 412 | self.theta(config, region, offset)?; 413 | self.rho(config, region, offset)?; 414 | self.pi(config, region, offset)?; 415 | self.xi(config, region, offset)?; 416 | self.iota(config, region, offset, round)?; 417 | Ok(()) 418 | } 419 | 420 | pub fn permute( 421 | &mut self, 422 | config: &CommonGateConfig, 423 | region: &Region, 424 | offset: &mut usize, 425 | ) -> Result<(), Error> { 426 | for round in 0..N_R { 427 | Self::round(self, config, region, offset, round)?; 428 | } 429 | 430 | Ok(()) 431 | } 432 | } 433 | -------------------------------------------------------------------------------- /src/circuits/merkle.rs: -------------------------------------------------------------------------------- 1 | use crate::circuits::poseidon::PoseidonGateConfig; 2 | use crate::circuits::CommonGateConfig; 3 | use crate::utils::bytes_to_field; 4 | use crate::utils::field_to_bytes; 5 | use halo2_proofs::arithmetic::FieldExt; 6 | use halo2_proofs::circuit::{Chip, Region}; 7 | use halo2_proofs::plonk::Advice; 8 | use halo2_proofs::plonk::Column; 9 | use halo2_proofs::plonk::ConstraintSystem; 10 | use halo2_proofs::plonk::Error; 11 | use std::marker::PhantomData; 12 | 13 | use crate::circuits::poseidon::PoseidonChip; 14 | use crate::circuits::Limb; 15 | use crate::host::merkle::MerkleProof; 16 | use crate::host::poseidon::MERKLE_HASHER_SPEC; 17 | 18 | use crate::host::poseidon::MERKLE_LEAF_HASHER_SPEC; 19 | type MerkleDataHasherChip = PoseidonChip; 20 | 21 | use crate::host::ForeignInst::MerkleSet; 22 | use halo2_proofs::pairing::bn256::Fr; 23 | 24 | /* Given a merkel tree eg1 with height=3: 25 | * 0 26 | * 1 2 27 | * 3 4 5 6 28 | * 7 8 9 10 11 12 13 14 29 | * A proof of 7 = {source: 7.hash, root: 0.hash, assist: [8.hash,4.hash,2.hash], index: 7} 30 | */ 31 | 32 | pub struct MerkleProofState { 33 | pub source: Limb, 34 | pub root: Limb, // last is root 35 | pub assist: [Limb; D], 36 | pub address: Limb, 37 | pub zero: Limb, 38 | pub one: Limb, 39 | } 40 | 41 | impl MerkleProofState { 42 | fn default() -> Self { 43 | MerkleProofState { 44 | source: Limb::new(None, F::zero()), 45 | root: Limb::new(None, F::zero()), 46 | address: Limb::new(None, F::zero()), 47 | assist: [0; D].map(|_| Limb::new(None, F::zero())), 48 | zero: Limb::new(None, F::zero()), 49 | one: Limb::new(None, F::one()), 50 | } 51 | } 52 | } 53 | 54 | pub struct MerkleChip { 55 | pub config: CommonGateConfig, 56 | pub extend: PoseidonGateConfig, 57 | data_hasher_chip: MerkleDataHasherChip, 58 | merkle_hasher_chip: PoseidonChip, 59 | state: MerkleProofState, 60 | _marker: PhantomData, 61 | } 62 | 63 | impl Chip for MerkleChip { 64 | type Config = CommonGateConfig; 65 | type Loaded = (); 66 | 67 | fn config(&self) -> &Self::Config { 68 | &self.config 69 | } 70 | 71 | fn loaded(&self) -> &Self::Loaded { 72 | &() 73 | } 74 | } 75 | 76 | impl MerkleChip { 77 | pub fn new(config: CommonGateConfig, extend: PoseidonGateConfig) -> Self { 78 | MerkleChip { 79 | merkle_hasher_chip: PoseidonChip::construct( 80 | config.clone(), 81 | extend.clone(), 82 | MERKLE_HASHER_SPEC.clone(), 83 | ), 84 | data_hasher_chip: PoseidonChip::construct( 85 | config.clone(), 86 | extend.clone(), 87 | MERKLE_LEAF_HASHER_SPEC.clone(), 88 | ), 89 | config, 90 | extend, 91 | state: MerkleProofState::default(), 92 | _marker: PhantomData, 93 | } 94 | } 95 | 96 | pub fn proof_height() -> usize { 97 | D 98 | } 99 | 100 | pub fn initialize( 101 | &mut self, 102 | config: &CommonGateConfig, 103 | region: &Region, 104 | offset: &mut usize, 105 | ) -> Result<(), Error> { 106 | self.merkle_hasher_chip.initialize(config, region, offset)?; 107 | self.data_hasher_chip.initialize(config, region, offset) 108 | } 109 | 110 | pub fn configure( 111 | cs: &mut ConstraintSystem, 112 | shared_advices: &Vec>, 113 | ) -> (CommonGateConfig, PoseidonGateConfig) { 114 | let config = CommonGateConfig::configure(cs, &(), shared_advices); 115 | let extend = PoseidonGateConfig::configure(cs, &config); 116 | (config, extend) 117 | } 118 | 119 | pub fn assign_proof( 120 | &mut self, 121 | region: &Region, 122 | offset: &mut usize, 123 | proof: &MerkleProof<[u8; 32], D>, 124 | opcode: &Limb, 125 | address: &Limb, 126 | root: &Limb, 127 | new_root: &Limb, 128 | value: [&Limb; 2], 129 | ) -> Result<(), Error> { 130 | let is_set = self.config.eq_constant( 131 | region, 132 | &mut (), 133 | offset, 134 | opcode, 135 | &Fr::from(MerkleSet as u64), 136 | )?; 137 | 138 | let fills = proof 139 | .assist 140 | .to_vec() 141 | .iter() 142 | .map(|x| Some(Limb::new(None, bytes_to_field(&x)))) 143 | .collect::>(); 144 | let new_assist: Vec> = fills 145 | .chunks(5) 146 | .collect::>() 147 | .iter() 148 | .map(|&values| { 149 | let mut v = values.to_vec(); 150 | v.resize_with(5, || None); 151 | self.config 152 | .assign_witness(region, &mut (), offset, v.try_into().unwrap(), 0) 153 | .unwrap() 154 | }) 155 | .collect::>>>() 156 | .into_iter() 157 | .flatten() 158 | .collect::>(); 159 | let compare_assist: [_; D] = self 160 | .state 161 | .assist 162 | .iter() 163 | .zip(new_assist.iter()) 164 | .map(|(old, new)| { 165 | self.config 166 | .select(region, &mut (), offset, &is_set, &new, &old, 0) 167 | .unwrap() 168 | }) 169 | .collect::>() 170 | .try_into() 171 | .unwrap(); 172 | for (a, b) in compare_assist.to_vec().into_iter().zip(new_assist) { 173 | region.constrain_equal(a.get_the_cell().cell(), b.get_the_cell().cell())?; 174 | } 175 | self.state.assist = compare_assist.clone(); 176 | 177 | let mut positions = vec![]; 178 | self.config 179 | .decompose_limb(region, &mut (), offset, &address, &mut positions, D)?; 180 | 181 | // position = 0 means assist is at right else assist is at left 182 | cfg_if::cfg_if! { 183 | if #[cfg(feature="complex-leaf")] { 184 | let values = 185 | [ 186 | value[0].clone(), 187 | value[1].clone(), 188 | self.state.one.clone(), 189 | self.state.zero.clone(), 190 | self.state.zero.clone(), 191 | self.state.zero.clone(), 192 | self.state.zero.clone(), 193 | self.state.zero.clone(), 194 | ]; 195 | } else { 196 | let values = 197 | [ 198 | value[0].clone(), 199 | value[1].clone(), 200 | ]; 201 | } 202 | }; 203 | 204 | let initial_hash = self.data_hasher_chip.get_permute_result( 205 | region, 206 | offset, 207 | &values, 208 | &self.state.one.clone(), 209 | )?; 210 | assert_eq!(field_to_bytes(&initial_hash.value), proof.source); 211 | 212 | let final_hash = positions 213 | .iter() 214 | .rev() 215 | .zip(compare_assist.iter().rev()) 216 | .fold(initial_hash, |acc, (position, assist)| { 217 | let left = self 218 | .config 219 | .select(region, &mut (), offset, &position, &acc, &assist, 0) 220 | .unwrap(); 221 | let right = self 222 | .config 223 | .select(region, &mut (), offset, &position, &assist, &acc, 0) 224 | .unwrap(); 225 | let hash = self 226 | .merkle_hasher_chip 227 | .get_permute_result(region, offset, &[left, right], &self.state.one.clone()) 228 | .unwrap(); 229 | //println!("position check: {} {:?} {:?}", position.value, acc.clone().value, assist.clone().value); 230 | hash 231 | }); 232 | 233 | let desired_root = 234 | self.config 235 | .select(region, &mut (), offset, &is_set, root, new_root, 0)?; 236 | assert_eq!(desired_root.value, final_hash.value); 237 | region.constrain_equal( 238 | desired_root.cell.as_ref().unwrap().cell(), 239 | final_hash.cell.as_ref().unwrap().cell(), 240 | )?; 241 | Ok(()) 242 | } 243 | } 244 | -------------------------------------------------------------------------------- /src/circuits/range.rs: -------------------------------------------------------------------------------- 1 | use crate::circuits::{LookupAssistChip, LookupAssistConfig}; 2 | use crate::utils::{bn_to_field, field_to_bn, GateCell, Limb}; 3 | use crate::{ 4 | constant_from, customized_circuits, customized_circuits_expand, item_count, table_item, 5 | value_for_assign, 6 | }; 7 | use halo2_proofs::{ 8 | arithmetic::FieldExt, 9 | circuit::Region, 10 | plonk::{Advice, Column, ConstraintSystem, Error, Expression, Fixed, Selector, VirtualCells}, 11 | poly::Rotation, 12 | }; 13 | use num_bigint::BigUint; 14 | use std::marker::PhantomData; 15 | use std::ops::Div; 16 | 17 | /* 18 | * Customized gates range_check(target) with each limb less than 2^12 19 | * acc will be the sum of the target limbs and rem is the remaining limbs 20 | * of the target value. 21 | */ 22 | #[rustfmt::skip] 23 | customized_circuits!(RangeCheckConfig, 2, 3, 2, 0, 24 | | limb | acc | rem | table | sel 25 | | nil | acc_n | rem_n | nil | sel_n 26 | ); 27 | 28 | impl LookupAssistConfig for RangeCheckConfig { 29 | /// register a column (col) to be range checked by limb size (sz) 30 | fn register( 31 | &self, 32 | cs: &mut ConstraintSystem, 33 | cols: impl FnOnce(&mut VirtualCells) -> Vec>, 34 | ) { 35 | cs.lookup_any("check ranges", |meta| { 36 | let exprs = cols(meta); 37 | let acc = self.get_expr(meta, RangeCheckConfig::acc()); 38 | let rem = self.get_expr(meta, RangeCheckConfig::rem()); 39 | vec![(exprs[0].clone(), acc), (exprs[12].clone(), rem)] 40 | }); 41 | } 42 | } 43 | 44 | pub struct RangeCheckChip { 45 | config: RangeCheckConfig, 46 | offset: usize, 47 | _marker: PhantomData, 48 | } 49 | 50 | impl LookupAssistChip for RangeCheckChip { 51 | fn provide_lookup_evidence( 52 | &mut self, 53 | region: &Region, 54 | value: F, 55 | sz: u64, 56 | ) -> Result<(), Error> { 57 | self.assign_value_with_range(region, value, sz) 58 | } 59 | } 60 | 61 | impl RangeCheckChip { 62 | pub fn new(config: RangeCheckConfig) -> Self { 63 | RangeCheckChip { 64 | config, 65 | offset: 0, 66 | _marker: PhantomData, 67 | } 68 | } 69 | 70 | pub fn configure(cs: &mut ConstraintSystem) -> RangeCheckConfig { 71 | let witness = [0; 3].map(|_| cs.advice_column()); 72 | witness.map(|x| cs.enable_equality(x)); 73 | let fixed = [0; 2].map(|_| cs.fixed_column()); 74 | let selector = []; 75 | 76 | let config = RangeCheckConfig { 77 | fixed, 78 | selector, 79 | witness, 80 | }; 81 | 82 | // Range Check of all limbs 83 | cs.lookup_any("within ranges", |meta| { 84 | let limb = config.get_expr(meta, RangeCheckConfig::limb()); 85 | let table = config.get_expr(meta, RangeCheckConfig::table()); 86 | vec![(limb, table)] 87 | }); 88 | 89 | // First we require the rem is continues if it is not zero 90 | cs.create_gate("range check constraint", |meta| { 91 | let rem = config.get_expr(meta, RangeCheckConfig::rem()); 92 | let rem_n = config.get_expr(meta, RangeCheckConfig::rem_n()); 93 | let sel = config.get_expr(meta, RangeCheckConfig::sel()); 94 | 95 | vec![sel * rem.clone() * (rem - rem_n - constant_from!(1))] 96 | }); 97 | 98 | // Second we make sure if the rem is not zero then 99 | // carry = carry_n * 2^12 + limb 100 | cs.create_gate("limb acc constraint", |meta| { 101 | let limb = config.get_expr(meta, RangeCheckConfig::limb()); 102 | let acc = config.get_expr(meta, RangeCheckConfig::acc()); 103 | let acc_n = config.get_expr(meta, RangeCheckConfig::acc_n()); 104 | let sel = config.get_expr(meta, RangeCheckConfig::sel()); 105 | let sel_n = config.get_expr(meta, RangeCheckConfig::sel_n()); 106 | 107 | vec![ 108 | sel.clone() * (acc.clone() - limb - acc_n * constant_from!(1u64 << 12) * sel_n), 109 | sel.clone() * (constant_from!(1) - sel.clone()), 110 | //(constant_from!(1) - sel) * acc, // if sel is 0 then acc must equal to 0 111 | ] 112 | }); 113 | 114 | cs.create_gate("end with zero", |meta| { 115 | let sel = config.get_expr(meta, RangeCheckConfig::sel()); 116 | let acc_n = config.get_expr(meta, RangeCheckConfig::acc_n()); 117 | let sel_n = config.get_expr(meta, RangeCheckConfig::sel_n()); 118 | vec![ 119 | sel * acc_n * (constant_from!(1) - sel_n), // if sel is 0 then acc must equal to 0 120 | ] 121 | }); 122 | 123 | config 124 | } 125 | 126 | /// Make sure the (value, sz) pair is lookupable in the range_chip 127 | pub fn assign_value_with_range( 128 | &mut self, 129 | region: &Region, 130 | value: F, 131 | sz: u64, 132 | ) -> Result<(), Error> { 133 | let mut limbs = vec![]; 134 | let mut bn = field_to_bn(&value); 135 | let mut cs = vec![]; 136 | for _ in 0..sz { 137 | cs.push(bn_to_field(&bn)); 138 | let limb = bn.modpow(&BigUint::from(1u128), &BigUint::from(1u128 << 12)); 139 | bn = (bn - limb.clone()).div(BigUint::from(1u128 << 12)); 140 | limbs.push(bn_to_field(&limb)); 141 | } 142 | cs.reverse(); 143 | limbs.reverse(); 144 | for i in 0..sz { 145 | let limb = limbs.pop().unwrap(); 146 | let acc = cs.pop().unwrap(); 147 | self.config 148 | .assign_cell(region, self.offset, &RangeCheckConfig::limb(), limb)?; 149 | self.config 150 | .assign_cell(region, self.offset, &RangeCheckConfig::acc(), acc)?; 151 | self.config.assign_cell( 152 | region, 153 | self.offset, 154 | &RangeCheckConfig::rem(), 155 | F::from_u128((sz - i) as u128), 156 | )?; 157 | self.config 158 | .assign_cell(region, self.offset, &RangeCheckConfig::sel(), F::one())?; 159 | self.offset += 1; 160 | } 161 | self.config 162 | .assign_cell(region, self.offset, &RangeCheckConfig::limb(), F::zero())?; 163 | self.config 164 | .assign_cell(region, self.offset, &RangeCheckConfig::acc(), F::zero())?; 165 | self.config 166 | .assign_cell(region, self.offset, &RangeCheckConfig::rem(), F::zero())?; 167 | self.config 168 | .assign_cell(region, self.offset, &RangeCheckConfig::sel(), F::zero())?; 169 | self.offset += 1; 170 | Ok(()) 171 | } 172 | 173 | /// initialize the table column from 1 to 2^12 174 | /// initialize needs to be called before using the range_chip 175 | pub fn initialize(&mut self, region: &Region) -> Result<(), Error> { 176 | for i in 0..4096 { 177 | self.config.assign_cell( 178 | region, 179 | i, 180 | &RangeCheckConfig::table(), 181 | F::from_u128(i as u128), 182 | )?; 183 | } 184 | self.offset = 0; 185 | self.assign_value_with_range(region, F::zero(), 25)?; 186 | Ok(()) 187 | } 188 | } 189 | 190 | #[cfg(test)] 191 | mod tests { 192 | use halo2_proofs::circuit::floor_planner::FlatFloorPlanner; 193 | use halo2_proofs::dev::MockProver; 194 | use halo2_proofs::pairing::bn256::Fr; 195 | 196 | use halo2_proofs::{ 197 | circuit::{AssignedCell, Chip, Layouter, Region}, 198 | plonk::{Advice, Circuit, Column, ConstraintSystem, Error, Expression, VirtualCells}, 199 | poly::Rotation, 200 | }; 201 | 202 | use super::{RangeCheckChip, RangeCheckConfig}; 203 | use crate::circuits::LookupAssistConfig; 204 | use crate::value_for_assign; 205 | 206 | #[derive(Clone, Debug)] 207 | pub struct HelperChipConfig { 208 | limb: Column, 209 | } 210 | 211 | impl HelperChipConfig { 212 | pub fn range_check_column(&self, cs: &mut VirtualCells) -> Expression { 213 | cs.query_advice(self.limb, Rotation::cur()) 214 | } 215 | } 216 | 217 | #[derive(Clone, Debug)] 218 | pub struct HelperChip { 219 | config: HelperChipConfig, 220 | } 221 | 222 | impl Chip for HelperChip { 223 | type Config = HelperChipConfig; 224 | type Loaded = (); 225 | 226 | fn config(&self) -> &Self::Config { 227 | &self.config 228 | } 229 | 230 | fn loaded(&self) -> &Self::Loaded { 231 | &() 232 | } 233 | } 234 | 235 | impl HelperChip { 236 | fn new(config: HelperChipConfig) -> Self { 237 | HelperChip { config } 238 | } 239 | 240 | fn configure(cs: &mut ConstraintSystem) -> HelperChipConfig { 241 | let limb = cs.advice_column(); 242 | cs.enable_equality(limb); 243 | HelperChipConfig { limb } 244 | } 245 | 246 | fn assign_value( 247 | &self, 248 | region: &Region, 249 | offset: &mut usize, 250 | value: Fr, 251 | ) -> Result, Error> { 252 | let c = region.assign_advice( 253 | || format!("assign input"), 254 | self.config.limb, 255 | *offset, 256 | || value_for_assign!(value), 257 | )?; 258 | *offset = *offset + 1; 259 | Ok(c) 260 | } 261 | } 262 | 263 | #[derive(Clone, Debug, Default)] 264 | struct TestCircuit {} 265 | 266 | #[derive(Clone, Debug)] 267 | struct TestConfig { 268 | rangecheckconfig: RangeCheckConfig, 269 | helperconfig: HelperChipConfig, 270 | } 271 | 272 | impl Circuit for TestCircuit { 273 | type Config = TestConfig; 274 | type FloorPlanner = FlatFloorPlanner; 275 | 276 | fn without_witnesses(&self) -> Self { 277 | Self::default() 278 | } 279 | 280 | fn configure(meta: &mut ConstraintSystem) -> Self::Config { 281 | let rangecheckconfig = RangeCheckChip::::configure(meta); 282 | let helperconfig = HelperChip::configure(meta); 283 | 284 | rangecheckconfig.register(meta, |c| { 285 | vec![ 286 | helperconfig.range_check_column(c), 287 | Expression::Constant(Fr::from(0 as u64)), //not used 288 | Expression::Constant(Fr::from(0 as u64)), //not used 289 | Expression::Constant(Fr::from(0 as u64)), //not used 290 | Expression::Constant(Fr::from(0 as u64)), //not used 291 | Expression::Constant(Fr::from(0 as u64)), //not used 292 | Expression::Constant(Fr::from(0 as u64)), //not used 293 | Expression::Constant(Fr::from(0 as u64)), //not used 294 | Expression::Constant(Fr::from(0 as u64)), //not used 295 | Expression::Constant(Fr::from(0 as u64)), //not used 296 | Expression::Constant(Fr::from(0 as u64)), //not used 297 | Expression::Constant(Fr::from(0 as u64)), //not used 298 | Expression::Constant(Fr::from(4 as u64)), 299 | ] 300 | }); 301 | 302 | Self::Config { 303 | rangecheckconfig, 304 | helperconfig, 305 | } 306 | } 307 | 308 | fn synthesize( 309 | &self, 310 | config: Self::Config, 311 | layouter: impl Layouter, 312 | ) -> Result<(), Error> { 313 | let helper_chip = HelperChip::new(config.clone().helperconfig); 314 | layouter.assign_region( 315 | || "range check test", 316 | |region| { 317 | let mut range_chip = RangeCheckChip::::new(config.clone().rangecheckconfig); 318 | let v = Fr::from(1u64 << 24 + 1); 319 | range_chip.initialize(®ion)?; 320 | range_chip.assign_value_with_range(®ion, v, 4)?; 321 | 322 | // assign helper 323 | let mut offset = 0; 324 | helper_chip.assign_value(®ion, &mut offset, v)?; 325 | Ok(()) 326 | }, 327 | )?; 328 | Ok(()) 329 | } 330 | } 331 | 332 | #[test] 333 | fn test_range_circuit() { 334 | let test_circuit = TestCircuit {}; 335 | let prover = MockProver::run(18, &test_circuit, vec![]).unwrap(); 336 | assert_eq!(prover.verify(), Ok(())); 337 | } 338 | } 339 | -------------------------------------------------------------------------------- /src/host/bls.rs: -------------------------------------------------------------------------------- 1 | #[cfg(test)] 2 | mod tests { 3 | use crate::host::{ExternalHostCallEntry, ExternalHostCallEntryTable, ForeignInst}; 4 | use crate::utils::field_to_bn; 5 | use ff::Field; 6 | use halo2_proofs::pairing::bls12_381::pairing; 7 | use halo2_proofs::pairing::bls12_381::{ 8 | Fq as Bls381Fq, Fr, G1Affine, G2Affine, Gt as Bls381Gt, G1, G2, 9 | }; 10 | use halo2_proofs::pairing::group::Group; 11 | use num_bigint::BigUint; 12 | use rand::rngs::OsRng; 13 | use std::fs::File; 14 | use std::ops::Add; 15 | 16 | fn bls381_fr_to_args(f: Fr, op: ForeignInst) -> Vec { 17 | let mut bn = field_to_bn(&f); 18 | let mut ret = vec![]; 19 | for _ in 0..5 { 20 | let d: BigUint = BigUint::from(1u64 << 54); 21 | let r = bn.clone() % d.clone(); 22 | let value = if r == BigUint::from(0 as u32) { 23 | 0 as u64 24 | } else { 25 | r.to_u64_digits()[0] 26 | }; 27 | println!("d is {:?}, remainder is {:?}", d, r); 28 | bn = bn / d; 29 | let entry = ExternalHostCallEntry { 30 | op: op as usize, 31 | value, 32 | is_ret: false, 33 | }; 34 | ret.append(&mut vec![entry]); 35 | } 36 | ret 37 | } 38 | 39 | fn bls381_fq_to_args(f: Bls381Fq, op: ForeignInst) -> Vec { 40 | let mut bn = field_to_bn(&f); 41 | let mut ret = vec![]; 42 | for _ in 0..8 { 43 | let d: BigUint = BigUint::from(1u64 << 54); 44 | let r = bn.clone() % d.clone(); 45 | let value = if r == BigUint::from(0 as u32) { 46 | 0 as u64 47 | } else { 48 | r.to_u64_digits()[0] 49 | }; 50 | println!("d is {:?}, remainder is {:?}", d, r); 51 | bn = bn / d; 52 | let entry = ExternalHostCallEntry { 53 | op: op as usize, 54 | value, 55 | is_ret: false, 56 | }; 57 | ret.append(&mut vec![entry]); 58 | } 59 | ret 60 | } 61 | 62 | fn bls381_gt_to_pair_args(f: Bls381Gt) -> Vec { 63 | let c000 = bls381_fq_to_args(f.0.c0.c0.c0, ForeignInst::BlsPairG3); 64 | let c001 = bls381_fq_to_args(f.0.c0.c0.c1, ForeignInst::BlsPairG3); 65 | let c010 = bls381_fq_to_args(f.0.c0.c1.c0, ForeignInst::BlsPairG3); 66 | let c011 = bls381_fq_to_args(f.0.c0.c1.c1, ForeignInst::BlsPairG3); 67 | let c020 = bls381_fq_to_args(f.0.c0.c2.c0, ForeignInst::BlsPairG3); 68 | let c021 = bls381_fq_to_args(f.0.c0.c2.c1, ForeignInst::BlsPairG3); 69 | let c100 = bls381_fq_to_args(f.0.c1.c0.c0, ForeignInst::BlsPairG3); 70 | let c101 = bls381_fq_to_args(f.0.c1.c0.c1, ForeignInst::BlsPairG3); 71 | let c110 = bls381_fq_to_args(f.0.c1.c1.c0, ForeignInst::BlsPairG3); 72 | let c111 = bls381_fq_to_args(f.0.c1.c1.c1, ForeignInst::BlsPairG3); 73 | let c120 = bls381_fq_to_args(f.0.c1.c2.c0, ForeignInst::BlsPairG3); 74 | let c121 = bls381_fq_to_args(f.0.c1.c2.c1, ForeignInst::BlsPairG3); 75 | vec![ 76 | c000, c001, c010, c011, c020, c021, c100, c101, c110, c111, c120, c121, 77 | ] 78 | .into_iter() 79 | .flatten() 80 | .collect() 81 | } 82 | 83 | fn bls381_g1_to_args(g: G1Affine, op: ForeignInst) -> Vec { 84 | let mut a = bls381_fq_to_args(g.x, op); 85 | let mut b = bls381_fq_to_args(g.y, op); 86 | let z: u64 = g.is_identity().unwrap_u8() as u64; 87 | a.append(&mut b); 88 | a.append(&mut vec![ExternalHostCallEntry { 89 | op: op as usize, 90 | value: z, 91 | is_ret: false, 92 | }]); 93 | a 94 | } 95 | 96 | fn bls381_g2_to_pair_args(g: G2Affine) -> Vec { 97 | let x0 = bls381_fq_to_args(g.x.c0, ForeignInst::BlsPairG2); 98 | let x1 = bls381_fq_to_args(g.x.c1, ForeignInst::BlsPairG2); 99 | let y0 = bls381_fq_to_args(g.y.c0, ForeignInst::BlsPairG2); 100 | let y1 = bls381_fq_to_args(g.y.c1, ForeignInst::BlsPairG2); 101 | let z: u64 = g.is_identity().unwrap_u8() as u64; 102 | let zentry = ExternalHostCallEntry { 103 | op: ForeignInst::BlsPairG2 as usize, 104 | value: z, 105 | is_ret: false, 106 | }; 107 | vec![x0, x1, y0, y1, vec![zentry]] 108 | .into_iter() 109 | .flatten() 110 | .collect() 111 | } 112 | 113 | pub fn create_bls_pair_shared_table(a: G1Affine, b: G2Affine) -> ExternalHostCallEntryTable { 114 | let ab: Bls381Gt = pairing(&a, &b); 115 | let g1_args = bls381_g1_to_args(a, ForeignInst::BlsPairG1); 116 | let g2_args = bls381_g2_to_pair_args(b); 117 | let ab_args = bls381_gt_to_pair_args(ab); 118 | let table = ExternalHostCallEntryTable( 119 | vec![g1_args, g2_args, ab_args] 120 | .into_iter() 121 | .flatten() 122 | .collect(), 123 | ); 124 | table 125 | } 126 | 127 | fn create_bls_sum_input( 128 | new: u32, 129 | a: Fr, 130 | g: G1Affine, 131 | sum: G1Affine, 132 | ) -> Vec { 133 | let mut r = vec![]; 134 | r.append(&mut vec![ExternalHostCallEntry { 135 | op: ForeignInst::BlsSumNew as usize, 136 | value: new as u64, 137 | is_ret: false, 138 | }]); 139 | r.append(&mut bls381_fr_to_args(a, ForeignInst::BlsSumScalar)); 140 | r.append(&mut bls381_g1_to_args(g, ForeignInst::BlsSumG1)); 141 | r.append(&mut bls381_g1_to_args(sum, ForeignInst::BlsSumResult)); 142 | r 143 | } 144 | 145 | #[test] 146 | fn generate_bls_pair_input() { 147 | let a: G1Affine = G1::random(&mut OsRng).into(); 148 | let b: G2Affine = G2Affine::from(G2::random(&mut OsRng)); 149 | let table = create_bls_pair_shared_table(a, b); 150 | let file = File::create("blspairtest.json").expect("can not create file"); 151 | serde_json::to_writer_pretty(file, &table).expect("can not write to file"); 152 | } 153 | 154 | #[test] 155 | fn generate_bls_sum_input() { 156 | let l = [2, 4, 3]; 157 | let mut inputs = vec![]; 158 | for i in 0..3 { 159 | let mut z = G1::identity(); 160 | for j in 0..l[i] { 161 | let new = if j == 0 { 1 } else { 0 }; 162 | let a_j = Fr::random(&mut OsRng); 163 | let g_j = G1::random(&mut OsRng); 164 | let r = g_j * a_j; 165 | z = z.add(r.clone()); 166 | inputs.append(&mut create_bls_sum_input( 167 | new, 168 | a_j, 169 | G1Affine::from(g_j), 170 | G1Affine::from(z), 171 | )); 172 | } 173 | } 174 | let table = ExternalHostCallEntryTable(inputs); 175 | let file = File::create("blssumtest.json").expect("can not create file"); 176 | serde_json::to_writer_pretty(file, &table).expect("can not write to file"); 177 | } 178 | } 179 | -------------------------------------------------------------------------------- /src/host/bn256.rs: -------------------------------------------------------------------------------- 1 | #[cfg(test)] 2 | mod tests { 3 | use crate::host::{ExternalHostCallEntry, ExternalHostCallEntryTable, ForeignInst}; 4 | use crate::utils::field_to_bn; 5 | use ff::Field; 6 | use halo2_proofs::pairing::bn256::pairing; 7 | use halo2_proofs::pairing::bn256::Fr; 8 | use halo2_proofs::pairing::bn256::{Fq as Bn256Fq, G1Affine, G2Affine, Gt as Bn256Gt, G1, G2}; 9 | use halo2_proofs::pairing::group::Group; 10 | use num_bigint::BigUint; 11 | use rand::rngs::OsRng; 12 | use std::fs::File; 13 | use std::ops::Add; 14 | 15 | fn bn256_fr_to_args(f: Fr, op: ForeignInst) -> Vec { 16 | let mut bn = field_to_bn(&f); 17 | let mut ret = vec![]; 18 | for _ in 0..4 { 19 | let d: BigUint = BigUint::from(1u128 << 64); 20 | let r = bn.clone() % d.clone(); 21 | let value = if r == BigUint::from(0 as u32) { 22 | 0 as u64 23 | } else { 24 | r.to_u64_digits()[0] 25 | }; 26 | println!("d is {:?}, remainder is {:?}", d, r); 27 | bn = bn / d; 28 | let entry = ExternalHostCallEntry { 29 | op: op as usize, 30 | value, 31 | is_ret: false, 32 | }; 33 | ret.append(&mut vec![entry]); 34 | } 35 | ret 36 | } 37 | 38 | fn bn256_fq_to_args(f: Bn256Fq, op: ForeignInst) -> Vec { 39 | let mut bn = field_to_bn(&f); 40 | let mut ret = vec![]; 41 | for _ in 0..5 { 42 | let d: BigUint = BigUint::from(1u64 << 54); 43 | let r = bn.clone() % d.clone(); 44 | let value = if r == BigUint::from(0 as u32) { 45 | 0 as u64 46 | } else { 47 | r.to_u64_digits()[0] 48 | }; 49 | println!("d is {:?}, remainder is {:?}", d, r); 50 | bn = bn / d; 51 | let entry = ExternalHostCallEntry { 52 | op: op as usize, 53 | value, 54 | is_ret: false, 55 | }; 56 | ret.append(&mut vec![entry]); 57 | } 58 | ret 59 | } 60 | 61 | fn bn256_gt_to_pair_args(f: Bn256Gt) -> Vec { 62 | let c000 = bn256_fq_to_args(f.0.c0.c0.c0, ForeignInst::Bn254PairG3); 63 | let c001 = bn256_fq_to_args(f.0.c0.c0.c1, ForeignInst::Bn254PairG3); 64 | let c010 = bn256_fq_to_args(f.0.c0.c1.c0, ForeignInst::Bn254PairG3); 65 | let c011 = bn256_fq_to_args(f.0.c0.c1.c1, ForeignInst::Bn254PairG3); 66 | let c020 = bn256_fq_to_args(f.0.c0.c2.c0, ForeignInst::Bn254PairG3); 67 | let c021 = bn256_fq_to_args(f.0.c0.c2.c1, ForeignInst::Bn254PairG3); 68 | let c100 = bn256_fq_to_args(f.0.c1.c0.c0, ForeignInst::Bn254PairG3); 69 | let c101 = bn256_fq_to_args(f.0.c1.c0.c1, ForeignInst::Bn254PairG3); 70 | let c110 = bn256_fq_to_args(f.0.c1.c1.c0, ForeignInst::Bn254PairG3); 71 | let c111 = bn256_fq_to_args(f.0.c1.c1.c1, ForeignInst::Bn254PairG3); 72 | let c120 = bn256_fq_to_args(f.0.c1.c2.c0, ForeignInst::Bn254PairG3); 73 | let c121 = bn256_fq_to_args(f.0.c1.c2.c1, ForeignInst::Bn254PairG3); 74 | vec![ 75 | c000, c001, c010, c011, c020, c021, c100, c101, c110, c111, c120, c121, 76 | ] 77 | .into_iter() 78 | .flatten() 79 | .collect() 80 | } 81 | 82 | fn bn256_g1_to_args(g: G1, op: ForeignInst) -> Vec { 83 | let g_af = G1Affine::from(g); 84 | let mut a = bn256_fq_to_args(g_af.x, op); 85 | let mut b = bn256_fq_to_args(g_af.y, op); 86 | let z: u64 = g.is_identity().unwrap_u8() as u64; 87 | a.append(&mut b); 88 | a.append(&mut vec![ExternalHostCallEntry { 89 | op: op as usize, 90 | value: z, 91 | is_ret: false, 92 | }]); 93 | a 94 | } 95 | 96 | fn bn256_g2_to_pair_args(g: G2) -> Vec { 97 | let g_af = G2Affine::from(g); 98 | let x0 = bn256_fq_to_args(g_af.x.c0, ForeignInst::Bn254PairG2); 99 | let x1 = bn256_fq_to_args(g_af.x.c1, ForeignInst::Bn254PairG2); 100 | let y0 = bn256_fq_to_args(g_af.y.c0, ForeignInst::Bn254PairG2); 101 | let y1 = bn256_fq_to_args(g_af.y.c1, ForeignInst::Bn254PairG2); 102 | let z: u64 = g.is_identity().unwrap_u8() as u64; 103 | let zentry = ExternalHostCallEntry { 104 | op: ForeignInst::Bn254PairG2 as usize, 105 | value: z, 106 | is_ret: false, 107 | }; 108 | vec![x0, x1, y0, y1, vec![zentry]] 109 | .into_iter() 110 | .flatten() 111 | .collect() 112 | } 113 | 114 | fn create_bn256_pair_shared_table(a: G1, b: G2) -> ExternalHostCallEntryTable { 115 | let a_af = G1Affine::from(a); 116 | let b_af = G2Affine::from(b); 117 | let ab: Bn256Gt = pairing(&a_af, &b_af); 118 | let g1_args = bn256_g1_to_args(a, ForeignInst::Bn254PairG1); 119 | let g2_args = bn256_g2_to_pair_args(b); 120 | let ab_args = bn256_gt_to_pair_args(ab); 121 | let table = ExternalHostCallEntryTable( 122 | vec![g1_args, g2_args, ab_args] 123 | .into_iter() 124 | .flatten() 125 | .collect(), 126 | ); 127 | table 128 | } 129 | 130 | fn create_bn256_sum_input(new: u32, a: Fr, g: G1, sum: G1) -> Vec { 131 | let mut r = vec![]; 132 | r.append(&mut vec![ExternalHostCallEntry { 133 | op: ForeignInst::Bn254SumNew as usize, 134 | value: new as u64, 135 | is_ret: false, 136 | }]); 137 | r.append(&mut bn256_fr_to_args(a, ForeignInst::Bn254SumScalar)); 138 | r.append(&mut bn256_g1_to_args(g, ForeignInst::Bn254SumG1)); 139 | r.append(&mut bn256_g1_to_args(sum, ForeignInst::Bn254SumResult)); 140 | r 141 | } 142 | #[test] 143 | fn generate_bn256_pair_input() { 144 | let a = G1::random(&mut OsRng); 145 | let b = G2::random(&mut OsRng); 146 | let table = create_bn256_pair_shared_table(a, b); 147 | let file = File::create("bn256pairtest.json").expect("can not create file"); 148 | serde_json::to_writer_pretty(file, &table).expect("can not write to file"); 149 | } 150 | 151 | #[test] 152 | fn generate_bn256_sum_input() { 153 | let l = [2, 4, 3]; 154 | let mut inputs = vec![]; 155 | for i in 0..3 { 156 | let mut z = G1::identity(); 157 | for j in 0..l[i] { 158 | let new = if j == 0 { 1 } else { 0 }; 159 | let a_j = Fr::random(&mut OsRng); 160 | let g_j = G1::random(&mut OsRng); 161 | let r = g_j * a_j; 162 | z = z.add(r.clone()); 163 | inputs.append(&mut create_bn256_sum_input(new, a_j, g_j, z)); 164 | } 165 | } 166 | let table = ExternalHostCallEntryTable(inputs); 167 | let file = File::create("bn256sumtest.json").expect("can not create file"); 168 | serde_json::to_writer_pretty(file, &table).expect("can not write to file"); 169 | } 170 | } 171 | -------------------------------------------------------------------------------- /src/host/cache.rs: -------------------------------------------------------------------------------- 1 | use crate::host::datahash::DataHashRecord; 2 | use crate::host::mongomerkle::MerkleRecord; 3 | use lru::LruCache; 4 | use std::num::NonZeroUsize; 5 | use std::sync::Mutex; 6 | 7 | // If maxsize is set to None, the LRU feature is disabled and the cache can grow without bound. 8 | // The LRU feature performs best when maxsize is a power-of-two. 9 | const DEFAULT_CACHE_SIZE: usize = usize::pow(2, 24); 10 | 11 | const ENV_CACHE_SIZE: &str = "ZKWASM_MERKLE_CACHE_SIZE"; 12 | 13 | lazy_static::lazy_static! { 14 | pub static ref MERKLE_CACHE: Mutex>> = 15 | Mutex::new(LruCache::<[u8; 32], Option>::new( 16 | NonZeroUsize::new(get_cache_size()).unwrap(), 17 | )); 18 | 19 | pub static ref DATA_CACHE: Mutex>> = 20 | Mutex::new(LruCache::<[u8; 32], Option>::new( 21 | NonZeroUsize::new(get_cache_size()).unwrap(), 22 | )); 23 | } 24 | 25 | fn get_cache_size() -> usize { 26 | if let Ok(size_var) = std::env::var(ENV_CACHE_SIZE) { 27 | return size_var.parse::().unwrap_or(DEFAULT_CACHE_SIZE); 28 | } 29 | DEFAULT_CACHE_SIZE 30 | } 31 | -------------------------------------------------------------------------------- /src/host/datahash.rs: -------------------------------------------------------------------------------- 1 | use std::cell::RefCell; 2 | use std::rc::Rc; 3 | 4 | use ff::PrimeField; 5 | use halo2_proofs::pairing::bn256::Fr; 6 | use mongodb::bson::doc; 7 | use mongodb::bson::{spec::BinarySubtype, Bson}; 8 | use serde::{ 9 | de::{Error, Unexpected}, 10 | Deserialize, Deserializer, Serialize, Serializer, 11 | }; 12 | 13 | //use lazy_static; 14 | use crate::host::db::{MongoDB, TreeDB}; 15 | use crate::host::poseidon::POSEIDON_HASHER; 16 | 17 | fn deserialize_u256_from_binary<'de, D>(deserializer: D) -> Result<[u8; 32], D::Error> 18 | where 19 | D: Deserializer<'de>, 20 | { 21 | match Bson::deserialize(deserializer) { 22 | Ok(Bson::Binary(bytes)) => Ok(bytes.bytes.try_into().unwrap()), 23 | Ok(..) => Err(Error::invalid_value(Unexpected::Enum, &"Bson::Binary")), 24 | Err(e) => Err(e), 25 | } 26 | } 27 | 28 | fn deserialize_bytes_from_binary<'de, D>(deserializer: D) -> Result, D::Error> 29 | where 30 | D: Deserializer<'de>, 31 | { 32 | match Bson::deserialize(deserializer) { 33 | Ok(Bson::Binary(bytes)) => Ok(bytes.bytes.to_vec()), 34 | Ok(..) => Err(Error::invalid_value(Unexpected::Enum, &"Bson::Binary")), 35 | Err(e) => Err(e), 36 | } 37 | } 38 | 39 | fn serialize_bytes_as_binary(bytes: &[u8], serializer: S) -> Result 40 | where 41 | S: Serializer, 42 | { 43 | let binary = Bson::Binary(mongodb::bson::Binary { 44 | subtype: BinarySubtype::Generic, 45 | bytes: bytes.into(), 46 | }); 47 | binary.serialize(serializer) 48 | } 49 | 50 | /* 51 | fn hash_to_bson(x: &[u8; 32]) -> Bson { 52 | Bson::Binary(mongodb::bson::Binary { 53 | subtype: BinarySubtype::Generic, 54 | bytes: (*x).into(), 55 | }) 56 | } 57 | */ 58 | 59 | #[derive(Clone)] 60 | pub struct MongoDataHash { 61 | db: Rc>, 62 | } 63 | 64 | impl PartialEq for DataHashRecord { 65 | fn eq(&self, other: &Self) -> bool { 66 | self.hash == other.hash 67 | } 68 | fn ne(&self, other: &Self) -> bool { 69 | !self.eq(other) 70 | } 71 | } 72 | 73 | impl MongoDataHash { 74 | pub fn construct(addr: [u8; 32], db: Option>>) -> Self { 75 | MongoDataHash { 76 | db: db.unwrap_or_else(|| Rc::new(RefCell::new(MongoDB::new(addr, None)))), 77 | } 78 | } 79 | 80 | pub fn get_record(&self, hash: &[u8; 32]) -> Result, anyhow::Error> { 81 | let record = self.db.borrow().get_data_record(hash); 82 | record 83 | } 84 | 85 | /* We always insert new record as there might be uncommitted update to the merkle tree */ 86 | pub fn update_record(&mut self, record: DataHashRecord) -> Result<(), anyhow::Error> { 87 | self.db.borrow_mut().set_data_record(record.clone())?; 88 | Ok(()) 89 | } 90 | } 91 | 92 | #[derive(Debug, Serialize, Deserialize, Clone)] 93 | pub struct DataHashRecord { 94 | #[serde(serialize_with = "self::serialize_bytes_as_binary")] 95 | #[serde(deserialize_with = "self::deserialize_u256_from_binary")] 96 | #[serde(rename = "_id")] 97 | pub hash: [u8; 32], 98 | #[serde(serialize_with = "self::serialize_bytes_as_binary")] 99 | #[serde(deserialize_with = "self::deserialize_bytes_from_binary")] 100 | pub data: Vec, 101 | } 102 | 103 | impl DataHashRecord { 104 | /// 将DataHashRecord转换为Vec 105 | pub fn to_slice(&self) -> Vec { 106 | let mut result = Vec::new(); 107 | 108 | // hash ([u8; 32]) 109 | result.extend_from_slice(&self.hash); 110 | 111 | // data (Vec) 112 | // firstly len 113 | let data_len = self.data.len() as u32; 114 | result.extend_from_slice(&data_len.to_le_bytes()); 115 | 116 | // then content 117 | result.extend_from_slice(&self.data); 118 | 119 | result 120 | } 121 | 122 | /// deserialze from Vec to DataHashRecord 123 | pub fn from_slice(slice: &[u8]) -> Result { 124 | if slice.len() < 32 { 125 | return Err(anyhow::anyhow!("Slice too short for hash")); 126 | } 127 | 128 | let mut pos = 0; 129 | 130 | // hash 131 | let mut hash = [0u8; 32]; 132 | hash.copy_from_slice(&slice[pos..pos+32]); 133 | pos += 32; 134 | 135 | // data 136 | // first, length 137 | if slice.len() < pos + 4 { 138 | return Err(anyhow::anyhow!("Slice too short for data length")); 139 | } 140 | let mut len_bytes = [0u8; 4]; 141 | len_bytes.copy_from_slice(&slice[pos..pos+4]); 142 | let data_len = u32::from_le_bytes(len_bytes) as usize; 143 | pos += 4; 144 | 145 | // then content 146 | if slice.len() < pos + data_len { 147 | return Err(anyhow::anyhow!("Slice too short for data content")); 148 | } 149 | let data = slice[pos..pos+data_len].to_vec(); 150 | 151 | Ok(DataHashRecord { 152 | hash, 153 | data, 154 | }) 155 | } 156 | } 157 | 158 | impl DataHashRecord { 159 | pub fn new(&mut self, data: &Vec) -> Self { 160 | let mut hasher = POSEIDON_HASHER.clone(); 161 | let batchdata = data 162 | .chunks(16) 163 | .into_iter() 164 | .map(|x| { 165 | let mut v = x.to_vec(); 166 | v.extend_from_slice(&[0u8; 16]); 167 | let f = v.try_into().unwrap(); 168 | Fr::from_repr(f).unwrap() 169 | }) 170 | .collect::>(); 171 | hasher.update(&batchdata.as_slice()); 172 | DataHashRecord { 173 | data: data.clone().try_into().unwrap(), 174 | hash: hasher.squeeze().to_repr(), 175 | } 176 | } 177 | pub fn data_as_u64(&self) -> [u64; 4] { 178 | [ 179 | u64::from_le_bytes(self.data[0..8].try_into().unwrap()), 180 | u64::from_le_bytes(self.data[8..16].try_into().unwrap()), 181 | u64::from_le_bytes(self.data[16..24].try_into().unwrap()), 182 | u64::from_le_bytes(self.data[24..32].try_into().unwrap()), 183 | ] 184 | } 185 | } 186 | 187 | #[cfg(test)] 188 | mod tests {} 189 | -------------------------------------------------------------------------------- /src/host/db.rs: -------------------------------------------------------------------------------- 1 | use mongodb::bson::{spec::BinarySubtype, to_bson, Bson}; 2 | use mongodb::error::{Error, ErrorKind}; 3 | use mongodb::options::{InsertManyOptions, UpdateOptions}; 4 | use mongodb::results::InsertManyResult; 5 | use mongodb::{ 6 | bson::doc, 7 | options::DropCollectionOptions, 8 | sync::{Client, Collection}, 9 | }; 10 | 11 | use crate::host::datahash::DataHashRecord; 12 | use crate::host::mongomerkle::MerkleRecord; 13 | use anyhow::Result; 14 | use rocksdb::{DB, Options, WriteBatch}; 15 | use std::path::Path; 16 | use std::sync::Arc; 17 | 18 | pub const MONGODB_DATABASE: &str = "zkwasm-mongo-merkle"; 19 | pub const MONGODB_MERKLE_NAME_PREFIX: &str = "MERKLEDATA"; 20 | pub const MONGODB_DATA_NAME_PREFIX: &str = "DATAHASH"; 21 | const DUPLICATE_KEY_ERROR_CODE: i32 = 11000; 22 | 23 | pub trait TreeDB { 24 | fn get_merkle_record(&self, hash: &[u8; 32]) -> Result, anyhow::Error>; 25 | 26 | fn set_merkle_record(&mut self, record: MerkleRecord) -> Result<(), anyhow::Error>; 27 | 28 | fn set_merkle_records(&mut self, records: &Vec) -> Result<(), anyhow::Error>; 29 | 30 | fn get_data_record(&self, hash: &[u8; 32]) -> Result, anyhow::Error>; 31 | 32 | fn set_data_record(&mut self, record: DataHashRecord) -> Result<(), anyhow::Error>; 33 | } 34 | 35 | #[derive(Clone)] 36 | pub struct MongoDB { 37 | cname_id: [u8; 32], 38 | client: Client, 39 | } 40 | 41 | impl MongoDB { 42 | pub fn new(cname_id: [u8; 32], uri: Option) -> Self { 43 | let uri = uri.map_or("mongodb://localhost:27017".to_string(), |x| x.clone()); 44 | let client = Client::with_uri_str(uri).expect("Unexpected DB Error"); 45 | Self { cname_id, client } 46 | } 47 | } 48 | 49 | impl MongoDB { 50 | pub fn get_collection( 51 | &self, 52 | database: String, 53 | name: String, 54 | ) -> Result, mongodb::error::Error> { 55 | let database = self.client.database(database.as_str()); 56 | let collection = database.collection::(name.as_str()); 57 | Ok(collection) 58 | } 59 | 60 | pub fn get_database_client(&self) -> Result<&Client, mongodb::error::Error> { 61 | Ok(&self.client) 62 | } 63 | 64 | pub fn drop_collection( 65 | &self, 66 | database: String, 67 | name: String, 68 | ) -> Result<(), mongodb::error::Error> { 69 | let collection = self.get_collection::(database, name)?; 70 | let options = DropCollectionOptions::builder().build(); 71 | collection.drop(options) 72 | } 73 | 74 | pub fn merkel_collection(&self) -> Result, mongodb::error::Error> { 75 | let cname = get_collection_name(MONGODB_MERKLE_NAME_PREFIX.to_string(), self.cname_id); 76 | self.get_collection::(MONGODB_DATABASE.to_string(), cname.to_string()) 77 | } 78 | 79 | pub fn data_collection(&self) -> Result, mongodb::error::Error> { 80 | let cname = get_collection_name(MONGODB_DATA_NAME_PREFIX.to_string(), self.cname_id); 81 | self.get_collection::(MONGODB_DATABASE.to_string(), cname.to_string()) 82 | } 83 | } 84 | 85 | impl TreeDB for MongoDB { 86 | fn get_merkle_record(&self, hash: &[u8; 32]) -> Result, anyhow::Error> { 87 | let collection = self.merkel_collection()?; 88 | let mut filter = doc! {}; 89 | filter.insert("_id", u256_to_bson(hash)); 90 | let record = collection.find_one(filter, None)?; 91 | Ok(record) 92 | } 93 | 94 | fn set_merkle_record(&mut self, record: MerkleRecord) -> Result<(), anyhow::Error> { 95 | let options = UpdateOptions::builder().upsert(true).build(); 96 | let mut filter = doc! {}; 97 | filter.insert("_id", u256_to_bson(&record.hash)); 98 | let record_doc = to_bson(&record).unwrap().as_document().unwrap().to_owned(); 99 | let update = doc! {"$set": record_doc}; 100 | let collection = self.merkel_collection()?; 101 | collection.update_one(filter, update, options)?; 102 | Ok(()) 103 | } 104 | 105 | fn set_merkle_records(&mut self, records: &Vec) -> Result<(), anyhow::Error> { 106 | let options = InsertManyOptions::builder().ordered(false).build(); 107 | let collection = self.merkel_collection()?; 108 | let ret = collection.insert_many(records, options); 109 | if let Some(e) = filter_duplicate_key_error(ret) { 110 | return Err(e.into()); 111 | } 112 | Ok(()) 113 | } 114 | 115 | fn get_data_record(&self, hash: &[u8; 32]) -> Result, anyhow::Error> { 116 | let collection = self.data_collection()?; 117 | let mut filter = doc! {}; 118 | filter.insert("_id", u256_to_bson(hash)); 119 | collection.find_one(filter, None).map_err(|e| e.into()) 120 | } 121 | 122 | fn set_data_record(&mut self, record: DataHashRecord) -> Result<(), anyhow::Error> { 123 | let options = UpdateOptions::builder().upsert(true).build(); 124 | let mut filter = doc! {}; 125 | filter.insert("_id", u256_to_bson(&record.hash)); 126 | let record_doc = to_bson(&record).unwrap().as_document().unwrap().to_owned(); 127 | let update = doc! {"$set": record_doc}; 128 | let collection = self.data_collection()?; 129 | collection.update_one(filter, update, options)?; 130 | Ok(()) 131 | } 132 | } 133 | 134 | pub fn filter_duplicate_key_error( 135 | result: mongodb::error::Result, 136 | ) -> Option { 137 | match result { 138 | Ok(_) => None, 139 | Err(err) => { 140 | if let ErrorKind::BulkWrite(we) = err.kind.as_ref() { 141 | if let Some(write_errors) = &we.write_errors { 142 | if write_errors 143 | .iter() 144 | .all(|we| we.code == DUPLICATE_KEY_ERROR_CODE) 145 | { 146 | return None; 147 | } 148 | } 149 | } 150 | Some(err) 151 | } 152 | } 153 | } 154 | 155 | pub fn u256_to_bson(x: &[u8; 32]) -> Bson { 156 | Bson::Binary(mongodb::bson::Binary { 157 | subtype: BinarySubtype::Generic, 158 | bytes: (*x).into(), 159 | }) 160 | } 161 | 162 | pub fn u64_to_bson(x: u64) -> Bson { 163 | Bson::Binary(mongodb::bson::Binary { 164 | subtype: BinarySubtype::Generic, 165 | bytes: x.to_le_bytes().to_vec(), 166 | }) 167 | } 168 | 169 | pub fn get_collection( 170 | client: &Client, 171 | database: String, 172 | name: String, 173 | ) -> Result, mongodb::error::Error> { 174 | let database = client.database(database.as_str()); 175 | let collection = database.collection::(name.as_str()); 176 | Ok(collection) 177 | } 178 | 179 | pub fn get_collection_name(name_prefix: String, id: [u8; 32]) -> String { 180 | format!("{}_{}", name_prefix, hex::encode(id)) 181 | } 182 | 183 | 184 | pub struct RocksDB { 185 | db: Arc, 186 | merkle_cf_name: String, 187 | data_cf_name: String, 188 | } 189 | 190 | impl Clone for RocksDB { 191 | fn clone(&self) -> Self { 192 | RocksDB { 193 | db: Arc::clone(&self.db), 194 | merkle_cf_name: self.merkle_cf_name.clone(), 195 | data_cf_name: self.data_cf_name.clone(), 196 | } 197 | } 198 | } 199 | impl RocksDB { 200 | // create RocksDB 201 | pub fn new>(path: P) -> Result { 202 | let merkle_cf_name = "merkle_records"; 203 | let data_cf_name = "data_records"; 204 | 205 | let mut opts = Options::default(); 206 | opts.create_if_missing(true); 207 | opts.create_missing_column_families(true); 208 | 209 | let cfs = vec![merkle_cf_name, data_cf_name]; 210 | let db = DB::open_cf(&opts, path, cfs)?; 211 | 212 | Ok(Self { 213 | db: Arc::new(db), 214 | merkle_cf_name: merkle_cf_name.to_string(), 215 | data_cf_name: data_cf_name.to_string(), 216 | }) 217 | } 218 | 219 | // 清空数据库 220 | pub fn clear(&self) -> Result<()> { 221 | // clear merkle records 222 | let merkle_cf = self.db.cf_handle(&self.merkle_cf_name) 223 | .ok_or_else(|| anyhow::anyhow!("Merkle column family not found"))?; 224 | 225 | let iter = self.db.iterator_cf(merkle_cf, rocksdb::IteratorMode::Start); 226 | let mut batch = WriteBatch::default(); 227 | 228 | for item in iter { 229 | let (key, _) = item?; 230 | batch.delete_cf(merkle_cf, &key); 231 | } 232 | 233 | // clear data records 234 | let data_cf = self.db.cf_handle(&self.data_cf_name) 235 | .ok_or_else(|| anyhow::anyhow!("Data column family not found"))?; 236 | 237 | let iter = self.db.iterator_cf(data_cf, rocksdb::IteratorMode::Start); 238 | 239 | for item in iter { 240 | let (key, _) = item?; 241 | batch.delete_cf(data_cf, &key); 242 | } 243 | 244 | self.db.write(batch)?; 245 | Ok(()) 246 | } 247 | } 248 | 249 | impl TreeDB for RocksDB { 250 | fn get_merkle_record(&self, hash: &[u8; 32]) -> Result> { 251 | let cf = self.db.cf_handle(&self.merkle_cf_name) 252 | .ok_or_else(|| anyhow::anyhow!("Merkle column family not found"))?; 253 | 254 | match self.db.get_cf(cf, hash)? { 255 | Some(data) => { 256 | let record = MerkleRecord::from_slice(&data)?; 257 | Ok(Some(record)) 258 | }, 259 | None => Ok(None), 260 | } 261 | } 262 | 263 | fn set_merkle_record(&mut self, record: MerkleRecord) -> Result<()> { 264 | let cf = self.db.cf_handle(&self.merkle_cf_name) 265 | .ok_or_else(|| anyhow::anyhow!("Merkle column family not found"))?; 266 | 267 | let serialized = record.to_slice(); 268 | self.db.put_cf(cf, &record.hash, serialized)?; 269 | Ok(()) 270 | } 271 | 272 | fn set_merkle_records(&mut self, records: &Vec) -> Result<()> { 273 | let cf = self.db.cf_handle(&self.merkle_cf_name) 274 | .ok_or_else(|| anyhow::anyhow!("Merkle column family not found"))?; 275 | 276 | let mut batch = WriteBatch::default(); 277 | 278 | for record in records { 279 | let serialized = record.to_slice(); 280 | batch.put_cf(cf, &record.hash, serialized); 281 | } 282 | 283 | self.db.write(batch)?; 284 | Ok(()) 285 | } 286 | 287 | fn get_data_record(&self, hash: &[u8; 32]) -> Result> { 288 | let cf = self.db.cf_handle(&self.data_cf_name) 289 | .ok_or_else(|| anyhow::anyhow!("Data column family not found"))?; 290 | 291 | match self.db.get_cf(cf, hash)? { 292 | Some(data) => { 293 | let record = DataHashRecord::from_slice(&data)?; 294 | Ok(Some(record)) 295 | }, 296 | None => Ok(None), 297 | } 298 | } 299 | 300 | fn set_data_record(&mut self, record: DataHashRecord) -> Result<()> { 301 | let cf = self.db.cf_handle(&self.data_cf_name) 302 | .ok_or_else(|| anyhow::anyhow!("Data column family not found"))?; 303 | 304 | let serialized = record.to_slice(); 305 | self.db.put_cf(cf, &record.hash, serialized)?; 306 | Ok(()) 307 | } 308 | } -------------------------------------------------------------------------------- /src/host/jubjub.rs: -------------------------------------------------------------------------------- 1 | use crate::utils::bn_to_field; 2 | use ff::Field; 3 | use halo2_proofs::pairing::bn256::Fr; 4 | use lazy_static::lazy_static; 5 | use num_bigint::BigUint; 6 | use std::ops::{AddAssign, MulAssign, SubAssign}; 7 | 8 | lazy_static! { 9 | static ref D_BIG: BigUint = BigUint::parse_bytes( 10 | b"12181644023421730124874158521699555681764249180949974110617291017600649128846", 11 | 10 12 | ) 13 | .unwrap(); 14 | static ref D: Fr = bn_to_field(&(D_BIG)); 15 | static ref A_BIG: BigUint = BigUint::parse_bytes( 16 | b"21888242871839275222246405745257275088548364400416034343698204186575808495616", 17 | 10 18 | ) 19 | .unwrap(); 20 | static ref A: Fr = bn_to_field(&(A_BIG)); 21 | pub static ref Q: BigUint = BigUint::parse_bytes( 22 | b"21888242871839275222246405745257275088548364400416034343698204186575808495617", 23 | 10 24 | ) 25 | .unwrap(); 26 | } 27 | 28 | #[derive(Clone, Debug)] 29 | pub struct PointProjective { 30 | pub x: Fr, 31 | pub y: Fr, 32 | pub z: Fr, 33 | } 34 | 35 | impl PointProjective { 36 | pub fn affine(&self) -> Point { 37 | if self.z.is_zero_vartime() { 38 | return Point { 39 | x: Fr::zero(), 40 | y: Fr::zero(), 41 | }; 42 | } 43 | 44 | let zinv = self.z.invert().unwrap(); 45 | let mut x = self.x; 46 | x.mul_assign(&zinv); 47 | let mut y = self.y; 48 | y.mul_assign(&zinv); 49 | 50 | Point { x, y } 51 | } 52 | 53 | #[allow(clippy::many_single_char_names)] 54 | pub fn add(&self, q: &PointProjective) -> PointProjective { 55 | // add-2008-bbjlp https://hyperelliptic.org/EFD/g1p/auto-twisted-projective.html#addition-add-2008-bbjlp 56 | let mut a = self.z; 57 | a.mul_assign(&q.z); 58 | let mut b = a; 59 | b = b.square(); 60 | let mut c = self.x; 61 | c.mul_assign(&q.x); 62 | let mut d = self.y; 63 | d.mul_assign(&q.y); 64 | let mut e = *D; 65 | e.mul_assign(&c); 66 | e.mul_assign(&d); 67 | let mut f = b; 68 | f.sub_assign(&e); 69 | let mut g = b; 70 | g.add_assign(&e); 71 | let mut x1y1 = self.x; 72 | x1y1.add_assign(&self.y); 73 | let mut x2y2 = q.x; 74 | x2y2.add_assign(&q.y); 75 | let mut aux = x1y1; 76 | aux.mul_assign(&x2y2); 77 | aux.sub_assign(&c); 78 | aux.sub_assign(&d); 79 | let mut x3 = a; 80 | x3.mul_assign(&f); 81 | x3.mul_assign(&aux); 82 | let mut ac = *A; 83 | ac.mul_assign(&c); 84 | let mut dac = d; 85 | dac.sub_assign(&ac); 86 | let mut y3 = a; 87 | y3.mul_assign(&g); 88 | y3.mul_assign(&dac); 89 | let mut z3 = f; 90 | z3.mul_assign(&g); 91 | 92 | PointProjective { 93 | x: x3, 94 | y: y3, 95 | z: z3, 96 | } 97 | } 98 | 99 | #[allow(clippy::many_single_char_names)] 100 | pub fn double(&self) -> PointProjective { 101 | // dbl-2008-bbjlp https://hyperelliptic.org/EFD/g1p/auto-twisted-projective.html#dbl-2008-bbjlp 102 | let mut two = Fr::one(); 103 | two = two.double(); 104 | 105 | let mut b = self.x; 106 | b.add_assign(&self.y); 107 | b = b.square(); 108 | 109 | let mut c = self.x; 110 | c = c.square(); 111 | 112 | let mut d = self.y; 113 | d = d.square(); 114 | 115 | let mut e = *A; 116 | e.mul_assign(&c); 117 | 118 | let mut f = e; 119 | f.add_assign(&d); 120 | 121 | let mut h = self.z; 122 | h = h.square(); 123 | 124 | let mut h2 = two; 125 | h2.mul_assign(&h); 126 | 127 | let mut j = f; 128 | j.sub_assign(&h2); 129 | 130 | let mut x3 = b; 131 | x3.sub_assign(&c); 132 | x3.sub_assign(&d); 133 | x3.mul_assign(&j); 134 | 135 | let mut ed = e; 136 | ed.sub_assign(&d); 137 | let mut y3 = f; 138 | // y3.sub_assign(d); 139 | y3.mul_assign(&ed); 140 | 141 | let mut z3 = f; 142 | z3.mul_assign(&j); 143 | PointProjective { 144 | x: x3, 145 | y: y3, 146 | z: z3, 147 | } 148 | } 149 | } 150 | 151 | #[derive(Clone, Debug, PartialEq)] 152 | pub struct Point { 153 | pub x: Fr, 154 | pub y: Fr, 155 | } 156 | 157 | impl Point { 158 | pub fn identity() -> Self { 159 | Point { 160 | x: Fr::zero(), 161 | y: Fr::one(), 162 | } 163 | } 164 | 165 | pub fn projective(&self) -> PointProjective { 166 | PointProjective { 167 | x: self.x, 168 | y: self.y, 169 | z: Fr::one(), 170 | } 171 | } 172 | 173 | pub fn add(&self, other: &Point) -> Point { 174 | self.projective().add(&other.projective()).affine() 175 | } 176 | 177 | pub fn mul_scalar(&self, n: &BigUint) -> Point { 178 | let mut r = Point::identity().projective(); 179 | let mut exp = self.projective(); 180 | let b = n.to_bytes_le(); 181 | //little-end wise, like 6, it is 0,1,1 sequence 182 | for i in 0..n.bits() { 183 | if test_bit(&b, i.try_into().unwrap()) { 184 | r = r.add(&exp); 185 | } 186 | exp = exp.double(); 187 | } 188 | r.affine() 189 | } 190 | } 191 | 192 | pub fn test_bit(b: &[u8], i: usize) -> bool { 193 | b[i / 8] & (1 << (i % 8)) != 0 194 | } 195 | 196 | #[cfg(test)] 197 | mod tests { 198 | use super::Point; 199 | use crate::utils::bn_to_field; 200 | use crate::utils::field_to_bn; 201 | use halo2_proofs::pairing::bn256::Fr; 202 | use num_bigint::BigUint; 203 | use std::str::FromStr; 204 | #[test] 205 | pub fn verify_alt_jubjub_signature() { 206 | let msg = "12183188902842291436925829409440956230535359139686694837527706100765491669070"; 207 | 208 | let pk_x = BigUint::parse_bytes( 209 | b"252e5567f8d2ec21093deb668196ebd676767e5414d167a09223d72a354e5b45", 210 | 16, 211 | ) 212 | .unwrap(); 213 | let pk_y = BigUint::parse_bytes( 214 | b"2e91ef67e1f4bad22d03af787175c1ddeeca18c59451421a3958c6b64a376ec4", 215 | 16, 216 | ) 217 | .unwrap(); 218 | let pk = Point { 219 | x: bn_to_field(&pk_x), 220 | y: bn_to_field(&pk_y), 221 | }; 222 | 223 | println!("pk_x is: {:?} {:?}", BigUint::to_u64_digits(&pk_x), pk_x); 224 | println!("pk_y is: {:?} {:?}", BigUint::to_u64_digits(&pk_y), pk_y); 225 | 226 | let sig_rx = BigUint::parse_bytes( 227 | b"03871ac3f0ae73813b0a4bbaa778bd0ea3d43d75297cd1a2d3a8b98e053cf2af", 228 | 16, 229 | ) 230 | .unwrap(); 231 | let sig_ry = BigUint::parse_bytes( 232 | b"1054763a3bdce693bb5f58064092cb5d3f45473eda5f1d0ead04e9e3a7b278f7", 233 | 16, 234 | ) 235 | .unwrap(); 236 | 237 | let sig_r = Point { 238 | x: bn_to_field(&sig_rx), 239 | y: bn_to_field(&sig_ry), 240 | }; 241 | 242 | println!( 243 | "sig_rx is: {:?} {:?}", 244 | BigUint::to_u64_digits(&sig_rx), 245 | sig_rx 246 | ); 247 | println!( 248 | "sig_ry is: {:?} {:?}", 249 | BigUint::to_u64_digits(&sig_ry), 250 | sig_ry 251 | ); 252 | 253 | // let p_g = Point { 254 | // x: bn_to_field(&(BigUint::parse_bytes(b"2ef3f9b423a2c8c74e9803958f6c320e854a1c1c06cd5cc8fd221dc052d76df7", 16).unwrap())), 255 | // y: bn_to_field(&(BigUint::parse_bytes(b"05a01167ea785d3f784224644a68e4067532c815f5f6d57d984b5c0e9c6c94b7", 16).unwrap())), 256 | // }; 257 | 258 | //p_g.negate() 259 | let base_x = BigUint::parse_bytes( 260 | b"017054bebd8ed76269b84220f215264ea2e9cc2c72ec13c846bfd7d39d28920a", 261 | 16, 262 | ) 263 | .unwrap(); 264 | let base_y = BigUint::parse_bytes( 265 | b"05a01167ea785d3f784224644a68e4067532c815f5f6d57d984b5c0e9c6c94b7", 266 | 16, 267 | ) 268 | .unwrap(); 269 | let p_g_neg = Point { 270 | x: bn_to_field(&(base_x)), 271 | y: bn_to_field(&(base_y)), 272 | }; 273 | let sig_s_str = 274 | "760414664615846567287977379644619164343552866248912558409257763292500819717"; 275 | 276 | println!( 277 | "base x is: {:?} {:?}", 278 | BigUint::to_u64_digits(&base_x), 279 | BigUint::to_u64_digits(&field_to_bn(&-bn_to_field::(&base_x))) 280 | ); 281 | println!( 282 | "base y is: {:?} {:?}", 283 | BigUint::to_u64_digits(&base_y), 284 | base_y 285 | ); 286 | 287 | let c = BigUint::from_str(msg).unwrap(); 288 | let sig_s = BigUint::from_str(sig_s_str).unwrap(); 289 | 290 | // Do not remove following prints as they are used in zkwasm sdk tests 291 | println!("msghash is {:?}", BigUint::to_u64_digits(&c)); 292 | println!("sig_s is {:?}, {:?}", BigUint::to_u64_digits(&sig_s), sig_s); 293 | 294 | // 0 = c . vk + R -S . P_G that requires all points to be in the same group 295 | let lhs = pk.mul_scalar(&c); 296 | println!("first round {:?}", lhs); 297 | let lhs = lhs.add(&sig_r); 298 | println!("second round {:?}", lhs); 299 | let rhs = p_g_neg.mul_scalar(&sig_s); 300 | let rst = lhs.add(&rhs); 301 | println!("third round {:?}", rst); 302 | // assert_eq!(lhs,rhs) 303 | assert_eq!(Point::identity(), rst); 304 | } 305 | } 306 | -------------------------------------------------------------------------------- /src/host/keccak256.rs: -------------------------------------------------------------------------------- 1 | use itertools::Itertools; 2 | pub const BITRATE: usize = (1600 / LANE_SIZE) as usize; 3 | pub const CAPACITY: usize = (512 / LANE_SIZE) as usize; 4 | pub const L: usize = 6; 5 | pub const RATE: usize = (1088 / LANE_SIZE) as usize; //17 6 | pub const LANE_SIZE: u32 = 64; 7 | 8 | /// The range of x and y coordinates for the sponge state. 9 | pub const T: usize = 5; 10 | /// The State is a 5x5 matrix of 64 bit lanes. 11 | 12 | #[derive(Debug, Clone)] 13 | pub struct State([[u64; T]; T]); 14 | 15 | /// The number of rounds for the 1600 bits permutation used in Keccak-256. 16 | pub const N_R: usize = T * T - 1; 17 | 18 | /// The number of next_inputs that are used inside the `absorb` circuit. 19 | pub const RATE_LANES: usize = 17; 20 | 21 | /// The Keccak [round constants](https://github.com/Legrandin/pycryptodome/blob/016252bde04456614b68d4e4e8798bc124d91e7a/src/keccak.c#L257-L282) 22 | pub static ROUND_CONSTANTS: [u64; N_R] = [ 23 | 0x0000000000000001, 24 | 0x0000000000008082, 25 | 0x800000000000808A, 26 | 0x8000000080008000, 27 | 0x000000000000808B, 28 | 0x0000000080000001, 29 | 0x8000000080008081, 30 | 0x8000000000008009, 31 | 0x000000000000008A, 32 | 0x0000000000000088, 33 | 0x0000000080008009, 34 | 0x000000008000000A, 35 | 0x000000008000808B, 36 | 0x800000000000008B, 37 | 0x8000000000008089, 38 | 0x8000000000008003, 39 | 0x8000000000008002, 40 | 0x8000000000000080, 41 | 0x000000000000800A, 42 | 0x800000008000000A, 43 | 0x8000000080008081, 44 | 0x8000000000008080, 45 | 0x0000000080000001, 46 | 0x8000000080008008, 47 | ]; 48 | 49 | /// The Keccak [rotation offsets](https://github.com/Legrandin/pycryptodome/blob/016252bde04456614b68d4e4e8798bc124d91e7a/src/keccak.c#L232-L255) 50 | pub static ROTATION_CONSTANTS: [[u32; 5]; 5] = [ 51 | [0, 36, 3, 41, 18], 52 | [1, 44, 10, 45, 2], 53 | [62, 6, 43, 15, 61], 54 | [28, 55, 25, 21, 56], 55 | [27, 20, 39, 8, 14], 56 | ]; 57 | 58 | impl State { 59 | fn default() -> Self { 60 | let state = [[0u64; T]; T]; 61 | State(state) 62 | } 63 | 64 | pub fn permute(&mut self) { 65 | //self.debug(); 66 | for rc in ROUND_CONSTANTS.iter().take(N_R) { 67 | self.round_b(*rc); 68 | //self.debug(); 69 | } 70 | } 71 | 72 | pub fn debug(&self) { 73 | println!("debug host state"); 74 | for i in 0..5 { 75 | let c = self.0[i].clone().map(|x| format!("{:02x}", x)).join("-"); 76 | println!("host state ({}): {}", i, c); 77 | } 78 | } 79 | 80 | fn round_b(&mut self, rc: u64) { 81 | self.theta(); 82 | self.rho(); 83 | self.pi(); 84 | self.xi(); 85 | self.iota(rc); 86 | } 87 | 88 | pub fn theta(&mut self) { 89 | let mut c: [u64; 5] = [0; 5]; 90 | for x in 0..5 { 91 | c[x] = 92 | (((&self.0[x][0] ^ &self.0[x][1]) ^ &self.0[x][2]) ^ &self.0[x][3]) ^ &self.0[x][4]; 93 | } 94 | for (x, y) in (0..5).cartesian_product(0..5) { 95 | self.0[x][y] = (&self.0[x][y] ^ &c[(x + 4) % 5]) ^ &c[(x + 1) % 5].rotate_left(1); 96 | } 97 | } 98 | 99 | pub fn rho(&mut self) { 100 | for (x, y) in (0..5).cartesian_product(0..5) { 101 | self.0[x][y] = self.0[x][y].rotate_left(ROTATION_CONSTANTS[x][y]); 102 | } 103 | } 104 | 105 | pub fn pi(&mut self) { 106 | let mut out = Self::default(); 107 | 108 | for (x, y) in (0..5).cartesian_product(0..5) { 109 | out.0[y][(2 * x + 3 * y) % 5] = self.0[x][y]; 110 | } 111 | self.0 = out.0; 112 | } 113 | 114 | pub fn xi(&mut self) { 115 | let mut out = Self::default(); 116 | for (x, y) in (0..5).cartesian_product(0..5) { 117 | out.0[x][y] = &self.0[x][y] ^ ((!&self.0[(x + 1) % 5][y]) & (&self.0[(x + 2) % 5][y])); 118 | } 119 | self.0 = out.0; 120 | } 121 | 122 | pub fn iota(&mut self, rc: u64) { 123 | self.0[0][0] = &self.0[0][0] ^ rc; 124 | } 125 | } 126 | 127 | impl State { 128 | pub fn absorb(&mut self, input: &[u64; RATE]) { 129 | //println!("absorbing ... {:?}", input); 130 | let mut x = 0; 131 | let mut y = 0; 132 | for i in 0..RATE { 133 | let word = input[i]; 134 | self.0[x][y] = (&word) ^ (&self.0[x][y]); 135 | if x < 5 - 1 { 136 | x += 1; 137 | } else { 138 | y += 1; 139 | x = 0; 140 | } 141 | //println!("current init round: {}", i); 142 | //self.debug(); 143 | } 144 | self.permute(); 145 | } 146 | 147 | pub fn result(&self) -> [u64; 4] { 148 | let mut output = vec![]; 149 | output.push(self.0[0][0]); 150 | output.push(self.0[1][0]); 151 | output.push(self.0[2][0]); 152 | output.push(self.0[3][0]); 153 | output.try_into().unwrap() //check endian (current big endian) 154 | } 155 | } 156 | 157 | #[derive(Debug, Clone)] 158 | pub struct Keccak { 159 | state: State, 160 | absorbing: Vec, 161 | } 162 | 163 | impl Keccak { 164 | pub fn new() -> Self { 165 | Self { 166 | state: State::default(), 167 | absorbing: Vec::new(), 168 | } 169 | } 170 | 171 | pub fn update(&mut self, input: &[u64]) { 172 | self.absorbing.extend(input); 173 | let candidate = self.absorbing.clone(); 174 | self.absorbing = vec![]; 175 | for chunk in candidate.chunks(RATE).into_iter() { 176 | if chunk.len() == RATE { 177 | self.state.absorb(chunk.try_into().unwrap()); 178 | } else { 179 | self.absorbing = chunk.to_vec(); 180 | } 181 | } 182 | } 183 | 184 | pub fn update_exact(&mut self, inputs: &[u64; RATE]) -> [u64; 4] { 185 | assert_eq!(self.absorbing.len(), 0); 186 | self.state.absorb(inputs); 187 | //self.spec.keccak_f.permute(&mut self.state); 188 | self.state.result() 189 | } 190 | 191 | /// Returns keccak hash based on current state 192 | pub fn squeeze(&mut self) -> [u64; 4] { 193 | let len = self.absorbing.len(); 194 | let padding_total = RATE - (len % RATE); 195 | 196 | let starting_one_lane = 1u64; 197 | let zero_lane = 0; 198 | let ending_one_lane = 1u64 << 63; 199 | let one_zero_one_lane = starting_one_lane + ending_one_lane; 200 | 201 | if padding_total == 1 { 202 | self.absorbing.push(one_zero_one_lane); 203 | } else { 204 | self.absorbing.push(starting_one_lane); 205 | self.absorbing.resize(len + padding_total - 1, zero_lane); 206 | self.absorbing.push(ending_one_lane); 207 | } 208 | let r: Vec = self.absorbing.clone(); 209 | println!("before absorb is {:?}", &r); 210 | self.state.absorb(&r.try_into().unwrap()); 211 | println!("after absorb state is {:?}", &self.state); 212 | //self.spec.keccak_f.permute(&mut self.state); 213 | self.absorbing.truncate(0); 214 | self.state.result() 215 | } 216 | } 217 | 218 | lazy_static::lazy_static! { 219 | pub static ref KECCAK_HASHER: Keccak = Keccak::new(); 220 | } 221 | 222 | #[cfg(test)] 223 | mod tests { 224 | use super::KECCAK_HASHER; 225 | use crate::host::keccak256::N_R; 226 | use itertools::Itertools; 227 | use rand::RngCore; 228 | use rand_core::OsRng; 229 | 230 | #[test] 231 | fn test_keccak() { 232 | let exp = [ 233 | 197, 210, 70, 1, 134, 247, 35, 60, 146, 126, 125, 178, 220, 199, 3, 192, 229, 0, 182, 234 | 83, 202, 130, 39, 59, 123, 250, 216, 4, 93, 133, 164, 112, 235 | ]; 236 | let expect_str = exp.iter().map(|x| format!("{:02x}", x)).join(""); 237 | let mut hasher = super::KECCAK_HASHER.clone(); 238 | hasher.update(&[]); 239 | let result = hasher.squeeze(); 240 | 241 | let hash = result.iter().map(|x| format!("{:02x}", x)).join(""); 242 | println!("hash result is {:?}", hash); // endian does not match the reference implementation 243 | println!("expect result is {:?}", expect_str); 244 | //assert_eq!(result.to_string(), ZERO_HASHER_SQUEEZE); 245 | } 246 | 247 | #[test] 248 | fn keccak256_one() { 249 | let mut keccak = KECCAK_HASHER.clone(); 250 | let number_of_permutation = N_R / 24; 251 | let number_of_inputs = 17 * number_of_permutation - 1; 252 | let inputs = (0..number_of_inputs) 253 | .map(|_| OsRng.next_u64()) 254 | .collect::>(); 255 | 256 | keccak.update(&inputs[..]); 257 | let a = keccak.squeeze(); 258 | 259 | let mut keccak = KECCAK_HASHER.clone(); 260 | let mut inputs = inputs.clone(); 261 | inputs.push(1u64 + (1u64 << 63)); 262 | assert_eq!(inputs.len() % 17, 0); 263 | 264 | for chunk in inputs.chunks(17) { 265 | keccak.state.absorb(&chunk.try_into().unwrap()); 266 | //keccak.spec.keccak_f.permute(&mut keccak.state) 267 | } 268 | 269 | let b = keccak.state.result(); 270 | 271 | assert_eq!(a, b); 272 | } 273 | 274 | #[test] 275 | fn keccak256_check_reference() { 276 | let mut keccak = KECCAK_HASHER.clone(); 277 | keccak.update(&[]); 278 | let expect = "0x0bbfa9132015329c07b3822630fc263512f39a81d9fc90542cc28fc914d8fa7a"; 279 | let result = keccak.squeeze(); 280 | let g = result.iter().map(|x| format!("{:02x}", x)).join(""); 281 | println!("g is {:?}", g); 282 | println!("result is {:?}", result); 283 | println!("expect is {:?}", expect); 284 | // what is a then? 285 | } 286 | 287 | #[test] 288 | fn keccak256_empty() { 289 | let mut keccak = KECCAK_HASHER.clone(); 290 | 291 | let inputs = (0..0).map(|_| OsRng.next_u64()).collect::>(); 292 | 293 | keccak.update(&inputs[..]); 294 | let a = keccak.squeeze(); 295 | 296 | let mut keccak = KECCAK_HASHER.clone(); 297 | let mut inputs = inputs.clone(); 298 | let mut extra_padding = vec![0; 17]; 299 | 300 | extra_padding[16] = 1u64 << 63; 301 | extra_padding[0] = 1; 302 | inputs.extend(extra_padding); 303 | assert_eq!(inputs.len() % 17, 0); 304 | 305 | for chunk in inputs.chunks(17) { 306 | keccak.state.absorb(&chunk.try_into().unwrap()); 307 | //keccak.spec.keccak_f.permute(&mut keccak.state) 308 | } 309 | 310 | let b = keccak.state.result(); 311 | 312 | assert_eq!(a, b); 313 | } 314 | 315 | #[test] 316 | fn keccak256_extra_permutation() { 317 | let mut keccak = KECCAK_HASHER.clone(); 318 | let number_of_permutation = N_R / 24; 319 | let number_of_inputs = 17 * number_of_permutation; 320 | let inputs = (0..number_of_inputs) 321 | .map(|_| OsRng.next_u64()) 322 | .collect::>(); 323 | 324 | keccak.update(&inputs[..]); 325 | let a = keccak.squeeze(); 326 | 327 | let mut keccak = KECCAK_HASHER.clone(); 328 | let mut inputs = inputs.clone(); 329 | let mut extra_padding = vec![0u64; 17]; 330 | extra_padding[16] = 1u64 << 63; 331 | extra_padding[0] = 1u64; 332 | inputs.extend(extra_padding); 333 | 334 | for chunk in inputs.chunks(17) { 335 | keccak.state.absorb(&chunk.try_into().unwrap()); 336 | //keccak.spec.keccak_f.permute(&mut keccak.state) 337 | } 338 | 339 | let b = keccak.state.result(); 340 | 341 | assert_eq!(a, b); 342 | } 343 | 344 | #[test] 345 | fn keccak_run() {} 346 | } 347 | -------------------------------------------------------------------------------- /src/host/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod bls; 2 | pub mod bn256; 3 | pub mod cache; 4 | pub mod datahash; 5 | pub mod db; 6 | pub mod jubjub; 7 | pub mod keccak256; 8 | pub mod merkle; 9 | pub mod mongomerkle; 10 | pub mod poseidon; 11 | use num_derive::{FromPrimitive, ToPrimitive}; 12 | 13 | use halo2_proofs::arithmetic::FieldExt; 14 | use serde::{Deserialize, Serialize}; 15 | 16 | #[derive(Serialize, Deserialize, Debug, Default)] 17 | pub struct ExternalHostCallEntryTable(pub Vec); 18 | 19 | #[derive(Serialize, Deserialize, Debug)] 20 | pub struct ExternalHostCallEntry { 21 | pub op: usize, 22 | pub value: u64, 23 | pub is_ret: bool, 24 | } 25 | 26 | #[derive(clap::ArgEnum, Clone, Copy, Debug, FromPrimitive, ToPrimitive)] 27 | pub enum ForeignInst { 28 | Log = 0, 29 | BlsPairG1, 30 | BlsPairG2, 31 | BlsPairG3, 32 | BlsSumNew, 33 | BlsSumScalar, 34 | BlsSumG1, 35 | BlsSumResult, 36 | Bn254PairG1, 37 | Bn254PairG2, 38 | Bn254PairG3, 39 | Bn254SumNew, 40 | Bn254SumScalar, 41 | Bn254SumG1, 42 | Bn254SumResult, 43 | MerkleSetRoot, 44 | MerkleGetRoot, 45 | MerkleAddress, 46 | MerkleSet, 47 | MerkleGet, 48 | CacheSetMode, 49 | CacheSetHash, 50 | CacheFetchData, 51 | CacheStoreData, 52 | SHA256New, 53 | SHA256Push, 54 | SHA256Finalize, 55 | PoseidonNew, 56 | PoseidonPush, 57 | PoseidonFinalize, 58 | JubjubSumNew, 59 | JubjubSumPush, 60 | JubjubSumResult, 61 | Keccak256New, 62 | Keccak256Push, 63 | Keccak256Finalize, 64 | LogChar, 65 | WitnessInsert, 66 | WitnessPop, 67 | WitnessTraceSize, 68 | WitnessIndexedInsert, 69 | WitnessSetIndex, 70 | WitnessIndexedPush, 71 | WitnessIndexedPop, 72 | } 73 | 74 | pub enum ReduceRule { 75 | Bytes(Vec, usize), 76 | Field(F, usize), // F * shiftbits 77 | U64(u64), 78 | } 79 | 80 | impl ReduceRule { 81 | fn nb_inputs(&self) -> usize { 82 | match self { 83 | ReduceRule::Bytes(_, a) => *a, // a * u64 84 | ReduceRule::Field(_, _) => 4, // 4 * u64 85 | ReduceRule::U64(_) => 1, // 1 * u64 86 | } 87 | } 88 | fn reduce(&mut self, v: u64, offset: usize) { 89 | match self { 90 | ReduceRule::Bytes(ref mut x, _) => { 91 | let mut bytes: Vec = v.to_le_bytes().to_vec(); 92 | x.append(&mut bytes); 93 | } // a * u64 94 | ReduceRule::Field(ref mut x, shift) => { 95 | let mut acc = F::from_u128(v as u128); 96 | for _ in 0..offset { 97 | acc = acc * F::from_u128(1u128 << *shift) 98 | } 99 | *x = *x + acc 100 | } // 4 * u64 101 | ReduceRule::U64(ref mut x) => { 102 | *x = v; 103 | } // 1 * u64 104 | } 105 | } 106 | 107 | fn reset(&mut self) { 108 | match self { 109 | ReduceRule::Bytes(ref mut x, _) => x.clear(), // a * u64 110 | ReduceRule::Field(ref mut x, _shift) => *x = F::zero(), // 4 * u64 111 | ReduceRule::U64(ref mut x) => { 112 | *x = 0; 113 | } // 1 * u64 114 | } 115 | } 116 | 117 | pub fn field_value(&self) -> Option { 118 | match self { 119 | ReduceRule::Bytes(_, _) => None, 120 | ReduceRule::Field(f, _) => Some(*f), // 4 * u64 121 | ReduceRule::U64(_) => None, // 4 * u64 122 | } 123 | } 124 | pub fn bytes_value(&self) -> Option> { 125 | match self { 126 | ReduceRule::Bytes(b, _) => Some(b.clone()), 127 | ReduceRule::Field(_, _) => None, // 4 * u64 128 | ReduceRule::U64(_) => None, // 4 * u64 129 | } 130 | } 131 | pub fn u64_value(&self) -> Option { 132 | match self { 133 | ReduceRule::Bytes(_, _) => None, 134 | ReduceRule::Field(_, _) => None, // 4 * u64 135 | ReduceRule::U64(v) => Some(*v), // 4 * u64 136 | } 137 | } 138 | } 139 | 140 | pub struct Reduce { 141 | pub cursor: usize, 142 | pub rules: Vec>, 143 | } 144 | 145 | impl Reduce { 146 | pub fn new(rules: Vec>) -> Self { 147 | Reduce { cursor: 0, rules } 148 | } 149 | pub fn total_len(&self) -> usize { 150 | self.rules.iter().fold(0, |acc, x| acc + x.nb_inputs()) 151 | } 152 | } 153 | 154 | impl Reduce { 155 | /// take in a u64 value and update all the reduce rule accordingly 156 | pub fn reduce(&mut self, v: u64) { 157 | let mut cursor = self.cursor; 158 | let total = self.total_len(); 159 | if cursor == 0 { 160 | for rule in self.rules.iter_mut() { 161 | rule.reset() 162 | } 163 | } 164 | for index in 0..self.rules.len() { 165 | if cursor >= self.rules[index].nb_inputs() { 166 | cursor = cursor - self.rules[index].nb_inputs(); 167 | } else { 168 | self.rules[index].reduce(v, cursor); 169 | break; 170 | } 171 | } 172 | self.cursor += 1; 173 | if self.cursor == total { 174 | self.cursor = 0; 175 | } 176 | } 177 | } 178 | 179 | #[cfg(test)] 180 | mod tests { 181 | use super::Reduce; 182 | use super::ReduceRule; 183 | use halo2_proofs::arithmetic::FieldExt; 184 | use halo2_proofs::pairing::bn256::Fr; 185 | fn new_reduce(rules: Vec>) -> Reduce { 186 | Reduce { cursor: 0, rules } 187 | } 188 | 189 | #[test] 190 | fn test_reduce_bytes() { 191 | let reducerule = ReduceRule::::Bytes(vec![], 4); 192 | let mut reduce = Reduce { 193 | cursor: 0, 194 | rules: vec![reducerule], 195 | }; 196 | reduce.reduce(1); 197 | } 198 | 199 | #[test] 200 | fn test_reduce_bytes_twice() { 201 | let reducerule = ReduceRule::::Bytes(vec![], 1); 202 | let mut reduce = Reduce { 203 | cursor: 0, 204 | rules: vec![reducerule], 205 | }; 206 | reduce.reduce(1); 207 | reduce.reduce(2); 208 | assert_eq!( 209 | reduce.rules[0].bytes_value().unwrap(), 210 | vec![2, 0, 0, 0, 0, 0, 0, 0] 211 | ) 212 | } 213 | 214 | #[test] 215 | fn test_reduce_u64() { 216 | let mut get = new_reduce(vec![ 217 | ReduceRule::U64(0), 218 | ReduceRule::U64(0), 219 | ReduceRule::U64(0), 220 | ReduceRule::U64(0), 221 | ]); 222 | get.reduce(12); 223 | assert_eq!(get.cursor, 1); 224 | assert_eq!(get.rules[0].u64_value().unwrap(), 12); 225 | } 226 | 227 | #[test] 228 | fn test_reduce_fr() { 229 | let mut get = new_reduce(vec![ReduceRule::Field(Fr::zero(), 64)]); 230 | get.reduce(1); 231 | get.reduce(1); 232 | get.reduce(0); 233 | get.reduce(0); 234 | assert_eq!( 235 | get.rules[0].field_value().unwrap(), 236 | Fr::from_u128((1u128 << 64) + 1) 237 | ); 238 | } 239 | } 240 | -------------------------------------------------------------------------------- /src/host/poseidon.rs: -------------------------------------------------------------------------------- 1 | use halo2_proofs::pairing::bn256::Fr; 2 | use poseidon::Poseidon; 3 | use poseidon::Spec; 4 | 5 | pub const PREFIX_CHALLENGE: u64 = 0u64; 6 | pub const PREFIX_POINT: u64 = 1u64; 7 | pub const PREFIX_SCALAR: u64 = 2u64; 8 | 9 | // We have two hasher here 10 | // 1. MERKLE_HASHER that is used for non sponge hash for hash two merkle siblings 11 | // 2. POSEIDON_HASHER thas is use for poseidon hash of data 12 | lazy_static::lazy_static! { 13 | pub static ref POSEIDON_HASHER: poseidon::Poseidon = Poseidon::::new(8, 63); 14 | pub static ref MERKLE_HASHER: poseidon::Poseidon = Poseidon::::new(8, 57); 15 | pub static ref MERKLE_LEAF_HASHER: poseidon::Poseidon = Poseidon::::new(8, 57); 16 | pub static ref POSEIDON_HASHER_SPEC: poseidon::Spec = Spec::new(8, 63); 17 | pub static ref MERKLE_HASHER_SPEC: poseidon::Spec = Spec::new(8, 57); 18 | pub static ref MERKLE_LEAF_HASHER_SPEC: poseidon::Spec = Spec::new(8, 57); 19 | } 20 | 21 | #[cfg(test)] 22 | mod tests { 23 | use halo2_proofs::pairing::bn256::Fr; 24 | #[test] 25 | fn test_poseidon() { 26 | const ZERO_HASHER_SQUEEZE: &str = 27 | "0x03f943aabd67cd7b72a539f3de686c3280c36c572be09f2b9193f5ef78761c6b"; //force the hasher is for fr field result. 28 | let mut hasher = super::POSEIDON_HASHER.clone(); 29 | hasher.update(&[Fr::zero()]); 30 | let result = hasher.squeeze(); 31 | println!("hash result is {:?}", result); 32 | assert_eq!(result.to_string(), ZERO_HASHER_SQUEEZE); 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![feature(slice_flatten)] 2 | 3 | pub mod adaptor; 4 | pub mod circuits; 5 | pub mod host; 6 | pub mod proof; 7 | pub mod utils; 8 | 9 | pub extern crate anyhow; 10 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | #![feature(slice_flatten)] 2 | //#![deny(warnings)] 3 | mod adaptor; 4 | pub mod circuits; 5 | pub mod host; 6 | pub mod proof; 7 | pub mod utils; 8 | 9 | use crate::proof::{exec_create_host_proof, read_host_call_table, OpType}; 10 | use clap::{arg, value_parser, App, Arg, ArgMatches}; 11 | use std::path::PathBuf; 12 | 13 | const DEFAULT_CIRCUITS_K: u32 = 22; 14 | 15 | #[derive(clap::Parser)] 16 | struct ArgOpName { 17 | #[clap(arg_enum)] 18 | t: OpType, 19 | } 20 | 21 | fn circuits_k<'a>() -> Arg<'a> { 22 | arg!(-k --K "Circuit size K") 23 | .required(false) 24 | .value_parser(value_parser!(u32)) 25 | } 26 | 27 | fn parse_circuits_k(matches: &ArgMatches) -> u32 { 28 | matches 29 | .get_one::("K") 30 | .unwrap_or(&DEFAULT_CIRCUITS_K) 31 | .clone() 32 | } 33 | 34 | fn output_folder<'a>() -> Arg<'a> { 35 | arg!(-o --output... "output file folder that contains all proof results") 36 | .max_values(1) 37 | .value_parser(value_parser!(PathBuf)) 38 | } 39 | 40 | fn parse_output_folder(matches: &ArgMatches) -> PathBuf { 41 | matches 42 | .get_one::("output") 43 | .expect("output folder is required") 44 | .clone() 45 | } 46 | 47 | fn param_folder<'a>() -> Arg<'a> { 48 | arg!(-p --param... "param file folder that contains all setup results") 49 | .max_values(1) 50 | .value_parser(value_parser!(PathBuf)) 51 | } 52 | 53 | fn parse_param_folder(matches: &ArgMatches) -> PathBuf { 54 | matches 55 | .get_one::("param") 56 | .expect("param folder is required") 57 | .clone() 58 | } 59 | 60 | fn input_file<'a>() -> Arg<'a> { 61 | arg!(-i --input... "Input file that contains all host function call") 62 | .max_values(1) 63 | .value_parser(value_parser!(PathBuf)) 64 | } 65 | 66 | fn parse_input_file(matches: &ArgMatches) -> PathBuf { 67 | matches 68 | .get_one::("input") 69 | .expect("input file is required") 70 | .clone() 71 | } 72 | 73 | fn opname<'a>() -> Arg<'a> { 74 | arg!(-n --opname... "Operation name") 75 | .max_values(1) 76 | .value_parser(value_parser!(OpType)) 77 | } 78 | 79 | fn parse_opname(matches: &ArgMatches) -> OpType { 80 | matches 81 | .get_one::("opname") 82 | .expect("opname is required") 83 | .clone() 84 | } 85 | 86 | #[allow(clippy::many_single_char_names)] 87 | fn main() { 88 | let clap_app = App::new("hostcircuit") 89 | .arg(input_file()) 90 | .arg(output_folder()) 91 | .arg(param_folder()) 92 | .arg(opname()) 93 | .arg(circuits_k()); 94 | 95 | let matches = clap_app.get_matches(); 96 | let input_file = parse_input_file(&matches); 97 | let cache_folder = parse_output_folder(&matches); 98 | let param_folder = parse_param_folder(&matches); 99 | let opname = parse_opname(&matches); 100 | let k = parse_circuits_k(&matches); 101 | 102 | exec_create_host_proof( 103 | "host", 104 | k as usize, 105 | &read_host_call_table(input_file), 106 | opname, 107 | &cache_folder, 108 | ¶m_folder, 109 | ); 110 | } 111 | -------------------------------------------------------------------------------- /src/migrate/migrate_from_mongo_to_rocksdb.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use clap::{App, Arg}; 3 | use mongodb::bson::doc; 4 | use zkwasm_host_circuits::host::db::{MongoDB, RocksDB, TreeDB}; 5 | 6 | pub fn main() -> Result<()> { 7 | let matches = App::new("MongoDB to RocksDB Migration Tool") 8 | .version("1.0") 9 | .author("cuiweixe") 10 | .about("Migrates data from MongoDB to RocksDB") 11 | .arg( 12 | Arg::with_name("mongo-uri") 13 | .long("mongo-uri") 14 | .value_name("URI") 15 | .help("MongoDB connection URI") 16 | .default_value("mongodb://localhost:27017") 17 | .takes_value(true), 18 | ) 19 | .arg( 20 | Arg::with_name("rocks-db-path") 21 | .long("rocks-db-path") 22 | .value_name("PATH") 23 | .help("Path to RocksDB database") 24 | .default_value("./test_db") 25 | .takes_value(true), 26 | ) 27 | .arg( 28 | Arg::with_name("collection-id") 29 | .long("collection-id") 30 | .value_name("HEX") 31 | .help("Collection ID as 32-byte hex string (default: 32 bytes of 0x01)") 32 | .default_value(&"01".repeat(32)) 33 | .takes_value(true), 34 | ) 35 | .get_matches(); 36 | 37 | let mongo_uri = matches.value_of("mongo-uri").unwrap(); 38 | let rocks_db_path = matches.value_of("rocks-db-path").unwrap(); 39 | let collection_id_hex = matches.value_of("collection-id").unwrap(); 40 | 41 | // Parse collection ID from hex string 42 | let collection_id = parse_collection_id(collection_id_hex)?; 43 | 44 | migrate_from_mongo_to_rocksdb(mongo_uri, rocks_db_path, collection_id)?; 45 | 46 | Ok(()) 47 | } 48 | 49 | fn parse_collection_id(hex_str: &str) -> Result<[u8; 32]> { 50 | // Remove "0x" prefix if present 51 | let hex_str = if hex_str.starts_with("0x") { 52 | &hex_str[2..] 53 | } else { 54 | hex_str 55 | }; 56 | 57 | // Check if the string has the correct length 58 | if hex_str.len() != 64 { 59 | return Err(anyhow::anyhow!("Collection ID must be 32 bytes (64 hex characters)")); 60 | } 61 | 62 | // Parse hex string to bytes 63 | let bytes = hex::decode(hex_str) 64 | .map_err(|e| anyhow::anyhow!("Failed to parse collection ID: {}", e))?; 65 | 66 | // Convert to fixed-size array 67 | let mut result = [0u8; 32]; 68 | result.copy_from_slice(&bytes); 69 | 70 | Ok(result) 71 | } 72 | pub fn migrate_from_mongo_to_rocksdb(mongo_uri: &str, rocks_db_path: &str, cname_id: [u8; 32]) -> Result<()> { 73 | println!("Starting migration from MongoDB to RocksDB"); 74 | println!("MongoDB URI: {}", mongo_uri); 75 | println!("RocksDB path: {}", rocks_db_path); 76 | 77 | // Initialize MongoDB client 78 | let mongo_db = MongoDB::new(cname_id, Some(mongo_uri.to_string())); 79 | 80 | // Initialize RocksDB 81 | let mut rocks_db = RocksDB::new(rocks_db_path)?; 82 | 83 | // Clear RocksDB to ensure clean migration 84 | rocks_db.clear()?; 85 | 86 | // Migrate Merkle records 87 | let merkle_collection = mongo_db.merkel_collection()?; 88 | let merkle_cursor = merkle_collection.find(doc! {}, None)?; 89 | 90 | let mut merkle_count = 0; 91 | for result in merkle_cursor { 92 | match result { 93 | Ok(record) => { 94 | rocks_db.set_merkle_record(record)?; 95 | merkle_count += 1; 96 | if merkle_count % 1000 == 0 { 97 | println!("Migrated {} Merkle records", merkle_count); 98 | } 99 | }, 100 | Err(e) => println!("Error reading Merkle record: {}", e), 101 | } 102 | } 103 | 104 | // Migrate Data records 105 | let data_collection = mongo_db.data_collection()?; 106 | let data_cursor = data_collection.find(doc! {}, None)?; 107 | 108 | let mut data_count = 0; 109 | for result in data_cursor { 110 | match result { 111 | Ok(record) => { 112 | rocks_db.set_data_record(record)?; 113 | data_count += 1; 114 | if data_count % 1000 == 0 { 115 | println!("Migrated {} Data records", data_count); 116 | } 117 | }, 118 | Err(e) => println!("Error reading Data record: {}", e), 119 | } 120 | } 121 | 122 | println!("Migration complete!"); 123 | println!("Total Merkle records migrated: {}", merkle_count); 124 | println!("Total Data records migrated: {}", data_count); 125 | 126 | Ok(()) 127 | } 128 | 129 | #[cfg(test)] 130 | mod tests { 131 | use super::*; 132 | use zkwasm_host_circuits::host::datahash::DataHashRecord; 133 | use zkwasm_host_circuits::host::mongomerkle::MerkleRecord; 134 | use zkwasm_host_circuits::host::db::{MongoDB, RocksDB, TreeDB, MONGODB_DATABASE, MONGODB_MERKLE_NAME_PREFIX, MONGODB_DATA_NAME_PREFIX, get_collection_name}; 135 | use tempfile::tempdir; 136 | 137 | #[test] 138 | fn test_migration() -> Result<()> { 139 | // Create a temporary directory for RocksDB 140 | let temp_dir = tempdir()?; 141 | let rocks_path = temp_dir.path().to_str().unwrap(); 142 | 143 | // Use a test collection ID 144 | let test_id = [1u8; 32]; 145 | 146 | // Setup MongoDB with test data 147 | let mongo_uri = "mongodb://localhost:27017"; 148 | let mut mongo_db = MongoDB::new(test_id, Some(mongo_uri.to_string())); 149 | 150 | // Create test Merkle records 151 | let merkle_record1 = MerkleRecord { 152 | index: 0, 153 | hash: [1u8; 32], 154 | left: Some([2u8; 32]), 155 | right: Some([3u8; 32]), 156 | data: None, 157 | }; 158 | 159 | let merkle_record2 = MerkleRecord { 160 | index: 0, 161 | hash: [4u8; 32], 162 | left: Some([5u8; 32]), 163 | right: Some([6u8; 32]), 164 | data: None, 165 | }; 166 | 167 | // Create test Data records 168 | let data_record1 = DataHashRecord { 169 | hash: [7u8; 32], 170 | data: vec![1, 2, 3, 4], 171 | }; 172 | 173 | let data_record2 = DataHashRecord { 174 | hash: [8u8; 32], 175 | data: vec![5, 6, 7, 8], 176 | }; 177 | 178 | // Insert test data into MongoDB 179 | mongo_db.set_merkle_record(merkle_record1.clone())?; 180 | mongo_db.set_merkle_record(merkle_record2.clone())?; 181 | mongo_db.set_data_record(data_record1.clone())?; 182 | mongo_db.set_data_record(data_record2.clone())?; 183 | 184 | // Run migration 185 | migrate_from_mongo_to_rocksdb(mongo_uri, rocks_path, test_id)?; 186 | 187 | // Verify data in RocksDB 188 | let rocks_db = RocksDB::new(rocks_path)?; 189 | 190 | // Check Merkle records 191 | let retrieved_merkle1 = rocks_db.get_merkle_record(&merkle_record1.hash)?; 192 | let retrieved_merkle2 = rocks_db.get_merkle_record(&merkle_record2.hash)?; 193 | 194 | assert!(retrieved_merkle1.is_some()); 195 | assert!(retrieved_merkle2.is_some()); 196 | 197 | let retrieved_merkle1 = retrieved_merkle1.unwrap(); 198 | let retrieved_merkle2 = retrieved_merkle2.unwrap(); 199 | 200 | assert_eq!(retrieved_merkle1.hash, merkle_record1.hash); 201 | assert_eq!(retrieved_merkle1.left, merkle_record1.left); 202 | assert_eq!(retrieved_merkle1.right, merkle_record1.right); 203 | 204 | assert_eq!(retrieved_merkle2.hash, merkle_record2.hash); 205 | assert_eq!(retrieved_merkle2.left, merkle_record2.left); 206 | assert_eq!(retrieved_merkle2.right, merkle_record2.right); 207 | 208 | // Check Data records 209 | let retrieved_data1 = rocks_db.get_data_record(&data_record1.hash)?; 210 | let retrieved_data2 = rocks_db.get_data_record(&data_record2.hash)?; 211 | 212 | assert!(retrieved_data1.is_some()); 213 | assert!(retrieved_data2.is_some()); 214 | 215 | let retrieved_data1 = retrieved_data1.unwrap(); 216 | let retrieved_data2 = retrieved_data2.unwrap(); 217 | 218 | assert_eq!(retrieved_data1.hash, data_record1.hash); 219 | assert_eq!(retrieved_data1.data, data_record1.data); 220 | 221 | assert_eq!(retrieved_data2.hash, data_record2.hash); 222 | assert_eq!(retrieved_data2.data, data_record2.data); 223 | 224 | // Clean up MongoDB collections 225 | let merkle_cname = get_collection_name(MONGODB_MERKLE_NAME_PREFIX.to_string(), test_id); 226 | let data_cname = get_collection_name(MONGODB_DATA_NAME_PREFIX.to_string(), test_id); 227 | 228 | mongo_db.drop_collection::(MONGODB_DATABASE.to_string(), merkle_cname)?; 229 | mongo_db.drop_collection::(MONGODB_DATABASE.to_string(), data_cname)?; 230 | 231 | Ok(()) 232 | } 233 | } 234 | -------------------------------------------------------------------------------- /src/proof.rs: -------------------------------------------------------------------------------- 1 | use crate::circuits::babyjub::AltJubChip; 2 | use crate::circuits::{ 3 | // bls::Bls381PairChip, 4 | // bls::Bls381SumChip, 5 | bn256::Bn256PairChip, 6 | bn256::Bn256SumChip, 7 | host::{HostOpChip, HostOpConfig, HostOpSelector}, 8 | keccak256::KeccakChip, 9 | merkle::MerkleChip, 10 | poseidon::PoseidonChip, 11 | }; 12 | use halo2_proofs::circuit::floor_planner::FlatFloorPlanner; 13 | use halo2_proofs::pairing::bn256::Bn256; 14 | use halo2_proofs::{ 15 | arithmetic::FieldExt, 16 | circuit::Layouter, 17 | pairing::bn256::Fr, 18 | plonk::{Circuit, ConstraintSystem, Error}, 19 | }; 20 | use std::{fs::File, io::BufReader, marker::PhantomData, path::PathBuf}; 21 | 22 | use circuits_batcher::args::HashType::Poseidon; 23 | use circuits_batcher::args::OpenSchema; 24 | use circuits_batcher::proof::{ParamsCache, ProofGenerationInfo, ProofPieceInfo, ProvingKeyCache}; 25 | 26 | use crate::host::ExternalHostCallEntryTable; 27 | use serde::{Deserialize, Serialize}; 28 | 29 | pub const MERKLE_DEPTH: usize = 32; 30 | 31 | #[derive(clap::Parser)] 32 | struct ArgOpName { 33 | #[clap(arg_enum)] 34 | t: OpType, 35 | } 36 | #[derive(clap::ArgEnum, Clone, Debug, Serialize, Deserialize)] 37 | pub enum OpType { 38 | // BLS381PAIR, 39 | // BLS381SUM, 40 | BN256PAIR, 41 | BN256SUM, 42 | POSEIDONHASH, 43 | KECCAKHASH, 44 | MERKLE, 45 | JUBJUBSUM, 46 | } 47 | 48 | #[derive(Clone)] 49 | pub struct HostOpCircuit { 50 | shared_operands: Vec, 51 | shared_opcodes: Vec, 52 | helper: S::Helper, 53 | k: usize, 54 | _marker: PhantomData<(F, S)>, 55 | } 56 | 57 | impl Default for HostOpCircuit { 58 | fn default() -> Self { 59 | HostOpCircuit { 60 | shared_operands: Vec::::default(), 61 | shared_opcodes: Vec::::default(), 62 | k: 22, 63 | helper: S::Helper::default(), 64 | _marker: PhantomData, 65 | } 66 | } 67 | } 68 | 69 | #[derive(Clone)] 70 | pub struct HostCircuitConfig { 71 | hostconfig: HostOpConfig, 72 | selectconfig: C, 73 | } 74 | 75 | impl Circuit for HostOpCircuit { 76 | // Since we are using a single chip for everything, we can just reuse its config. 77 | type Config = HostCircuitConfig; 78 | type FloorPlanner = FlatFloorPlanner; 79 | 80 | fn without_witnesses(&self) -> Self { 81 | Self::default() 82 | } 83 | 84 | fn configure(meta: &mut ConstraintSystem) -> Self::Config { 85 | let shared_advices = vec![ 86 | meta.advice_column(), 87 | meta.advice_column(), 88 | meta.advice_column(), 89 | meta.advice_column(), 90 | meta.advice_column(), 91 | meta.advice_column(), 92 | ]; 93 | // We create the two advice columns that FieldChip uses for I/O. 94 | HostCircuitConfig { 95 | hostconfig: HostOpChip::::configure(meta, &shared_advices), 96 | selectconfig: S::configure(meta, &shared_advices), 97 | } 98 | } 99 | 100 | fn synthesize(&self, config: Self::Config, layouter: impl Layouter) -> Result<(), Error> { 101 | let host_op_chip = 102 | HostOpChip::::construct(config.hostconfig.clone(), config.selectconfig.clone()); 103 | let (all_arg_cells, mut selector_chip) = layouter.assign_region( 104 | || "filter operands and opcodes", 105 | |region| { 106 | let mut offset = 0; 107 | let all_arg_cells = host_op_chip.assign( 108 | ®ion, 109 | self.k, 110 | &mut offset, 111 | &self.shared_operands, 112 | &self.shared_opcodes, 113 | )?; 114 | let mut selector_chip = S::construct(config.selectconfig.clone()); 115 | 116 | println!("total arg cells: {:?}", all_arg_cells.len()); 117 | println!("selector offset start at: {:?}", offset); 118 | selector_chip.synthesize(&mut offset, &all_arg_cells, ®ion, &self.helper)?; 119 | Ok((all_arg_cells, selector_chip)) 120 | }, 121 | )?; 122 | selector_chip.synthesize_separate(&all_arg_cells, &layouter)?; 123 | Ok(()) 124 | } 125 | } 126 | 127 | pub fn read_host_call_table(input_file: PathBuf) -> ExternalHostCallEntryTable { 128 | let file = File::open(input_file).expect("File does not exist"); 129 | let v: ExternalHostCallEntryTable = match serde_json::from_reader(BufReader::new(file)) { 130 | Err(e) => { 131 | println!("load json error {:?}", e); 132 | unreachable!(); 133 | } 134 | Ok(o) => o, 135 | }; 136 | v 137 | } 138 | 139 | pub fn build_host_circuit( 140 | v: &ExternalHostCallEntryTable, 141 | k: usize, 142 | helper: S::Helper, 143 | ) -> HostOpCircuit { 144 | // Prepare the private and public inputs to the circuit! 145 | let shared_operands = v.0.iter().map(|x| Fr::from(x.value as u64)).collect(); 146 | let shared_opcodes = v.0.iter().map(|x| Fr::from(x.op as u64)).collect(); 147 | 148 | HostOpCircuit:: { 149 | shared_operands, 150 | shared_opcodes, 151 | k, 152 | helper, 153 | _marker: PhantomData, 154 | } 155 | } 156 | 157 | pub fn exec_create_host_proof( 158 | name: &str, 159 | k: usize, 160 | v: &ExternalHostCallEntryTable, 161 | opname: OpType, 162 | cache_folder: &PathBuf, 163 | param_folder: &PathBuf, 164 | ) { 165 | // Instantiate the circuit with the private inputs. 166 | // Given the correct public input, our circuit will verify. 167 | 168 | let mut params_cache = ParamsCache::::new(5, param_folder.clone()); 169 | let mut pkey_cache = ProvingKeyCache::new(5, param_folder.clone()); 170 | macro_rules! gen_proof { 171 | ($circuit: expr) => { 172 | let prover: ProofPieceInfo = 173 | ProofPieceInfo::new(format!("{}.{:?}", name, opname), 0, 0, None); 174 | let mut proof_gen_info = 175 | ProofGenerationInfo::new(format!("{}.{:?}", name, opname).as_str(), k, Poseidon); 176 | let proof = prover.exec_create_proof( 177 | &$circuit, 178 | &vec![], 179 | k, 180 | &mut pkey_cache, 181 | &mut params_cache, 182 | Poseidon, 183 | OpenSchema::Shplonk, 184 | ); 185 | prover.save_proof_data::(&vec![], &proof, cache_folder); 186 | //prover.mock_proof(k as u32); 187 | proof_gen_info.append_single_proof(prover); 188 | proof_gen_info.save(cache_folder); 189 | }; 190 | } 191 | 192 | match opname { 193 | // OpType::BLS381PAIR => { 194 | // let circuit = build_host_circuit::>(&v, k, ()); 195 | // gen_proof!(circuit); 196 | // } 197 | // OpType::BLS381SUM => { 198 | // let circuit = build_host_circuit::>(&v, k, ()); 199 | // gen_proof!(circuit); 200 | // } 201 | OpType::BN256PAIR => { 202 | let circuit = build_host_circuit::>(&v, k, ()); 203 | gen_proof!(circuit); 204 | } 205 | OpType::BN256SUM => { 206 | let circuit = build_host_circuit::>(&v, k, ()); 207 | gen_proof!(circuit); 208 | } 209 | OpType::POSEIDONHASH => { 210 | let circuit = build_host_circuit::>(&v, k, ()); 211 | gen_proof!(circuit); 212 | } 213 | OpType::MERKLE => { 214 | let circuit = build_host_circuit::>(&v, k, None); 215 | gen_proof!(circuit); 216 | } 217 | OpType::JUBJUBSUM => { 218 | let circuit = build_host_circuit::>(&v, k, ()); 219 | gen_proof!(circuit); 220 | } 221 | OpType::KECCAKHASH => { 222 | let circuit = build_host_circuit::>(&v, k, ()); 223 | gen_proof!(circuit); 224 | } 225 | }; 226 | 227 | println!("Proof generated."); 228 | } 229 | -------------------------------------------------------------------------------- /src/scripts/kvpair_db_upgrade/README: -------------------------------------------------------------------------------- 1 | # kvpair mongodb upgrade scripts 2 | ## How to run upgrade 3 | ``` 4 | npm install 5 | npm run upgrade 6 | ``` -------------------------------------------------------------------------------- /src/scripts/kvpair_db_upgrade/db_upgrade.ts: -------------------------------------------------------------------------------- 1 | import * as mongoDB from "mongodb"; 2 | 3 | const DB_VERSION = 1; 4 | const DB_CONFIG_COLLECTION_NAME = "DBConfig"; 5 | const DB_NAME = "zkwasmkvpair"; 6 | const DB_VERSION_NAME = "DBVersion"; 7 | 8 | /* 9 | * Current dbversion store in the DBConfig collection: {name: DBVersion, value: 1.0} 10 | */ 11 | async function getDBVersion(db: mongoDB.Db) : Promise { 12 | let db_config = db.collection(DB_CONFIG_COLLECTION_NAME); 13 | let query = { "name": DB_VERSION_NAME }; 14 | let verRecord = await db_config.findOne(query); 15 | if (verRecord) { 16 | let version = verRecord.value; 17 | return version; 18 | } 19 | else { 20 | return 0; 21 | } 22 | } 23 | 24 | async function updateDBVersion(db: mongoDB.Db) { 25 | let db_config = db.collection(DB_CONFIG_COLLECTION_NAME); 26 | let filter = { "name": DB_VERSION_NAME }; 27 | let replacement = { $set: { "value": DB_VERSION } } 28 | await db_config.updateOne(filter, replacement, { upsert: true }); 29 | } 30 | 31 | /* 32 | * Detail db upgrade script start here. 33 | */ 34 | 35 | //Add index hash as a unique index 36 | async function addIndexHashIndex(collection: mongoDB.Collection) { 37 | const nameRegExp = /^MERKLEDATA_/; 38 | const INDEX_NAME = "indexHashIndex"; 39 | if (collection.collectionName.match(nameRegExp)) { 40 | let indexExisted: boolean = await collection.indexExists(INDEX_NAME); 41 | if(indexExisted) { 42 | console.log("Delete pre-existing %s for %s", INDEX_NAME, collection.collectionName); 43 | collection.dropIndex(INDEX_NAME); 44 | } 45 | 46 | console.log("Begin add %s for %s", INDEX_NAME, collection.collectionName); 47 | await collection.createIndex( 48 | { 49 | "index": 1, 50 | "hash": -1 51 | }, 52 | { 53 | unique: true, 54 | name: INDEX_NAME 55 | } 56 | ); 57 | console.log("Finish add %s for %s", INDEX_NAME, collection.collectionName); 58 | } 59 | } 60 | 61 | (async () => { 62 | const client: mongoDB.MongoClient = new mongoDB.MongoClient('mongodb://localhost/'); 63 | await client.connect(); 64 | const db: mongoDB.Db = client.db(DB_NAME); 65 | const dbVersion = await getDBVersion(db); 66 | let collections: mongoDB.Collection[] = await db.collections(); 67 | for (const collection of collections) { 68 | if (dbVersion < 1) { 69 | await addIndexHashIndex(collection); 70 | } 71 | } 72 | 73 | //This must be run at the end of upgrade script. 74 | await updateDBVersion(db); 75 | console.log("Upgrade finished"); 76 | client.close(); 77 | })(); -------------------------------------------------------------------------------- /src/scripts/kvpair_db_upgrade/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kvpair_db_upgrade", 3 | "version": "1.0.0", 4 | "lockfileVersion": 2, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "kvpair_db_upgrade", 9 | "version": "1.0.0", 10 | "dependencies": { 11 | "mongodb": "5.7.0" 12 | } 13 | }, 14 | "node_modules/@types/node": { 15 | "version": "20.4.2", 16 | "resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.2.tgz", 17 | "integrity": "sha512-Dd0BYtWgnWJKwO1jkmTrzofjK2QXXcai0dmtzvIBhcA+RsG5h8R3xlyta0kGOZRNfL9GuRtb1knmPEhQrePCEw==" 18 | }, 19 | "node_modules/@types/webidl-conversions": { 20 | "version": "7.0.0", 21 | "resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-7.0.0.tgz", 22 | "integrity": "sha512-xTE1E+YF4aWPJJeUzaZI5DRntlkY3+BCVJi0axFptnjGmAoWxkyREIh/XMrfxVLejwQxMCfDXdICo0VLxThrog==" 23 | }, 24 | "node_modules/@types/whatwg-url": { 25 | "version": "8.2.2", 26 | "resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-8.2.2.tgz", 27 | "integrity": "sha512-FtQu10RWgn3D9U4aazdwIE2yzphmTJREDqNdODHrbrZmmMqI0vMheC/6NE/J1Yveaj8H+ela+YwWTjq5PGmuhA==", 28 | "dependencies": { 29 | "@types/node": "*", 30 | "@types/webidl-conversions": "*" 31 | } 32 | }, 33 | "node_modules/bson": { 34 | "version": "5.4.0", 35 | "resolved": "https://registry.npmjs.org/bson/-/bson-5.4.0.tgz", 36 | "integrity": "sha512-WRZ5SQI5GfUuKnPTNmAYPiKIof3ORXAF4IRU5UcgmivNIon01rWQlw5RUH954dpu8yGL8T59YShVddIPaU/gFA==", 37 | "engines": { 38 | "node": ">=14.20.1" 39 | } 40 | }, 41 | "node_modules/ip": { 42 | "version": "2.0.0", 43 | "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz", 44 | "integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==" 45 | }, 46 | "node_modules/memory-pager": { 47 | "version": "1.5.0", 48 | "resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz", 49 | "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==", 50 | "optional": true 51 | }, 52 | "node_modules/mongodb": { 53 | "version": "5.7.0", 54 | "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-5.7.0.tgz", 55 | "integrity": "sha512-zm82Bq33QbqtxDf58fLWBwTjARK3NSvKYjyz997KSy6hpat0prjeX/kxjbPVyZY60XYPDNETaHkHJI2UCzSLuw==", 56 | "dependencies": { 57 | "bson": "^5.4.0", 58 | "mongodb-connection-string-url": "^2.6.0", 59 | "socks": "^2.7.1" 60 | }, 61 | "engines": { 62 | "node": ">=14.20.1" 63 | }, 64 | "optionalDependencies": { 65 | "saslprep": "^1.0.3" 66 | }, 67 | "peerDependencies": { 68 | "@aws-sdk/credential-providers": "^3.201.0", 69 | "@mongodb-js/zstd": "^1.1.0", 70 | "kerberos": "^2.0.1", 71 | "mongodb-client-encryption": ">=2.3.0 <3", 72 | "snappy": "^7.2.2" 73 | }, 74 | "peerDependenciesMeta": { 75 | "@aws-sdk/credential-providers": { 76 | "optional": true 77 | }, 78 | "@mongodb-js/zstd": { 79 | "optional": true 80 | }, 81 | "kerberos": { 82 | "optional": true 83 | }, 84 | "mongodb-client-encryption": { 85 | "optional": true 86 | }, 87 | "snappy": { 88 | "optional": true 89 | } 90 | } 91 | }, 92 | "node_modules/mongodb-connection-string-url": { 93 | "version": "2.6.0", 94 | "resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-2.6.0.tgz", 95 | "integrity": "sha512-WvTZlI9ab0QYtTYnuMLgobULWhokRjtC7db9LtcVfJ+Hsnyr5eo6ZtNAt3Ly24XZScGMelOcGtm7lSn0332tPQ==", 96 | "dependencies": { 97 | "@types/whatwg-url": "^8.2.1", 98 | "whatwg-url": "^11.0.0" 99 | } 100 | }, 101 | "node_modules/punycode": { 102 | "version": "2.3.0", 103 | "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", 104 | "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==", 105 | "engines": { 106 | "node": ">=6" 107 | } 108 | }, 109 | "node_modules/saslprep": { 110 | "version": "1.0.3", 111 | "resolved": "https://registry.npmjs.org/saslprep/-/saslprep-1.0.3.tgz", 112 | "integrity": "sha512-/MY/PEMbk2SuY5sScONwhUDsV2p77Znkb/q3nSVstq/yQzYJOH/Azh29p9oJLsl3LnQwSvZDKagDGBsBwSooag==", 113 | "optional": true, 114 | "dependencies": { 115 | "sparse-bitfield": "^3.0.3" 116 | }, 117 | "engines": { 118 | "node": ">=6" 119 | } 120 | }, 121 | "node_modules/smart-buffer": { 122 | "version": "4.2.0", 123 | "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", 124 | "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", 125 | "engines": { 126 | "node": ">= 6.0.0", 127 | "npm": ">= 3.0.0" 128 | } 129 | }, 130 | "node_modules/socks": { 131 | "version": "2.7.1", 132 | "resolved": "https://registry.npmjs.org/socks/-/socks-2.7.1.tgz", 133 | "integrity": "sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ==", 134 | "dependencies": { 135 | "ip": "^2.0.0", 136 | "smart-buffer": "^4.2.0" 137 | }, 138 | "engines": { 139 | "node": ">= 10.13.0", 140 | "npm": ">= 3.0.0" 141 | } 142 | }, 143 | "node_modules/sparse-bitfield": { 144 | "version": "3.0.3", 145 | "resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz", 146 | "integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==", 147 | "optional": true, 148 | "dependencies": { 149 | "memory-pager": "^1.0.2" 150 | } 151 | }, 152 | "node_modules/tr46": { 153 | "version": "3.0.0", 154 | "resolved": "https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz", 155 | "integrity": "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==", 156 | "dependencies": { 157 | "punycode": "^2.1.1" 158 | }, 159 | "engines": { 160 | "node": ">=12" 161 | } 162 | }, 163 | "node_modules/webidl-conversions": { 164 | "version": "7.0.0", 165 | "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", 166 | "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==", 167 | "engines": { 168 | "node": ">=12" 169 | } 170 | }, 171 | "node_modules/whatwg-url": { 172 | "version": "11.0.0", 173 | "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-11.0.0.tgz", 174 | "integrity": "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==", 175 | "dependencies": { 176 | "tr46": "^3.0.0", 177 | "webidl-conversions": "^7.0.0" 178 | }, 179 | "engines": { 180 | "node": ">=12" 181 | } 182 | } 183 | }, 184 | "dependencies": { 185 | "@types/node": { 186 | "version": "20.4.2", 187 | "resolved": "https://registry.npmjs.org/@types/node/-/node-20.4.2.tgz", 188 | "integrity": "sha512-Dd0BYtWgnWJKwO1jkmTrzofjK2QXXcai0dmtzvIBhcA+RsG5h8R3xlyta0kGOZRNfL9GuRtb1knmPEhQrePCEw==" 189 | }, 190 | "@types/webidl-conversions": { 191 | "version": "7.0.0", 192 | "resolved": "https://registry.npmjs.org/@types/webidl-conversions/-/webidl-conversions-7.0.0.tgz", 193 | "integrity": "sha512-xTE1E+YF4aWPJJeUzaZI5DRntlkY3+BCVJi0axFptnjGmAoWxkyREIh/XMrfxVLejwQxMCfDXdICo0VLxThrog==" 194 | }, 195 | "@types/whatwg-url": { 196 | "version": "8.2.2", 197 | "resolved": "https://registry.npmjs.org/@types/whatwg-url/-/whatwg-url-8.2.2.tgz", 198 | "integrity": "sha512-FtQu10RWgn3D9U4aazdwIE2yzphmTJREDqNdODHrbrZmmMqI0vMheC/6NE/J1Yveaj8H+ela+YwWTjq5PGmuhA==", 199 | "requires": { 200 | "@types/node": "*", 201 | "@types/webidl-conversions": "*" 202 | } 203 | }, 204 | "bson": { 205 | "version": "5.4.0", 206 | "resolved": "https://registry.npmjs.org/bson/-/bson-5.4.0.tgz", 207 | "integrity": "sha512-WRZ5SQI5GfUuKnPTNmAYPiKIof3ORXAF4IRU5UcgmivNIon01rWQlw5RUH954dpu8yGL8T59YShVddIPaU/gFA==" 208 | }, 209 | "ip": { 210 | "version": "2.0.0", 211 | "resolved": "https://registry.npmjs.org/ip/-/ip-2.0.0.tgz", 212 | "integrity": "sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ==" 213 | }, 214 | "memory-pager": { 215 | "version": "1.5.0", 216 | "resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz", 217 | "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==", 218 | "optional": true 219 | }, 220 | "mongodb": { 221 | "version": "5.7.0", 222 | "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-5.7.0.tgz", 223 | "integrity": "sha512-zm82Bq33QbqtxDf58fLWBwTjARK3NSvKYjyz997KSy6hpat0prjeX/kxjbPVyZY60XYPDNETaHkHJI2UCzSLuw==", 224 | "requires": { 225 | "bson": "^5.4.0", 226 | "mongodb-connection-string-url": "^2.6.0", 227 | "saslprep": "^1.0.3", 228 | "socks": "^2.7.1" 229 | } 230 | }, 231 | "mongodb-connection-string-url": { 232 | "version": "2.6.0", 233 | "resolved": "https://registry.npmjs.org/mongodb-connection-string-url/-/mongodb-connection-string-url-2.6.0.tgz", 234 | "integrity": "sha512-WvTZlI9ab0QYtTYnuMLgobULWhokRjtC7db9LtcVfJ+Hsnyr5eo6ZtNAt3Ly24XZScGMelOcGtm7lSn0332tPQ==", 235 | "requires": { 236 | "@types/whatwg-url": "^8.2.1", 237 | "whatwg-url": "^11.0.0" 238 | } 239 | }, 240 | "punycode": { 241 | "version": "2.3.0", 242 | "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.0.tgz", 243 | "integrity": "sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==" 244 | }, 245 | "saslprep": { 246 | "version": "1.0.3", 247 | "resolved": "https://registry.npmjs.org/saslprep/-/saslprep-1.0.3.tgz", 248 | "integrity": "sha512-/MY/PEMbk2SuY5sScONwhUDsV2p77Znkb/q3nSVstq/yQzYJOH/Azh29p9oJLsl3LnQwSvZDKagDGBsBwSooag==", 249 | "optional": true, 250 | "requires": { 251 | "sparse-bitfield": "^3.0.3" 252 | } 253 | }, 254 | "smart-buffer": { 255 | "version": "4.2.0", 256 | "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", 257 | "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==" 258 | }, 259 | "socks": { 260 | "version": "2.7.1", 261 | "resolved": "https://registry.npmjs.org/socks/-/socks-2.7.1.tgz", 262 | "integrity": "sha512-7maUZy1N7uo6+WVEX6psASxtNlKaNVMlGQKkG/63nEDdLOWNbiUMoLK7X4uYoLhQstau72mLgfEWcXcwsaHbYQ==", 263 | "requires": { 264 | "ip": "^2.0.0", 265 | "smart-buffer": "^4.2.0" 266 | } 267 | }, 268 | "sparse-bitfield": { 269 | "version": "3.0.3", 270 | "resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz", 271 | "integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==", 272 | "optional": true, 273 | "requires": { 274 | "memory-pager": "^1.0.2" 275 | } 276 | }, 277 | "tr46": { 278 | "version": "3.0.0", 279 | "resolved": "https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz", 280 | "integrity": "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==", 281 | "requires": { 282 | "punycode": "^2.1.1" 283 | } 284 | }, 285 | "webidl-conversions": { 286 | "version": "7.0.0", 287 | "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", 288 | "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==" 289 | }, 290 | "whatwg-url": { 291 | "version": "11.0.0", 292 | "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-11.0.0.tgz", 293 | "integrity": "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==", 294 | "requires": { 295 | "tr46": "^3.0.0", 296 | "webidl-conversions": "^7.0.0" 297 | } 298 | } 299 | } 300 | } 301 | -------------------------------------------------------------------------------- /src/scripts/kvpair_db_upgrade/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kvpair_db_upgrade", 3 | "version": "1.0.0", 4 | "private": true, 5 | "description": "upgrade kvpair mongodb", 6 | "dependencies": { 7 | "mongodb": "5.7.0" 8 | }, 9 | "scripts": { 10 | "upgrade": "node db_upgrade.js", 11 | "prepare": "npm run build", 12 | "build": "tsc" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/scripts/kvpair_db_upgrade/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Visit https://aka.ms/tsconfig.json to read more about this file */ 4 | 5 | /* Basic Options */ 6 | // "incremental": true, /* Enable incremental compilation */ 7 | "target": "ES2015", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */ 8 | "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */ 9 | // "lib": [], /* Specify library files to be included in the compilation. */ 10 | // "allowJs": true, /* Allow javascript files to be compiled. */ 11 | // "checkJs": true, /* Report errors in .js files. */ 12 | // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */ 13 | // "declaration": true, /* Generates corresponding '.d.ts' file. */ 14 | // "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */ 15 | "sourceMap": true, /* Generates corresponding '.map' file. */ 16 | // "outFile": "./", /* Concatenate and emit output to single file. */ 17 | "rootDir": ".", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ 18 | // "composite": true, /* Enable project compilation */ 19 | // "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */ 20 | // "removeComments": true, /* Do not emit comments to output. */ 21 | // "noEmit": true, /* Do not emit outputs. */ 22 | // "importHelpers": true, /* Import emit helpers from 'tslib'. */ 23 | // "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */ 24 | // "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */ 25 | 26 | /* Strict Type-Checking Options */ 27 | "strict": true, /* Enable all strict type-checking options. */ 28 | // "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ 29 | // "strictNullChecks": true, /* Enable strict null checks. */ 30 | // "strictFunctionTypes": true, /* Enable strict checking of function types. */ 31 | // "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */ 32 | // "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */ 33 | // "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ 34 | // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ 35 | 36 | /* Additional Checks */ 37 | // "noUnusedLocals": true, /* Report errors on unused locals. */ 38 | // "noUnusedParameters": true, /* Report errors on unused parameters. */ 39 | // "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ 40 | // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ 41 | // "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */ 42 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an 'override' modifier. */ 43 | // "noPropertyAccessFromIndexSignature": true, /* Require undeclared properties from index signatures to use element accesses. */ 44 | 45 | /* Module Resolution Options */ 46 | // "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */ 47 | // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */ 48 | // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ 49 | // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */ 50 | // "typeRoots": [], /* List of folders to include type definitions from. */ 51 | // "types": [], /* Type declaration files to be included in compilation. */ 52 | // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */ 53 | "esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ 54 | // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ 55 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 56 | 57 | /* Source Map Options */ 58 | // "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */ 59 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 60 | // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */ 61 | // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */ 62 | 63 | /* Experimental Options */ 64 | // "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */ 65 | // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */ 66 | 67 | /* Advanced Options */ 68 | "skipLibCheck": true, /* Skip type checking of declaration files. */ 69 | "forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */ 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/utils/macros.rs: -------------------------------------------------------------------------------- 1 | #[macro_export] 2 | macro_rules! constant_from { 3 | ($x: expr) => { 4 | halo2_proofs::plonk::Expression::Constant(F::from($x as u64)) 5 | }; 6 | } 7 | 8 | #[macro_export] 9 | macro_rules! constant_from_bn { 10 | ($x: expr) => { 11 | halo2_proofs::plonk::Expression::Constant(bn_to_field($x)) 12 | }; 13 | } 14 | 15 | #[macro_export] 16 | macro_rules! constant { 17 | ($x: expr) => { 18 | halo2_proofs::plonk::Expression::Constant($x) 19 | }; 20 | } 21 | 22 | #[macro_export] 23 | macro_rules! value_for_assign { 24 | ($x: expr) => { 25 | Ok($x) 26 | }; 27 | } 28 | 29 | #[macro_export] 30 | macro_rules! item_count { 31 | () => {0usize}; 32 | ($cut:tt nil $($tail:tt)*) => {1usize + item_count!($($tail)*)}; 33 | ($cut:tt $name:tt $($tail:tt)*) => {1usize + item_count!($($tail)*)}; 34 | } 35 | 36 | #[macro_export] 37 | macro_rules! table_item { 38 | ($row:expr, $col:expr, ) => {}; 39 | ($row:expr, $col:expr, | nil $($tail:tt)*) => { 40 | table_item!($row, $col, $($tail)*); 41 | }; 42 | ($row:expr, $col:expr, | $name:tt $($tail:tt)*) => { 43 | pub fn $name() -> GateCell { 44 | let index = $row * $col - 1usize - (item_count!($($tail)*)); 45 | GateCell { 46 | cell: [Self::typ(index), Self::col(index), Self::row(index)], 47 | name: String::from(stringify!($name)), 48 | } 49 | } 50 | table_item!($row, $col, $($tail)*); 51 | }; 52 | } 53 | 54 | #[macro_export] 55 | macro_rules! customized_circuits_expand { 56 | ($name:ident, $row:expr, $col:expr, $adv:expr, $fix:expr, $sel:expr, $($item:tt)* ) => { 57 | #[allow(dead_code)] 58 | #[derive(Clone, Debug)] 59 | pub struct $name { 60 | witness: [Column; $adv], 61 | selector: [Selector; $sel], 62 | fixed: [Column; $fix], 63 | } 64 | 65 | impl $name { 66 | pub fn new(witness: [Column; $adv], fixed: [Column; $fix], selector: [Selector; $sel]) -> Self { 67 | $name { 68 | witness, 69 | fixed, 70 | selector, 71 | } 72 | } 73 | 74 | pub fn get_expr(&self, meta: &mut VirtualCells, gate_cell: GateCell) -> Expression { 75 | let cell = gate_cell.cell; 76 | //println!("Assign Cell at {} {} {:?}", start_offset, gate_cell.name, value); 77 | if cell[0] == 0 { // advice 78 | meta.query_advice(self.witness[cell[1]], Rotation(cell[2] as i32)) 79 | } else if cell[0] == 1 { // fix 80 | meta.query_fixed(self.fixed[cell[1]], Rotation(cell[2] as i32)) 81 | } else { // selector 82 | meta.query_selector(self.selector[cell[1]]) 83 | } 84 | } 85 | 86 | pub fn get_expr_with_offset(&self, meta: &mut VirtualCells, gate_cell: GateCell, offset: usize) -> Expression { 87 | let cell = gate_cell.cell; 88 | //println!("Assign Cell at {} {} {:?}", start_offset, gate_cell.name, value); 89 | if cell[0] == 0 { // advice 90 | meta.query_advice(self.witness[cell[1]], Rotation((cell[2] + offset) as i32)) 91 | } else if cell[0] == 1 { // fix 92 | meta.query_fixed(self.fixed[cell[1]], Rotation((cell[2] + offset) as i32)) 93 | } else { // selector 94 | meta.query_selector(self.selector[cell[1]]) 95 | } 96 | } 97 | 98 | pub fn get_advice_column(&self, gate_cell: GateCell) -> Column { 99 | let cell = gate_cell.cell; 100 | //println!("Assign Cell at {} {} {:?}", start_offset, gate_cell.name, value); 101 | if cell[0] == 0 { // advice 102 | self.witness[cell[1]] 103 | } else { 104 | unreachable!(); 105 | } 106 | } 107 | 108 | pub fn get_fixed_column(&self, gate_cell: GateCell) -> Column { 109 | let cell = gate_cell.cell; 110 | //println!("Assign Cell at {} {} {:?}", start_offset, gate_cell.name, value); 111 | if cell[0] == 1 { // advice 112 | self.fixed[cell[1]] 113 | } else { 114 | unreachable!(); 115 | } 116 | } 117 | 118 | pub fn get_selector_column(&self, gate_cell: GateCell) -> Selector { 119 | let cell = gate_cell.cell; 120 | //println!("Assign Cell at {} {} {:?}", start_offset, gate_cell.name, value); 121 | if cell[0] == 2 { // advice 122 | self.selector[cell[1]] 123 | } else { 124 | unreachable!(); 125 | } 126 | } 127 | 128 | pub fn assign_cell( 129 | &self, 130 | region: &Region, 131 | start_offset: usize, 132 | gate_cell: &GateCell, 133 | value: F, 134 | ) -> Result, Error> { 135 | let cell = gate_cell.cell; 136 | //println!("Assign Cell at {} {} {:?}", start_offset, gate_cell.name, value); 137 | if cell[0] == 0 { // advice 138 | let c = region.assign_advice( 139 | || gate_cell.name.clone(), 140 | self.witness[cell[1]], 141 | start_offset + cell[2], 142 | || value_for_assign!(value) 143 | )?; 144 | Ok(Limb::new(Some(c), value)) 145 | } else if cell[0] == 1 { // fix 146 | let c = region.assign_fixed( 147 | || format!("assign cell"), 148 | self.fixed[cell[1]], 149 | start_offset + cell[2], 150 | || value_for_assign!(value) 151 | )?; 152 | Ok(Limb::new(Some(c), value)) 153 | } else { // selector 154 | unreachable!() 155 | } 156 | } 157 | 158 | pub fn bind_cell( 159 | &self, 160 | region: &Region, 161 | start_offset: usize, 162 | cell: &GateCell, 163 | value: &Limb, 164 | ) -> Result, Error> { 165 | let limb = self.assign_cell(region, start_offset, cell, value.value.clone())?; 166 | value.cell.as_ref().map(|value| region.constrain_equal(limb.get_the_cell().cell(), value.cell())); 167 | Ok(limb) 168 | } 169 | 170 | 171 | pub fn enable_selector( 172 | &self, 173 | region: &Region, 174 | start_offset: usize, 175 | gate_cell: &GateCell, 176 | ) -> Result<(), Error> { 177 | assert!(gate_cell.cell[0] == 2); 178 | self.selector[gate_cell.cell[1]].enable(region, start_offset + gate_cell.cell[2]) 179 | } 180 | } 181 | 182 | impl $name { 183 | fn typ(index: usize) -> usize { 184 | let x = index % $col; 185 | #[allow(unused_comparisons)] 186 | if x < $adv { 187 | 0 188 | } else if x < $adv + $fix { 189 | 1 190 | } else { 191 | 2 192 | } 193 | } 194 | 195 | fn col(index: usize) -> usize { 196 | let x = index % $col; 197 | #[allow(unused_comparisons)] 198 | if x < $adv { 199 | x 200 | } else if x < $adv + $fix { 201 | x - $adv 202 | } else { 203 | x - $adv - $fix 204 | } 205 | } 206 | 207 | fn row(index: usize) -> usize { 208 | index / $col 209 | } 210 | 211 | table_item!($row, $col, $($item)*); 212 | } 213 | }; 214 | } 215 | 216 | #[macro_export] 217 | /// Define customize circuits with (nb_row, nb_adv, nb_fix, nb_expr) 218 | /// | adv | fix | sel | 219 | /// | a | b | c | 220 | /// | a_next | b_next | c_next | 221 | macro_rules! customized_circuits { 222 | ($name:ident, $row:expr, $adv:expr, $fix:expr, $sel:expr, $($item:tt)* ) => { 223 | customized_circuits_expand!($name, $row, ($fix + $sel + $adv), $adv, $fix, $sel, $($item)*); 224 | }; 225 | } 226 | 227 | #[cfg(test)] 228 | mod tests { 229 | /* 230 | use crate::customized_circuits; 231 | use crate::customized_circuits_expand; 232 | use crate::table_item; 233 | use crate::item_count; 234 | use crate::utils::GateCell; 235 | use crate::utils::Limb; 236 | use halo2_proofs::arithmetic::FieldExt; 237 | use halo2_proofs::plonk::{ 238 | Fixed, Column, Advice, 239 | Selector, Expression, VirtualCells, 240 | Error, 241 | }; 242 | use halo2_proofs::poly::Rotation; 243 | 244 | #[rustfmt::skip] 245 | customized_circuits!(TestConfig, 2, 2, 1, 1, 246 | | wc | b2 | c2 | d2 247 | | w1 | b3 | c3 | d3 248 | ); 249 | #[test] 250 | fn test_gate_macro() { 251 | //let config = TestConfig {}; 252 | //assert_eq!(r.to_vec(), r1); 253 | } 254 | */ 255 | } 256 | -------------------------------------------------------------------------------- /src/utils/mod.rs: -------------------------------------------------------------------------------- 1 | use halo2_proofs::arithmetic::BaseExt; 2 | use halo2_proofs::arithmetic::FieldExt; 3 | use halo2_proofs::circuit::AssignedCell; 4 | use num_bigint::BigUint; 5 | 6 | #[derive(Clone, Debug)] 7 | pub struct Limb { 8 | pub cell: Option>, 9 | pub value: F, 10 | } 11 | 12 | impl Limb { 13 | pub fn new(cell: Option>, value: F) -> Self { 14 | Limb { cell, value } 15 | } 16 | pub fn get_the_cell(&self) -> AssignedCell { 17 | self.cell.as_ref().unwrap().clone() 18 | } 19 | } 20 | 21 | pub fn data_to_bytes(fs: Vec) -> Vec { 22 | let mut bytes = vec![]; 23 | for f in fs.iter() { 24 | let mut b = vec![]; 25 | f.write(&mut b).unwrap(); 26 | b.resize(16, 0); 27 | bytes.append(&mut b); 28 | } 29 | bytes 30 | } 31 | 32 | pub fn bytes_to_u64(bytes: &[u8; 32]) -> [u64; 4] { 33 | let r = bytes 34 | .to_vec() 35 | .chunks_exact(8) 36 | .map(|x| u64::from_le_bytes(x.try_into().unwrap())) 37 | .collect::>(); 38 | r.try_into().unwrap() 39 | } 40 | 41 | pub fn field_to_bytes(f: &F) -> [u8; 32] { 42 | let mut bytes: Vec = Vec::new(); 43 | f.write(&mut bytes).unwrap(); 44 | bytes.try_into().unwrap() 45 | } 46 | 47 | pub fn field_to_bn(f: &F) -> BigUint { 48 | let mut bytes: Vec = Vec::new(); 49 | f.write(&mut bytes).unwrap(); 50 | BigUint::from_bytes_le(&bytes[..]) 51 | } 52 | 53 | pub fn bn_to_field(bn: &BigUint) -> F { 54 | let mut bytes = bn.to_bytes_le(); 55 | bytes.resize(48, 0); 56 | let mut bytes = &bytes[..]; 57 | F::read(&mut bytes).unwrap() 58 | } 59 | 60 | pub fn bytes_to_field(bytes: &[u8; 32]) -> F { 61 | F::read(&mut &bytes.clone().to_vec()[..]).unwrap() 62 | } 63 | 64 | pub fn field_to_u32(f: &F) -> u32 { 65 | let mut bytes: Vec = Vec::new(); 66 | f.write(&mut bytes).unwrap(); 67 | u32::from_le_bytes(bytes[0..4].try_into().unwrap()) 68 | } 69 | 70 | pub fn field_to_u64(f: &F) -> u64 { 71 | let mut bytes: Vec = Vec::new(); 72 | f.write(&mut bytes).unwrap(); 73 | u64::from_le_bytes(bytes[0..8].try_into().unwrap()) 74 | } 75 | 76 | /* 77 | fn u8_to_bits(num: u8) -> Vec { 78 | let mut result = Vec::with_capacity(8); 79 | let mut n = num; 80 | for _ in 0..8 { 81 | result.push(n & 1 == 1); 82 | n >>= 1; 83 | } 84 | result 85 | } 86 | */ 87 | 88 | #[derive(Debug, Clone)] 89 | pub struct GateCell { 90 | pub cell: [usize; 3], 91 | pub name: String, 92 | } 93 | 94 | impl GateCell { 95 | pub fn next(&self, u: usize) -> Self { 96 | let mut r = self.clone(); 97 | r.cell[2] = r.cell[2] + u; 98 | r 99 | } 100 | } 101 | 102 | pub mod macros; 103 | -------------------------------------------------------------------------------- /test.sh: -------------------------------------------------------------------------------- 1 | cargo test generate_keccak_input_multi 2 | RUST_BACKTRACE=1 cargo run --release -- --input keccak256_test.json --opname keccakhash --output output --param params 3 | RUST_BACKTRACE=1 cargo run --release -- --input keccak256_test_multi.json --opname keccakhash --output output --param params 4 | -------------------------------------------------------------------------------- /test_bn254.sh: -------------------------------------------------------------------------------- 1 | cargo test generate_bn256_sum_input 2 | cargo run --release --features cuda -- --input bn256sumtest.json --opname bn256sum --output output/ --param params/ 3 | #cargo test generate_bn256_pair_input 4 | #cargo run --release --features cuda -- --input bn256pairtest.json --opname bn256pair --output output/ --param params/ 5 | -------------------------------------------------------------------------------- /test_jubjub.sh: -------------------------------------------------------------------------------- 1 | cargo test generate_jubjub_msm 2 | cargo run --release --features cuda -- --input jubjub.json --opname jubjubsum --output output/ --param params 3 | cargo run --release --features cuda -- --input jubjub_multi.json --opname jubjubsum --output output/ --param params 4 | 5 | 6 | 7 | -------------------------------------------------------------------------------- /test_keccak.sh: -------------------------------------------------------------------------------- 1 | cargo test generate_keccak 2 | RUST_BACKTRACE=1 cargo run --release --features cuda -- -k 22 --input keccak256_test.json --opname keccakhash --output output --param params 3 | RUST_BACKTRACE=1 cargo run --release --features cuda -- -k 22 --input keccak256_test_multi.json --opname keccakhash --output output --param params 4 | -------------------------------------------------------------------------------- /test_merkle.sh: -------------------------------------------------------------------------------- 1 | cargo test generate_kvpair_input 2 | cargo run --release --features cuda -- --input kvpair_test1.json --opname merkle --output output/ --param params/ 3 | cargo run --release --features cuda -- --input kvpair_test2.json --opname merkle --output output/ --param params/ 4 | -------------------------------------------------------------------------------- /test_poseidon.sh: -------------------------------------------------------------------------------- 1 | cargo test generate_poseidon 2 | cargo run --release --features cuda -- --input poseidontest.json --opname poseidonhash --output output/ --param params/ 3 | cargo run --release --features cuda -- --input poseidontest_multi.json --opname poseidonhash --output output/ --param params/ 4 | --------------------------------------------------------------------------------