├── README.md ├── solidity ├── .solhintignore ├── remappings.txt ├── slither.config.json ├── scripts │ ├── install_deps.sh │ ├── pre_forge.sh │ ├── doc_gen.sh │ ├── lint-and-test.sh │ └── check_coverage.sh ├── soldeer.lock ├── foundry.toml ├── .solhint.json ├── test │ ├── base │ │ ├── SwitchUtil.t.pre.sol │ │ ├── FieldUtil.sol │ │ ├── Queue.t.pre.sol │ │ └── MathUtil.t.sol │ └── proof_exprs │ │ ├── NotExpr.t.pre.sol │ │ ├── LiteralExpr.t.pre.sol │ │ ├── AddExpr.t.pre.sol │ │ └── SubtractExpr.t.pre.sol ├── README.md └── src │ └── base │ ├── SwitchUtil.pre.sol │ ├── Queue.pre.sol │ └── MathUtil.sol ├── rust-toolchain.toml ├── crates ├── proof-of-sql │ ├── examples │ │ ├── posql_db │ │ │ ├── hello_world.csv │ │ │ ├── run_example.sh │ │ │ ├── README.md │ │ │ └── commit_accessor.rs │ │ ├── vehicles │ │ │ └── vehicles.csv │ │ ├── space │ │ │ └── planets.csv │ │ ├── tech_gadget_prices │ │ │ └── tech_gadget_prices.csv │ │ ├── wood_types │ │ │ └── wood_types.csv │ │ ├── avocado-prices │ │ │ └── avocado-prices.csv │ │ ├── dinosaurs │ │ │ └── dinosaurs.csv │ │ ├── sushi │ │ │ └── fish.csv │ │ ├── programming_books │ │ │ └── programming_books.csv │ │ ├── plastics │ │ │ └── plastics.csv │ │ ├── brands │ │ │ └── brands.csv │ │ ├── rockets │ │ │ └── launch_vehicles.csv │ │ ├── dog_breeds │ │ │ └── dog_breeds.csv │ │ ├── hello_world │ │ │ └── README.md │ │ ├── countries │ │ │ └── countries_gdp.csv │ │ ├── books │ │ │ └── books.csv │ │ ├── stocks │ │ │ └── stocks.csv │ │ └── census │ │ │ └── census-income.csv │ ├── src │ │ ├── base │ │ │ ├── byte │ │ │ │ └── mod.rs │ │ │ ├── slice_ops │ │ │ │ ├── add_const_test.rs │ │ │ │ ├── add_const.rs │ │ │ │ ├── mod.rs │ │ │ │ ├── mul_add_assign.rs │ │ │ │ ├── inner_product.rs │ │ │ │ ├── slice_cast.rs │ │ │ │ └── mul_add_assign_test.rs │ │ │ ├── bit │ │ │ │ ├── mod.rs │ │ │ │ ├── bit_mask_utils.rs │ │ │ │ ├── bit_mask_utils_test.rs │ │ │ │ └── bit_matrix.rs │ │ │ ├── rayon_cfg.rs │ │ │ ├── encode │ │ │ │ ├── mod.rs │ │ │ │ └── u256.rs │ │ │ ├── posql_time │ │ │ │ ├── mod.rs │ │ │ │ └── error.rs │ │ │ ├── scalar │ │ │ │ ├── error.rs │ │ │ │ ├── mod.rs │ │ │ │ ├── test_scalar.rs │ │ │ │ └── mont_scalar_test.rs │ │ │ ├── database │ │ │ │ ├── error.rs │ │ │ │ ├── table_evaluation.rs │ │ │ │ ├── test_accessor.rs │ │ │ │ ├── expression_evaluation_error.rs │ │ │ │ ├── arrow_schema_utility.rs │ │ │ │ └── owned_column_error.rs │ │ │ ├── math │ │ │ │ ├── mod.rs │ │ │ │ └── decimal_tests.rs │ │ │ ├── proof │ │ │ │ ├── mod.rs │ │ │ │ ├── merlin_transcript_core.rs │ │ │ │ └── keccak256_transcript.rs │ │ │ ├── arrow │ │ │ │ ├── mod.rs │ │ │ │ └── record_batch_errors.rs │ │ │ ├── ref_into.rs │ │ │ ├── polynomial │ │ │ │ ├── mod.rs │ │ │ │ └── evaluation_vector.rs │ │ │ ├── map.rs │ │ │ ├── mod.rs │ │ │ ├── standard_binary_serde.rs │ │ │ └── serialize.rs │ │ ├── proof_primitive │ │ │ ├── hyperkzg │ │ │ │ ├── test_ppot_0080_02.bin │ │ │ │ ├── mod.rs │ │ │ │ ├── scalar.rs │ │ │ │ └── nova_engine.rs │ │ │ ├── dory │ │ │ │ ├── test_table_commitmet_do_not_modify.bin │ │ │ │ ├── state_test.rs │ │ │ │ ├── build_vmv_state.rs │ │ │ │ ├── fold_scalars_test.rs │ │ │ │ ├── dynamic_build_vmv_state.rs │ │ │ │ ├── offset_to_bytes.rs │ │ │ │ ├── dory_inner_product.rs │ │ │ │ ├── dory_public_setup.rs │ │ │ │ ├── extended_state_test.rs │ │ │ │ └── extended_dory_inner_product.rs │ │ │ ├── dynamic_matrix_utils │ │ │ │ └── mod.rs │ │ │ ├── inner_product │ │ │ │ ├── mod.rs │ │ │ │ ├── ristretto_point.rs │ │ │ │ └── curve_25519_scalar.rs │ │ │ ├── sumcheck │ │ │ │ ├── mod.rs │ │ │ │ └── prover_state.rs │ │ │ └── mod.rs │ │ ├── utils │ │ │ ├── mod.rs │ │ │ └── log.rs │ │ ├── lib.rs │ │ └── sql │ │ │ ├── proof_exprs │ │ │ ├── table_expr.rs │ │ │ ├── aliased_dyn_proof_expr.rs │ │ │ ├── column_expr_test.rs │ │ │ ├── proof_expr_test.rs │ │ │ └── mod.rs │ │ │ ├── evm_proof_plan │ │ │ ├── mod.rs │ │ │ └── error.rs │ │ │ ├── postprocessing │ │ │ ├── postprocessing_step.rs │ │ │ ├── mod.rs │ │ │ ├── select_postprocessing.rs │ │ │ ├── test_utility.rs │ │ │ └── select_postprocessing_test.rs │ │ │ ├── mod.rs │ │ │ ├── parse │ │ │ ├── mod.rs │ │ │ └── where_expr_builder.rs │ │ │ ├── proof │ │ │ ├── sumcheck_random_scalars.rs │ │ │ └── sumcheck_mle_evaluations_test.rs │ │ │ ├── proof_gadgets │ │ │ └── mod.rs │ │ │ ├── proof_plans │ │ │ ├── fold_util.rs │ │ │ └── mod.rs │ │ │ └── error.rs │ └── test_assets │ │ └── ppot_0080_10.bin ├── proof-of-sql-parser │ ├── build.rs │ ├── src │ │ ├── posql_time │ │ │ └── mod.rs │ │ ├── error.rs │ │ └── lib.rs │ ├── README.md │ └── Cargo.toml ├── proof-of-sql-benches │ ├── src │ │ ├── utils │ │ │ ├── mod.rs │ │ │ ├── jaeger_setup.rs │ │ │ └── results_io.rs │ │ └── README.md │ ├── Cargo.toml │ └── scripts │ │ └── run_benchmarks.sh ├── powers-of-tau-util │ └── Cargo.toml └── proof-of-sql-planner │ ├── src │ ├── postprocessing │ │ ├── postprocessing_step.rs │ │ ├── mod.rs │ │ ├── error.rs │ │ ├── expression_evaluation_error.rs │ │ └── select_postprocessing.rs │ ├── df_util.rs │ ├── lib.rs │ └── uppercase_column_visitor.rs │ └── Cargo.toml ├── clippy.toml ├── docs ├── ProofOfSQLBanner.png ├── QueryRequestDiagram.png ├── DataIngestionDiagram.png ├── HyperKZG_A100_2025.05.12.png ├── HyperKZG_A100_200k_2025.05.12.png ├── HyperKZG_multi_A100_2025.05.12.png ├── HyperKZG_multi_A100_200k_2025.05.12.png ├── protocols │ └── permutation.tex └── SQLSyntaxSpecification.md ├── .github ├── ISSUE_TEMPLATE │ ├── generic-issue.md │ ├── bug_report.md │ └── feature_request.md ├── workflows │ ├── bountyissuestojira.yaml │ ├── check-approver.yaml │ ├── stale.yml │ ├── release.yaml │ └── code-coverage.yml └── pull_request_template.md ├── .cargo └── config.toml ├── scripts ├── check_commits.sh └── run_ci_checks.sh ├── third_party └── license │ ├── vervolg.LICENSE │ └── integer-encoding.LICENSE ├── ci └── publish.sh ├── .gitignore └── package.json /README.md: -------------------------------------------------------------------------------- 1 | crates/proof-of-sql/README.md -------------------------------------------------------------------------------- /solidity/.solhintignore: -------------------------------------------------------------------------------- 1 | dependencies/ 2 | -------------------------------------------------------------------------------- /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "1.85.0" 3 | -------------------------------------------------------------------------------- /solidity/remappings.txt: -------------------------------------------------------------------------------- 1 | forge-std/=dependencies/forge-std-1.9.5/src 2 | -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/posql_db/hello_world.csv: -------------------------------------------------------------------------------- 1 | a,b 2 | 1,hi 3 | 2,hello 4 | 3,there 5 | 2,world -------------------------------------------------------------------------------- /clippy.toml: -------------------------------------------------------------------------------- 1 | missing-docs-in-crate-items = true 2 | check-private-items = true 3 | doc-valid-idents = ["DeFi"] 4 | -------------------------------------------------------------------------------- /docs/ProofOfSQLBanner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmahmood/sxt-proof-of-sql/main/docs/ProofOfSQLBanner.png -------------------------------------------------------------------------------- /docs/QueryRequestDiagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmahmood/sxt-proof-of-sql/main/docs/QueryRequestDiagram.png -------------------------------------------------------------------------------- /docs/DataIngestionDiagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmahmood/sxt-proof-of-sql/main/docs/DataIngestionDiagram.png -------------------------------------------------------------------------------- /docs/HyperKZG_A100_2025.05.12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmahmood/sxt-proof-of-sql/main/docs/HyperKZG_A100_2025.05.12.png -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/byte/mod.rs: -------------------------------------------------------------------------------- 1 | mod byte_distribution; 2 | pub mod byte_matrix_utils; 3 | pub use byte_distribution::*; 4 | -------------------------------------------------------------------------------- /solidity/slither.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "detectors_to_exclude": "assembly,naming-conventions", 3 | "fail_on": "pedantic" 4 | } 5 | -------------------------------------------------------------------------------- /docs/HyperKZG_A100_200k_2025.05.12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmahmood/sxt-proof-of-sql/main/docs/HyperKZG_A100_200k_2025.05.12.png -------------------------------------------------------------------------------- /docs/HyperKZG_multi_A100_2025.05.12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmahmood/sxt-proof-of-sql/main/docs/HyperKZG_multi_A100_2025.05.12.png -------------------------------------------------------------------------------- /docs/HyperKZG_multi_A100_200k_2025.05.12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmahmood/sxt-proof-of-sql/main/docs/HyperKZG_multi_A100_200k_2025.05.12.png -------------------------------------------------------------------------------- /crates/proof-of-sql/test_assets/ppot_0080_10.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmahmood/sxt-proof-of-sql/main/crates/proof-of-sql/test_assets/ppot_0080_10.bin -------------------------------------------------------------------------------- /crates/proof-of-sql-parser/build.rs: -------------------------------------------------------------------------------- 1 | //! This file is used to generate the parser from the grammar file. 2 | extern crate lalrpop; 3 | 4 | fn main() { 5 | lalrpop::process_root().unwrap(); 6 | } 7 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/hyperkzg/test_ppot_0080_02.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmahmood/sxt-proof-of-sql/main/crates/proof-of-sql/src/proof_primitive/hyperkzg/test_ppot_0080_02.bin -------------------------------------------------------------------------------- /crates/proof-of-sql-benches/src/utils/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod benchmark_accessor; 2 | pub mod jaeger_setup; 3 | pub mod queries; 4 | pub mod random_util; 5 | pub mod results_io; 6 | use random_util::OptionalRandBound; 7 | -------------------------------------------------------------------------------- /solidity/scripts/install_deps.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) 4 | cd $SCRIPT_DIR/.. 5 | forge soldeer install 6 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/dory/test_table_commitmet_do_not_modify.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tmahmood/sxt-proof-of-sql/main/crates/proof-of-sql/src/proof_primitive/dory/test_table_commitmet_do_not_modify.bin -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/generic-issue.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Generic Issue 3 | about: This is a template for a generic issue. 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | # Background and Motivation 11 | # Changes Required 12 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/dynamic_matrix_utils/mod.rs: -------------------------------------------------------------------------------- 1 | /// Code for working with the structure of the dynamic matrix. 2 | pub mod matrix_structure; 3 | /// Code for working with vectors that interface with the dynamic matrix. 4 | pub mod standard_basis_helper; 5 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/slice_ops/add_const_test.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | 3 | #[test] 4 | fn test_add_const() { 5 | let mut a = vec![1, 2, 3, 4]; 6 | add_const(&mut a, 10); 7 | let b = vec![1 + 10, 2 + 10, 3 + 10, 4 + 10]; 8 | assert_eq!(a, b); 9 | } 10 | -------------------------------------------------------------------------------- /solidity/scripts/pre_forge.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) 4 | cd $SCRIPT_DIR/.. 5 | scripts/preprocess_yul_imports.sh src 6 | scripts/preprocess_yul_imports.sh test 7 | forge "$@" 8 | -------------------------------------------------------------------------------- /solidity/scripts/doc_gen.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) 4 | cd $SCRIPT_DIR/.. 5 | scripts/pre_forge.sh doc 6 | cd docs 7 | sed -i '/\[output\.html\]/a mathjax-support = true' book.toml 8 | mdbook "$@" 9 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/inner_product/mod.rs: -------------------------------------------------------------------------------- 1 | /// TODO: add doc 2 | pub mod curve_25519_scalar; 3 | #[cfg(test)] 4 | mod curve_25519_tests; 5 | /// TODO: add doc 6 | #[cfg(feature = "blitzar")] 7 | pub mod inner_product_proof; 8 | /// TODO: add doc 9 | pub mod ristretto_point; 10 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/utils/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module contains utilities for working with the library 2 | /// Parse DDLs and find bigdecimal columns 3 | pub mod parse; 4 | 5 | /// This module provides logging utilities for the library, including functions to log system memory usage. 6 | pub mod log; 7 | -------------------------------------------------------------------------------- /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [target.x86_64-unknown-linux-gnu] 2 | linker = "/usr/bin/clang" # used to decrease build time 3 | rustflags = ["-Clink-arg=-fuse-ld=lld"] # used to decrease link time 4 | 5 | [alias] 6 | f = "fmt --all -- --config imports_granularity=Crate,group_imports=One" 7 | cl = "clippy --all-targets --all-features" 8 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/sumcheck/mod.rs: -------------------------------------------------------------------------------- 1 | mod proof; 2 | #[cfg(test)] 3 | mod proof_test; 4 | pub use proof::SumcheckProof; 5 | 6 | mod prover_state; 7 | pub(crate) use prover_state::ProverState; 8 | 9 | mod prover_round; 10 | use prover_round::prove_round; 11 | 12 | #[cfg(test)] 13 | mod test_cases; 14 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/bit/mod.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod bit_mask_utils; 2 | #[cfg(test)] 3 | mod bit_mask_utils_test; 4 | 5 | mod bit_distribution; 6 | pub use bit_distribution::*; 7 | #[cfg(test)] 8 | mod bit_distribution_test; 9 | 10 | mod bit_matrix; 11 | pub use bit_matrix::*; 12 | #[cfg(test)] 13 | mod bit_matrix_test; 14 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![cfg_attr(test, expect(clippy::missing_panics_doc))] 2 | #![doc = include_str!("../README.md")] 3 | #![cfg_attr(not(feature = "std"), no_std)] 4 | 5 | extern crate alloc; 6 | 7 | pub mod base; 8 | pub mod proof_primitive; 9 | pub mod sql; 10 | /// Utilities for working with the library 11 | pub mod utils; 12 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/proof_exprs/table_expr.rs: -------------------------------------------------------------------------------- 1 | use crate::base::database::TableRef; 2 | use serde::{Deserialize, Serialize}; 3 | 4 | /// Expression for an SQL table 5 | #[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone)] 6 | pub struct TableExpr { 7 | /// The `TableRef` for the table 8 | pub table_ref: TableRef, 9 | } 10 | -------------------------------------------------------------------------------- /solidity/soldeer.lock: -------------------------------------------------------------------------------- 1 | [[dependencies]] 2 | name = "forge-std" 3 | version = "1.9.5" 4 | url = "https://soldeer-revisions.s3.amazonaws.com/forge-std/1_9_5_21-12-2024_15:04:05_forge-std-1.9.zip" 5 | checksum = "57ada736f383289db77fac4472d48f820e7c98172cf9b01681b0c37065ce043f" 6 | integrity = "4753ffdfa0dde40878372b6a4d8e8fd1648b190b33996896c8b92f6f1680850f" 7 | -------------------------------------------------------------------------------- /solidity/foundry.toml: -------------------------------------------------------------------------------- 1 | [profile.default] 2 | src = "src" 3 | out = "out" 4 | solc = "0.8.28" 5 | libs = ["dependencies"] 6 | deny-warnings = true 7 | no-match-path = "**/*.pre.sol" 8 | no-match-coverage = "pre.sol" 9 | 10 | [doc] 11 | ignore = ["**/*.pre.sol"] 12 | 13 | [dependencies] 14 | forge-std = "1.9.5" 15 | 16 | [soldeer] 17 | remappings_generate = true 18 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/rayon_cfg.rs: -------------------------------------------------------------------------------- 1 | macro_rules! if_rayon { 2 | ($rayon_value: expr, $else_value: expr) => {{ 3 | #[cfg(feature = "rayon")] 4 | { 5 | ($rayon_value) 6 | } 7 | #[cfg(not(feature = "rayon"))] 8 | { 9 | ($else_value) 10 | } 11 | }}; 12 | } 13 | pub(crate) use if_rayon; 14 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/encode/mod.rs: -------------------------------------------------------------------------------- 1 | mod u256; 2 | pub(crate) use u256::U256; 3 | 4 | mod zigzag; 5 | pub(crate) use zigzag::ZigZag; 6 | 7 | #[cfg(test)] 8 | mod zigzag_test; 9 | 10 | mod scalar_varint; 11 | 12 | #[cfg(test)] 13 | mod scalar_varint_test; 14 | 15 | mod varint_trait; 16 | pub use varint_trait::VarInt; 17 | #[cfg(test)] 18 | mod varint_trait_test; 19 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/evm_proof_plan/mod.rs: -------------------------------------------------------------------------------- 1 | mod error; 2 | pub(crate) use error::{EVMProofPlanError, EVMProofPlanResult}; 3 | mod exprs; 4 | pub(crate) use exprs::EVMDynProofExpr; 5 | mod plans; 6 | mod proof_plan; 7 | #[cfg(test)] 8 | mod tests; 9 | 10 | pub use proof_plan::EVMProofPlan; 11 | 12 | #[cfg(all(test, feature = "hyperkzg_proof"))] 13 | mod evm_tests; 14 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/mod.rs: -------------------------------------------------------------------------------- 1 | //! TODO: add docs 2 | pub mod dory; 3 | /// Central location for any code that requires the use of a dynamic matrix (for now, hyrax and dynamic dory). 4 | pub(super) mod dynamic_matrix_utils; 5 | /// TODO: add docs 6 | pub(crate) mod sumcheck; 7 | 8 | pub mod hyperkzg; 9 | 10 | /// TODO: Add docs 11 | pub mod inner_product; 12 | -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/vehicles/vehicles.csv: -------------------------------------------------------------------------------- 1 | id,make,model,year,price 2 | 1,Tesla,Model S,2020,79999 3 | 2,Ford,Mustang,2019,55999 4 | 3,Chevrolet,Camaro,2018,42999 5 | 4,BMW,3 Series,2021,41300 6 | 5,Audi,A4,2021,39900 7 | 6,Ford,Maverick,2024,27990 8 | 7,Hyundai,Santa Cruz,2024,29895 9 | 8,Toyota,Tacoma,2024,32995 10 | 9,Ram,1500 TRX,2024,98335 11 | 10,Ford,F-150,2025,39345 12 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/posql_time/mod.rs: -------------------------------------------------------------------------------- 1 | mod error; 2 | /// Errors related to time operations, including timezone and timestamp conversions. 3 | pub use error::PoSQLTimestampError; 4 | mod timezone; 5 | /// Defines a timezone as count of seconds offset from UTC 6 | pub use timezone::PoSQLTimeZone; 7 | mod unit; 8 | /// Defines the precision of the timestamp 9 | pub use unit::PoSQLTimeUnit; 10 | -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/space/planets.csv: -------------------------------------------------------------------------------- 1 | name,distance,dwarf,density 2 | Mercury,36,FALSE,5400 3 | Venus,67,FALSE,5200 4 | Earth,93,FALSE,5500 5 | Mars,140,FALSE,3900 6 | Ceres,260,TRUE,2200 7 | Jupiter,480,FALSE,1300 8 | Saturn,890,FALSE,700 9 | Uranus,1800,FALSE,1300 10 | Neptune,2800,FALSE,1600 11 | Pluto,3700,TRUE,1900 12 | Haumea,4000,TRUE,2000 13 | Makemake,4300,TRUE,2100 14 | Eris,6300,TRUE,1500 15 | -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/tech_gadget_prices/tech_gadget_prices.csv: -------------------------------------------------------------------------------- 1 | name,brand,category,releaseyear,price 2 | iPhone 13,Apple,Smartphone,2021,799 3 | Galaxy S21,Samsung,Smartphone,2021,799 4 | PlayStation 5,Sony,Game Console,2020,499 5 | Xbox Series X,Microsoft,Game Console,2020,499 6 | iPad Pro,Apple,Tablet,2021,799 7 | Surface Pro 7,Microsoft,Tablet,2019,749 8 | MacBook Air,Apple,Laptop,2020,999 9 | Pixel 5,Google,Smartphone,2020,699 -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/scalar/error.rs: -------------------------------------------------------------------------------- 1 | use alloc::string::String; 2 | use snafu::Snafu; 3 | 4 | #[derive(Snafu, Debug)] 5 | /// These errors occur when a scalar conversion fails. 6 | pub enum ScalarConversionError { 7 | #[snafu(display("Overflow error: {error}"))] 8 | /// This error occurs when a scalar is too large to be converted. 9 | Overflow { 10 | /// The underlying error 11 | error: String, 12 | }, 13 | } 14 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/proof_exprs/aliased_dyn_proof_expr.rs: -------------------------------------------------------------------------------- 1 | use super::DynProofExpr; 2 | use serde::{Deserialize, Serialize}; 3 | use sqlparser::ast::Ident; 4 | 5 | /// A `DynProofExpr` with an alias. 6 | #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] 7 | pub struct AliasedDynProofExpr { 8 | /// The `DynProofExpr` to alias. 9 | pub expr: DynProofExpr, 10 | /// The alias for the expression. 11 | pub alias: Ident, 12 | } 13 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/database/error.rs: -------------------------------------------------------------------------------- 1 | use alloc::string::String; 2 | use snafu::Snafu; 3 | 4 | /// Errors encountered during the parsing process 5 | #[derive(Debug, Snafu, Eq, PartialEq)] 6 | pub enum ParseError { 7 | #[snafu(display("Invalid table reference: {}", table_reference))] 8 | /// Cannot parse the `TableRef` 9 | InvalidTableReference { 10 | /// The underlying error 11 | table_reference: String, 12 | }, 13 | } 14 | -------------------------------------------------------------------------------- /crates/powers-of-tau-util/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "powers-of-tau-util" 3 | edition.workspace = true 4 | exclude.workspace = true 5 | repository.workspace = true 6 | version.workspace = true 7 | license-file.workspace = true 8 | 9 | [dependencies] 10 | ark-bn254 = { version = "0.5.0" } 11 | ark-serialize = { version = "0.5.0" } 12 | blitzar = { version = "4.3.0" } 13 | nova-snark = { git = "https://github.com/microsoft/Nova.git", rev = "4386a91" } 14 | 15 | [lints] 16 | workspace = true 17 | -------------------------------------------------------------------------------- /crates/proof-of-sql-parser/src/posql_time/mod.rs: -------------------------------------------------------------------------------- 1 | mod error; 2 | /// Errors related to time operations, including timezone and timestamp conversions. 3 | pub use error::PoSQLTimestampError; 4 | mod timestamp; 5 | /// Defines an RFC3339-formatted timestamp 6 | pub use timestamp::PoSQLTimestamp; 7 | mod timezone; 8 | /// Defines a timezone as count of seconds offset from UTC 9 | pub use timezone::PoSQLTimeZone; 10 | mod unit; 11 | /// Defines the precision of the timestamp 12 | pub use unit::PoSQLTimeUnit; 13 | -------------------------------------------------------------------------------- /solidity/.solhint.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "solhint:all", 3 | "rules": { 4 | "comprehensive-interface": "off", 5 | "foundry-test-functions": "off", 6 | "function-max-lines": "off", 7 | "gas-calldata-parameters": "off", 8 | "import-path-check": "off", 9 | "max-line-length": "off", 10 | "no-global-import": "off", 11 | "no-inline-assembly": "off", 12 | "one-contract-per-file": "off", 13 | "ordering": "off" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/wood_types/wood_types.csv: -------------------------------------------------------------------------------- 1 | id,name,hardness,density,color,common_use 2 | 1,Oak,7.0,0.75,Light brown,Furniture 3 | 2,Pine,3.5,0.55,Pale yellow,Construction 4 | 3,Maple,6.5,0.70,Cream,Flooring 5 | 4,Cherry,5.5,0.65,Reddish-brown,Cabinetry 6 | 5,Walnut,6.0,0.68,Dark brown,Gunstocks 7 | 6,Mahogany,4.5,0.60,Reddish,Musical instruments 8 | 7,Birch,5.0,0.62,White,Plywood 9 | 8,Cedar,2.5,0.45,Reddish,Outdoor furniture 10 | 9,Teak,7.5,0.80,Golden brown,Boat building 11 | 10,Ebony,9.0,1.10,Black,Piano keys -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/math/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module defines math utilities used in Proof of SQL. 2 | /// Handles parsing between decimal tokens received from the lexer into native `Decimal75` Proof of SQL type. 3 | pub mod decimal; 4 | #[cfg(test)] 5 | mod decimal_tests; 6 | /// Module containing [I256] type. 7 | pub mod i256; 8 | mod log; 9 | pub(crate) use log::log2_up; 10 | /// TODO: add docs 11 | pub(crate) mod permutation; 12 | 13 | mod big_decimal_ext; 14 | pub(crate) use big_decimal_ext::BigDecimalExt; 15 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/postprocessing/postprocessing_step.rs: -------------------------------------------------------------------------------- 1 | use super::PostprocessingResult; 2 | use crate::base::{database::OwnedTable, scalar::Scalar}; 3 | use core::fmt::Debug; 4 | 5 | /// A trait for postprocessing steps that can be applied to an `OwnedTable`. 6 | pub trait PostprocessingStep: Debug + Send + Sync { 7 | /// Apply the postprocessing step to the `OwnedTable` and return the result. 8 | fn apply(&self, owned_table: OwnedTable) -> PostprocessingResult>; 9 | } 10 | -------------------------------------------------------------------------------- /crates/proof-of-sql-planner/src/postprocessing/postprocessing_step.rs: -------------------------------------------------------------------------------- 1 | use super::PostprocessingResult; 2 | use core::fmt::Debug; 3 | use proof_of_sql::base::{database::OwnedTable, scalar::Scalar}; 4 | 5 | /// A trait for postprocessing steps that can be applied to an `OwnedTable`. 6 | pub trait PostprocessingStep: Debug + Send + Sync { 7 | /// Apply the postprocessing step to the `OwnedTable` and return the result. 8 | fn apply(&self, owned_table: OwnedTable) -> PostprocessingResult>; 9 | } 10 | -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/posql_db/run_example.sh: -------------------------------------------------------------------------------- 1 | cd crates/proof-of-sql/examples/posql_db 2 | cargo run --features="arrow,utils" "$@" --example posql_db create -t sxt.table -c a,b -d BIGINT,VARCHAR 3 | cargo run --features="arrow,utils" "$@" --example posql_db append -t sxt.table -f hello_world.csv 4 | cargo run --features="arrow,utils" "$@" --example posql_db prove -q "SELECT b FROM sxt.table WHERE a = 2" -f hello.proof 5 | cargo run --features="arrow,utils" "$@" --example posql_db verify -q "SELECT b FROM sxt.table WHERE a = 2" -f hello.proof 6 | -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/avocado-prices/avocado-prices.csv: -------------------------------------------------------------------------------- 1 | year,price 2 | 1990,96 3 | 1991,100 4 | 1992,269 5 | 1993,149 6 | 1994,127 7 | 1995,153 8 | 1996,232 9 | 1997,127 10 | 1998,249 11 | 1999,240 12 | 2000,241 13 | 2001,90 14 | 2002,91 15 | 2003,169 16 | 2004,167 17 | 2005,56 18 | 2006,230 19 | 2007,174 20 | 2008,124 21 | 2009,92 22 | 2010,201 23 | 2011,167 24 | 2012,125 25 | 2013,147 26 | 2014,285 27 | 2015,154 28 | 2016,106 29 | 2017,223 30 | 2018,85 31 | 2019,145 32 | 2020,147 33 | 2021,68 34 | 2022,142 35 | 2023,281 36 | 2024,164 37 | 38 | -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/dinosaurs/dinosaurs.csv: -------------------------------------------------------------------------------- 1 | id,name,period,diet,length_meters,weight_tons 2 | 1,Tyrannosaurus Rex,Cretaceous,Carnivore,12.3,7.0 3 | 2,Stegosaurus,Jurassic,Herbivore,9.0,5.5 4 | 3,Triceratops,Cretaceous,Herbivore,8.5,10.0 5 | 4,Velociraptor,Cretaceous,Carnivore,1.8,0.015 6 | 5,Brachiosaurus,Jurassic,Herbivore,26.0,50.0 7 | 6,Ankylosaurus,Cretaceous,Herbivore,6.5,6.0 8 | 7,Spinosaurus,Cretaceous,Carnivore,15.0,7.5 9 | 8,Diplodocus,Jurassic,Herbivore,27.0,25.0 10 | 9,Allosaurus,Jurassic,Carnivore,9.7,2.3 11 | 10,Parasaurolophus,Cretaceous,Herbivore,10.0,3.5 -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/sushi/fish.csv: -------------------------------------------------------------------------------- 1 | name_en,name_ja,kind_en,kind_ja,price_per_pound 2 | Tuna,Maguro,Lean Red Meat,Akami,25 3 | Tuna,Maguro,Medium Fat Red Meat,Toro,65 4 | Tuna,Maguro,Fatty Red Meat,Otoro,115 5 | Bonito,Katsuo,Red Meat,Akami,20 6 | Yellowtail,Hamachi,Red Meat,Akami,27 7 | Salmon,Salmon,White Fish,Shiromi,17 8 | Sea Bream,Tai,White Fish,Shiromi,32 9 | Sea Bass,Suzuki,White Fish,Shiromi,28 10 | Mackerel,Aji,Silver Skinned,Hikarimono,14 11 | Sardine,Iwashi,Silver Skinned,Hikarimono,11 12 | Scallops,Hotate,Shellfish,Kai,26 13 | Ark-shell clams,Akagai,Shellfish,Kai,29 14 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: JayWhite2357 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Please provide a minimal example of what causes the bug. This allows us to identify the root cause and fix it more quickly. 15 | 16 | **Expected behavior** 17 | A clear and concise description of what you expected to happen. 18 | 19 | **Additional context** 20 | Add any other context about the problem here. 21 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module contains the main logic for Proof of SQL. 2 | 3 | mod error; 4 | /// This module holds the [`EVMProofPlan`] struct and its implementation, which allows for EVM compatible serialization. 5 | pub mod evm_proof_plan; 6 | pub mod parse; 7 | /// [`AnalyzeError`] temporarily exists until we switch to using Datafusion Analyzer to handle type checking. 8 | pub use error::{AnalyzeError, AnalyzeResult}; 9 | pub mod postprocessing; 10 | pub mod proof; 11 | pub mod proof_exprs; 12 | pub mod proof_gadgets; 13 | pub mod proof_plans; 14 | mod scale; 15 | pub use scale::scale_cast_binary_op; 16 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/scalar/mod.rs: -------------------------------------------------------------------------------- 1 | /// This module contains the definition of the `Scalar` trait, which is used to represent the scalar field used in Proof of SQL. 2 | mod scalar; 3 | pub use scalar::Scalar; 4 | mod error; 5 | pub use error::ScalarConversionError; 6 | /// TODO add doc 7 | mod mont_scalar; 8 | #[cfg(test)] 9 | mod mont_scalar_test; 10 | pub use mont_scalar::MontScalar; 11 | /// Module for a test Scalar 12 | #[cfg(test)] 13 | pub mod test_scalar; 14 | #[cfg(test)] 15 | mod test_scalar_test; 16 | 17 | mod scalar_ext; 18 | #[cfg(test)] 19 | pub(crate) use scalar_ext::test_scalar_constants; 20 | pub use scalar_ext::ScalarExt; 21 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/bit/bit_mask_utils.rs: -------------------------------------------------------------------------------- 1 | use crate::base::scalar::ScalarExt; 2 | use bnum::types::U256; 3 | use core::ops::Shl; 4 | 5 | pub fn make_bit_mask(x: S) -> U256 { 6 | let x_as_u256 = x.into_u256_wrapping(); 7 | if x > S::MAX_SIGNED { 8 | x_as_u256 - S::into_u256_wrapping(S::MAX_SIGNED) + (U256::ONE.shl(255)) 9 | - S::into_u256_wrapping(S::MAX_SIGNED) 10 | - U256::ONE 11 | } else { 12 | x_as_u256 + (U256::ONE.shl(255)) 13 | } 14 | } 15 | 16 | pub fn is_bit_mask_negative_representation(bit_mask: U256) -> bool { 17 | bit_mask & (U256::ONE.shl(255)) == U256::ZERO 18 | } 19 | -------------------------------------------------------------------------------- /solidity/scripts/lint-and-test.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # This script is used to lint and test the Solidity codebase. 3 | # It is not used in the CI pipeline, but shold be identical to the CI pipeline's test job. 4 | # The CI pipeline is explicitly written out to make the pipeline more readable. 5 | set -euo pipefail 6 | SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) 7 | cd $SCRIPT_DIR/.. 8 | find . -type f -name "*.post.sol" -delete 9 | scripts/install_deps.sh 10 | scripts/pre_forge.sh clean 11 | scripts/pre_forge.sh test --summary 12 | scripts/check_coverage.sh 13 | scripts/pre_forge.sh fmt --check 14 | solhint '**/*.sol' -w 0 15 | slither . 16 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/slice_ops/add_const.rs: -------------------------------------------------------------------------------- 1 | use crate::base::if_rayon; 2 | use core::ops::AddAssign; 3 | #[cfg(feature = "rayon")] 4 | use rayon::iter::{IndexedParallelIterator, IntoParallelRefMutIterator, ParallelIterator}; 5 | 6 | /// This operation does `result[i] += to_add` for `i` in `0..result.len()`. 7 | pub fn add_const(result: &mut [T], to_add: S) 8 | where 9 | T: Send + Sync + AddAssign + Copy, 10 | S: Into + Sync + Copy, 11 | { 12 | if_rayon!( 13 | result.par_iter_mut().with_min_len(super::MIN_RAYON_LEN), 14 | result.iter_mut() 15 | ) 16 | .for_each(|res_i| { 17 | *res_i += to_add.into(); 18 | }); 19 | } 20 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/proof/mod.rs: -------------------------------------------------------------------------------- 1 | //! Contains the transcript protocol used to construct a proof, 2 | //! as well as an error type which can occur when verification fails. 3 | mod error; 4 | pub use error::{PlaceholderError, PlaceholderResult, ProofError, ProofSizeMismatch}; 5 | 6 | /// Contains an extension trait for `merlin::Transcript`, which is used to construct a proof. 7 | #[cfg(any(test, feature = "blitzar"))] 8 | mod merlin_transcript_core; 9 | 10 | mod transcript; 11 | pub use transcript::Transcript; 12 | 13 | mod transcript_core; 14 | #[cfg(test)] 15 | mod transcript_core_test; 16 | 17 | mod keccak256_transcript; 18 | pub use keccak256_transcript::Keccak256Transcript; 19 | -------------------------------------------------------------------------------- /solidity/test/base/SwitchUtil.t.pre.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: UNLICENSED 2 | // This is licensed under the Cryptographic Open Software License 1.0 3 | pragma solidity ^0.8.28; 4 | 5 | import {Test} from "forge-std/Test.sol"; 6 | import "../../src/base/Errors.sol"; 7 | import {SwitchUtil} from "../../src/base/SwitchUtil.pre.sol"; 8 | 9 | contract SwitchUtilTest is Test { 10 | /// forge-config: default.allow_internal_expect_revert = true 11 | function testFuzzCaseConst(uint256 lhs, uint256 rhs) public { 12 | if (lhs != rhs) { 13 | vm.expectRevert(Errors.IncorrectCaseConst.selector); 14 | } 15 | SwitchUtil.__caseConst(lhs, rhs); 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/programming_books/programming_books.csv: -------------------------------------------------------------------------------- 1 | title,author,publication_year,genre,rating 2 | The Pragmatic Programmer,Andrew Hunt,1999,Programming,4.5 3 | Clean Code,Robert C. Martin,2008,Programming,4.7 4 | The Clean Coder,Robert C. Martin,2011,Programming,4.6 5 | Design Patterns,Erich Gamma,1994,Software Engineering,4.8 6 | Refactoring,Martin Fowler,1999,Programming,4.5 7 | Effective Java,Joshua Bloch,2008,Programming,4.7 8 | Introduction to Algorithms,Thomas H. Cormen,2009,Computer Science,4.8 9 | Code Complete,Steve McConnell,2004,Programming,4.6 10 | The Mythical Man-Month,Fred Brooks,1975,Software Engineering,4.3 11 | Algorithms,Robert Sedgewick,1983,Computer Science,4.5 12 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context about the feature request here. 21 | -------------------------------------------------------------------------------- /solidity/scripts/check_coverage.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) 4 | cd $SCRIPT_DIR/.. 5 | scripts/pre_forge.sh coverage -q --report lcov 6 | gawk -i inplace ' 7 | BEGIN { e=0; p="" } 8 | $0 ~ /exclude_coverage_start/ { e=1; p=""; next } 9 | $0 ~ /exclude_coverage_stop/ { e=0; next } 10 | e==0 { if(p) print p; p=$0 } 11 | END { if(p) print p } 12 | ' lcov.info 13 | percentage=$(genhtml lcov.info -o coverage-report --branch-coverage | grep -o "[0-9\.]*%" | uniq | tr -d '\n') 14 | if [ $percentage != "100.0%" ]; then 15 | >&2 echo "missing test coverage!" 16 | exit 1 17 | fi 18 | echo "100% test coverage!" 19 | -------------------------------------------------------------------------------- /crates/proof-of-sql-planner/src/postprocessing/mod.rs: -------------------------------------------------------------------------------- 1 | /// Proof of SQL Postprocessing. Used when the last step of the logical plan is an unprovable projection. 2 | mod error; 3 | pub use error::{PostprocessingError, PostprocessingResult}; 4 | mod expression_evaluation; 5 | pub(crate) use expression_evaluation::evaluate_expr; 6 | mod expression_evaluation_error; 7 | pub use expression_evaluation_error::{ExpressionEvaluationError, ExpressionEvaluationResult}; 8 | #[cfg(test)] 9 | mod expression_evaluation_test; 10 | mod postprocessing_step; 11 | pub use postprocessing_step::PostprocessingStep; 12 | mod select_postprocessing; 13 | pub use select_postprocessing::SelectPostprocessing; 14 | #[cfg(test)] 15 | mod select_postprocessing_test; 16 | -------------------------------------------------------------------------------- /solidity/README.md: -------------------------------------------------------------------------------- 1 | # Development Dependencies Installation 2 | 1. `forge` 3 | ```bash 4 | curl -L https://foundry.paradigm.xyz | bash 5 | foundryup 6 | ``` 7 | 2. `lcov`/`genhtml` 8 | ```bash 9 | sudo apt install lcov 10 | ``` 11 | 3. `solhint` 12 | ```bash 13 | npm install -g solhint 14 | ``` 15 | 4. `slither` 16 | ```bash 17 | pipx install slither-analyzer 18 | ``` 19 | 5. `aderyn` (Recommended) 20 | ```bash 21 | npm install -g @cyfrin/aderyn 22 | ``` 23 | 24 | # Build and Test 25 | To generate the solidity library artifact, 26 | 27 | ```bash 28 | ./scripts/preprocess_yul_imports.sh src 29 | ``` 30 | 31 | The final artifact is `./src/verifier/Verifier.t.post.sol`. 32 | 33 | 34 | To run all tests and lints: 35 | 36 | ```bash 37 | ./scripts/lint-and-test.sh 38 | ``` -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/plastics/plastics.csv: -------------------------------------------------------------------------------- 1 | name,code,density,biodegradable 2 | Polyethylene Terephthalate (PET),1,1.38,FALSE 3 | High-Density Polyethylene (HDPE),2,0.97,FALSE 4 | Polyvinyl Chloride (PVC),3,1.40,FALSE 5 | Low-Density Polyethylene (LDPE),4,0.92,FALSE 6 | Polypropylene (PP),5,0.90,FALSE 7 | Polystyrene (PS),6,1.05,FALSE 8 | Polylactic Acid (PLA),7,1.25,TRUE 9 | Polybutylene Adipate Terephthalate (PBAT),7,1.26,TRUE 10 | Polyhydroxyalkanoates (PHA),7,1.24,TRUE 11 | Polybutylene Succinate (PBS),7,1.26,TRUE 12 | Acrylic (PMMA),7,1.18,FALSE 13 | Polycarbonate (PC),7,1.20,FALSE 14 | Polyurethane (PU),7,1.05,FALSE 15 | Acrylonitrile Butadiene Styrene (ABS),7,1.04,FALSE 16 | Polyamide (Nylon),7,1.15,FALSE 17 | Polyethylene Furanoate (PEF),7,1.43,TRUE 18 | Thermoplastic Starch (TPS),7,1.35,TRUE 19 | Cellulose Acetate,7,1.30,TRUE -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/posql_db/README.md: -------------------------------------------------------------------------------- 1 | # `posql_db` 2 | 3 | Example demonstrating an implementation of a simple csv-backed database with Proof of SQL capabilities. 4 | 5 | ## Install 6 | Run `cargo install --example posql_db --path crates/proof-of-sql` to install the example. 7 | 8 | > [!NOTE] 9 | > To run this example without the `blitzar` (i.e., CPU only) feature 10 | > ```bash 11 | > cargo install --example posql_db --path crates/proof-of-sql --no-default-features --features="cpu-perf" 12 | > ``` 13 | 14 | ## Quick Start Example 15 | Run the following 16 | ```bash 17 | posql_db create -t sxt.table -c a,b -d BIGINT,VARCHAR 18 | posql_db append -t sxt.table -f hello_world.csv 19 | posql_db prove -q "SELECT b FROM sxt.table WHERE a = 2" -f hello.proof 20 | posql_db verify -q "SELECT b FROM sxt.table WHERE a = 2" -f hello.proof 21 | ``` -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/arrow/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module provides conversions and utilities for working with Arrow data structures. 2 | 3 | /// Module for handling conversion from Arrow arrays to columns. 4 | pub mod arrow_array_to_column_conversion; 5 | 6 | /// Module for converting between owned and Arrow data structures. 7 | pub mod owned_and_arrow_conversions; 8 | 9 | #[cfg(test)] 10 | /// Tests for owned and Arrow conversions. 11 | mod owned_and_arrow_conversions_test; 12 | 13 | /// Module for converting record batches. 14 | pub mod record_batch_conversion; 15 | 16 | /// Module for record batch error definitions. 17 | pub mod record_batch_errors; 18 | 19 | /// Module for scalar and i256 conversions. 20 | pub mod scalar_and_i256_conversions; 21 | 22 | /// Module for handling conversions between columns and Arrow arrays. 23 | pub mod column_arrow_conversions; 24 | -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/brands/brands.csv: -------------------------------------------------------------------------------- 1 | name,country,founded,revenue 2 | Apple,United States,1976,365.82 3 | Samsung,South Korea,1938,200.73 4 | Microsoft,United States,1975,198.27 5 | Amazon,United States,1994,513.98 6 | Google,United States,1998,282.84 7 | Toyota,Japan,1937,278.52 8 | Coca-Cola,United States,1886,38.66 9 | Mercedes-Benz,Germany,1926,154.31 10 | McDonald's,United States,1955,19.2 11 | Nike,United States,1964,44.54 12 | Louis Vuitton,France,1854,75.98 13 | BMW,Germany,1916,121.87 14 | Disney,United States,1923,67.42 15 | Honda,Japan,1948,129.21 16 | Pepsi,United States,1893,79.47 17 | Adidas,Germany,1949,21.23 18 | Nestle,Switzerland,1866,94.42 19 | Unilever,Netherlands,1929,58.26 20 | Sony,Japan,1946,84.89 21 | Volkswagen,Germany,1937,250.2 22 | IKEA,Sweden,1943,44.6 23 | Starbucks,United States,1971,23.52 24 | Zara,Spain,1974,27.72 25 | H&M,Sweden,1947,21.73 26 | Gucci,Italy,1921,10.34 -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/rockets/launch_vehicles.csv: -------------------------------------------------------------------------------- 1 | name,country,year,mtow 2 | Saturn V,USA,1967,2976000 3 | Falcon Heavy,USA,2018,1420788 4 | Space Shuttle,USA,1981,2041167 5 | Energia,USSR,1987,2400000 6 | Ariane 5,Europe,1996,780000 7 | Delta IV Heavy,USA,2004,733400 8 | Long March 5,China,2016,869000 9 | Proton,USSR/Russia,1965,705000 10 | Atlas V,USA,2002,546700 11 | H-IIA,Japan,2001,445000 12 | Soyuz,USSR/Russia,1966,308000 13 | Falcon 9,USA,2010,549054 14 | Vega,Europe,2012,137000 15 | PSLV,India,1993,320000 16 | GSLV Mk III,India,2017,640000 17 | Titan II,USA,1962,153800 18 | Angara A5,Russia,2014,1335000 19 | Delta II,USA,1989,231870 20 | Electron,New Zealand,2017,12500 21 | Antares,USA,2013,240000 22 | Zenit,USSR/Ukraine,1985,462000 23 | N1,USSR,1969,2735000 24 | New Glenn,USA,2024,1300000 25 | Redstone,USA,1953,29500 26 | Black Arrow,UK,1971,18800 27 | Diamant,France,1965,18000 28 | Pegasus,USA,1990,23300 29 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/ref_into.rs: -------------------------------------------------------------------------------- 1 | //! Module holding the `RefInto` trait. 2 | 3 | /// A reference-to-value conversion that does not consume the input value. 4 | /// 5 | /// This is automatically implemented for all `S` where `&S` implements `Into`. 6 | /// 7 | /// This is primarily useful when defining subtraits. For example, here is a trait that requires 8 | /// the implementation of conversions to and from `usize` for both values and references: 9 | /// ```ignore 10 | /// pub trait SubTrait: From + Into + for<'a> From<&'a usize> + RefInto { 11 | /// ... 12 | /// } 13 | /// ``` 14 | pub trait RefInto { 15 | /// Converts a reference to this type into the (usually inferred) input type. 16 | fn ref_into(&self) -> T; 17 | } 18 | impl RefInto for S 19 | where 20 | for<'a> &'a S: Into, 21 | { 22 | fn ref_into(&self) -> T { 23 | self.into() 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/polynomial/mod.rs: -------------------------------------------------------------------------------- 1 | mod composite_polynomial; 2 | pub use composite_polynomial::CompositePolynomial; 3 | #[cfg(test)] 4 | mod composite_polynomial_test; 5 | 6 | mod interpolate; 7 | #[cfg(test)] 8 | mod interpolate_test; 9 | #[expect(unused_imports)] 10 | pub use interpolate::{interpolate_evaluations_to_reverse_coefficients, interpolate_uni_poly}; 11 | 12 | mod evaluation_vector; 13 | pub use evaluation_vector::compute_evaluation_vector; 14 | #[cfg(test)] 15 | mod evaluation_vector_test; 16 | 17 | mod lagrange_basis_evaluation; 18 | pub use lagrange_basis_evaluation::{ 19 | compute_rho_eval, compute_truncated_lagrange_basis_inner_product, 20 | compute_truncated_lagrange_basis_sum, 21 | }; 22 | #[cfg(test)] 23 | mod lagrange_basis_evaluation_test; 24 | 25 | mod multilinear_extension; 26 | pub use multilinear_extension::MultilinearExtension; 27 | #[cfg(test)] 28 | mod multilinear_extension_test; 29 | -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/dog_breeds/dog_breeds.csv: -------------------------------------------------------------------------------- 1 | name,origin,size,lifespan 2 | Labrador Retriever,Canada,Large,12 3 | German Shepherd,Germany,Large,11 4 | Chihuahua,Mexico,Small,14 5 | Siberian Husky,Russia,Medium,12 6 | Poodle,France,Medium,14 7 | Shiba Inu,Japan,Small,13 8 | Australian Shepherd,United States,Medium,13 9 | Bernese Mountain Dog,Switzerland,Large,8 10 | Beagle,United Kingdom,Small,12 11 | Rottweiler,Germany,Large,9 12 | Dachshund,Germany,Small,12 13 | Golden Retriever,United Kingdom,Large,11 14 | Bulldog,United Kingdom,Medium,8 15 | Pug,China,Small,12 16 | Great Dane,Germany,Large,8 17 | Border Collie,United Kingdom,Medium,12 18 | Akita,Japan,Large,10 19 | Corgi,United Kingdom,Small,12 20 | Doberman Pinscher,Germany,Large,10 21 | Boxer,Germany,Medium,10 22 | Shih Tzu,China,Small,13 23 | Irish Setter,Ireland,Large,12 24 | Alaskan Malamute,United States,Large,11 25 | Cocker Spaniel,United Kingdom,Medium,12 26 | Vizsla,Hungary,Medium,12 -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/map.rs: -------------------------------------------------------------------------------- 1 | pub(crate) type IndexMap = 2 | indexmap::IndexMap>; 3 | pub(crate) type IndexSet = indexmap::IndexSet>; 4 | 5 | /// Create an [`IndexMap`][self::IndexMap] from a list of key-value pairs 6 | macro_rules! indexmap { 7 | ($($key:expr => $value:expr,)+) => { indexmap::indexmap_with_default!{ahash::AHasher; $($key => $value),+} }; 8 | ($($key:expr => $value:expr),*) => { indexmap::indexmap_with_default!{ahash::AHasher; $($key => $value),*} }; 9 | } 10 | 11 | /// Create an [`IndexSet`][self::IndexSet] from a list of values 12 | macro_rules! indexset { 13 | ($($value:expr,)+) => { indexmap::indexset_with_default!{ahash::AHasher; $($value),+} }; 14 | ($($value:expr),*) => { indexmap::indexset_with_default!{ahash::AHasher; $($value),*} }; 15 | } 16 | 17 | pub(crate) use indexmap; 18 | pub(crate) use indexset; 19 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/parse/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module contains conversion of intermediate AST to provable AST and a non-provable component if necessary. 2 | mod error; 3 | 4 | pub use error::ConversionError; 5 | pub(crate) use error::ConversionResult; 6 | 7 | mod enriched_expr; 8 | pub(crate) use enriched_expr::EnrichedExpr; 9 | 10 | #[cfg(all(test, feature = "blitzar"))] 11 | mod query_expr_tests; 12 | 13 | mod query_expr; 14 | pub use query_expr::QueryExpr; 15 | 16 | mod filter_exec_builder; 17 | pub(crate) use filter_exec_builder::FilterExecBuilder; 18 | 19 | /// TODO: add docs 20 | pub(crate) mod query_context; 21 | pub(crate) use query_context::QueryContext; 22 | 23 | mod query_context_builder; 24 | pub(crate) use query_context_builder::QueryContextBuilder; 25 | 26 | mod dyn_proof_expr_builder; 27 | pub(crate) use dyn_proof_expr_builder::DynProofExprBuilder; 28 | 29 | mod where_expr_builder; 30 | pub(crate) use where_expr_builder::WhereExprBuilder; 31 | #[cfg(test)] 32 | mod where_expr_builder_tests; 33 | -------------------------------------------------------------------------------- /crates/proof-of-sql-planner/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | publish = true 3 | name = "proof-of-sql-planner" 4 | version = { workspace = true } 5 | edition = { workspace = true } 6 | repository = { workspace = true } 7 | description = "SQL query planner for proof-of-sql." 8 | exclude = { workspace = true } 9 | license-file = { workspace = true } 10 | 11 | [lib] 12 | crate-type = ["cdylib", "rlib"] 13 | 14 | [dependencies] 15 | ahash = { workspace = true } 16 | arrow = { workspace = true } 17 | datafusion = { workspace = true } 18 | # getrandom and uuid must be compiled with js feature 19 | getrandom = { workspace = true, features = ["js"] } 20 | indexmap = { workspace = true } 21 | proof-of-sql = { workspace = true, features = ["arrow"] } 22 | serde = { workspace = true } 23 | snafu = { workspace = true } 24 | sqlparser = { workspace = true } 25 | uuid = { workspace = true, features = ["js"] } 26 | 27 | [dev-dependencies] 28 | ark-std = { workspace = true } 29 | bumpalo = { workspace = true } 30 | 31 | [lints] 32 | workspace = true 33 | -------------------------------------------------------------------------------- /crates/proof-of-sql-planner/src/df_util.rs: -------------------------------------------------------------------------------- 1 | use arrow::datatypes::{DataType, Field, Schema}; 2 | use datafusion::{ 3 | catalog::TableReference, 4 | common::{Column, DFSchema}, 5 | logical_expr::Expr, 6 | }; 7 | 8 | /// Create a `Expr::Column` from full table name and column 9 | pub(crate) fn df_column(table_name: &str, column: &str) -> Expr { 10 | Expr::Column(Column::new( 11 | Some(TableReference::from(table_name)), 12 | column.to_string(), 13 | )) 14 | } 15 | 16 | /// Create a `DFSchema` from table name, column name and data type pairs 17 | /// 18 | /// Note that nulls are not allowed in the schema 19 | pub(crate) fn df_schema(table_name: &str, pairs: Vec<(&str, DataType)>) -> DFSchema { 20 | let arrow_schema = Schema::new( 21 | pairs 22 | .into_iter() 23 | .map(|(name, data_type)| Field::new(name, data_type, false)) 24 | .collect::>(), 25 | ); 26 | DFSchema::try_from_qualified_schema(table_name, &arrow_schema).unwrap() 27 | } 28 | -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/hello_world/README.md: -------------------------------------------------------------------------------- 1 | # Proof of SQL "Hello World" 2 | 3 | This example demonstrates generating and verifying a proof of the query `SELECT b FROM table WHERE a = 2` for the table: 4 | 5 | | a | b | 6 | |------------|-------------| 7 | | 1 | hi | 8 | | 2 | hello | 9 | | 3 | there | 10 | | 2 | world | 11 | 12 | #### Run 13 | 14 | ```bash 15 | cargo run --example hello_world 16 | ``` 17 | 18 | > [!NOTE] 19 | > To run this example without the `blitzar` (i.e., CPU only) feature: 20 | > ```bash 21 | > cargo run --example hello_world --no-default-features --features="test cpu-perf" 22 | > ``` 23 | 24 | #### Output 25 | 26 | ``` 27 | Warming up GPU... 520.959485ms 28 | Loading data... 3.229767ms 29 | Parsing Query... 1.870256ms 30 | Generating Proof... 467.45371ms 31 | Verifying Proof... 7.106864ms 32 | Valid proof! 33 | Query result: OwnedTable { table: {Ident { value: "b", quote_style: None }: VarChar(["hello", "world"])} } 34 | ``` 35 | -------------------------------------------------------------------------------- /crates/proof-of-sql-parser/README.md: -------------------------------------------------------------------------------- 1 |

2 | Proof of SQL 3 | TwitterDiscord Server 4 |

5 | 6 | This crate is a library for SQL parsing for the Proof of SQL execution engine. See the [proof-of-sql](https://docs.rs/proof-of-sql) crate. 7 | 8 | Proof of SQL is a high performance zero knowledge (ZK) prover developed by the [Space and Time](https://www.spaceandtime.io/) team, which cryptographically guarantees SQL queries were computed accurately against untampered data. It targets online latencies while proving computations over entire chain histories, an order of magnitude faster than state-of-the art zkVMs and coprocessors. -------------------------------------------------------------------------------- /crates/proof-of-sql-planner/src/postprocessing/error.rs: -------------------------------------------------------------------------------- 1 | use datafusion::common::DataFusionError; 2 | use proof_of_sql::base::database::OwnedTableError; 3 | use snafu::Snafu; 4 | 5 | /// Errors in postprocessing 6 | #[derive(Snafu, Debug)] 7 | pub enum PostprocessingError { 8 | /// Errors in evaluation of `Expression`s 9 | #[snafu(transparent)] 10 | ExpressionEvaluationError { 11 | /// The underlying source error 12 | source: crate::postprocessing::ExpressionEvaluationError, 13 | }, 14 | /// Returned when a datafusion error occurs 15 | #[snafu(transparent)] 16 | DataFusionError { 17 | /// Underlying datafusion error 18 | source: DataFusionError, 19 | }, 20 | /// Returned when an `OwnedTableError` occurs 21 | #[snafu(transparent)] 22 | OwnedTableError { 23 | /// Underlying `OwnedTableError` 24 | source: OwnedTableError, 25 | }, 26 | } 27 | 28 | /// Result type for postprocessing 29 | pub type PostprocessingResult = core::result::Result; 30 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/dory/state_test.rs: -------------------------------------------------------------------------------- 1 | use super::{rand_G_vecs, test_rng, ProverState, PublicParameters}; 2 | use ark_ec::pairing::Pairing; 3 | 4 | #[test] 5 | pub fn we_can_create_a_verifier_state_from_a_prover_state() { 6 | let mut rng = test_rng(); 7 | let max_nu = 5; 8 | let pp = PublicParameters::test_rand(max_nu, &mut rng); 9 | let prover_setup = (&pp).into(); 10 | for nu in 0..max_nu { 11 | let (v1, v2) = rand_G_vecs(nu, &mut rng); 12 | let prover_state = ProverState::new(v1.clone(), v2.clone(), nu); 13 | let verifier_state = prover_state.calculate_verifier_state(&prover_setup); 14 | 15 | let C = Pairing::multi_pairing(&v1, &v2); 16 | let D_1 = Pairing::multi_pairing(&v1, prover_setup.Gamma_2[nu]); 17 | let D_2 = Pairing::multi_pairing(prover_setup.Gamma_1[nu], &v2); 18 | 19 | assert_eq!(verifier_state.C, C); 20 | assert_eq!(verifier_state.D_1, D_1); 21 | assert_eq!(verifier_state.D_2, D_2); 22 | assert_eq!(verifier_state.nu, nu); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/slice_ops/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module provide operations for working on slices. Each operation is done as generically as possible to be interopable. 2 | //! When relevent, slices are assumed to extend indefinitely and be filled with zeros. 3 | //! For example, the inner product will not panic when the two input slices have different lengths. 4 | //! Instead, it will simply truncate the longer one, which is equivalent to multiply each extra element by zero before summing. 5 | 6 | #[cfg(any(feature = "rayon", test))] 7 | pub const MIN_RAYON_LEN: usize = 1 << 8; 8 | 9 | mod add_const; 10 | #[cfg(test)] 11 | mod add_const_test; 12 | mod inner_product; 13 | #[cfg(test)] 14 | mod inner_product_test; 15 | mod mul_add_assign; 16 | #[cfg(test)] 17 | mod mul_add_assign_test; 18 | mod slice_cast; 19 | #[cfg(test)] 20 | mod slice_cast_test; 21 | 22 | pub use add_const::*; 23 | pub use inner_product::*; 24 | pub use mul_add_assign::*; 25 | pub use slice_cast::*; 26 | 27 | mod batch_inverse; 28 | pub use batch_inverse::*; 29 | 30 | #[cfg(test)] 31 | mod batch_inverse_test; 32 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/slice_ops/mul_add_assign.rs: -------------------------------------------------------------------------------- 1 | use crate::base::if_rayon; 2 | use core::ops::{AddAssign, Mul}; 3 | #[cfg(feature = "rayon")] 4 | use rayon::iter::{IndexedParallelIterator, IntoParallelRefMutIterator, ParallelIterator}; 5 | 6 | /// This operation does `result[i] += multiplier * to_mul_add[i]` for `i` in `0..to_mul_add.len()`. 7 | /// 8 | /// # Panics 9 | /// Panics if the length of `result` is less than the length of `to_mul_add`. 10 | pub fn mul_add_assign(result: &mut [T], multiplier: T, to_mul_add: &[S]) 11 | where 12 | T: Send + Sync + Mul + AddAssign + Copy, 13 | S: Into + Sync + Copy, 14 | { 15 | assert!(result.len() >= to_mul_add.len(), "The length of result must be greater than or equal to the length of the vector of values to be multiplied and added"); 16 | if_rayon!( 17 | result.par_iter_mut().with_min_len(super::MIN_RAYON_LEN), 18 | result.iter_mut() 19 | ) 20 | .zip(to_mul_add) 21 | .for_each(|(res_i, &data_i)| { 22 | *res_i += multiplier * data_i.into(); 23 | }); 24 | } 25 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/database/table_evaluation.rs: -------------------------------------------------------------------------------- 1 | use crate::base::scalar::Scalar; 2 | use alloc::vec::Vec; 3 | 4 | /// The result of evaluating a table 5 | #[derive(Debug, Eq, PartialEq, Clone)] 6 | pub struct TableEvaluation { 7 | /// Evaluation of each column in the table 8 | column_evals: Vec, 9 | /// Evaluation of an all-one column with the same length as the table 10 | chi_eval: S, 11 | } 12 | 13 | impl TableEvaluation { 14 | /// Creates a new [`TableEvaluation`]. 15 | #[must_use] 16 | pub fn new(column_evals: Vec, chi_eval: S) -> Self { 17 | Self { 18 | column_evals, 19 | chi_eval, 20 | } 21 | } 22 | 23 | /// Returns the evaluation of each column in the table. 24 | #[must_use] 25 | pub fn column_evals(&self) -> &[S] { 26 | &self.column_evals 27 | } 28 | 29 | /// Returns the evaluation of an all-one column with the same length as the table. 30 | #[must_use] 31 | pub fn chi_eval(&self) -> S { 32 | self.chi_eval 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /scripts/check_commits.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | CONVENTIONAL_REGEX="^(feat|fix|chore|docs|style|refactor|perf|test|build|ci|revert)(\(.+\))?: .+$" 4 | COMMITS=$(git log origin/main..HEAD --pretty=format:"%s") 5 | 6 | count=0 7 | failed=0 8 | 9 | while IFS= read -r COMMIT_MSG; do 10 | count=$((count + 1)) 11 | echo "[$count] Checking commit message: $COMMIT_MSG" 12 | if [[ ! $COMMIT_MSG =~ $CONVENTIONAL_REGEX ]]; then 13 | echo " -> Does NOT match conventional commit format" 14 | failed=$((failed + 1)) 15 | else 16 | echo " -> Matches conventional commit format" 17 | fi 18 | echo 19 | done <<< "$COMMITS" 20 | 21 | echo "Summary of the conventional commit check:" 22 | echo " Total commits: $count" 23 | echo " Failed commits: $failed" 24 | 25 | if [ "$failed" -gt 0 ]; then 26 | echo "Some commits failed the check. Make sure your commit messages match the conventional commit format. 27 | Check https://www.conventionalcommits.org/en/v1.0.0/#summary for more details." 28 | else 29 | echo "All commits match the conventional commit format!" 30 | fi 31 | -------------------------------------------------------------------------------- /crates/proof-of-sql-parser/src/error.rs: -------------------------------------------------------------------------------- 1 | use alloc::string::String; 2 | use snafu::Snafu; 3 | 4 | /// Errors encountered during the parsing process 5 | #[expect(clippy::module_name_repetitions)] 6 | #[derive(Debug, Snafu, Eq, PartialEq)] 7 | pub enum ParseError { 8 | #[snafu(display("Unable to parse query"))] 9 | /// Cannot parse the query 10 | QueryParseError { 11 | /// The underlying error 12 | error: String, 13 | }, 14 | #[snafu(display("Unable to parse identifier"))] 15 | /// Cannot parse the identifier 16 | IdentifierParseError { 17 | /// The underlying error 18 | error: String, 19 | }, 20 | #[snafu(display("Unable to parse resource_id"))] 21 | /// Cannot parse the `resource_id` 22 | ResourceIdParseError { 23 | /// The underlying error 24 | error: String, 25 | }, 26 | } 27 | 28 | /// General parsing error that may occur, for example if the provided `schema`/`object_name` strings 29 | /// aren't valid postgres-style identifiers (excluding dollar signs). 30 | pub type ParseResult = Result; 31 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module contains basic shared functionalities of the library. 2 | /// TODO: add docs 3 | #[cfg(feature = "arrow")] 4 | pub mod arrow; 5 | 6 | pub(crate) mod bit; 7 | pub(crate) mod byte; 8 | pub mod commitment; 9 | pub mod database; 10 | /// TODO: add docs 11 | pub(crate) mod encode; 12 | pub mod math; 13 | /// TODO: add docs 14 | pub(crate) mod polynomial; 15 | /// Module for Proof of SQL datetime types. 16 | pub mod posql_time; 17 | pub(crate) mod proof; 18 | mod standard_binary_serde; 19 | pub use proof::{PlaceholderError, PlaceholderResult}; 20 | pub use standard_binary_serde::{ 21 | try_standard_binary_deserialization, try_standard_binary_serialization, 22 | }; 23 | pub(crate) mod ref_into; 24 | /// This module contains the `Scalar` trait as well as the main, generic, implementations of it. 25 | pub mod scalar; 26 | mod serialize; 27 | pub(crate) use serialize::{impl_serde_for_ark_serde_checked, impl_serde_for_ark_serde_unchecked}; 28 | pub(crate) mod map; 29 | pub(crate) mod slice_ops; 30 | 31 | mod rayon_cfg; 32 | pub(crate) use rayon_cfg::if_rayon; 33 | -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/countries/countries_gdp.csv: -------------------------------------------------------------------------------- 1 | country,continent,gdp,gdpp 2 | UnitedStates,NorthAmerica,21137,63543 3 | China,Asia,14342,10261 4 | Japan,Asia,5081,40293 5 | Germany,Europe,3846,46329 6 | India,Asia,2875,2099 7 | UnitedKingdom,Europe,2825,42330 8 | France,Europe,2716,41463 9 | Italy,Europe,2001,33279 10 | Brazil,SouthAmerica,1839,8718 11 | Canada,NorthAmerica,1643,43119 12 | Russia,EuropeAsia,1637,11229 13 | SouthKorea,Asia,1622,31489 14 | Australia,Oceania,1382,53799 15 | Spain,Europe,1316,28152 16 | Mexico,NorthAmerica,1265,9958 17 | Indonesia,Asia,1119,4152 18 | Netherlands,Europe,902,52477 19 | SaudiArabia,Asia,793,23206 20 | Turkey,EuropeAsia,761,9005 21 | Switzerland,Europe,703,81392 22 | Argentina,SouthAmerica,449,9921 23 | Sweden,Europe,528,52073 24 | Nigeria,Africa,448,2190 25 | Poland,Europe,594,15673 26 | Thailand,Asia,509,7306 27 | SouthAfrica,Africa,350,5883 28 | Philippines,Asia,402,3685 29 | Colombia,SouthAmerica,323,6458 30 | Egypt,Africa,302,3012 31 | Pakistan,Asia,278,1450 32 | Bangladesh,Asia,302,1855 33 | Vietnam,Asia,283,2900 34 | Chile,SouthAmerica,252,13120 35 | Finland,Europe,268,48888 -------------------------------------------------------------------------------- /.github/workflows/bountyissuestojira.yaml: -------------------------------------------------------------------------------- 1 | name: Origin Repository Workflow 2 | 3 | on: 4 | issues: 5 | types: [opened, labeled] 6 | 7 | jobs: 8 | cross-repo-pipeline: 9 | if: contains(github.event.issue.labels.*.name, 'bug') 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Trigger Target Repository 13 | run: | 14 | curl -X POST \ 15 | -H "Authorization: token ${{ secrets.PAT_TOKEN }}" \ 16 | -H "Accept: application/vnd.github.everest-preview+json" \ 17 | https://api.github.com/repos/spaceandtimelabs/jira-github-integration/dispatches \ 18 | -d '{ 19 | "event_type": "repository_dispatch", 20 | "client_payload": { 21 | "issue_title": "${{ github.event.issue.title }}", 22 | "issue_body": "${{ github.event.issue.body }}", 23 | "issue_url": "${{ github.event.issue.html_url }}", 24 | "issue_number": "${{ github.event.issue.number }}", 25 | "issue_labels": ${{ toJson(github.event.issue.labels.*.name) }} 26 | } 27 | }' 28 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/postprocessing/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module contains new lightweight postprocessing for non-provable components. 2 | mod error; 3 | pub use error::{PostprocessingError, PostprocessingResult}; 4 | 5 | mod owned_table_postprocessing; 6 | 7 | mod postprocessing_step; 8 | pub use owned_table_postprocessing::{apply_postprocessing_steps, OwnedTablePostprocessing}; 9 | pub use postprocessing_step::PostprocessingStep; 10 | #[cfg(test)] 11 | /// Utility functions for testing postprocessing steps. 12 | pub mod test_utility; 13 | 14 | mod group_by_postprocessing; 15 | pub use group_by_postprocessing::GroupByPostprocessing; 16 | #[cfg(test)] 17 | mod group_by_postprocessing_test; 18 | 19 | mod order_by_postprocessing; 20 | pub use order_by_postprocessing::OrderByPostprocessing; 21 | #[cfg(test)] 22 | mod order_by_postprocessing_test; 23 | 24 | mod select_postprocessing; 25 | pub use select_postprocessing::SelectPostprocessing; 26 | #[cfg(test)] 27 | mod select_postprocessing_test; 28 | 29 | mod slice_postprocessing; 30 | pub use slice_postprocessing::SlicePostprocessing; 31 | #[cfg(test)] 32 | mod slice_postprocessing_test; 33 | -------------------------------------------------------------------------------- /crates/proof-of-sql-parser/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | publish = true 3 | name = "proof-of-sql-parser" 4 | version = { workspace = true } 5 | edition = { workspace = true } 6 | repository = { workspace = true } 7 | build = "build.rs" 8 | description = "Library for SQL parsing for the Proof of SQL execution engine." 9 | exclude = { workspace = true } 10 | license-file = { workspace = true } 11 | 12 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 13 | [lib] 14 | doctest = true 15 | test = true 16 | 17 | [dependencies] 18 | arrayvec = { workspace = true, features = ["serde"] } 19 | bigdecimal = { workspace = true, default_features = false } 20 | chrono = { workspace = true, features = ["serde"] } 21 | lalrpop-util = { workspace = true, features = ["lexer", "unicode"] } 22 | serde = { workspace = true, features = ["serde_derive", "alloc"] } 23 | snafu = { workspace = true } 24 | sqlparser = { workspace = true, default_features = false } 25 | 26 | [build-dependencies] 27 | lalrpop = { workspace = true } 28 | 29 | [dev-dependencies] 30 | serde_json = { workspace = true } 31 | 32 | [lints] 33 | workspace = true -------------------------------------------------------------------------------- /.github/workflows/check-approver.yaml: -------------------------------------------------------------------------------- 1 | name: Check Approver 2 | 3 | on: 4 | pull_request_review: 5 | types: [submitted] 6 | 7 | permissions: read-all 8 | 9 | jobs: 10 | check_approver: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Check Approver 15 | uses: actions/github-script@v7 16 | # Only run for external pull requests 17 | if: github.event.pull_request.head.repo.fork 18 | with: 19 | script: | 20 | const reviews = await github.rest.pulls.listReviews({ 21 | owner: context.repo.owner, 22 | repo: context.repo.repo, 23 | pull_number: context.payload.pull_request.number, 24 | }); 25 | 26 | const requiredReviewer = 'JayWhite2357'; 27 | const approved = reviews.data.some(review => review.user.login === requiredReviewer && review.state === 'APPROVED'); 28 | 29 | if (!approved) { 30 | core.setFailed(`External pull request needs to be approved by ${requiredReviewer}.`); 31 | } else { 32 | console.log(`External pull request has been approved by ${requiredReviewer}.`); 33 | } 34 | -------------------------------------------------------------------------------- /third_party/license/vervolg.LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018-2021 Hans-Martin Will 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/proof/sumcheck_random_scalars.rs: -------------------------------------------------------------------------------- 1 | use crate::base::{polynomial::compute_evaluation_vector, scalar::Scalar}; 2 | use alloc::{vec, vec::Vec}; 3 | 4 | /// Accessor for the random scalars used to form the sumcheck polynomial of a query proof 5 | pub struct SumcheckRandomScalars<'a, S: Scalar> { 6 | pub entrywise_point: &'a [S], 7 | pub subpolynomial_multipliers: &'a [S], 8 | pub table_length: usize, 9 | } 10 | 11 | impl<'a, S: Scalar> SumcheckRandomScalars<'a, S> { 12 | pub fn new(scalars: &'a [S], table_length: usize, num_sumcheck_variables: usize) -> Self { 13 | let num_subpolynomial_multipliers = scalars.len() - num_sumcheck_variables; 14 | let (subpolynomial_multipliers, entrywise_point) = 15 | scalars.split_at(num_subpolynomial_multipliers); 16 | Self { 17 | entrywise_point, 18 | subpolynomial_multipliers, 19 | table_length, 20 | } 21 | } 22 | 23 | pub fn compute_entrywise_multipliers(&self) -> Vec { 24 | let mut v = vec![Default::default(); self.table_length]; 25 | compute_evaluation_vector(&mut v, self.entrywise_point); 26 | v 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/database/test_accessor.rs: -------------------------------------------------------------------------------- 1 | use super::{CommitmentAccessor, DataAccessor, MetadataAccessor, SchemaAccessor, TableRef}; 2 | use crate::base::commitment::Commitment; 3 | use alloc::vec::Vec; 4 | 5 | /// A trait that defines the interface for a combined metadata, schema, commitment, and data accessor for unit testing or example purposes. 6 | pub trait TestAccessor: 7 | Clone 8 | + Default 9 | + MetadataAccessor 10 | + SchemaAccessor 11 | + CommitmentAccessor 12 | + DataAccessor 13 | { 14 | /// The table type that the accessor will accept in the `add_table` method, and likely the inner table type. 15 | type Table; 16 | 17 | /// Create an empty test accessor 18 | fn new_empty() -> Self; 19 | 20 | /// Add a new table to the current test accessor 21 | fn add_table(&mut self, table_ref: TableRef, data: Self::Table, table_offset: usize); 22 | 23 | /// Get the column names for a given table 24 | fn get_column_names(&self, table_ref: &TableRef) -> Vec<&str>; 25 | 26 | /// Update the table offset alongside its column commitments 27 | fn update_offset(&mut self, table_ref: &TableRef, new_offset: usize); 28 | } 29 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/scalar/test_scalar.rs: -------------------------------------------------------------------------------- 1 | use super::MontScalar; 2 | use ark_ff::{Fp, MontBackend, MontConfig}; 3 | 4 | /// An implementation of `Scalar` intended for use in testing when a concrete implementation is required. 5 | /// 6 | /// Ultimately, a wrapper type around the field element `ark_curve25519::Fr` and should be used in place of `ark_curve25519::Fr`. 7 | pub type TestScalar = MontScalar; 8 | 9 | /// An implementation of `MontConfig` intended for use in testing when a concrete implementation is required. 10 | /// 11 | /// Ultimately, a wrapper type around the field element `ark_curve25519::FrConfig` and should be used in place of `ark_curve25519::FrConfig`. 12 | pub struct TestMontConfig(pub ark_curve25519::FrConfig); 13 | 14 | impl MontConfig<4> for TestMontConfig { 15 | const MODULUS: ark_ff::BigInt<4> = >::MODULUS; 16 | 17 | const GENERATOR: Fp, 4> = 18 | Fp::new(>::GENERATOR.0); 19 | 20 | const TWO_ADIC_ROOT_OF_UNITY: ark_ff::Fp, 4> = 21 | Fp::new(>::TWO_ADIC_ROOT_OF_UNITY.0); 22 | } 23 | -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/books/books.csv: -------------------------------------------------------------------------------- 1 | id,title,author,publication_year,genre,rating 2 | 1,To Kill a Mockingbird,Harper Lee,1960,Fiction,4.5 3 | 2,1984,George Orwell,1949,Science Fiction,4.7 4 | 3,Pride and Prejudice,Jane Austen,1813,Romance,4.3 5 | 4,The Great Gatsby,F. Scott Fitzgerald,1925,Fiction,4.2 6 | 5,The Catcher in the Rye,J.D. Salinger,1951,Fiction,4.0 7 | 6,Moby-Dick,Herman Melville,1851,Adventure,4.1 8 | 7,The Lord of the Rings,J.R.R. Tolkien,1954,Fantasy,4.9 9 | 8,The Hobbit,J.R.R. Tolkien,1937,Fantasy,4.6 10 | 9,Brave New World,Aldous Huxley,1932,Science Fiction,4.4 11 | 10,The Hunger Games,Suzanne Collins,2008,Young Adult,4.3 12 | 11,Harry Potter and the Philosopher's Stone,J.K. Rowling,1997,Fantasy,4.8 13 | 12,The Da Vinci Code,Dan Brown,2003,Thriller,3.9 14 | 13,The Alchemist,Paulo Coelho,1988,Fiction,4.2 15 | 14,The Girl with the Dragon Tattoo,Stieg Larsson,2005,Mystery,4.1 16 | 15,The Hitchhiker's Guide to the Galaxy,Douglas Adams,1979,Science Fiction,4.5 17 | 16,The Shining,Stephen King,1977,Horror,4.3 18 | 17,The Catch-22,Joseph Heller,1961,Satire,4.0 19 | 18,The Chronicles of Narnia,C.S. Lewis,1950,Fantasy,4.7 20 | 19,The Fault in Our Stars,John Green,2012,Young Adult,4.2 21 | 20,The Old Man and the Sea,Ernest Hemingway,1952,Fiction,4.1 -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/proof/merlin_transcript_core.rs: -------------------------------------------------------------------------------- 1 | impl super::transcript_core::TranscriptCore for merlin::Transcript { 2 | fn new() -> Self { 3 | merlin::Transcript::new(b"TranscriptCore::new") 4 | } 5 | fn raw_append(&mut self, message: &[u8]) { 6 | self.append_message(b"TranscriptCore::raw_append", message); 7 | } 8 | fn raw_challenge(&mut self) -> [u8; 32] { 9 | let mut result = [0u8; 32]; 10 | self.challenge_bytes(b"TranscriptCore::raw_challenge", &mut result); 11 | result 12 | } 13 | } 14 | 15 | #[cfg(test)] 16 | mod tests { 17 | use super::super::transcript_core::test_util::*; 18 | #[test] 19 | fn we_get_equivalent_challenges_with_equivalent_merlin_transcripts() { 20 | we_get_equivalent_challenges_with_equivalent_transcripts::(); 21 | } 22 | #[test] 23 | fn we_get_different_challenges_with_different_keccak256_transcripts() { 24 | we_get_different_challenges_with_different_transcripts::(); 25 | } 26 | #[test] 27 | fn we_get_different_nontrivial_consecutive_challenges_from_keccak256_transcript() { 28 | we_get_different_nontrivial_consecutive_challenges_from_transcript::(); 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /ci/publish.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -Eeuxo pipefail 3 | 4 | # number to be used to tag the compressed files 5 | NEW_VERSION=$1 6 | 7 | if ! [[ ${NEW_VERSION} =~ ^[0-9]+[.][0-9]+[.][0-9]+$ ]] 8 | then 9 | echo "Incorrect version format: " $NEW_VERSION 10 | exit 1 11 | fi 12 | 13 | # configure rust lib to release 14 | sed -i 's/version = "*.*.*" # DO NOT CHANGE THIS LINE! This will be automatically updated/version = "'${NEW_VERSION}'"/' Cargo.toml 15 | sed -i 's/path = "[^"]*"/version = "'${NEW_VERSION}'"/g' Cargo.toml 16 | 17 | CRATES=("proof-of-sql-parser" "proof-of-sql" "proof-of-sql-planner") 18 | 19 | for crate in "${CRATES[@]}"; do 20 | echo "Attempting to see if ${crate}@${NEW_VERSION} is published already..." 21 | # Make sure to use the correct index URL for crates.io since local crates are otherwise considered 22 | # which will always succeed and nothing will be published 23 | if cargo info --index https://github.com/rust-lang/crates.io-index \ 24 | "${crate}@${NEW_VERSION}" >/dev/null 2>&1 25 | then 26 | echo "The version ${NEW_VERSION} for ${crate} is already on crates.io. Skipping publish." 27 | else 28 | echo "${crate}@${NEW_VERSION} not found, publishing..." 29 | cargo publish -p "${crate}" --token "${CRATES_TOKEN}" 30 | fi 31 | done 32 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/arrow/record_batch_errors.rs: -------------------------------------------------------------------------------- 1 | use super::arrow_array_to_column_conversion::ArrowArrayToColumnConversionError; 2 | use crate::base::commitment::ColumnCommitmentsMismatch; 3 | use snafu::Snafu; 4 | 5 | /// Errors that can occur when trying to create or extend a [`TableCommitment`] from a record batch. 6 | #[derive(Debug, Snafu)] 7 | pub enum RecordBatchToColumnsError { 8 | /// Error converting from arrow array 9 | #[snafu(transparent)] 10 | ArrowArrayToColumnConversionError { 11 | /// The underlying source error 12 | source: ArrowArrayToColumnConversionError, 13 | }, 14 | } 15 | 16 | /// Errors that can occur when attempting to append a record batch to a [`TableCommitment`]. 17 | #[derive(Debug, Snafu)] 18 | pub enum AppendRecordBatchTableCommitmentError { 19 | /// During commitment operation, metadata indicates that operand tables cannot be the same. 20 | #[snafu(transparent)] 21 | ColumnCommitmentsMismatch { 22 | /// The underlying source error 23 | source: ColumnCommitmentsMismatch, 24 | }, 25 | /// Error converting from arrow array 26 | #[snafu(transparent)] 27 | ArrowBatchToColumnError { 28 | /// The underlying source error 29 | source: RecordBatchToColumnsError, 30 | }, 31 | } 32 | -------------------------------------------------------------------------------- /crates/proof-of-sql-planner/src/postprocessing/expression_evaluation_error.rs: -------------------------------------------------------------------------------- 1 | use alloc::string::String; 2 | use core::result::Result; 3 | use proof_of_sql::base::{database::ColumnOperationError, math::decimal::DecimalError}; 4 | use snafu::Snafu; 5 | 6 | /// Errors from evaluation of `Expression`s. 7 | #[derive(Snafu, Debug)] 8 | pub enum ExpressionEvaluationError { 9 | /// Column not found 10 | #[snafu(display("Column not found: {error}"))] 11 | ColumnNotFound { 12 | /// The underlying error 13 | error: String, 14 | }, 15 | /// Error in column operation 16 | #[snafu(transparent)] 17 | ColumnOperationError { 18 | /// The underlying source error 19 | source: ColumnOperationError, 20 | }, 21 | /// Expression not yet supported 22 | #[snafu(display("Expression {expression} is not supported yet"))] 23 | Unsupported { 24 | /// The unsupported expression 25 | expression: String, 26 | }, 27 | /// Error in decimal conversion 28 | #[snafu(transparent)] 29 | DecimalConversionError { 30 | /// The underlying source error 31 | source: DecimalError, 32 | }, 33 | } 34 | 35 | /// Result type for expression evaluation 36 | pub type ExpressionEvaluationResult = Result; 37 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/database/expression_evaluation_error.rs: -------------------------------------------------------------------------------- 1 | use crate::base::{database::ColumnOperationError, math::decimal::DecimalError}; 2 | use alloc::string::String; 3 | use core::result::Result; 4 | use snafu::Snafu; 5 | 6 | /// Errors from evaluation of `Expression`s. 7 | #[derive(Snafu, Debug, PartialEq, Eq)] 8 | pub enum ExpressionEvaluationError { 9 | /// Column not found 10 | #[snafu(display("Column not found: {error}"))] 11 | ColumnNotFound { 12 | /// The underlying error 13 | error: String, 14 | }, 15 | /// Error in column operation 16 | #[snafu(transparent)] 17 | ColumnOperationError { 18 | /// The underlying source error 19 | source: ColumnOperationError, 20 | }, 21 | /// Expression not yet supported 22 | #[snafu(display("Expression {expression} is not supported yet"))] 23 | Unsupported { 24 | /// The unsupported expression 25 | expression: String, 26 | }, 27 | /// Error in decimal conversion 28 | #[snafu(transparent)] 29 | DecimalConversionError { 30 | /// The underlying source error 31 | source: DecimalError, 32 | }, 33 | } 34 | 35 | /// Result type for expression evaluation 36 | pub type ExpressionEvaluationResult = Result; 37 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/evm_proof_plan/error.rs: -------------------------------------------------------------------------------- 1 | use crate::sql::AnalyzeError; 2 | use snafu::Snafu; 3 | 4 | /// Represents errors that can occur in the EVM proof plan module. 5 | #[derive(Snafu, Debug, PartialEq)] 6 | pub(crate) enum EVMProofPlanError { 7 | /// Error indicating that the plan is not supported. 8 | #[snafu(display("plan not yet supported"))] 9 | NotSupported, 10 | /// Error indicating that the column was not found. 11 | #[snafu(display("column not found"))] 12 | ColumnNotFound, 13 | /// Error indicating that the table was not found. 14 | #[snafu(display("table not found"))] 15 | TableNotFound, 16 | /// Error indicating that table name can not be parsed into `TableRef`. 17 | #[snafu(display("table name can not be parsed into TableRef"))] 18 | InvalidTableName, 19 | /// Error indicating that the output column name is invalid or missing. 20 | #[snafu(display("invalid or missing output column name"))] 21 | InvalidOutputColumnName, 22 | /// Analyze error 23 | #[snafu(transparent)] 24 | AnalyzeError { 25 | /// The underlying source error 26 | source: AnalyzeError, 27 | }, 28 | } 29 | 30 | /// Result type for EVM proof plan operations. 31 | pub(crate) type EVMProofPlanResult = core::result::Result; 32 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/utils/log.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "std")] 2 | use sysinfo::System; 3 | use tracing::{trace, Level}; 4 | 5 | /// Logs the memory usage of the system at the TRACE level. 6 | /// 7 | /// This function logs the available memory, used memory, and the percentage of memory used. 8 | /// It only logs this information if the TRACE level is enabled in the tracing configuration. 9 | /// 10 | /// # Arguments 11 | /// 12 | /// * `name` - A string slice that holds the name to be included in the log message. 13 | #[expect(clippy::cast_precision_loss)] 14 | pub fn log_memory_usage(name: &str) { 15 | #[cfg(feature = "std")] 16 | if tracing::level_enabled!(Level::TRACE) { 17 | let mut system = System::new_all(); 18 | system.refresh_memory(); 19 | 20 | let available_memory = system.available_memory() as f64 / (1024.0 * 1024.0); 21 | let used_memory = system.used_memory() as f64 / (1024.0 * 1024.0); 22 | let percentage_memory_used = (used_memory / (used_memory + available_memory)) * 100.0; 23 | 24 | trace!( 25 | "{} Available memory: {:.2} MB, Used memory: {:.2} MB, Percentage memory used: {:.2}%", 26 | name, 27 | available_memory, 28 | used_memory, 29 | percentage_memory_used 30 | ); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /third_party/license/integer-encoding.LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016 Google Inc. (lewinb@google.com) -- though not an official 4 | Google product or in any way related! 5 | Copyright (c) 2018-2020 Lewin Bormann (lbo@spheniscida.de) 6 | 7 | Permission is hereby granted, free of charge, to any person obtaining a copy 8 | of this software and associated documentation files (the "Software"), to 9 | deal in the Software without restriction, including without limitation the 10 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or 11 | sell copies of the Software, and to permit persons to whom the Software is 12 | furnished to do so, subject to the following conditions: 13 | 14 | The above copyright notice and this permission notice shall be included in 15 | all copies or substantial portions of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 20 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 21 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 22 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 23 | IN THE SOFTWARE. -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/encode/u256.rs: -------------------------------------------------------------------------------- 1 | use crate::base::scalar::MontScalar; 2 | use ark_ff::MontConfig; 3 | 4 | /// U256 represents an unsigned 256-bits integer number 5 | /// 6 | /// low is the lower bytes of the u256 number (from 0 to 127 bits) 7 | /// high is the upper bytes of the u256 number (from 128 to 255 bits) 8 | #[derive(PartialEq, Eq, Copy, Clone)] 9 | pub struct U256 { 10 | pub low: u128, 11 | pub high: u128, 12 | } 13 | 14 | impl U256 { 15 | #[inline] 16 | pub const fn from_words(low: u128, high: u128) -> Self { 17 | U256 { low, high } 18 | } 19 | } 20 | 21 | /// This trait converts a dalek scalar into a U256 integer 22 | impl> From<&MontScalar> for U256 { 23 | fn from(val: &MontScalar) -> Self { 24 | let buf: [u64; 4] = val.into(); 25 | let low: u128 = u128::from(buf[0]) | (u128::from(buf[1]) << 64); 26 | let high: u128 = u128::from(buf[2]) | (u128::from(buf[3]) << 64); 27 | U256::from_words(low, high) 28 | } 29 | } 30 | 31 | /// This trait converts a U256 integer into a dalek scalar 32 | impl> From<&U256> for MontScalar { 33 | fn from(val: &U256) -> Self { 34 | let bytes = [val.low.to_le_bytes(), val.high.to_le_bytes()].concat(); 35 | MontScalar::::from_le_bytes_mod_order(&bytes) 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/proof_gadgets/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module contains shared proof logic for multiple `ProofExpr` / `ProofPlan` implementations. 2 | #[cfg(test)] 3 | mod divide_and_modulo_expr; 4 | mod membership_check; 5 | mod monotonic; 6 | #[cfg_attr(not(test), expect(dead_code))] 7 | mod permutation_check; 8 | mod shift; 9 | pub(crate) use membership_check::{ 10 | final_round_evaluate_membership_check, first_round_evaluate_membership_check, 11 | verify_membership_check, 12 | }; 13 | #[cfg(test)] 14 | mod membership_check_test; 15 | #[expect(unused_imports)] 16 | use permutation_check::{final_round_evaluate_permutation_check, verify_permutation_check}; 17 | #[cfg(test)] 18 | mod permutation_check_test; 19 | use shift::{final_round_evaluate_shift, first_round_evaluate_shift, verify_shift}; 20 | #[cfg(test)] 21 | mod shift_test; 22 | mod sign_expr; 23 | pub(crate) use sign_expr::{ 24 | final_round_evaluate_sign, first_round_evaluate_sign, verifier_evaluate_sign, 25 | }; 26 | #[cfg(feature = "blitzar")] 27 | #[cfg_attr(not(test), expect(dead_code))] 28 | mod range_check; 29 | #[cfg(all(test, feature = "blitzar"))] 30 | mod range_check_test; 31 | #[cfg(all(test, feature = "blitzar"))] 32 | mod sign_expr_test; 33 | pub(crate) use monotonic::{ 34 | final_round_evaluate_monotonic, first_round_evaluate_monotonic, verify_monotonic, 35 | }; 36 | #[cfg(test)] 37 | mod monotonic_test; 38 | -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/stocks/stocks.csv: -------------------------------------------------------------------------------- 1 | symbol,company,sector,price,volume,marketcap,pe_ratio,dividendyield 2 | AAPL,Apple Inc.,Technology,175.50,52000000,2850.25,28.5,0.5 3 | MSFT,Microsoft Corporation,Technology,325.75,25000000,2425.80,32.8,0.8 4 | GOOGL,Alphabet Inc.,Technology,135.20,18000000,1720.40,25.2,0.0 5 | AMZN,Amazon.com Inc.,Consumer Cyclical,128.90,35000000,1325.60,42.1,0.0 6 | META,Meta Platforms Inc.,Technology,308.45,22000000,785.30,31.5,0.0 7 | TSLA,Tesla Inc.,Automotive,238.45,100000000,755.90,75.2,0.0 8 | JPM,JPMorgan Chase & Co.,Financial Services,148.75,12000000,428.90,11.2,2.8 9 | V,Visa Inc.,Financial Services,245.60,8000000,510.30,28.9,0.7 10 | JNJ,Johnson & Johnson,Healthcare,152.30,6000000,395.80,15.5,3.1 11 | PG,Procter & Gamble Co.,Consumer Defensive,150.20,7000000,355.40,25.3,2.4 12 | XOM,Exxon Mobil Corporation,Energy,105.80,20000000,425.60,8.9,3.5 13 | WMT,Walmart Inc.,Consumer Defensive,158.90,8500000,428.70,26.4,1.5 14 | KO,Coca-Cola Company,Consumer Defensive,58.75,12000000,254.30,24.2,3.0 15 | DIS,Walt Disney Company,Communication Services,85.50,15000000,156.80,42.8,0.0 16 | NFLX,Netflix Inc.,Communication Services,385.20,7500000,171.20,38.5,0.0 17 | NVDA,NVIDIA Corporation,Technology,455.80,45000000,1125.40,110.5,0.1 18 | INTC,Intel Corporation,Technology,35.80,42000000,150.20,15.8,1.5 19 | AMD,Advanced Micro Devices,Technology,105.25,65000000,170.30,220.5,0.0 -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/math/decimal_tests.rs: -------------------------------------------------------------------------------- 1 | #[cfg(test)] 2 | mod precision_tests { 3 | use crate::base::math::decimal::Precision; 4 | use serde_json; 5 | 6 | #[test] 7 | fn we_can_deserialize_valid_precision() { 8 | let json = "50"; // A valid value within the range 9 | let precision: Result = serde_json::from_str(json); 10 | assert!(precision.is_ok()); 11 | assert_eq!(precision.unwrap().value(), 50); 12 | } 13 | 14 | #[test] 15 | fn we_can_deserialize_valid_precision_inclusive() { 16 | let json = "75"; // A valid value within the range 17 | let precision: Result = serde_json::from_str(json); 18 | assert!(precision.is_ok()); 19 | assert_eq!(precision.unwrap().value(), 75); 20 | } 21 | 22 | #[test] 23 | fn we_cannot_deserialize_invalid_precision() { 24 | let json = "76"; // An invalid value outside the range 25 | let precision: Result = serde_json::from_str(json); 26 | assert!(precision.is_err()); 27 | } 28 | 29 | // Test deserialization of a non-numeric value 30 | #[test] 31 | fn we_cannot_deserialize_non_numeric_precision() { 32 | let json = "\"not a number\""; 33 | let precision: Result = serde_json::from_str(json); 34 | assert!(precision.is_err()); 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/proof_exprs/column_expr_test.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | base::{ 3 | commitment::InnerProductProof, 4 | database::{ 5 | owned_table_utility::*, ColumnField, ColumnType, OwnedTableTestAccessor, TableRef, 6 | }, 7 | }, 8 | sql::{ 9 | proof::{exercise_verification, VerifiableQueryResult}, 10 | proof_exprs::test_utility::*, 11 | proof_plans::test_utility::*, 12 | }, 13 | }; 14 | 15 | #[test] 16 | fn we_can_prove_a_query_with_a_single_selected_row() { 17 | let data = owned_table([boolean("a", [true, false])]); 18 | let t = TableRef::new("sxt", "t"); 19 | let accessor = 20 | OwnedTableTestAccessor::::new_from_table(t.clone(), data, 0, ()); 21 | let ast = projection( 22 | cols_expr_plan(&t, &["a"], &accessor), 23 | table_exec( 24 | t.clone(), 25 | vec![ColumnField::new("a".into(), ColumnType::Boolean)], 26 | ), 27 | ); 28 | let verifiable_res = VerifiableQueryResult::new(&ast, &accessor, &(), &[]).unwrap(); 29 | exercise_verification(&verifiable_res, &ast, &accessor, &t); 30 | let res = verifiable_res 31 | .verify(&ast, &accessor, &(), &[]) 32 | .unwrap() 33 | .table; 34 | let expected_res = owned_table([boolean("a", [true, false])]); 35 | assert_eq!(res, expected_res); 36 | } 37 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/dory/build_vmv_state.rs: -------------------------------------------------------------------------------- 1 | use super::{ 2 | compute_L_R_vec, compute_l_r_tensors, compute_v_vec, DeferredGT, G1Affine, VMVProverState, 3 | VMVVerifierState, F, 4 | }; 5 | use alloc::vec::Vec; 6 | 7 | /// Builds a [`VMVProverState`] from the given parameters. 8 | pub(super) fn build_vmv_prover_state( 9 | a: &[F], 10 | b_point: &[F], 11 | T_vec_prime: Vec, 12 | sigma: usize, 13 | nu: usize, 14 | ) -> VMVProverState { 15 | let (L_vec, R_vec) = compute_L_R_vec(b_point, sigma, nu); 16 | #[cfg(test)] 17 | let (l_tensor, r_tensor) = compute_l_r_tensors(b_point, sigma, nu); 18 | let v_vec = compute_v_vec(a, &L_vec, sigma, nu); 19 | VMVProverState { 20 | v_vec, 21 | T_vec_prime, 22 | #[cfg(test)] 23 | l_tensor, 24 | #[cfg(test)] 25 | r_tensor, 26 | L_vec, 27 | R_vec, 28 | nu, 29 | } 30 | } 31 | 32 | /// Builds a [`VMVVerifierState`] from the given parameters. 33 | pub(super) fn build_vmv_verifier_state( 34 | y: F, 35 | b_point: &[F], 36 | T: DeferredGT, 37 | sigma: usize, 38 | nu: usize, 39 | ) -> VMVVerifierState { 40 | let (l_tensor, r_tensor) = compute_l_r_tensors(b_point, sigma, nu); 41 | VMVVerifierState { 42 | y, 43 | T, 44 | l_tensor, 45 | r_tensor, 46 | nu, 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/proof_plans/fold_util.rs: -------------------------------------------------------------------------------- 1 | use crate::base::{polynomial::MultilinearExtension, scalar::Scalar}; 2 | 3 | /// This function takes a set of columns and fold it into a slice of scalars. 4 | /// 5 | /// The result `res` is updated with 6 | /// `res[i] += mul * sum (beta^(n-j) * columns[j][i]) for j in 0..columns.len()` where n is the number of columns. 7 | /// where each column is padded with 0s as needed. 8 | /// 9 | /// This is similar to adding `mul * fold_vals(beta,...)` on each row. 10 | pub fn fold_columns( 11 | res: &mut [S], 12 | mul: S, 13 | beta: S, 14 | columns: &[impl MultilinearExtension], 15 | ) { 16 | for (m, col) in powers(mul, beta).zip(columns.iter().rev()) { 17 | col.mul_add(res, &m); 18 | } 19 | } 20 | 21 | /// This function takes a set of values and returns a scalar that is the 22 | /// result of folding the values. 23 | /// 24 | /// The result is 25 | /// `sum (beta^(n-j) * vals[j]) for j in 0..vals.len()` where n is the number of vals. 26 | pub fn fold_vals(beta: S, vals: &[S]) -> S { 27 | vals.iter().fold(S::zero(), |acc, &v| acc * beta + v) 28 | } 29 | 30 | /// Returns an iterator for the lazily evaluated sequence `init, init * base, init * base^2, ...` 31 | fn powers(init: S, base: S) -> impl Iterator { 32 | core::iter::successors(Some(init), move |&m| Some(m * base)) 33 | } 34 | -------------------------------------------------------------------------------- /solidity/test/base/FieldUtil.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: UNLICENSED 2 | // This is licensed under the Cryptographic Open Software License 1.0 3 | pragma solidity ^0.8.28; 4 | 5 | import "../../src/base/Constants.sol"; 6 | 7 | using {F.into, _add as +, _mul as *, _sub as -, _neg as -} for FF global; 8 | 9 | type FF is uint256; 10 | 11 | function _add(FF a, FF b) pure returns (FF c) { 12 | c = FF.wrap(addmod(a.into(), b.into(), MODULUS)); 13 | } 14 | 15 | function _mul(FF a, FF b) pure returns (FF c) { 16 | c = FF.wrap(mulmod(FF.unwrap(a), FF.unwrap(b), MODULUS)); 17 | } 18 | 19 | function _sub(FF a, FF b) pure returns (FF c) { 20 | c = a + (-b); 21 | } 22 | 23 | function _neg(FF a) pure returns (FF c) { 24 | c = FF.wrap(MODULUS_MINUS_ONE) * a; 25 | } 26 | 27 | library F { 28 | FF public constant ZERO = FF.wrap(0); 29 | FF public constant ONE = FF.wrap(1); 30 | FF public constant TWO = FF.wrap(2); 31 | 32 | function from(int64 a) internal pure returns (FF c) { 33 | if (a < 0) { 34 | c = -FF.wrap(uint256(-int256(a))); 35 | } else { 36 | c = FF.wrap(uint256(int256(a))); 37 | } 38 | } 39 | 40 | function from(uint256 a) internal pure returns (FF c) { 41 | c = FF.wrap(a); 42 | } 43 | 44 | function into(FF a) internal pure returns (uint256 c) { 45 | c = FF.unwrap(a) % MODULUS; 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /.github/workflows/stale.yml: -------------------------------------------------------------------------------- 1 | name: Mark and Close Stale PRs 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * 5' # Run at midnight every Friday 6 | workflow_dispatch: # Allow manual triggering 7 | 8 | permissions: 9 | actions: write 10 | contents: write # only for delete-branch option 11 | issues: write 12 | pull-requests: write 13 | 14 | jobs: 15 | stale: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - name: Mark and Close Stale PRs 19 | uses: actions/stale@v9 20 | with: 21 | # PR specific settings 22 | stale-pr-message: 'This PR has been marked as stale due to 30 days of inactivity. It will be closed in 7 days if no further activity occurs.' 23 | close-pr-message: 'This PR has been closed due to inactivity. Feel free to reopen if you wish to continue working on it.' 24 | days-before-pr-stale: 30 25 | days-before-pr-close: 7 26 | stale-pr-label: 'stale' 27 | exempt-pr-labels: 'no-stale,dependencies,security' 28 | exempt-draft-pr: true 29 | 30 | # Issue specific settings - disabled by default 31 | days-before-issue-stale: -1 32 | days-before-issue-close: -1 33 | 34 | # General settings 35 | operations-per-run: 100 36 | ascending: true # Older PRs are processed first 37 | remove-stale-when-updated: true 38 | -------------------------------------------------------------------------------- /docs/protocols/permutation.tex: -------------------------------------------------------------------------------- 1 | \documentclass[11pt]{article} 2 | \usepackage[T1]{fontenc} 3 | \usepackage[utf8]{inputenc} 4 | \usepackage{lmodern} 5 | \usepackage[margin=1in]{geometry} 6 | \usepackage{graphicx} 7 | \usepackage{amsmath,amssymb} 8 | \usepackage{booktabs} 9 | \usepackage{hyperref} 10 | \usepackage{microtype} 11 | \usepackage{todonotes} 12 | \hypersetup{ 13 | colorlinks=true, 14 | linkcolor=blue, 15 | citecolor=blue, 16 | urlcolor=blue 17 | } 18 | \setlength{\parindent}{0pt} 19 | \setlength{\parskip}{6pt} 20 | 21 | \title{Permutation} 22 | \author{Space and Time Inc} 23 | \date{February 2025} 24 | 25 | \begin{document} 26 | \maketitle 27 | 28 | \noindent Let $A=(a_{ij})$ be a table. We need to prove that $R=(r_{ij})$ is a permuted version of $A$. \\ 29 | 30 | \section{Summary} 31 | \begin{itemize} 32 | \item Plan values: $i$ and number of columns in $A$ 33 | \item Inputs: $A$ 34 | \item Outputs: $R$ 35 | \item Hints: $c$, $d$, $c^\ast$, $d^\ast$. 36 | \end{itemize} 37 | 38 | \section{Details} 39 | We set 40 | 41 | \begin{align*} 42 | \hat{c} &\equiv \sum_j a_j \beta^j &\hat{d} &\equiv \sum_j r_ j \beta^j 43 | \end{align*} 44 | 45 | The $3$ constraints are: 46 | 47 | \begin{align*} 48 | c^\ast \cdot (\alpha + \hat{c}) &\equiv \chi_{[0,n)}\\ 49 | d^\ast \cdot (\alpha + \hat{d}) &\equiv \chi_{[0,n)}\\ 50 | c^\ast &\overset{\sum}{=} d^\ast 51 | \end{align*} 52 | \end{document} 53 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | name: Release 2 | on: 3 | push: 4 | branches: 5 | - main 6 | permissions: 7 | contents: read # for checkout 8 | jobs: 9 | lint-and-test: 10 | uses: ./.github/workflows/lint-and-test.yml 11 | 12 | release: 13 | name: Release 14 | runs-on: large-8-core-32gb-22-04 15 | needs: [lint-and-test] 16 | environment: deploy #!! DO NOT CHANGE THIS LINE !! # 17 | permissions: 18 | contents: write # to be able to publish a GitHub release 19 | issues: write # to be able to comment on released issues 20 | pull-requests: write # to be able to comment on released pull requests 21 | id-token: write # to enable use of OIDC for npm provenance 22 | steps: 23 | - uses: actions/checkout@v4.1.0 24 | with: 25 | fetch-depth: 0 # download tags, see https://github.com/actions/checkout/issues/100 26 | - run: git config --global --add safe.directory $(realpath .) 27 | - name: Setup node 28 | uses: actions/setup-node@v3 29 | with: 30 | node-version: "20.x" 31 | - name: Install cargo info 32 | run: | 33 | cargo install cargo-info 34 | - name: Semantic Release 35 | run: | 36 | npm install semantic-release 37 | npx semantic-release 38 | env: 39 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 40 | CRATES_TOKEN: ${{ secrets.CRATES_TOKEN }} 41 | -------------------------------------------------------------------------------- /crates/proof-of-sql-planner/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! This crate converts a `DataFusion` `LogicalPlan` to a `ProofPlan` and `Postprocessing` 2 | #![cfg_attr(test, expect(clippy::missing_panics_doc))] 3 | extern crate alloc; 4 | mod aggregate; 5 | pub(crate) use aggregate::{aggregate_function_to_proof_expr, AggregateFunc}; 6 | mod context; 7 | pub use context::PoSqlContextProvider; 8 | #[cfg(test)] 9 | pub(crate) use context::PoSqlTableSource; 10 | mod conversion; 11 | pub use conversion::{ 12 | get_table_refs_from_statement, sql_to_proof_plans, sql_to_proof_plans_with_postprocessing, 13 | }; 14 | #[cfg(test)] 15 | mod df_util; 16 | mod expr; 17 | pub use expr::expr_to_proof_expr; 18 | mod error; 19 | pub use error::{PlannerError, PlannerResult}; 20 | mod plan; 21 | /// Proof of SQL Postprocessing. Used when the last step of the logical plan is an unprovable projection. 22 | pub mod postprocessing; 23 | pub use plan::logical_plan_to_proof_plan; 24 | mod proof_plan_with_postprocessing; 25 | pub use proof_plan_with_postprocessing::{ 26 | logical_plan_to_proof_plan_with_postprocessing, ProofPlanWithPostprocessing, 27 | }; 28 | mod uppercase_column_visitor; 29 | pub use uppercase_column_visitor::{statement_with_uppercase_identifiers, uppercase_identifier}; 30 | mod util; 31 | pub use util::column_fields_to_schema; 32 | pub(crate) use util::{ 33 | column_to_column_ref, placeholder_to_placeholder_expr, scalar_value_to_literal_value, 34 | schema_to_column_fields, table_reference_to_table_ref, 35 | }; 36 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/database/arrow_schema_utility.rs: -------------------------------------------------------------------------------- 1 | //! This module provides utility functions for working with Arrow schemas in the context of Proof of SQL. 2 | //! It includes functionality to convert Arrow schemas to PoSQL-compatible formats. 3 | 4 | use alloc::sync::Arc; 5 | use arrow::datatypes::{DataType, Field, Schema, SchemaRef}; 6 | 7 | /// Converts an Arrow schema to a PoSQL-compatible schema. 8 | /// 9 | /// This function takes an Arrow `SchemaRef` and returns a new `SchemaRef` where 10 | /// floating-point data types (Float16, Float32, Float64) are converted to Decimal256(75, 30). 11 | /// Other data types remain unchanged. 12 | /// 13 | /// # Arguments 14 | /// 15 | /// * `schema` - The input Arrow schema to convert. 16 | /// 17 | /// # Returns 18 | /// 19 | /// A new `SchemaRef` with PoSQL-compatible data types. 20 | #[must_use] 21 | pub fn get_posql_compatible_schema(schema: &SchemaRef) -> SchemaRef { 22 | let new_fields: Vec = schema 23 | .fields() 24 | .iter() 25 | .map(|field| { 26 | let new_data_type = match field.data_type() { 27 | DataType::Float16 | DataType::Float32 | DataType::Float64 => { 28 | DataType::Decimal256(20, 10) 29 | } 30 | _ => field.data_type().clone(), 31 | }; 32 | Field::new(field.name(), new_data_type, field.is_nullable()) 33 | }) 34 | .collect(); 35 | 36 | Arc::new(Schema::new(new_fields)) 37 | } 38 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | /target/ 4 | 5 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 6 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 7 | Cargo.lock 8 | 9 | # These are backup files generated by rustfmt 10 | **/*.rs.bk 11 | 12 | # latexpdf 13 | protocols/*.aux 14 | protocols/*.log 15 | protocols/*.fdb_latexmk 16 | protocols/*.fls 17 | protocols/*.synctex.gz 18 | 19 | #Added by osX 20 | .DS_Store 21 | protocols/.DS_Store 22 | 23 | # ignore node files 24 | node_modules* 25 | 26 | # Added by cargo 27 | 28 | /target 29 | /Cargo.lock 30 | 31 | # annoying mac artifacts 32 | 33 | **.DS_Store 34 | 35 | # Editor-specific 36 | Session.vim 37 | .vscode 38 | .idea 39 | 40 | # Measurements are really pretty huge and not necessary to store in the repo 41 | measurements.json 42 | 43 | # ignore python files 44 | __pycache__/ 45 | *.py[cod] 46 | *$py.class 47 | temp-bench-results 48 | 49 | # ignore release artifacts 50 | *.tar.gz 51 | *.tar 52 | *.zip 53 | 54 | *.aux 55 | *.fdb_latexmk 56 | *.fls 57 | *.log 58 | *.synctex.gz 59 | *.out 60 | *.pdf 61 | # ignore proofs stuff 62 | metastore_db 63 | 64 | # forge test files 65 | out 66 | cache 67 | 68 | # any output files from generating public params 69 | output/ 70 | 71 | *.post.sol 72 | 73 | **/dependencies 74 | 75 | solidity/lcov.info 76 | solidity/coverage-report 77 | solidity/docs 78 | autogen_logs/ 79 | _autocoder/ 80 | prompts/ 81 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/posql_time/error.rs: -------------------------------------------------------------------------------- 1 | use alloc::string::{String, ToString}; 2 | use serde::{Deserialize, Serialize}; 3 | use snafu::Snafu; 4 | 5 | /// Errors related to time operations, including timezone and timestamp conversions. 6 | #[derive(Snafu, Debug, Eq, PartialEq, Serialize, Deserialize)] 7 | pub enum PoSQLTimestampError { 8 | /// Error when the timezone string provided cannot be parsed into a valid timezone. 9 | #[snafu(display("invalid timezone string: {timezone}"))] 10 | InvalidTimezone { 11 | /// The invalid timezone 12 | timezone: String, 13 | }, 14 | 15 | /// Error indicating an invalid timezone offset was provided. 16 | #[snafu(display("invalid timezone offset"))] 17 | InvalidTimezoneOffset, 18 | 19 | /// Indicates a failure to convert between different representations of time units. 20 | #[snafu(display("Invalid time unit"))] 21 | InvalidTimeUnit { 22 | /// The underlying error 23 | error: String, 24 | }, 25 | 26 | /// Represents a failure to parse a provided time unit precision value, `PoSQL` supports 27 | /// Seconds, Milliseconds, Microseconds, and Nanoseconds 28 | #[snafu(display("Unsupported precision for timestamp: {error}"))] 29 | UnsupportedPrecision { 30 | /// The underlying error 31 | error: String, 32 | }, 33 | } 34 | 35 | // This exists because TryFrom for ColumnType error is String 36 | impl From for String { 37 | fn from(error: PoSQLTimestampError) -> Self { 38 | error.to_string() 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/slice_ops/inner_product.rs: -------------------------------------------------------------------------------- 1 | use crate::base::{ 2 | if_rayon, 3 | scalar::{Scalar, ScalarExt}, 4 | }; 5 | use alloc::vec::Vec; 6 | use core::{iter::Sum, ops::Mul}; 7 | #[cfg(feature = "rayon")] 8 | use rayon::iter::{IndexedParallelIterator, IntoParallelRefIterator, ParallelIterator}; 9 | 10 | /// This operation takes the inner product of two slices. In other words, it does `a[0] * b[0] + a[1] * b[1] + ... + a[n] * b[n]`. 11 | /// If one of the slices is longer than the other, the extra elements are ignored/considered to be 0. 12 | pub fn inner_product(a: &[F], b: &[F]) -> F 13 | where 14 | F: Sync + Send + Mul + Sum + Copy, 15 | { 16 | if_rayon!(a.par_iter().with_min_len(super::MIN_RAYON_LEN), a.iter()) 17 | .zip(b) 18 | .map(|(&a, &b)| a * b) 19 | .sum() 20 | } 21 | 22 | pub fn inner_product_ref_cast(a: &[F], b: &[T]) -> T 23 | where 24 | for<'a> &'a F: Into, 25 | F: Send + Sync, 26 | T: Sync + Send + Mul + Sum + Copy, 27 | { 28 | if_rayon!(a.par_iter().with_min_len(super::MIN_RAYON_LEN), a.iter()) 29 | .zip(b) 30 | .map(|(a, b)| a.into() * *b) 31 | .sum() 32 | } 33 | 34 | /// Cannot use blanket impls for `Vec` because bytes might have different embeddings as scalars 35 | pub fn inner_product_with_bytes(a: &[Vec], b: &[S]) -> S { 36 | if_rayon!(a.par_iter().with_min_len(super::MIN_RAYON_LEN), a.iter()) 37 | .zip(b) 38 | .map(|(lhs_bytes, &rhs)| S::from_byte_slice_via_hash(lhs_bytes) * rhs) 39 | .sum() 40 | } 41 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/dory/fold_scalars_test.rs: -------------------------------------------------------------------------------- 1 | use super::{ 2 | extended_dory_reduce_helper::extended_dory_reduce_verify_fold_s_vecs, fold_scalars_0_prove, 3 | fold_scalars_0_verify, rand_F_tensors, rand_G_vecs, test_rng, DoryMessages, 4 | ExtendedProverState, PublicParameters, 5 | }; 6 | use merlin::Transcript; 7 | 8 | #[test] 9 | fn we_can_fold_scalars() { 10 | let mut rng = test_rng(); 11 | let nu = 0; 12 | let pp = PublicParameters::test_rand(nu, &mut rng); 13 | let prover_setup = (&pp).into(); 14 | let verifier_setup = (&pp).into(); 15 | let (s1_tensor, s2_tensor) = rand_F_tensors(nu, &mut rng); 16 | let (v1, v2) = rand_G_vecs(nu, &mut rng); 17 | let prover_state = ExtendedProverState::new_from_tensors(s1_tensor, s2_tensor, v1, v2, nu); 18 | let verifier_state = prover_state.calculate_verifier_state(&prover_setup); 19 | 20 | let mut transcript = Transcript::new(b"fold_scalars_test"); 21 | let mut messages = DoryMessages::default(); 22 | let prover_folded_state = 23 | fold_scalars_0_prove(&mut messages, &mut transcript, prover_state, &prover_setup); 24 | 25 | let mut transcript = Transcript::new(b"fold_scalars_test"); 26 | let verifier_folded_state = fold_scalars_0_verify( 27 | &mut messages, 28 | &mut transcript, 29 | verifier_state, 30 | &verifier_setup, 31 | extended_dory_reduce_verify_fold_s_vecs, 32 | ); 33 | assert_eq!( 34 | prover_folded_state.calculate_verifier_state(&prover_setup), 35 | verifier_folded_state 36 | ); 37 | } 38 | -------------------------------------------------------------------------------- /crates/proof-of-sql-benches/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "proof-of-sql-benches" 3 | edition.workspace = true 4 | exclude.workspace = true 5 | repository.workspace = true 6 | version.workspace = true 7 | license-file.workspace = true 8 | 9 | [dependencies] 10 | ark-serialize = { version = "0.5.0" } 11 | ark-std = { version = "0.5.0", default-features = false } 12 | blitzar = { version = "4.3.0" } 13 | bumpalo = { version = "3.11.0" } 14 | clap = { version = "4.5.4", features = ["derive", "env"] } 15 | criterion = { version = "0.5.1", features = ["html_reports"] } 16 | csv = { version = "1.3.1" } 17 | curve25519-dalek = { version = "4", features = ["rand_core"] } 18 | datafusion = { version = '38.0.0', default-features = false } 19 | ff = { version = "0.13.0"} 20 | halo2curves = { version = "0.8.0", default-features = false } 21 | indexmap = { version = "2.8", default-features = false } 22 | nova-snark = { version = "0.41.0" } 23 | opentelemetry = { version = "0.23.0" } 24 | opentelemetry-jaeger = { version = "0.20.0" } 25 | proof-of-sql = { path = "../proof-of-sql", default-features = false, features = ["arrow", "hyperkzg_proof"] } 26 | proof-of-sql-planner = { path = "../proof-of-sql-planner" } 27 | rand = { version = "0.8", default-features = false } 28 | sqlparser = { version = "0.45.0", default-features = false } 29 | tracing = { version = "0.1.36", default-features = false } 30 | tracing-opentelemetry = { version = "0.22.0" } 31 | tracing-subscriber = { version = "0.3.0", features = ["env-filter"] } 32 | 33 | [lints] 34 | workspace = true 35 | 36 | [[bench]] 37 | name = "bench_append_rows" 38 | harness = false 39 | -------------------------------------------------------------------------------- /crates/proof-of-sql-planner/src/postprocessing/select_postprocessing.rs: -------------------------------------------------------------------------------- 1 | use super::{evaluate_expr, PostprocessingResult, PostprocessingStep}; 2 | use ahash::AHasher; 3 | use alloc::vec::Vec; 4 | use core::hash::BuildHasherDefault; 5 | use datafusion::logical_expr::Expr; 6 | use indexmap::IndexMap; 7 | use proof_of_sql::base::{ 8 | database::{OwnedColumn, OwnedTable}, 9 | scalar::Scalar, 10 | }; 11 | use sqlparser::ast::Ident; 12 | 13 | /// The select expression used to select, reorder, and apply alias transformations 14 | #[derive(Debug, Clone, PartialEq)] 15 | pub struct SelectPostprocessing { 16 | /// The expressions we select 17 | exprs: Vec, 18 | } 19 | 20 | impl SelectPostprocessing { 21 | /// Create a new `SelectPostprocessing` node. 22 | #[must_use] 23 | pub fn new(exprs: Vec) -> Self { 24 | Self { exprs } 25 | } 26 | } 27 | 28 | impl PostprocessingStep for SelectPostprocessing { 29 | /// Apply the select transformation to the given `OwnedTable`. 30 | fn apply(&self, owned_table: OwnedTable) -> PostprocessingResult> { 31 | let cols: IndexMap, BuildHasherDefault> = self 32 | .exprs 33 | .iter() 34 | .map(|expr| -> PostprocessingResult<(Ident, OwnedColumn)> { 35 | let result_column = evaluate_expr(&owned_table, expr)?; 36 | Ok((expr.display_name()?.as_str().into(), result_column)) 37 | }) 38 | .collect::>()?; 39 | Ok(OwnedTable::try_new(cols)?) 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/bit/bit_mask_utils_test.rs: -------------------------------------------------------------------------------- 1 | use super::bit_mask_utils::make_bit_mask; 2 | use crate::base::{ 3 | bit::bit_mask_utils::is_bit_mask_negative_representation, 4 | scalar::{test_scalar::TestScalar, Scalar}, 5 | }; 6 | use bnum::types::U256; 7 | use core::ops::Shl; 8 | 9 | #[test] 10 | fn we_can_make_positive_bit_mask() { 11 | // ARRANGE 12 | let positive_scalar = TestScalar::TWO; 13 | 14 | // ACT 15 | let bit_mask = make_bit_mask(positive_scalar); 16 | 17 | // ASSERT 18 | assert_eq!(bit_mask, (U256::ONE.shl(255)) + U256::TWO); 19 | } 20 | 21 | #[test] 22 | fn we_can_make_negative_bit_mask() { 23 | // ARRANGE 24 | let negative_scalar = -TestScalar::TWO; 25 | 26 | // ACT 27 | let bit_mask = make_bit_mask(negative_scalar); 28 | 29 | // ASSERT 30 | assert_eq!(bit_mask, (U256::ONE.shl(255)) - U256::TWO); 31 | } 32 | 33 | #[test] 34 | fn we_can_verify_positive_bit_mask_is_positive_representation() { 35 | // ARRANGE 36 | let positive_scalar = TestScalar::TWO; 37 | let bit_mask = make_bit_mask(positive_scalar); 38 | 39 | // ACT 40 | let is_positive = !is_bit_mask_negative_representation(bit_mask); 41 | 42 | // ASSERT 43 | assert!(is_positive); 44 | } 45 | 46 | #[test] 47 | fn we_can_verify_negative_bit_mask_is_negative_representation() { 48 | // ARRANGE 49 | let negative_scalar = -TestScalar::TWO; 50 | let bit_mask = make_bit_mask(negative_scalar); 51 | 52 | // ACT 53 | let is_negative = is_bit_mask_negative_representation(bit_mask); 54 | 55 | // ASSERT 56 | assert!(is_negative); 57 | } 58 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/proof_plans/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module proves provable execution plans. 2 | mod empty_exec; 3 | pub use empty_exec::EmptyExec; 4 | 5 | mod table_exec; 6 | pub use table_exec::TableExec; 7 | #[cfg(all(test, feature = "blitzar"))] 8 | mod table_exec_test; 9 | 10 | mod projection_exec; 11 | pub(crate) use projection_exec::ProjectionExec; 12 | #[cfg(all(test, feature = "blitzar"))] 13 | mod projection_exec_test; 14 | 15 | #[cfg(test)] 16 | pub(crate) mod test_utility; 17 | 18 | mod filter_exec; 19 | pub(crate) use filter_exec::FilterExec; 20 | #[cfg(test)] 21 | pub(crate) use filter_exec::OstensibleFilterExec; 22 | #[cfg(all(test, feature = "blitzar"))] 23 | mod filter_exec_test; 24 | #[cfg(all(test, feature = "blitzar"))] 25 | mod filter_exec_test_dishonest_prover; 26 | 27 | mod fold_util; 28 | pub(crate) use fold_util::{fold_columns, fold_vals}; 29 | #[cfg(test)] 30 | mod fold_util_test; 31 | 32 | mod group_by_exec; 33 | pub(crate) use group_by_exec::GroupByExec; 34 | 35 | #[cfg(all(test, feature = "blitzar"))] 36 | mod group_by_exec_test; 37 | 38 | mod slice_exec; 39 | pub(crate) use slice_exec::SliceExec; 40 | #[cfg(all(test, feature = "blitzar"))] 41 | mod slice_exec_test; 42 | 43 | mod union_exec; 44 | pub(crate) use union_exec::UnionExec; 45 | #[cfg(all(test, feature = "blitzar"))] 46 | mod union_exec_test; 47 | 48 | mod sort_merge_join_exec; 49 | pub use sort_merge_join_exec::SortMergeJoinExec; 50 | #[cfg(all(test, feature = "blitzar"))] 51 | mod sort_merge_join_exec_test; 52 | 53 | mod dyn_proof_plan; 54 | pub use dyn_proof_plan::DynProofPlan; 55 | 56 | #[cfg(test)] 57 | mod demo_mock_plan; 58 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/database/owned_column_error.rs: -------------------------------------------------------------------------------- 1 | use crate::base::database::ColumnType; 2 | use alloc::string::String; 3 | use snafu::Snafu; 4 | 5 | /// Errors from operations related to `OwnedColumn`s. 6 | #[derive(Snafu, Debug, PartialEq, Eq)] 7 | pub enum OwnedColumnError { 8 | /// Can not perform type casting. 9 | #[snafu(display("Can not perform type casting from {from_type:?} to {to_type:?}"))] 10 | TypeCastError { 11 | /// The type from which we are trying to cast. 12 | from_type: ColumnType, 13 | /// The type to which we are trying to cast. 14 | to_type: ColumnType, 15 | }, 16 | /// Error in converting scalars to a given column type. 17 | #[snafu(display("Error in converting scalars to a given column type: {error}"))] 18 | ScalarConversionError { 19 | /// The underlying error 20 | error: String, 21 | }, 22 | /// Unsupported operation. 23 | #[snafu(display("Unsupported operation: {error}"))] 24 | Unsupported { 25 | /// The underlying error 26 | error: String, 27 | }, 28 | } 29 | 30 | /// Errors that can occur when coercing a column. 31 | #[derive(Snafu, Debug, PartialEq, Eq)] 32 | pub(crate) enum ColumnCoercionError { 33 | /// Overflow when coercing a column. 34 | #[snafu(display("Overflow when coercing a column"))] 35 | Overflow, 36 | /// Invalid type coercion. 37 | #[snafu(display("Invalid type coercion"))] 38 | InvalidTypeCoercion, 39 | } 40 | 41 | /// Result type for operations related to `OwnedColumn`s. 42 | pub type OwnedColumnResult = core::result::Result; 43 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/hyperkzg/mod.rs: -------------------------------------------------------------------------------- 1 | //! Implementation of `HyperKZG` PCS for usage with proof-of-sql. 2 | //! 3 | //! The prover side of this implementation simply wraps around nova's hyper-kzg implementation. 4 | //! 5 | //! While the `Commitment` for this commitment scheme is always available, the corresponding 6 | //! `CommitmentEvaluationProof` is gated behind the `hyperkzg_proof` feature flag. 7 | //! This is done to preserve `no_std` compatibility for `no_std` commitment generation apps. 8 | 9 | mod scalar; 10 | pub use scalar::BNScalar; 11 | 12 | mod public_setup; 13 | #[cfg(feature = "std")] 14 | pub use public_setup::deserialize_flat_compressed_hyperkzg_public_setup_from_reader; 15 | #[cfg(all(test, feature = "hyperkzg_proof"))] 16 | pub use public_setup::load_small_setup_for_testing; 17 | pub use public_setup::{ 18 | deserialize_flat_compressed_hyperkzg_public_setup_from_slice, HyperKZGPublicSetup, 19 | HyperKZGPublicSetupOwned, 20 | }; 21 | 22 | mod commitment; 23 | pub use commitment::HyperKZGCommitment; 24 | 25 | #[cfg(feature = "hyperkzg_proof")] 26 | mod nova_commitment; 27 | 28 | #[cfg(feature = "hyperkzg_proof")] 29 | mod nova_engine; 30 | #[cfg(feature = "hyperkzg_proof")] 31 | pub use nova_engine::{nova_commitment_key_to_hyperkzg_public_setup, HyperKZGEngine}; 32 | 33 | #[cfg(feature = "hyperkzg_proof")] 34 | mod commitment_evaluation_proof; 35 | #[cfg(feature = "hyperkzg_proof")] 36 | pub use commitment_evaluation_proof::HyperKZGCommitmentEvaluationProof; 37 | 38 | #[cfg(feature = "hyperkzg_proof")] 39 | mod halo2_conversions; 40 | 41 | #[cfg(all(test, feature = "hyperkzg_proof"))] 42 | mod evm_tests; 43 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/bit/bit_matrix.rs: -------------------------------------------------------------------------------- 1 | use super::bit_mask_utils::make_bit_mask; 2 | use crate::base::{bit::BitDistribution, scalar::Scalar}; 3 | use alloc::vec::Vec; 4 | use bnum::types::U256; 5 | use bumpalo::Bump; 6 | use core::ops::Shl; 7 | 8 | /// Let `x1, ..., xn` denote the values of a data column. Let 9 | /// `b1, ..., bk` denote the bit positions of `abs(x1), ..., abs(xn)` 10 | /// that vary. 11 | /// 12 | /// `compute_varying_bit_matrix` returns the matrix M where 13 | /// `M_ij = abs(xi) & (1 << bj) == 1` 14 | /// The last column of M corresponds to the sign bit if it varies. 15 | pub fn compute_varying_bit_matrix<'a, S: Scalar>( 16 | alloc: &'a Bump, 17 | vals: &[S], 18 | dist: &BitDistribution, 19 | ) -> Vec<&'a [bool]> { 20 | let number_of_scalars = vals.len(); 21 | let num_varying_bits = dist.num_varying_bits(); 22 | let data: &'a mut [bool] = alloc.alloc_slice_fill_default(number_of_scalars * num_varying_bits); 23 | 24 | // decompose 25 | for (scalar_index, val) in vals.iter().enumerate() { 26 | let mask = make_bit_mask(*val); 27 | for (vary_index, bit_index) in dist.vary_mask_iter().enumerate() { 28 | data[scalar_index + vary_index * number_of_scalars] = 29 | (mask & U256::ONE.shl(bit_index)) != U256::ZERO; 30 | } 31 | } 32 | 33 | // make result 34 | let mut res = Vec::with_capacity(num_varying_bits); 35 | for bit_index in 0..num_varying_bits { 36 | let first = number_of_scalars * bit_index; 37 | let last = number_of_scalars * (bit_index + 1); 38 | res.push(&data[first..last]); 39 | } 40 | res 41 | } 42 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/dory/dynamic_build_vmv_state.rs: -------------------------------------------------------------------------------- 1 | use super::{ 2 | dynamic_dory_helper::compute_dynamic_v_vec, DeferredGT, DoryScalar, G1Affine, VMVProverState, 3 | VMVVerifierState, F, 4 | }; 5 | use crate::proof_primitive::dynamic_matrix_utils::standard_basis_helper::compute_dynamic_vecs; 6 | use alloc::vec::Vec; 7 | 8 | /// Builds a [`VMVProverState`] from the given parameters. 9 | pub(super) fn build_dynamic_vmv_prover_state( 10 | a: &[F], 11 | b_point: &[F], 12 | T_vec_prime: Vec, 13 | nu: usize, 14 | ) -> VMVProverState { 15 | let (lo_vec, hi_vec) = 16 | compute_dynamic_vecs(bytemuck::TransparentWrapper::wrap_slice(b_point) as &[DoryScalar]); 17 | let (lo_vec, hi_vec) = ( 18 | bytemuck::TransparentWrapper::peel_slice(&lo_vec) as &[F], 19 | bytemuck::TransparentWrapper::peel_slice(&hi_vec) as &[F], 20 | ); 21 | let v_vec = compute_dynamic_v_vec(a, hi_vec, nu); 22 | VMVProverState { 23 | v_vec, 24 | T_vec_prime, 25 | L_vec: hi_vec.to_vec(), 26 | R_vec: lo_vec.to_vec(), 27 | #[cfg(test)] 28 | l_tensor: Vec::with_capacity(0), 29 | #[cfg(test)] 30 | r_tensor: b_point.to_vec(), 31 | nu, 32 | } 33 | } 34 | 35 | /// Builds a [`VMVVerifierState`] from the given parameters. 36 | pub(super) fn build_dynamic_vmv_verifier_state( 37 | y: F, 38 | b_point: &[F], 39 | T: DeferredGT, 40 | nu: usize, 41 | ) -> VMVVerifierState { 42 | VMVVerifierState { 43 | y, 44 | T, 45 | l_tensor: Vec::with_capacity(0), 46 | r_tensor: b_point.to_vec(), 47 | nu, 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/dory/offset_to_bytes.rs: -------------------------------------------------------------------------------- 1 | pub trait OffsetToBytes { 2 | fn offset_to_bytes(&self) -> [u8; LEN]; 3 | } 4 | 5 | impl OffsetToBytes<1> for u8 { 6 | fn offset_to_bytes(&self) -> [u8; 1] { 7 | [*self] 8 | } 9 | } 10 | 11 | impl OffsetToBytes<1> for i8 { 12 | fn offset_to_bytes(&self) -> [u8; 1] { 13 | let shifted = self.wrapping_sub(i8::MIN); 14 | shifted.to_le_bytes() 15 | } 16 | } 17 | 18 | impl OffsetToBytes<2> for i16 { 19 | fn offset_to_bytes(&self) -> [u8; 2] { 20 | let shifted = self.wrapping_sub(i16::MIN); 21 | shifted.to_le_bytes() 22 | } 23 | } 24 | 25 | impl OffsetToBytes<4> for i32 { 26 | fn offset_to_bytes(&self) -> [u8; 4] { 27 | let shifted = self.wrapping_sub(i32::MIN); 28 | shifted.to_le_bytes() 29 | } 30 | } 31 | 32 | impl OffsetToBytes<8> for i64 { 33 | fn offset_to_bytes(&self) -> [u8; 8] { 34 | let shifted = self.wrapping_sub(i64::MIN); 35 | shifted.to_le_bytes() 36 | } 37 | } 38 | 39 | impl OffsetToBytes<16> for i128 { 40 | fn offset_to_bytes(&self) -> [u8; 16] { 41 | let shifted = self.wrapping_sub(i128::MIN); 42 | shifted.to_le_bytes() 43 | } 44 | } 45 | 46 | impl OffsetToBytes<1> for bool { 47 | fn offset_to_bytes(&self) -> [u8; 1] { 48 | [u8::from(*self)] 49 | } 50 | } 51 | 52 | impl OffsetToBytes<8> for u64 { 53 | fn offset_to_bytes(&self) -> [u8; 8] { 54 | self.to_le_bytes() 55 | } 56 | } 57 | 58 | impl OffsetToBytes<32> for [u64; 4] { 59 | fn offset_to_bytes(&self) -> [u8; 32] { 60 | bytemuck::cast(*self) 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/dory/dory_inner_product.rs: -------------------------------------------------------------------------------- 1 | use super::{ 2 | dory_reduce_prove, dory_reduce_verify, scalar_product_prove, scalar_product_verify, 3 | DoryMessages, ProverSetup, ProverState, VerifierSetup, VerifierState, 4 | }; 5 | use crate::base::proof::Transcript; 6 | 7 | /// This is the prover side of the Dory-Innerproduct algorithm in section 3.3 of . 8 | /// This function builds/enqueues `messages`, appends to `transcript`, and consumes `state`. 9 | #[cfg(test)] 10 | pub fn dory_inner_product_prove( 11 | messages: &mut DoryMessages, 12 | transcript: &mut impl Transcript, 13 | mut state: ProverState, 14 | setup: &ProverSetup, 15 | ) { 16 | let nu = state.nu; 17 | assert!(setup.max_nu >= nu); 18 | for _ in 0..nu { 19 | dory_reduce_prove(messages, transcript, &mut state, setup); 20 | } 21 | scalar_product_prove(messages, transcript, &state); 22 | } 23 | 24 | /// This is the verifier side of the Dory-Innerproduct algorithm in section 3.3 of . 25 | /// This function consumes/dequeues from `messages`, appends to `transcript`, and consumes `state`. 26 | #[cfg(test)] 27 | pub fn dory_inner_product_verify( 28 | messages: &mut DoryMessages, 29 | transcript: &mut impl Transcript, 30 | mut state: VerifierState, 31 | setup: &VerifierSetup, 32 | ) -> bool { 33 | let nu = state.nu; 34 | assert!(setup.max_nu >= nu); 35 | for _ in 0..nu { 36 | if !dory_reduce_verify(messages, transcript, &mut state, setup) { 37 | return false; 38 | } 39 | } 40 | scalar_product_verify(messages, transcript, state, setup) 41 | } 42 | -------------------------------------------------------------------------------- /crates/proof-of-sql-benches/scripts/run_benchmarks.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Get the absolute path to the project root 4 | PROJECT_ROOT="$(cd "$(dirname "$0")/../../.." && pwd)" 5 | 6 | # Create a "data" directory if it doesn't already exist 7 | mkdir -p "$PROJECT_ROOT/crates/proof-of-sql-benches/data" 8 | 9 | # Get the current timestamp in the format "YYYY-MM-DD_HH-MM-SS" 10 | TIMESTAMP=$(date +"%Y-%m-%d_%H-%M-%S") 11 | 12 | # Export the CSV_PATH environment variable 13 | export CSV_PATH="$PROJECT_ROOT/crates/proof-of-sql-benches/data/results_${TIMESTAMP}.csv" 14 | echo "Saving results at: ${CSV_PATH}" 15 | 16 | # Define the schemes and table sizes to iterate over 17 | SCHEMES=("hyper-kzg") 18 | TABLE_SIZES=( 19 | 10000 20000 30000 40000 50000 60000 70000 80000 90000 100000 20 | 110000 120000 130000 140000 150000 160000 170000 180000 190000 21 | 200000 400000 600000 800000 1000000 3000000 6000000 10000000 22 | ) 23 | 24 | # Define the queries to run 25 | QUERIES=("filter" "complex-filter" "group-by") 26 | 27 | # Run the benchmarks 28 | cd "$PROJECT_ROOT" 29 | for SCHEME in "${SCHEMES[@]}"; do 30 | for QUERY in "${QUERIES[@]}"; do 31 | for TABLE_SIZE in "${TABLE_SIZES[@]}"; do 32 | cargo run --release --bin proof-of-sql-benches -- -s "$SCHEME" -t "$TABLE_SIZE" -r 0 -i 10 -q "$QUERY" 33 | done 34 | done 35 | done 36 | 37 | # Join query 38 | # The data gets doubled - 1/2 of the table size is equivalent to the table size for other queries 39 | for SCHEME in "${SCHEMES[@]}"; do 40 | for TABLE_SIZE in "${TABLE_SIZES[@]}"; do 41 | HALF_TABLE_SIZE=$((TABLE_SIZE / 2)) 42 | cargo run --release --bin proof-of-sql-benches -- -s "$SCHEME" -t "$HALF_TABLE_SIZE" -r 0 -i 10 -q join 43 | done 44 | done 45 | -------------------------------------------------------------------------------- /solidity/test/base/Queue.t.pre.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: UNLICENSED 2 | // This is licensed under the Cryptographic Open Software License 1.0 3 | pragma solidity ^0.8.28; 4 | 5 | import {Test} from "forge-std/Test.sol"; 6 | import {Errors} from "../../src/base/Errors.sol"; 7 | import "../../src/base/Queue.pre.sol"; 8 | 9 | contract ErrorsTest is Test { 10 | /// forge-config: default.allow_internal_expect_revert = true 11 | function testEmptyDequeue() public { 12 | uint256[][1] memory queue = [new uint256[](0)]; 13 | vm.expectRevert(Errors.EmptyQueue.selector); 14 | Queue.__dequeue(queue); 15 | } 16 | 17 | /// forge-config: default.allow_internal_expect_revert = true 18 | function testDequeue() public { 19 | uint256[][1] memory queue = [new uint256[](3)]; 20 | queue[0][0] = 1001; 21 | queue[0][1] = 1002; 22 | queue[0][2] = 1003; 23 | assert(Queue.__dequeue(queue) == 1001); 24 | assert(Queue.__dequeue(queue) == 1002); 25 | assert(Queue.__dequeue(queue) == 1003); 26 | vm.expectRevert(Errors.EmptyQueue.selector); 27 | Queue.__dequeue(queue); 28 | } 29 | 30 | /// forge-config: default.allow_internal_expect_revert = true 31 | function testFuzzDequeue(uint256[][1] memory queue) public { 32 | uint256 length = queue[0].length; 33 | uint256[] memory original = new uint256[](length); 34 | for (uint256 i = 0; i < length; ++i) { 35 | original[i] = queue[0][i]; 36 | } 37 | for (uint256 i = 0; i < length; ++i) { 38 | assert(Queue.__dequeue(queue) == original[i]); 39 | } 40 | vm.expectRevert(Errors.EmptyQueue.selector); 41 | Queue.__dequeue(queue); 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/postprocessing/select_postprocessing.rs: -------------------------------------------------------------------------------- 1 | use super::{PostprocessingResult, PostprocessingStep}; 2 | use crate::base::{ 3 | database::{OwnedColumn, OwnedTable}, 4 | map::IndexMap, 5 | scalar::Scalar, 6 | }; 7 | use alloc::vec::Vec; 8 | use proof_of_sql_parser::intermediate_ast::AliasedResultExpr; 9 | use serde::{Deserialize, Serialize}; 10 | use sqlparser::ast::Ident; 11 | 12 | /// The select expression used to select, reorder, and apply alias transformations 13 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] 14 | pub struct SelectPostprocessing { 15 | /// The aliased result expressions we select 16 | aliased_result_exprs: Vec, 17 | } 18 | 19 | impl SelectPostprocessing { 20 | /// Create a new `SelectPostprocessing` node. 21 | #[must_use] 22 | pub fn new(aliased_result_exprs: Vec) -> Self { 23 | Self { 24 | aliased_result_exprs, 25 | } 26 | } 27 | } 28 | 29 | impl PostprocessingStep for SelectPostprocessing { 30 | /// Apply the select transformation to the given `OwnedTable`. 31 | fn apply(&self, owned_table: OwnedTable) -> PostprocessingResult> { 32 | let cols: IndexMap> = self 33 | .aliased_result_exprs 34 | .iter() 35 | .map( 36 | |aliased_result_expr| -> PostprocessingResult<(Ident, OwnedColumn)> { 37 | let result_column = owned_table.evaluate(&aliased_result_expr.expr)?; 38 | Ok((aliased_result_expr.alias.into(), result_column)) 39 | }, 40 | ) 41 | .collect::>()?; 42 | Ok(OwnedTable::try_new(cols)?) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/slice_ops/slice_cast.rs: -------------------------------------------------------------------------------- 1 | use crate::base::if_rayon; 2 | use alloc::vec::Vec; 3 | #[cfg(feature = "rayon")] 4 | use rayon::iter::{IndexedParallelIterator, IntoParallelRefIterator, ParallelIterator}; 5 | 6 | /// This operation takes a slice and casts it to a vector of a different type using the provided function. 7 | pub fn slice_cast_with<'a, F, T>(value: &'a [F], cast: impl Fn(&'a F) -> T + Send + Sync) -> Vec 8 | where 9 | F: Sync, 10 | T: Send, 11 | { 12 | if_rayon!( 13 | value.par_iter().with_min_len(super::MIN_RAYON_LEN), 14 | value.iter() 15 | ) 16 | .map(cast) 17 | .collect() 18 | } 19 | 20 | /// This operation takes a slice and casts it to a mutable slice of a different type using the provided function. 21 | pub fn slice_cast_mut_with<'a, F, T>( 22 | value: &'a [F], 23 | result: &mut [T], 24 | cast: impl Fn(&'a F) -> T + Sync, 25 | ) where 26 | F: Sync, 27 | T: Send + Sync, 28 | { 29 | if_rayon!( 30 | value.par_iter().with_min_len(super::MIN_RAYON_LEN), 31 | value.iter() 32 | ) 33 | .zip(result) 34 | .for_each(|(a, b)| *b = cast(a)); 35 | } 36 | 37 | /// This operation takes a slice and casts it to a vector of a different type using the provided function. 38 | pub fn slice_cast<'a, F, T>(value: &'a [F]) -> Vec 39 | where 40 | F: Sync, 41 | T: Send, 42 | &'a F: Into, 43 | { 44 | slice_cast_with(value, Into::into) 45 | } 46 | 47 | /// This operation takes a slice and casts it to a mutable slice of a different type using the provided function. 48 | pub fn slice_cast_mut<'a, F, T>(value: &'a [F], result: &mut [T]) 49 | where 50 | F: Sync, 51 | T: Send + Sync, 52 | &'a F: Into, 53 | { 54 | slice_cast_mut_with(value, result, Into::into); 55 | } 56 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/postprocessing/test_utility.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | use proof_of_sql_parser::intermediate_ast::AliasedResultExpr; 3 | use sqlparser::ast::Ident; 4 | 5 | #[must_use] 6 | /// Producing a postprocessing object that represents a group by operation. 7 | pub fn group_by_postprocessing( 8 | cols: &[&str], 9 | result_exprs: &[AliasedResultExpr], 10 | ) -> OwnedTablePostprocessing { 11 | let ids: Vec = cols.iter().map(|col| (*col).into()).collect(); 12 | OwnedTablePostprocessing::new_group_by( 13 | GroupByPostprocessing::try_new(ids, result_exprs.to_vec()).unwrap(), 14 | ) 15 | } 16 | 17 | /// Producing a postprocessing object that represents a select operation. 18 | /// # Panics 19 | /// 20 | /// This function may panic if the internal structures cannot be created properly, although this is unlikely under normal circumstances. 21 | #[must_use] 22 | pub fn select_expr(result_exprs: &[AliasedResultExpr]) -> OwnedTablePostprocessing { 23 | OwnedTablePostprocessing::new_select(SelectPostprocessing::new(result_exprs.to_vec())) 24 | } 25 | 26 | /// Producing a postprocessing object that represents a slice operation. 27 | #[must_use] 28 | pub fn slice(limit: Option, offset: Option) -> OwnedTablePostprocessing { 29 | OwnedTablePostprocessing::new_slice(SlicePostprocessing::new(limit, offset)) 30 | } 31 | 32 | /// Producing a postprocessing object that represents an order by operation. 33 | #[must_use] 34 | pub fn orders(indexes: &[usize], directions: &[bool]) -> OwnedTablePostprocessing { 35 | let index_direction_pairs: Vec<(usize, bool)> = indexes 36 | .iter() 37 | .copied() 38 | .zip(directions.iter().copied()) 39 | .collect(); 40 | OwnedTablePostprocessing::new_order_by(OrderByPostprocessing::new(index_direction_pairs)) 41 | } 42 | -------------------------------------------------------------------------------- /solidity/src/base/SwitchUtil.pre.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: UNLICENSED 2 | // This is licensed under the Cryptographic Open Software License 1.0 3 | pragma solidity ^0.8.28; 4 | 5 | import "./Errors.sol"; 6 | 7 | /// @title SwitchUtil 8 | /// @dev Library providing helper functions for switch statement validation. 9 | library SwitchUtil { 10 | /// @notice Validates that two values in a switch case statement match 11 | /// @custom:as-yul-wrapper 12 | /// #### Wrapped Yul Function 13 | /// ##### Signature 14 | /// ```yul 15 | /// case_const(lhs, rhs) 16 | /// ``` 17 | /// ##### Parameters 18 | /// * `lhs` - the left-hand side value to compare 19 | /// * `rhs` - the right-hand side value to compare 20 | /// @dev This function reverts with ERR_INCORRECT_CASE_CONST if the values don't match 21 | /// @dev Note: This function is designed to be used with constant values. When both lhs and rhs 22 | /// @dev are constants and the --optimize flag is used, the entire function call will be eliminated 23 | /// @dev at compile time. The compiler will either: 24 | /// @dev 1. Remove the call entirely if the constants match 25 | /// @dev 2. Replace it with a direct revert if they don't match 26 | /// @dev This means there is zero runtime overhead for switch statement validation in the intended usage. 27 | /// @param __lhs The left-hand side value 28 | /// @param __rhs The right-hand side value 29 | function __caseConst(uint256 __lhs, uint256 __rhs) internal pure { 30 | assembly { 31 | // IMPORT-YUL Errors.sol 32 | function err(code) { 33 | revert(0, 0) 34 | } 35 | function case_const(lhs, rhs) { 36 | if sub(lhs, rhs) { err(ERR_INCORRECT_CASE_CONST) } 37 | } 38 | case_const(__lhs, __rhs) 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /crates/proof-of-sql-benches/src/utils/jaeger_setup.rs: -------------------------------------------------------------------------------- 1 | //! # Jaeger Setup Module 2 | //! 3 | //! This module provides functionality to set up Jaeger tracing for benchmarks. 4 | //! Jaeger is a distributed tracing system that helps in monitoring and troubleshooting 5 | //! performance issues in distributed systems. This module integrates Jaeger with the 6 | //! `tracing` crate to enable tracing for Rust applications. 7 | 8 | use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter}; 9 | 10 | /// Sets up Jaeger tracing for the benchmarks. 11 | /// 12 | /// This function initializes a Jaeger tracer using the `opentelemetry_jaeger` crate and 13 | /// integrates it with the `tracing` crate. It configures the tracing subscriber to use 14 | /// the Jaeger tracer and applies an environment filter for log levels. 15 | /// 16 | /// ### Returns 17 | /// - `Ok(())` if the tracing setup is successful. 18 | /// - `Err(Box)` if an error occurs during setup. 19 | /// 20 | /// ### Panics 21 | /// 22 | /// This function panics if the tracing subscriber fails to initialize. 23 | pub fn setup_jaeger_tracing() -> Result<(), Box> { 24 | let tracer = opentelemetry_jaeger::new_agent_pipeline() 25 | .with_service_name("benches") 26 | .install_simple() 27 | .unwrap(); 28 | 29 | let opentelemetry = tracing_opentelemetry::layer().with_tracer(tracer); 30 | 31 | let filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new("DEBUG")); 32 | 33 | Ok(tracing_subscriber::registry() 34 | .with(opentelemetry) 35 | .with(filter) 36 | .try_init()?) 37 | } 38 | 39 | /// Stops Jaeger tracing for the benchmarks. 40 | /// 41 | /// This function shuts down the global tracer provider for Jaeger tracing. 42 | pub fn stop_jaeger_tracing() { 43 | opentelemetry::global::shutdown_tracer_provider(); 44 | } 45 | -------------------------------------------------------------------------------- /solidity/src/base/Queue.pre.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: UNLICENSED 2 | // This is licensed under the Cryptographic Open Software License 1.0 3 | pragma solidity ^0.8.28; 4 | 5 | import "./Constants.sol"; 6 | import "./Errors.sol"; 7 | 8 | /// @title Queue 9 | /// @dev Library providing queue operations for memory-based queues. 10 | library Queue { 11 | /// @notice Dequeues a value from the front of the queue 12 | /// @custom:as-yul-wrapper 13 | /// #### Wrapped Yul Function 14 | /// ##### Signature 15 | /// ```yul 16 | /// dequeue(queue_ptr) -> value 17 | /// ``` 18 | /// ##### Parameters 19 | /// * `queue_ptr` - pointer to the array in memory. In Solidity memory layout, 20 | /// this points to where the array length is stored, followed by the array elements 21 | /// ##### Return Values 22 | /// * `value` - the dequeued value from the front of the queue 23 | /// @dev Removes and returns the first element from the queue. 24 | /// Reverts with Errors.EmptyQueue if the queue is empty. 25 | /// @param __queue Single-element array containing the queue array 26 | /// @return __value The dequeued value 27 | function __dequeue(uint256[][1] memory __queue) internal pure returns (uint256 __value) { 28 | assembly { 29 | // IMPORT-YUL Errors.sol 30 | function err(code) { 31 | revert(0, 0) 32 | } 33 | function dequeue(queue_ptr) -> value { 34 | let queue := mload(queue_ptr) 35 | let length := mload(queue) 36 | if iszero(length) { err(ERR_EMPTY_QUEUE) } 37 | queue := add(queue, WORD_SIZE) 38 | value := mload(queue) 39 | mstore(queue, sub(length, 1)) 40 | mstore(queue_ptr, queue) 41 | } 42 | __value := dequeue(__queue) 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /.github/workflows/code-coverage.yml: -------------------------------------------------------------------------------- 1 | name: CI-Code-Coverage 2 | 3 | on: 4 | pull_request: 5 | types: [opened, synchronize, reopened] 6 | merge_group: 7 | 8 | concurrency: 9 | group: ${{ github.workflow }}-${{ github.ref }} 10 | cancel-in-progress: true 11 | 12 | env: 13 | CARGO_TERM_COLOR: always 14 | BLITZAR_BACKEND: cpu 15 | 16 | jobs: 17 | coverage: 18 | name: Code Coverage 19 | runs-on: ubuntu-latest 20 | steps: 21 | - name: Checkout sources 22 | uses: actions/checkout@v4 23 | - name: Install Dependencies 24 | run: sudo apt-get update && sudo apt-get install -y clang lld 25 | - uses: taiki-e/install-action@cargo-llvm-cov 26 | - name: Clean Previous Coverage Artifacts 27 | run: cargo llvm-cov clean --workspace 28 | - name: Run Tests to Generate Coverage Data (All Features) 29 | run: cargo llvm-cov --no-report --all-features --workspace --exclude proof-of-sql-benches --ignore-filename-regex evm_tests 30 | #- name: Run Tests to Generate Coverage Data (Rayon Only) 31 | # run: cargo llvm-cov --no-report --no-default-features --features="rayon" 32 | #- name: Run Tests to Generate Coverage Data (Blitzar Only) 33 | # run: cargo llvm-cov --no-report --no-default-features --features="blitzar" 34 | #- name: Run Tests to Generate Coverage Data (std only) 35 | # run: cargo llvm-cov --no-report --no-default-features --features="std" 36 | - name: Generate Final LCOV Report (Merged Coverage) 37 | run: cargo llvm-cov report --lcov --ignore-filename-regex '[/\\]evm_tests\.rs$' --output-path lcov.info 38 | - name: Upload Coverage to Codecov 39 | uses: codecov/codecov-action@v5 40 | env: 41 | CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} 42 | with: 43 | files: lcov.info 44 | fail_ci_if_error: true 45 | - name: Enforce Coverage Threshold 46 | run: cargo llvm-cov report --summary-only --fail-under-lines 94 47 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/standard_binary_serde.rs: -------------------------------------------------------------------------------- 1 | use alloc::vec::Vec; 2 | use bincode::{ 3 | config::Config, 4 | error::{DecodeError, EncodeError}, 5 | }; 6 | use serde::{Deserialize, Serialize}; 7 | 8 | fn standard_binary_config() -> impl Config { 9 | bincode::config::legacy() 10 | .with_fixed_int_encoding() 11 | .with_big_endian() 12 | } 13 | 14 | /// The standard serialization we use for our proof types 15 | pub fn try_standard_binary_serialization( 16 | value_to_be_serialized: impl Serialize, 17 | ) -> Result, EncodeError> { 18 | bincode::serde::encode_to_vec(value_to_be_serialized, standard_binary_config()) 19 | } 20 | 21 | /// The standard deserialization we use for our proof types 22 | pub fn try_standard_binary_deserialization Deserialize<'a>>( 23 | value_to_be_deserialized: &[u8], 24 | ) -> Result<(D, usize), DecodeError> { 25 | bincode::serde::decode_from_slice(value_to_be_deserialized, standard_binary_config()) 26 | } 27 | 28 | #[cfg(test)] 29 | mod tests { 30 | use super::{try_standard_binary_deserialization, try_standard_binary_serialization}; 31 | use serde::{Deserialize, Serialize}; 32 | 33 | #[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] 34 | struct SerdeTestType { 35 | a: String, 36 | b: bool, 37 | c: i32, 38 | } 39 | 40 | #[test] 41 | fn round_trip() { 42 | let obj = SerdeTestType { 43 | a: "test".to_string(), 44 | b: false, 45 | c: 123, 46 | }; 47 | let serialized = try_standard_binary_serialization(obj.clone()).unwrap(); 48 | let (deserialized, _): (SerdeTestType, _) = 49 | try_standard_binary_deserialization(&serialized).unwrap(); 50 | assert_eq!(obj, deserialized); 51 | let reserialized = try_standard_binary_serialization(deserialized).unwrap(); 52 | assert_eq!(serialized, reserialized); 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/proof_exprs/proof_expr_test.rs: -------------------------------------------------------------------------------- 1 | use super::{test_utility::*, DynProofExpr, ProofExpr}; 2 | use crate::base::{ 3 | commitment::InnerProductProof, 4 | database::{table_utility::*, Column, TableRef, TableTestAccessor, TestAccessor}, 5 | }; 6 | use bumpalo::Bump; 7 | 8 | #[test] 9 | fn we_can_compute_the_correct_result_of_a_complex_bool_expr_using_first_round_evaluate() { 10 | let alloc = Bump::new(); 11 | let data = table([ 12 | borrowed_bigint( 13 | "a", 14 | [1, 2, 3, 4, 5, 5, 5, 5, 5, 5, 5, 5, 6, 7, 8, 9, 999], 15 | &alloc, 16 | ), 17 | borrowed_varchar( 18 | "b", 19 | [ 20 | "g", "g", "t", "ghi", "g", "g", "jj", "f", "g", "g", "gar", "qwe", "g", "g", "poi", 21 | "zxc", "999", 22 | ], 23 | &alloc, 24 | ), 25 | borrowed_int128( 26 | "c", 27 | [ 28 | 3, 123, 3, 234, 3, 345, 3, 456, 3, 567, 3, 678, 3, 789, 3, 890, 999, 29 | ], 30 | &alloc, 31 | ), 32 | ]); 33 | let mut accessor = TableTestAccessor::::new_empty_with_setup(()); 34 | let t = TableRef::new("sxt", "t"); 35 | accessor.add_table(t.clone(), data.clone(), 0); 36 | // (a <= 5 || b == "g") && c != 3 37 | let bool_expr: DynProofExpr = and( 38 | or( 39 | lte(column(&t, "a", &accessor), const_bigint(5)), 40 | equal(column(&t, "b", &accessor), const_varchar("g")), 41 | ), 42 | not(equal(column(&t, "c", &accessor), const_int128(3))), 43 | ); 44 | let res = bool_expr.first_round_evaluate(&alloc, &data, &[]).unwrap(); 45 | let expected_res = Column::Boolean(&[ 46 | false, true, false, true, false, true, false, true, false, true, false, true, false, true, 47 | false, false, false, 48 | ]); 49 | assert_eq!(res, expected_res); 50 | } 51 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/parse/where_expr_builder.rs: -------------------------------------------------------------------------------- 1 | use super::{ConversionError, DynProofExprBuilder}; 2 | use crate::{ 3 | base::{ 4 | database::{ColumnRef, ColumnType}, 5 | map::IndexMap, 6 | }, 7 | sql::proof_exprs::{DynProofExpr, ProofExpr}, 8 | }; 9 | use alloc::boxed::Box; 10 | use proof_of_sql_parser::intermediate_ast::Expression; 11 | use sqlparser::ast::Ident; 12 | 13 | /// Builder that enables building a `proof_of_sql::sql::proof_exprs::DynProofExpr` from a `proof_of_sql_parser::intermediate_ast::Expression` that is 14 | /// intended to be used as the where clause in a filter expression or group by expression. 15 | pub struct WhereExprBuilder<'a> { 16 | builder: DynProofExprBuilder<'a>, 17 | } 18 | impl<'a> WhereExprBuilder<'a> { 19 | /// Creates a new `WhereExprBuilder` with the given column mapping. 20 | pub fn new(column_mapping: &'a IndexMap) -> Self { 21 | Self { 22 | builder: DynProofExprBuilder::new(column_mapping), 23 | } 24 | } 25 | /// Builds a `proof_of_sql::sql::proof_exprs::DynProofExpr` from a `proof_of_sql_parser::intermediate_ast::Expression` that is 26 | /// intended to be used as the where clause in a filter expression or group by expression. 27 | pub fn build( 28 | self, 29 | where_expr: Option>, 30 | ) -> Result, ConversionError> { 31 | where_expr 32 | .map(|where_expr| { 33 | let expr_plan = self.builder.build(&where_expr)?; 34 | // Ensure that the expression is a boolean expression 35 | match expr_plan.data_type() { 36 | ColumnType::Boolean => Ok(expr_plan), 37 | _ => Err(ConversionError::NonbooleanWhereClause { 38 | datatype: expr_plan.data_type(), 39 | }), 40 | } 41 | }) 42 | .transpose() 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/census/census-income.csv: -------------------------------------------------------------------------------- 1 | id,geography,id2,households_estimate_total 2 | 0400000US01,Alabama,1,1837292 3 | 0400000US02,Alaska,2,250875 4 | 0400000US04,Arizona,4,2381501 5 | 0400000US05,Arkansas,5,1130417 6 | 0400000US06,California,6,12581722 7 | 0400000US08,Colorado,8,1989371 8 | 0400000US09,Connecticut,9,1348275 9 | 0400000US10,Delaware,10,337245 10 | 0400000US11,District of Columbia,11,268015 11 | 0400000US12,Florida,12,7168502 12 | 0400000US13,Georgia,13,3522934 13 | 0400000US15,Hawaii,15,449296 14 | 0400000US16,Idaho,16,583452 15 | 0400000US17,Illinois,17,4763457 16 | 0400000US18,Indiana,18,2482558 17 | 0400000US19,Iowa,19,1227201 18 | 0400000US20,Kansas,20,1109747 19 | 0400000US21,Kentucky,21,1693399 20 | 0400000US22,Louisiana,22,1715997 21 | 0400000US23,Maine,23,552589 22 | 0400000US24,Maryland,24,2149424 23 | 0400000US25,Massachusetts,25,2528592 24 | 0400000US26,Michigan,26,3815532 25 | 0400000US27,Minnesota,27,2109924 26 | 0400000US28,Mississippi,28,1086898 27 | 0400000US29,Missouri,29,2353778 28 | 0400000US30,Montana,30,405504 29 | 0400000US31,Nebraska,31,729572 30 | 0400000US32,Nevada,32,995980 31 | 0400000US33,New Hampshire,33,518088 32 | 0400000US34,New Jersey,34,3181152 33 | 0400000US35,New Mexico,35,760251 34 | 0400000US36,New York,36,7214163 35 | 0400000US37,North Carolina,37,3721358 36 | 0400000US38,North Dakota,38,291468 37 | 0400000US39,Ohio,39,4551497 38 | 0400000US40,Oklahoma,40,1445059 39 | 0400000US41,Oregon,41,1516591 40 | 0400000US42,Pennsylvania,42,4945140 41 | 0400000US44,Rhode Island,44,410347 42 | 0400000US45,South Carolina,45,1781957 43 | 0400000US46,South Dakota,46,326086 44 | 0400000US47,Tennessee,47,2480467 45 | 0400000US48,Texas,48,8965352 46 | 0400000US49,Utah,49,891240 47 | 0400000US50,Vermont,50,256563 48 | 0400000US51,Virginia,51,3026761 49 | 0400000US53,Washington,53,2634496 50 | 0400000US54,West Virginia,54,739759 51 | 0400000US55,Wisconsin,55,2281781 52 | 0400000US56,Wyoming,56,222679 53 | 0400000US72,Puerto Rico,72,1254274 54 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/scalar/mont_scalar_test.rs: -------------------------------------------------------------------------------- 1 | use crate::base::scalar::{ 2 | test_scalar::{TestMontConfig, TestScalar}, 3 | Scalar, ScalarConversionError, 4 | }; 5 | use ark_ff::MontConfig; 6 | use bnum::types::U256; 7 | use num_bigint::BigInt; 8 | #[test] 9 | fn test_bigint_to_scalar_overflow() { 10 | assert_eq!( 11 | TestScalar::try_from( 12 | "3618502788666131106986593281521497120428558179689953803000975469142727125494" 13 | .parse::() 14 | .unwrap() 15 | ) 16 | .unwrap(), 17 | TestScalar::MAX_SIGNED 18 | ); 19 | assert_eq!( 20 | TestScalar::try_from( 21 | "-3618502788666131106986593281521497120428558179689953803000975469142727125494" 22 | .parse::() 23 | .unwrap() 24 | ) 25 | .unwrap(), 26 | -TestScalar::MAX_SIGNED 27 | ); 28 | 29 | assert!(matches!( 30 | TestScalar::try_from( 31 | "3618502788666131106986593281521497120428558179689953803000975469142727125495" 32 | .parse::() 33 | .unwrap() 34 | ), 35 | Err(ScalarConversionError::Overflow { .. }) 36 | )); 37 | assert!(matches!( 38 | TestScalar::try_from( 39 | "-3618502788666131106986593281521497120428558179689953803000975469142727125495" 40 | .parse::() 41 | .unwrap() 42 | ), 43 | Err(ScalarConversionError::Overflow { .. }) 44 | )); 45 | } 46 | 47 | #[test] 48 | fn we_can_bound_modulus_using_max_bits() { 49 | let modulus_of_i_max_bits = U256::ONE << TestScalar::MAX_BITS; 50 | let modulus_of_i_max_bits_plus_1 = U256::ONE << (TestScalar::MAX_BITS + 1); 51 | let modulus_of_test_scalar = U256::from(TestMontConfig::MODULUS.0); 52 | assert!(modulus_of_i_max_bits <= modulus_of_test_scalar); 53 | assert!(modulus_of_i_max_bits_plus_1 > modulus_of_test_scalar); 54 | } 55 | -------------------------------------------------------------------------------- /crates/proof-of-sql-benches/src/README.md: -------------------------------------------------------------------------------- 1 | # Running benchmarks 2 | 3 | ## Jaeger benchmarking 4 | 5 | The Jaeger benchmarks/tracing is wrapped by a binary. The motivation of the wrapper is to allow greater control over benchmark parameters. To run benchmarks with Jaeger, you need to do the following 6 | 7 | 1. Spin up Jaeger service on port 6831 to receive the benchmarks trace data, and provides Jaeger UI on port 16686. 8 | ```bash 9 | docker run --rm -d --name jaeger -p 6831:6831/udp -p 16686:16686 jaegertracing/all-in-one:1.62.0 10 | ``` 11 | 2. See all the options to run a benchmark. 12 | ```bash 13 | cargo run --release --bin proof-of-sql-benches -- --help 14 | ``` 15 | 3. Navigate to http://localhost:16686/ to see the results. 16 | 4. To end the Jaeger service, run 17 | ```bash 18 | docker kill jaeger 19 | ``` 20 | 21 | All the options are outlined in the help and `main.rs` module. 22 | 23 | ### Example 24 | 25 | To run a benchmark on the `HyperKZG` commitment scheme using the `Single Column Filter` query with a table size of `1_000_000` for `3` iterations, your command would be the following. 26 | 27 | ```bash 28 | cargo run --release --bin proof-of-sql-benches -- -s hyper-kzg -i 3 -t 1000000 -q single-column-filter 29 | ``` 30 | 31 | ### Memory logging (optional) 32 | 33 | Jaeger benchmarks default to logging any traces at `DEBUG` level and above. Memory consumption is logged at `TRACE` level. In order to capture memory consumption in the Jaeger benchmarks, add `RUST_LOG=trace` to the command. 34 | 35 | Example 36 | ``` 37 | RUST_LOG=trace cargo run --release --bin proof-of-sql-benches 38 | ``` 39 | 40 | ## Criterion benchmarking 41 | 42 | To run benchmarks with Criterion, you need to do the following 43 | 44 | 1. Run the benchmarks. (Warning: this takes a very long time.) 45 | ```bash 46 | cargo bench -p proof-of-sql-benches --bench bench_append_rows --features="test" 47 | ``` 48 | 2. Navigate to `target/criterion/report/index.html` to see the results. 49 | -------------------------------------------------------------------------------- /solidity/test/base/MathUtil.t.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: UNLICENSED 2 | // This is licensed under the Cryptographic Open Software License 1.0 3 | pragma solidity ^0.8.28; 4 | 5 | import {MathUtil} from "../../src/base/MathUtil.sol"; 6 | 7 | library MathUtilTest { 8 | function testWeCanComputeLog2Up() public pure { 9 | /* solhint-disable gas-strict-inequalities */ 10 | for (uint256 i = 0; i <= 2; ++i) { 11 | assert(MathUtil.__log2Up(i) == 1); 12 | } 13 | for (uint256 i = 3; i <= 4; ++i) { 14 | assert(MathUtil.__log2Up(i) == 2); 15 | } 16 | for (uint256 i = 5; i <= 8; ++i) { 17 | assert(MathUtil.__log2Up(i) == 3); 18 | } 19 | for (uint256 i = 9; i <= 16; ++i) { 20 | assert(MathUtil.__log2Up(i) == 4); 21 | } 22 | for (uint256 i = 17; i <= 32; ++i) { 23 | assert(MathUtil.__log2Up(i) == 5); 24 | } 25 | /* solhint-enable gas-strict-inequalities */ 26 | } 27 | 28 | function testLog2UpEdgeCases() public pure { 29 | for (uint256 exponent = 1; exponent < 256; ++exponent) { 30 | uint256 value = 1 << exponent; 31 | assert(MathUtil.__log2Up(value) == exponent); 32 | assert(MathUtil.__log2Up(value - 1) == exponent); 33 | assert(MathUtil.__log2Up(value + 1) == exponent + 1); 34 | } 35 | assert(MathUtil.__log2Up(uint256(int256(-1))) == 256); 36 | } 37 | 38 | function testFuzzLog2Up(uint256 value) public pure { 39 | uint256 exponent = MathUtil.__log2Up(value); 40 | if (value < 2) { 41 | assert(exponent == 1); 42 | return; 43 | } else if (exponent < 256) { 44 | assert((1 << exponent) >= value); // solhint-disable-line gas-strict-inequalities 45 | assert((1 << (exponent - 1)) < value); 46 | } else { 47 | assert(exponent == 256); 48 | assert((1 << 255) < value); 49 | } 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /crates/proof-of-sql-benches/src/utils/results_io.rs: -------------------------------------------------------------------------------- 1 | use csv::{Writer, WriterBuilder}; 2 | use std::{fs::OpenOptions, io::BufWriter, path::Path}; 3 | 4 | /// Writes the header to the CSV file. 5 | /// 6 | /// # Arguments 7 | /// * `writer` - A mutable reference to the CSV writer. 8 | /// 9 | /// # Panics 10 | /// * If the header cannot be written to the CSV file. 11 | fn write_csv_header(writer: &mut Writer>) { 12 | writer 13 | .write_record([ 14 | "commitment_scheme", 15 | "query", 16 | "table_size", 17 | "generate_proof (ms)", 18 | "verify_proof (ms)", 19 | "iteration", 20 | ]) 21 | .expect("Failed to write headers to CSV file."); 22 | } 23 | 24 | /// Appends values to an existing CSV file or creates a new one if it doesn't exist. 25 | /// 26 | /// # Arguments 27 | /// * `file_path` - The path to the CSV file. 28 | /// * `new_row` - A vector of strings to append to the file. 29 | /// 30 | /// # Panics 31 | /// * If the file cannot be opened, read, or appended. 32 | pub fn append_to_csv(file_path: &Path, new_row: &[String]) { 33 | // Open the file in append mode or create it if it doesn't exist 34 | let file = OpenOptions::new() 35 | .read(true) 36 | .append(true) 37 | .create(true) 38 | .open(file_path) 39 | .expect("Failed to open or create the CSV file."); 40 | 41 | // Check if the file is empty to determine if we need to write headers 42 | let is_empty = file.metadata().map(|m| m.len() == 0).unwrap_or(true); 43 | 44 | // Create a CSV writer 45 | let mut writer = WriterBuilder::new().from_writer(BufWriter::new(file)); 46 | 47 | // Write headers if the file is empty 48 | if is_empty { 49 | write_csv_header(&mut writer); 50 | } 51 | 52 | // Write new row to the CSV file 53 | writer 54 | .write_record(new_row) 55 | .expect("Failed to write row to CSV file."); 56 | 57 | writer.flush().expect("Failed to flush CSV writer."); 58 | } 59 | -------------------------------------------------------------------------------- /solidity/test/proof_exprs/NotExpr.t.pre.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: UNLICENSED 2 | // This is licensed under the Cryptographic Open Software License 1.0 3 | pragma solidity ^0.8.28; 4 | 5 | import {Test} from "forge-std/Test.sol"; 6 | import "../../src/base/Constants.sol"; 7 | import {VerificationBuilder} from "../../src/builder/VerificationBuilder.pre.sol"; 8 | import {NotExpr} from "../../src/proof_exprs/NotExpr.pre.sol"; 9 | import {F} from "../base/FieldUtil.sol"; 10 | 11 | contract NotExprTest is Test { 12 | function testSimpleNotExpr() public pure { 13 | bytes memory expr = 14 | abi.encodePacked(abi.encodePacked(LITERAL_EXPR_VARIANT, DATA_TYPE_BIGINT_VARIANT, int64(1)), hex"abcdef"); 15 | VerificationBuilder.Builder memory builder; 16 | 17 | uint256 eval; 18 | (expr, builder, eval) = NotExpr.__notExprEvaluate(expr, builder, 10); 19 | 20 | assert(eval == 0); 21 | bytes memory expectedExprOut = hex"abcdef"; 22 | assert(expr.length == expectedExprOut.length); 23 | uint256 exprOutLength = expr.length; 24 | for (uint256 i = 0; i < exprOutLength; ++i) { 25 | assert(expr[i] == expectedExprOut[i]); 26 | } 27 | } 28 | 29 | function testFuzzNotExpr( 30 | VerificationBuilder.Builder memory builder, 31 | uint256 chiEvaluation, 32 | int64 inputValue, 33 | bytes memory trailingExpr 34 | ) public pure { 35 | bytes memory expr = 36 | abi.encodePacked(abi.encodePacked(LITERAL_EXPR_VARIANT, DATA_TYPE_BIGINT_VARIANT, inputValue), trailingExpr); 37 | 38 | uint256 eval; 39 | (expr, builder, eval) = NotExpr.__notExprEvaluate(expr, builder, chiEvaluation); 40 | 41 | assert(eval == (F.from(chiEvaluation) - F.from(inputValue) * F.from(chiEvaluation)).into()); 42 | assert(expr.length == trailingExpr.length); 43 | uint256 exprOutLength = expr.length; 44 | for (uint256 i = 0; i < exprOutLength; ++i) { 45 | assert(expr[i] == trailingExpr[i]); 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /solidity/test/proof_exprs/LiteralExpr.t.pre.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: UNLICENSED 2 | // This is licensed under the Cryptographic Open Software License 1.0 3 | pragma solidity ^0.8.28; 4 | 5 | import {Test} from "forge-std/Test.sol"; 6 | import "../../src/base/Constants.sol"; 7 | import "../../src/base/Errors.sol"; 8 | import {LiteralExpr} from "../../src/proof_exprs/LiteralExpr.pre.sol"; 9 | import {F} from "../base/FieldUtil.sol"; 10 | 11 | contract LiteralExprTest is Test { 12 | function testLiteralExpr() public pure { 13 | bytes memory exprIn = abi.encodePacked(DATA_TYPE_BIGINT_VARIANT, int64(2), hex"abcdef"); 14 | bytes memory expectedExprOut = hex"abcdef"; 15 | (bytes memory exprOut, uint256 eval) = LiteralExpr.__literalExprEvaluate(exprIn, 3); 16 | assert(eval == 6); 17 | assert(exprOut.length == expectedExprOut.length); 18 | uint256 exprOutLength = exprOut.length; 19 | for (uint256 i = 0; i < exprOutLength; ++i) { 20 | assert(exprOut[i] == expectedExprOut[i]); 21 | } 22 | } 23 | 24 | function testFuzzBigIntLiteralExpr(int64 literalValue, uint256 chiInEval, bytes memory trailingExpr) public pure { 25 | bytes memory exprIn = abi.encodePacked(DATA_TYPE_BIGINT_VARIANT, literalValue, trailingExpr); 26 | (bytes memory exprOut, uint256 eval) = LiteralExpr.__literalExprEvaluate(exprIn, chiInEval); 27 | assert(eval == (F.from(literalValue) * F.from(chiInEval)).into()); 28 | assert(exprOut.length == trailingExpr.length); 29 | uint256 exprOutLength = exprOut.length; 30 | for (uint256 i = 0; i < exprOutLength; ++i) { 31 | assert(exprOut[i] == trailingExpr[i]); 32 | } 33 | } 34 | 35 | function testFuzzInvalidLiteralVariant(uint32 variant) public { 36 | vm.assume(variant > DATA_TYPE_TIMESTAMP_VARIANT); 37 | bytes memory exprIn = abi.encodePacked(variant, int64(2), hex"abcdef"); 38 | vm.expectRevert(Errors.UnsupportedDataTypeVariant.selector); 39 | LiteralExpr.__literalExprEvaluate(exprIn, 3); 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/proof/sumcheck_mle_evaluations_test.rs: -------------------------------------------------------------------------------- 1 | use super::SumcheckMleEvaluations; 2 | use crate::{ 3 | proof_primitive::inner_product::curve_25519_scalar::Curve25519Scalar, 4 | sql::proof::SumcheckRandomScalars, 5 | }; 6 | use num_traits::One; 7 | 8 | #[test] 9 | fn we_can_track_the_evaluation_of_mles_used_within_sumcheck() { 10 | let evaluation_point = [Curve25519Scalar::from(3u64), Curve25519Scalar::from(5u64)]; 11 | let random_scalars = [ 12 | Curve25519Scalar::from(123u64), 13 | Curve25519Scalar::from(456u64), 14 | ]; 15 | 16 | let sumcheck_random_scalars = SumcheckRandomScalars::new(&random_scalars, 3, 2); 17 | 18 | let pcs_proof_evaluations = [Curve25519Scalar::from(42u64)]; 19 | let evals = SumcheckMleEvaluations::new( 20 | 3, 21 | [3, 3], 22 | [], 23 | &evaluation_point, 24 | &sumcheck_random_scalars, 25 | &pcs_proof_evaluations, 26 | &[], 27 | ); 28 | let expected_eval = (Curve25519Scalar::one() - evaluation_point[0]) 29 | * (Curve25519Scalar::one() - evaluation_point[1]) 30 | * (Curve25519Scalar::one() - random_scalars[0]) 31 | * (Curve25519Scalar::one() - random_scalars[1]) 32 | + (evaluation_point[0]) 33 | * (Curve25519Scalar::one() - evaluation_point[1]) 34 | * (random_scalars[0]) 35 | * (Curve25519Scalar::one() - random_scalars[1]) 36 | + (Curve25519Scalar::one() - evaluation_point[0]) 37 | * (evaluation_point[1]) 38 | * (Curve25519Scalar::one() - random_scalars[0]) 39 | * (random_scalars[1]); 40 | assert_eq!(evals.random_evaluation, expected_eval); 41 | 42 | let expected_eval = (Curve25519Scalar::one() - evaluation_point[0]) 43 | * (Curve25519Scalar::one() - evaluation_point[1]) 44 | + (evaluation_point[0]) * (Curve25519Scalar::one() - evaluation_point[1]) 45 | + (Curve25519Scalar::one() - evaluation_point[0]) * (evaluation_point[1]); 46 | assert_eq!( 47 | *evals.chi_evaluations.values().next().unwrap(), 48 | expected_eval 49 | ); 50 | } 51 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/sumcheck/prover_state.rs: -------------------------------------------------------------------------------- 1 | use crate::base::polynomial::CompositePolynomial; 2 | /* 3 | * Adapted from arkworks 4 | * 5 | * See third_party/license/arkworks.LICENSE 6 | */ 7 | use crate::{base::scalar::Scalar, utils::log}; 8 | use alloc::vec::Vec; 9 | 10 | #[derive(Debug)] 11 | pub struct ProverState { 12 | /// Stores the list of products that is meant to be added together. Each multiplicand is represented by 13 | /// the index in `flattened_ml_extensions` 14 | pub list_of_products: Vec<(S, Vec)>, 15 | /// Stores a list of multilinear extensions in which `self.list_of_products` points to 16 | pub flattened_ml_extensions: Vec>, 17 | pub num_vars: usize, 18 | pub max_multiplicands: usize, 19 | pub round: usize, 20 | } 21 | 22 | impl ProverState { 23 | pub fn new( 24 | list_of_products: Vec<(S, Vec)>, 25 | flattened_ml_extensions: Vec>, 26 | num_vars: usize, 27 | max_multiplicands: usize, 28 | ) -> Self { 29 | ProverState { 30 | list_of_products, 31 | flattened_ml_extensions, 32 | num_vars, 33 | max_multiplicands, 34 | round: 0, 35 | } 36 | } 37 | 38 | #[tracing::instrument(name = "ProverState::create", level = "debug", skip_all)] 39 | pub fn create(polynomial: &CompositePolynomial) -> Self { 40 | log::log_memory_usage("Start"); 41 | 42 | assert!( 43 | polynomial.num_variables != 0, 44 | "Attempt to prove a constant." 45 | ); 46 | 47 | // create a deep copy of all unique MLExtensions 48 | let flattened_ml_extensions = polynomial 49 | .flattened_ml_extensions 50 | .iter() 51 | .map(|x| x.as_ref().clone()) 52 | .collect(); 53 | 54 | log::log_memory_usage("End"); 55 | 56 | ProverState::new( 57 | polynomial.products.clone(), 58 | flattened_ml_extensions, 59 | polynomial.num_variables, 60 | polynomial.max_multiplicands, 61 | ) 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/postprocessing/select_postprocessing_test.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | base::database::{owned_table_utility::*, OwnedTable}, 3 | proof_primitive::inner_product::curve_25519_scalar::Curve25519Scalar, 4 | sql::postprocessing::{apply_postprocessing_steps, test_utility::*, OwnedTablePostprocessing}, 5 | }; 6 | use proof_of_sql_parser::utility::*; 7 | 8 | #[test] 9 | fn we_can_filter_out_owned_table_columns() { 10 | let table: OwnedTable = owned_table([ 11 | bigint("c", [-5_i64, 1, -56, 2]), 12 | varchar("a", ["d", "a", "f", "b"]), 13 | ]); 14 | let postprocessing: [OwnedTablePostprocessing; 1] = 15 | [select_expr(&[aliased_expr(col("a"), "a")])]; 16 | let expected_table = owned_table([varchar("a", ["d", "a", "f", "b"])]); 17 | let actual_table = apply_postprocessing_steps(table, &postprocessing).unwrap(); 18 | assert_eq!(actual_table, expected_table); 19 | } 20 | 21 | #[test] 22 | fn we_can_reorder_and_rename_owned_table_columns() { 23 | let table: OwnedTable = owned_table([ 24 | int128("c", [-5_i128, 1, -56, 2]), 25 | varchar("a", ["d", "a", "f", "b"]), 26 | ]); 27 | let postprocessing: [OwnedTablePostprocessing; 1] = [select_expr(&[ 28 | aliased_expr(col("a"), "b"), 29 | aliased_expr(col("c"), "d"), 30 | ])]; 31 | let expected_table = owned_table([ 32 | varchar("b", ["d", "a", "f", "b"]), 33 | int128("d", [-5_i128, 1, -56, 2]), 34 | ]); 35 | let actual_table = apply_postprocessing_steps(table, &postprocessing).unwrap(); 36 | assert_eq!(actual_table, expected_table); 37 | } 38 | 39 | #[test] 40 | fn we_can_do_computation_on_owned_table_columns() { 41 | let table: OwnedTable = owned_table([bigint("c", [1, 2, 3, 4])]); 42 | let res_col = add(add(col("c"), col("c")), lit(1)); 43 | let postprocessing: [OwnedTablePostprocessing; 1] = 44 | [select_expr(&[aliased_expr(res_col, "res")])]; 45 | let expected_table = owned_table([bigint("res", [3, 5, 7, 9])]); 46 | let actual_table = apply_postprocessing_steps(table, &postprocessing).unwrap(); 47 | assert_eq!(actual_table, expected_table); 48 | } 49 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/hyperkzg/scalar.rs: -------------------------------------------------------------------------------- 1 | use crate::base::scalar::MontScalar; 2 | 3 | /// The scalar used in the `HyperKZG` PCS. This is the BN254 scalar. 4 | pub type BNScalar = MontScalar; 5 | 6 | #[cfg(test)] 7 | mod tests { 8 | use super::*; 9 | use crate::base::scalar::test_scalar_constants; 10 | #[cfg(feature = "hyperkzg_proof")] 11 | use crate::base::scalar::Scalar; 12 | #[cfg(feature = "hyperkzg_proof")] 13 | use ark_std::UniformRand; 14 | #[cfg(feature = "hyperkzg_proof")] 15 | use nova_snark::provider::bn256_grumpkin::bn256::Scalar as NovaScalar; 16 | 17 | #[test] 18 | fn we_have_correct_constants_for_bn_scalar() { 19 | test_scalar_constants::(); 20 | } 21 | 22 | #[test] 23 | #[cfg(feature = "hyperkzg_proof")] 24 | fn we_can_convert_from_posql_scalar_to_nova_scalar() { 25 | // Test zero 26 | assert_eq!(NovaScalar::from(0_u64), NovaScalar::from(BNScalar::ZERO)); 27 | 28 | // Test one 29 | assert_eq!(NovaScalar::from(1_u64), NovaScalar::from(BNScalar::ONE)); 30 | 31 | // Test negative one 32 | assert_eq!(-NovaScalar::from(1_u64), NovaScalar::from(-BNScalar::ONE)); 33 | 34 | // Test two 35 | assert_eq!(NovaScalar::from(2_u64), NovaScalar::from(BNScalar::TWO)); 36 | 37 | // Test ten 38 | assert_eq!(NovaScalar::from(10_u64), NovaScalar::from(BNScalar::TEN)); 39 | 40 | // Test a large value 41 | let large_value = BNScalar::from(123_456_789_u64); 42 | assert_eq!( 43 | NovaScalar::from(123_456_789_u64), 44 | NovaScalar::from(large_value) 45 | ); 46 | 47 | let mut rng = ark_std::test_rng(); 48 | 49 | for _ in 0..10 { 50 | let a = BNScalar::rand(&mut rng); 51 | let b = BNScalar::rand(&mut rng); 52 | assert_eq!( 53 | NovaScalar::from(a + b), 54 | NovaScalar::from(a) + NovaScalar::from(b) 55 | ); 56 | assert_eq!( 57 | NovaScalar::from(a * b), 58 | NovaScalar::from(a) * NovaScalar::from(b) 59 | ); 60 | } 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "proof_of_sql", 3 | "version": "0.0.0-development", 4 | "devDependencies": { 5 | "conventional-changelog-conventionalcommits": "^5.0.0", 6 | "semantic-release": "^21.0.5" 7 | }, 8 | "release": { 9 | "branches": [ 10 | "main" 11 | ], 12 | "tagFormat": "v${version}", 13 | "plugins": [ 14 | [ 15 | "@semantic-release/commit-analyzer", 16 | { 17 | "preset": "conventionalCommits", 18 | "releaseRules": [ 19 | { "breaking": true, "release": "minor" }, 20 | { "revert": true, "release": "patch" }, 21 | { "type": "feat", "release": "patch" }, 22 | { "type": "fix", "release": "patch" }, 23 | { "type": "build", "release": "patch" }, 24 | { "type": "docs", "release": "patch" }, 25 | { "type": "chore", "release": "patch" }, 26 | { "type": "bench", "release": "patch" }, 27 | { "type": "perf", "release": "patch" }, 28 | { "type": "refactor", "release": "patch" }, 29 | { "type": "test", "release": "patch" }, 30 | { "type": "ci", "release": "patch" } 31 | ], 32 | "parserOpts": { 33 | "noteKeywords": [ 34 | "BREAKING CHANGE", 35 | "BREAKING CHANGES", 36 | "BREAKING" 37 | ] 38 | } 39 | } 40 | ], 41 | "@semantic-release/release-notes-generator", 42 | [ 43 | "@semantic-release/exec", 44 | { 45 | "prepareCmd": "bash ./ci/publish.sh ${nextRelease.version}" 46 | } 47 | ], 48 | [ 49 | "@semantic-release/github" 50 | ] 51 | ] 52 | }, 53 | "dependencies": { 54 | "@semantic-release/exec": "^6.0.3" 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /crates/proof-of-sql-parser/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![doc = include_str!("../README.md")] 2 | #![no_std] 3 | #![cfg_attr(test, expect(clippy::missing_panics_doc))] 4 | extern crate alloc; 5 | 6 | /// Module for handling an intermediate timestamp type received from the lexer. 7 | pub mod posql_time; 8 | #[macro_use] 9 | extern crate lalrpop_util; 10 | 11 | pub mod intermediate_ast; 12 | 13 | #[cfg(test)] 14 | mod intermediate_ast_tests; 15 | 16 | /// Shortcuts to construct intermediate AST nodes. 17 | pub mod utility; 18 | 19 | /// TODO: add docs 20 | pub(crate) mod select_statement; 21 | pub use select_statement::SelectStatement; 22 | 23 | /// Error definitions for proof-of-sql-parser 24 | pub mod error; 25 | pub use error::ParseError; 26 | pub(crate) use error::ParseResult; 27 | 28 | /// TODO: add docs 29 | pub(crate) mod identifier; 30 | pub use identifier::Identifier; 31 | 32 | pub mod resource_id; 33 | pub use resource_id::ResourceId; 34 | 35 | pub mod sqlparser; 36 | 37 | // lalrpop-generated code is not clippy-compliant 38 | lalrpop_mod!(#[expect(clippy::all, missing_docs, clippy::pedantic, clippy::missing_panics_doc, clippy::allow_attributes, reason = "lalrpop-generated code can not be expected to be clippy-compliant or use expect")] pub sql); 39 | 40 | /// Implement [`Deserialize`](serde::Deserialize) through [`FromStr`](core::str::FromStr) to avoid invalid identifiers. 41 | #[macro_export] 42 | macro_rules! impl_serde_from_str { 43 | ($type:ty) => { 44 | impl serde::Serialize for $type { 45 | fn serialize(&self, serializer: S) -> Result 46 | where 47 | S: serde::Serializer, 48 | { 49 | serializer.serialize_str(&self.to_string()) 50 | } 51 | } 52 | impl<'d> serde::Deserialize<'d> for $type { 53 | fn deserialize(deserializer: D) -> Result 54 | where 55 | D: serde::Deserializer<'d>, 56 | { 57 | extern crate alloc; 58 | let string = alloc::string::String::deserialize(deserializer)?; 59 | <$type>::from_str(&string).map_err(serde::de::Error::custom) 60 | } 61 | } 62 | }; 63 | } 64 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/serialize.rs: -------------------------------------------------------------------------------- 1 | /// TODO: add docs 2 | macro_rules! impl_serde_for_ark_serde_checked { 3 | ($t:ty) => { 4 | impl serde::Serialize for $t { 5 | fn serialize(&self, serializer: S) -> Result { 6 | let mut bytes = 7 | Vec::with_capacity(ark_serialize::CanonicalSerialize::compressed_size(self)); 8 | ark_serialize::CanonicalSerialize::serialize_compressed(self, &mut bytes) 9 | .map_err(serde::ser::Error::custom)?; 10 | bytes.serialize(serializer) 11 | } 12 | } 13 | impl<'de> serde::Deserialize<'de> for $t { 14 | fn deserialize>(deserializer: D) -> Result { 15 | ark_serialize::CanonicalDeserialize::deserialize_compressed( 16 | Vec::deserialize(deserializer)?.as_slice(), 17 | ) 18 | .map_err(serde::de::Error::custom) 19 | } 20 | } 21 | }; 22 | } 23 | 24 | /// TODO: add docs 25 | macro_rules! impl_serde_for_ark_serde_unchecked { 26 | ($t:ty) => { 27 | impl serde::Serialize for $t { 28 | fn serialize(&self, serializer: S) -> Result { 29 | let mut bytes = 30 | Vec::with_capacity(ark_serialize::CanonicalSerialize::compressed_size(self)); 31 | ark_serialize::CanonicalSerialize::serialize_compressed(self, &mut bytes) 32 | .map_err(serde::ser::Error::custom)?; 33 | bytes.serialize(serializer) 34 | } 35 | } 36 | impl<'de> serde::Deserialize<'de> for $t { 37 | fn deserialize>(deserializer: D) -> Result { 38 | ark_serialize::CanonicalDeserialize::deserialize_compressed_unchecked( 39 | Vec::deserialize(deserializer)?.as_slice(), 40 | ) 41 | .map_err(serde::de::Error::custom) 42 | } 43 | } 44 | }; 45 | } 46 | 47 | pub(crate) use impl_serde_for_ark_serde_checked; 48 | pub(crate) use impl_serde_for_ark_serde_unchecked; 49 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/dory/dory_public_setup.rs: -------------------------------------------------------------------------------- 1 | use super::{ProverSetup, VerifierSetup}; 2 | 3 | /// The public setup required for the Dory PCS by the prover and the commitment computation. 4 | #[derive(Clone, Copy)] 5 | pub struct DoryProverPublicSetup<'a> { 6 | prover_setup: &'a ProverSetup<'a>, 7 | sigma: usize, 8 | } 9 | impl<'a> DoryProverPublicSetup<'a> { 10 | /// Create a new public setup for the Dory PCS. 11 | /// `public_parameters`: The public parameters for the Dory protocol. 12 | /// `sigma`: A commitment with this setup is a matrix commitment with `1 << sigma` columns. 13 | #[must_use] 14 | pub fn new(prover_setup: &'a ProverSetup<'a>, sigma: usize) -> Self { 15 | Self { 16 | prover_setup, 17 | sigma, 18 | } 19 | } 20 | /// Returns sigma. A commitment with this setup is a matrix commitment with `1 << sigma` columns. 21 | #[must_use] 22 | pub fn sigma(&self) -> usize { 23 | self.sigma 24 | } 25 | /// The public setup for the Dory protocol. 26 | #[must_use] 27 | pub fn prover_setup(&self) -> &ProverSetup { 28 | self.prover_setup 29 | } 30 | } 31 | 32 | /// The verifier's public setup for the Dory PCS. 33 | #[derive(Clone, Copy)] 34 | pub struct DoryVerifierPublicSetup<'a> { 35 | verifier_setup: &'a VerifierSetup, 36 | sigma: usize, 37 | } 38 | impl<'a> DoryVerifierPublicSetup<'a> { 39 | /// Create a new public setup for the Dory PCS. 40 | /// `verifier_setup`: The verifier's setup parameters for the Dory protocol. 41 | /// `sigma`: A commitment with this setup is a matrix commitment with `1 << sigma` columns. 42 | #[must_use] 43 | pub fn new(verifier_setup: &'a VerifierSetup, sigma: usize) -> Self { 44 | Self { 45 | verifier_setup, 46 | sigma, 47 | } 48 | } 49 | /// Returns sigma. A commitment with this setup is a matrix commitment with `1< usize { 52 | self.sigma 53 | } 54 | /// The verifier's setup parameters for the Dory protocol. 55 | #[must_use] 56 | pub fn verifier_setup(&self) -> &VerifierSetup { 57 | self.verifier_setup 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /docs/SQLSyntaxSpecification.md: -------------------------------------------------------------------------------- 1 | # PoSQL SQL Syntax 2 | 3 | Proof of SQL currently supports the following syntax. The syntax support is rapidly expanding, and we are happy to take suggestions about what should be added. Anyone submitting a PR must ensure that this is kept up to date. 4 | 5 | ``` 6 | SELECT [* | expression [ [ AS ] output_name ] [, …]] 7 | FROM table 8 | [WHERE condition] 9 | [GROUP BY expression] 10 | [ORDER BY expression [ASC | DESC]] 11 | [LIMIT count] 12 | [OFFSET start] 13 | ``` 14 | ## Supported in the Prover 15 | * DataTypes 16 | - Bool / Boolean 17 | - Numeric Types 18 | * Uint8 (8 bits) 19 | * TinyInt (8 bits) 20 | * SmallInt (16 bits) 21 | * Int / Integer (32 bits) 22 | * BigInt (64 bits) 23 | * Int128 24 | * Decimal75 25 | - Character Types 26 | * Varchar [^1] 27 | - Date / Time Types 28 | * Timestamp 29 | * Operators 30 | - Logical Operators 31 | * AND, OR 32 | * NOT 33 | - Numerical Operators 34 | * +, -, * 35 | - Comparison Operators 36 | * =, != 37 | * \>, >=, <, <= 38 | * Aggregate Functions 39 | - SUM 40 | - COUNT 41 | * SELECT syntax 42 | - WHERE clause 43 | - GROUP BY clause 44 | ## Currently Only Supported in Post-Processing 45 | 46 | Note: this post-processing is still trustworthy because it is done by the verifier after verifying the result. The prime example of why this is valuable is for the query `SELECT SUM(price) / COUNT(price) FROM table`. 47 | It is far more efficient for the verifier to compute the actual division, while the prover produces a proof for the `SUM` and `COUNT`. While we plan to support `/` in the prover soon, we will still defer to post-processing when it is possible, cheap enough for the verifier, and more efficient overall. 48 | 49 | * Operators 50 | - Numerical Operators 51 | * / 52 | - Aggregate Functions 53 | * MAX, MIN 54 | * FIRST 55 | * SELECT syntax 56 | - ORDER BY clause 57 | - LIMIT clause 58 | - OFFSET clause 59 | 60 | [^1]: Currently, we do not support any string operations beyond = and !=. 61 | 62 | ## Reserved keywords 63 | 64 | The following keywords may not be used as aliases: 65 | - `count` 66 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/proof/keccak256_transcript.rs: -------------------------------------------------------------------------------- 1 | use super::{transcript_core::TranscriptCore, Transcript}; 2 | use core::mem::replace; 3 | use tiny_keccak::{Hasher, Keccak}; 4 | 5 | /// Public coin transcript that is easily portable to Solidity. 6 | /// 7 | /// Leverages the keccak256 hash function, which has the lowest gas costs on Solidity. 8 | /// 9 | /// The public coin transcript consists of alternating prover messages and verifier challenges. 10 | /// In order to multiple verifier challenges in a row, an empty prover message must be sent. 11 | /// In order to send multiple prover messages in a row, the verifier challenge can be discarded. 12 | /// 13 | /// The challenges/state are computed as follows: 14 | /// ```pseudo-code 15 | /// challenge_(i+1) = keccak256(challenge_i, message_(i+1)) 16 | /// ``` 17 | pub struct Keccak256Transcript(Keccak); 18 | impl TranscriptCore for Keccak256Transcript { 19 | fn new() -> Self { 20 | Self(Keccak::v256()) 21 | } 22 | fn raw_append(&mut self, message: &[u8]) { 23 | self.0.update(message); 24 | } 25 | fn raw_challenge(&mut self) -> [u8; 32] { 26 | let mut result = [0; 32]; 27 | 28 | // Replace existing Hasher with a new one, and finalize the old Hasher, 29 | // getting a hash/the desired challenge: 30 | replace(self, Transcript::new()).0.finalize(&mut result); 31 | 32 | // Add this challenge to the new Hasher for the next round of messages: 33 | self.raw_append(&result); 34 | 35 | result 36 | } 37 | } 38 | 39 | #[cfg(test)] 40 | mod tests { 41 | use super::{super::transcript_core::test_util::*, Keccak256Transcript}; 42 | #[test] 43 | fn we_get_equivalent_challenges_with_equivalent_keccak256_transcripts() { 44 | we_get_equivalent_challenges_with_equivalent_transcripts::(); 45 | } 46 | #[test] 47 | fn we_get_different_challenges_with_different_keccak256_transcripts() { 48 | we_get_different_challenges_with_different_transcripts::(); 49 | } 50 | #[test] 51 | fn we_get_different_nontrivial_consecutive_challenges_from_keccak256_transcript() { 52 | we_get_different_nontrivial_consecutive_challenges_from_transcript::(); 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/dory/extended_state_test.rs: -------------------------------------------------------------------------------- 1 | use super::{ 2 | rand_F_tensors, rand_G_vecs, test_rng, ExtendedProverState, G1Projective, G2Projective, 3 | PublicParameters, 4 | }; 5 | use crate::base::polynomial::compute_evaluation_vector; 6 | use ark_ec::{pairing::Pairing, VariableBaseMSM}; 7 | use ark_ff::Fp; 8 | 9 | #[test] 10 | pub fn we_can_create_an_extended_verifier_state_from_an_extended_prover_state() { 11 | let mut rng = test_rng(); 12 | let max_nu = 5; 13 | let pp = PublicParameters::test_rand(max_nu, &mut rng); 14 | let prover_setup = (&pp).into(); 15 | for nu in 0..max_nu { 16 | let (v1, v2) = rand_G_vecs(nu, &mut rng); 17 | let (s1_tensor, s2_tensor) = rand_F_tensors(nu, &mut rng); 18 | let mut s1 = vec![Fp::default(); 1 << nu]; 19 | let mut s2 = vec![Fp::default(); 1 << nu]; 20 | compute_evaluation_vector(&mut s1, &s1_tensor); 21 | compute_evaluation_vector(&mut s2, &s2_tensor); 22 | let extended_prover_state = ExtendedProverState::new_from_tensors( 23 | s1_tensor.clone(), 24 | s2_tensor.clone(), 25 | v1.clone(), 26 | v2.clone(), 27 | nu, 28 | ); 29 | assert_eq!(extended_prover_state.s1, s1); 30 | assert_eq!(extended_prover_state.s2, s2); 31 | let extended_verifier_state = extended_prover_state.calculate_verifier_state(&prover_setup); 32 | 33 | let C = Pairing::multi_pairing(&v1, &v2); 34 | let D_1 = Pairing::multi_pairing(&v1, prover_setup.Gamma_2[nu]); 35 | let D_2 = Pairing::multi_pairing(prover_setup.Gamma_1[nu], &v2); 36 | let E_1 = G1Projective::msm_unchecked(&v1, &s2); 37 | let E_2 = G2Projective::msm_unchecked(&v2, &s1); 38 | 39 | assert_eq!(extended_verifier_state.base_state.C, C); 40 | assert_eq!(extended_verifier_state.base_state.D_1, D_1); 41 | assert_eq!(extended_verifier_state.base_state.D_2, D_2); 42 | assert_eq!(extended_verifier_state.base_state.nu, nu); 43 | assert_eq!(extended_verifier_state.E_1, E_1); 44 | assert_eq!(extended_verifier_state.E_2, E_2); 45 | assert_eq!(extended_verifier_state.s1_tensor, s1_tensor); 46 | assert_eq!(extended_verifier_state.s2_tensor, s2_tensor); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /solidity/test/proof_exprs/AddExpr.t.pre.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: UNLICENSED 2 | // This is licensed under the Cryptographic Open Software License 1.0 3 | pragma solidity ^0.8.28; 4 | 5 | import {Test} from "forge-std/Test.sol"; 6 | import "../../src/base/Constants.sol"; 7 | import {VerificationBuilder} from "../../src/builder/VerificationBuilder.pre.sol"; 8 | import {AddExpr} from "../../src/proof_exprs/AddExpr.pre.sol"; 9 | import {F} from "../base/FieldUtil.sol"; 10 | 11 | contract AddExprTest is Test { 12 | function testSimpleAddExpr() public pure { 13 | bytes memory expr = abi.encodePacked( 14 | abi.encodePacked(LITERAL_EXPR_VARIANT, DATA_TYPE_BIGINT_VARIANT, int64(7)), 15 | abi.encodePacked(LITERAL_EXPR_VARIANT, DATA_TYPE_BIGINT_VARIANT, int64(5)), 16 | hex"abcdef" 17 | ); 18 | VerificationBuilder.Builder memory builder; 19 | 20 | uint256 eval; 21 | (expr, builder, eval) = AddExpr.__addExprEvaluate(expr, builder, 10); 22 | 23 | assert(eval == 120); 24 | bytes memory expectedExprOut = hex"abcdef"; 25 | assert(expr.length == expectedExprOut.length); 26 | uint256 exprOutLength = expr.length; 27 | for (uint256 i = 0; i < exprOutLength; ++i) { 28 | assert(expr[i] == expectedExprOut[i]); 29 | } 30 | } 31 | 32 | function testFuzzAddExpr( 33 | VerificationBuilder.Builder memory builder, 34 | uint256 chiEvaluation, 35 | int64 lhsValue, 36 | int64 rhsValue, 37 | bytes memory trailingExpr 38 | ) public pure { 39 | bytes memory expr = abi.encodePacked( 40 | abi.encodePacked(LITERAL_EXPR_VARIANT, DATA_TYPE_BIGINT_VARIANT, lhsValue), 41 | abi.encodePacked(LITERAL_EXPR_VARIANT, DATA_TYPE_BIGINT_VARIANT, rhsValue), 42 | trailingExpr 43 | ); 44 | 45 | uint256 eval; 46 | (expr, builder, eval) = AddExpr.__addExprEvaluate(expr, builder, chiEvaluation); 47 | 48 | assert(eval == ((F.from(lhsValue) + F.from(rhsValue)) * F.from(chiEvaluation)).into()); 49 | assert(expr.length == trailingExpr.length); 50 | uint256 exprOutLength = expr.length; 51 | for (uint256 i = 0; i < exprOutLength; ++i) { 52 | assert(expr[i] == trailingExpr[i]); 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/error.rs: -------------------------------------------------------------------------------- 1 | use crate::base::{ 2 | database::ColumnType, 3 | math::decimal::{DecimalError, IntermediateDecimalError}, 4 | proof::PlaceholderError, 5 | }; 6 | use alloc::string::{String, ToString}; 7 | use core::result::Result; 8 | use snafu::Snafu; 9 | 10 | /// Errors related to queries that can not be run due to invalid column references, data types, etc. 11 | /// Will be replaced once we fully switch to the planner. 12 | #[derive(Snafu, Debug, PartialEq, Eq)] 13 | pub enum AnalyzeError { 14 | #[snafu(display("Expression has datatype {expr_type}, which was not valid"))] 15 | /// Invalid data type received 16 | InvalidDataType { 17 | /// data type found 18 | expr_type: ColumnType, 19 | }, 20 | 21 | #[snafu(display("Left side has '{left_type}' type but right side has '{right_type}' type"))] 22 | /// Data types do not match 23 | DataTypeMismatch { 24 | /// The left side datatype 25 | left_type: String, 26 | /// The right side datatype 27 | right_type: String, 28 | }, 29 | 30 | #[snafu(display("Columns have different lengths: {len_a} != {len_b}"))] 31 | /// Two columns do not have the same length 32 | DifferentColumnLength { 33 | /// The length of the first column 34 | len_a: usize, 35 | /// The length of the second column 36 | len_b: usize, 37 | }, 38 | 39 | #[snafu(transparent)] 40 | /// Errors related to decimal operations 41 | DecimalConversionError { 42 | /// The underlying source error 43 | source: DecimalError, 44 | }, 45 | 46 | #[snafu(transparent)] 47 | /// Errors related to placeholders 48 | PlaceholderError { 49 | /// The underlying source error 50 | source: PlaceholderError, 51 | }, 52 | } 53 | 54 | impl From for String { 55 | fn from(error: AnalyzeError) -> Self { 56 | error.to_string() 57 | } 58 | } 59 | 60 | impl From for AnalyzeError { 61 | fn from(err: IntermediateDecimalError) -> AnalyzeError { 62 | AnalyzeError::DecimalConversionError { 63 | source: DecimalError::IntermediateDecimalConversionError { source: err }, 64 | } 65 | } 66 | } 67 | 68 | /// Result type for analyze errors 69 | pub type AnalyzeResult = Result; 70 | -------------------------------------------------------------------------------- /solidity/src/base/MathUtil.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: UNLICENSED 2 | // This is licensed under the Cryptographic Open Software License 1.0 3 | pragma solidity ^0.8.28; 4 | 5 | /// @title Math Utilities Library 6 | /// @notice Provides functions to perform various math operations 7 | library MathUtil { 8 | /// @notice Computes `max(1,ceil(log_2(value)))` 9 | /// @dev The smallest integer greater than or equal to the base 2 logarithm of a number. 10 | /// If the number is less than 2, the result is 1. 11 | /// @param __value The input value for which to compute the logarithm 12 | /// @return __exponent The computed logarithm value 13 | function __log2Up(uint256 __value) internal pure returns (uint256 __exponent) { 14 | assembly { 15 | function log2_up(value) -> exponent { 16 | if value { value := sub(value, 1) } 17 | exponent := 1 18 | if gt(value, 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF) { 19 | exponent := add(exponent, 128) 20 | value := shr(128, value) 21 | } 22 | if gt(value, 0xFFFFFFFFFFFFFFFF) { 23 | exponent := add(exponent, 64) 24 | value := shr(64, value) 25 | } 26 | if gt(value, 0xFFFFFFFF) { 27 | exponent := add(exponent, 32) 28 | value := shr(32, value) 29 | } 30 | if gt(value, 0xFFFF) { 31 | exponent := add(exponent, 16) 32 | value := shr(16, value) 33 | } 34 | if gt(value, 0xFF) { 35 | exponent := add(exponent, 8) 36 | value := shr(8, value) 37 | } 38 | if gt(value, 0xF) { 39 | exponent := add(exponent, 4) 40 | value := shr(4, value) 41 | } 42 | if gt(value, 0x3) { 43 | exponent := add(exponent, 2) 44 | value := shr(2, value) 45 | } 46 | if gt(value, 0x1) { 47 | exponent := add(exponent, 1) 48 | value := shr(1, value) 49 | } 50 | } 51 | __exponent := log2_up(__value) 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/slice_ops/mul_add_assign_test.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | use crate::base::scalar::test_scalar::TestScalar; 3 | 4 | #[test] 5 | fn test_mul_add_assign() { 6 | let mut a = vec![1, 2, 3, 4]; 7 | let b = vec![2, 3, 4, 5]; 8 | mul_add_assign(&mut a, 10, &b); 9 | let c = vec![1 + 10 * 2, 2 + 10 * 3, 3 + 10 * 4, 4 + 10 * 5]; 10 | assert_eq!(a, c); 11 | } 12 | 13 | /// test [`mul_add_assign`] with uneven vectors 14 | #[test] 15 | fn test_mul_add_assign_uneven() { 16 | let mut a = vec![1, 2, 3, 4, 5]; 17 | let b = vec![2, 3, 4, 5]; 18 | mul_add_assign(&mut a, 10, &b); 19 | let c = vec![1 + 10 * 2, 2 + 10 * 3, 3 + 10 * 4, 4 + 10 * 5, 5]; 20 | assert_eq!(a, c); 21 | } 22 | 23 | /// test [`mul_add_assign`] with with uneven panics when len(a) < len(b) 24 | #[test] 25 | #[should_panic( 26 | expected = "The length of result must be greater than or equal to the length of the vector of values to be multiplied and added" 27 | )] 28 | fn test_mul_add_assign_uneven_panic() { 29 | let mut a = vec![1, 2, 3, 4]; 30 | let b = vec![2, 3, 4, 5, 6]; 31 | mul_add_assign(&mut a, 10, &b); 32 | } 33 | 34 | /// test [`mul_add_assign`] with `TestScalar` 35 | #[test] 36 | fn test_mul_add_assign_testscalar() { 37 | let mut a = vec![TestScalar::from(1u64), TestScalar::from(2u64)]; 38 | let b = vec![TestScalar::from(2u64), TestScalar::from(3u64)]; 39 | mul_add_assign(&mut a, TestScalar::from(10u64), &b); 40 | let c = vec![ 41 | TestScalar::from(1u64) + TestScalar::from(10u64) * TestScalar::from(2u64), 42 | TestScalar::from(2u64) + TestScalar::from(10u64) * TestScalar::from(3u64), 43 | ]; 44 | assert_eq!(a, c); 45 | } 46 | 47 | /// test [`mul_add_assign`] with uneven `TestScalar` 48 | #[test] 49 | fn test_mul_add_assign_testscalar_uneven() { 50 | let mut a = vec![ 51 | TestScalar::from(1u64), 52 | TestScalar::from(2u64), 53 | TestScalar::from(3u64), 54 | ]; 55 | let b = vec![TestScalar::from(2u64), TestScalar::from(3u64)]; 56 | mul_add_assign(&mut a, TestScalar::from(10u64), &b); 57 | let c = vec![ 58 | TestScalar::from(1u64) + TestScalar::from(10u64) * TestScalar::from(2u64), 59 | TestScalar::from(2u64) + TestScalar::from(10u64) * TestScalar::from(3u64), 60 | TestScalar::from(3u64), 61 | ]; 62 | assert_eq!(a, c); 63 | } 64 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/inner_product/ristretto_point.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | base::commitment::{Commitment, CommittableColumn}, 3 | proof_primitive::inner_product::curve_25519_scalar::Curve25519Scalar, 4 | }; 5 | use alloc::vec::Vec; 6 | use curve25519_dalek::RistrettoPoint; 7 | 8 | impl Commitment for RistrettoPoint { 9 | type Scalar = Curve25519Scalar; 10 | type PublicSetup<'a> = (); 11 | #[cfg(feature = "blitzar")] 12 | fn compute_commitments( 13 | committable_columns: &[CommittableColumn], 14 | offset: usize, 15 | _setup: &Self::PublicSetup<'_>, 16 | ) -> Vec { 17 | use curve25519_dalek::ristretto::CompressedRistretto; 18 | 19 | let sequences: Vec<_> = committable_columns.iter().map(Into::into).collect(); 20 | let mut compressed_commitments = 21 | vec![CompressedRistretto::default(); committable_columns.len()]; 22 | blitzar::compute::compute_curve25519_commitments( 23 | &mut compressed_commitments, 24 | &sequences, 25 | offset as u64, 26 | ); 27 | compressed_commitments 28 | .into_iter() 29 | .map(|cc| { 30 | cc.decompress().expect( 31 | "invalid ristretto point decompression in Commitment::compute_commitments", 32 | ) 33 | }) 34 | .collect() 35 | } 36 | #[cfg(not(feature = "blitzar"))] 37 | fn compute_commitments( 38 | _committable_columns: &[CommittableColumn], 39 | _offset: usize, 40 | _setup: &Self::PublicSetup<'_>, 41 | ) -> Vec { 42 | unimplemented!() 43 | } 44 | 45 | fn to_transcript_bytes(&self) -> Vec { 46 | self.compress().as_bytes().to_vec() 47 | } 48 | } 49 | 50 | #[cfg(test)] 51 | mod tests { 52 | use crate::base::commitment::*; 53 | use curve25519_dalek::{constants::RISTRETTO_BASEPOINT_POINT, ristretto::RistrettoPoint}; 54 | 55 | #[test] 56 | fn we_get_different_transcript_bytes_from_different_ristretto_point_commitments() { 57 | let commitment1 = RistrettoPoint::default(); 58 | let commitment2 = RISTRETTO_BASEPOINT_POINT; 59 | 60 | assert_ne!( 61 | commitment1.to_transcript_bytes(), 62 | commitment2.to_transcript_bytes() 63 | ); 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/hyperkzg/nova_engine.rs: -------------------------------------------------------------------------------- 1 | use super::{BNScalar, HyperKZGPublicSetupOwned}; 2 | use crate::base::{ 3 | proof::{Keccak256Transcript, Transcript}, 4 | slice_ops, 5 | }; 6 | use nova_snark::{ 7 | errors::NovaError, 8 | provider::{bn256_grumpkin::bn256::Scalar as NovaScalar, hyperkzg::CommitmentKey}, 9 | traits::{Engine, TranscriptEngineTrait, TranscriptReprTrait}, 10 | }; 11 | use serde::{Deserialize, Serialize}; 12 | 13 | /// The `HyperKZG` engine that implements nova's `Engine` trait. 14 | #[derive(Clone, Copy, Debug, Eq, PartialEq, Serialize, Deserialize)] 15 | pub struct HyperKZGEngine; 16 | 17 | impl Engine for HyperKZGEngine { 18 | type Base = nova_snark::provider::bn256_grumpkin::bn256::Base; 19 | type Scalar = NovaScalar; 20 | type GE = nova_snark::provider::bn256_grumpkin::bn256::Point; 21 | type RO = nova_snark::provider::poseidon::PoseidonRO; 22 | type ROCircuit = nova_snark::provider::poseidon::PoseidonROCircuit; 23 | type RO2 = nova_snark::provider::poseidon::PoseidonRO; 24 | type RO2Circuit = nova_snark::provider::poseidon::PoseidonROCircuit; 25 | type TE = Keccak256Transcript; 26 | type CE = nova_snark::provider::hyperkzg::CommitmentEngine; 27 | } 28 | 29 | impl TranscriptEngineTrait for Keccak256Transcript { 30 | fn new(_label: &'static [u8]) -> Self { 31 | Transcript::new() 32 | } 33 | 34 | fn squeeze(&mut self, _label: &'static [u8]) -> Result { 35 | let res = Transcript::scalar_challenge_as_be::(self).into(); 36 | Transcript::challenge_as_le(self); 37 | Ok(res) 38 | } 39 | 40 | fn absorb::GE>>( 41 | &mut self, 42 | _label: &'static [u8], 43 | o: &T, 44 | ) { 45 | Transcript::extend_as_le_from_refs(self, &o.to_transcript_bytes()); 46 | } 47 | 48 | fn dom_sep(&mut self, _bytes: &'static [u8]) {} 49 | } 50 | 51 | /// Utility converting a nova `CommitmentKey` to a [`HyperKZGPublicSetupOwned`]. 52 | pub fn nova_commitment_key_to_hyperkzg_public_setup( 53 | setup: &CommitmentKey, 54 | ) -> HyperKZGPublicSetupOwned { 55 | slice_ops::slice_cast_with(setup.ck(), blitzar::compute::convert_to_ark_bn254_g1_affine) 56 | } 57 | -------------------------------------------------------------------------------- /solidity/test/proof_exprs/SubtractExpr.t.pre.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: UNLICENSED 2 | // This is licensed under the Cryptographic Open Software License 1.0 3 | pragma solidity ^0.8.28; 4 | 5 | import {Test} from "forge-std/Test.sol"; 6 | import "../../src/base/Constants.sol"; 7 | import {VerificationBuilder} from "../../src/builder/VerificationBuilder.pre.sol"; 8 | import {SubtractExpr} from "../../src/proof_exprs/SubtractExpr.pre.sol"; 9 | import {F} from "../base/FieldUtil.sol"; 10 | 11 | contract SubtractExprTest is Test { 12 | function testSimpleSubtractExpr() public pure { 13 | bytes memory expr = abi.encodePacked( 14 | abi.encodePacked(LITERAL_EXPR_VARIANT, DATA_TYPE_BIGINT_VARIANT, int64(7)), 15 | abi.encodePacked(LITERAL_EXPR_VARIANT, DATA_TYPE_BIGINT_VARIANT, int64(5)), 16 | hex"abcdef" 17 | ); 18 | VerificationBuilder.Builder memory builder; 19 | 20 | uint256 eval; 21 | (expr, builder, eval) = SubtractExpr.__subtractExprEvaluate(expr, builder, 10); 22 | 23 | assert(eval == 20); 24 | bytes memory expectedExprOut = hex"abcdef"; 25 | assert(expr.length == expectedExprOut.length); 26 | uint256 exprOutLength = expr.length; 27 | for (uint256 i = 0; i < exprOutLength; ++i) { 28 | assert(expr[i] == expectedExprOut[i]); 29 | } 30 | } 31 | 32 | function testFuzzSubtractExpr( 33 | VerificationBuilder.Builder memory builder, 34 | uint256 chiEvaluation, 35 | int64 lhsValue, 36 | int64 rhsValue, 37 | bytes memory trailingExpr 38 | ) public pure { 39 | bytes memory expr = abi.encodePacked( 40 | abi.encodePacked(LITERAL_EXPR_VARIANT, DATA_TYPE_BIGINT_VARIANT, lhsValue), 41 | abi.encodePacked(LITERAL_EXPR_VARIANT, DATA_TYPE_BIGINT_VARIANT, rhsValue), 42 | trailingExpr 43 | ); 44 | 45 | uint256 eval; 46 | (expr, builder, eval) = SubtractExpr.__subtractExprEvaluate(expr, builder, chiEvaluation); 47 | 48 | assert(eval == ((F.from(lhsValue) - F.from(rhsValue)) * F.from(chiEvaluation)).into()); 49 | assert(expr.length == trailingExpr.length); 50 | uint256 exprOutLength = expr.length; 51 | for (uint256 i = 0; i < exprOutLength; ++i) { 52 | assert(expr[i] == trailingExpr[i]); 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | Please be sure to look over the pull request guidelines here: https://github.com/spaceandtimelabs/sxt-proof-of-sql/blob/main/CONTRIBUTING.md#submit-pr. 2 | 3 | # Please go through the following checklist 4 | - [ ] The PR title and commit messages adhere to guidelines here: https://github.com/spaceandtimelabs/sxt-proof-of-sql/blob/main/CONTRIBUTING.md. In particular `!` is used if and only if at least one breaking change has been introduced. 5 | - [ ] I have run the ci check script with `source scripts/run_ci_checks.sh`. 6 | - [ ] I have run the clean commit check script with `source scripts/check_commits.sh`, and the commit history is certified to follow clean commit guidelines as described here: https://github.com/spaceandtimelabs/sxt-proof-of-sql/blob/main/COMMIT_GUIDELINES.md 7 | - [ ] The latest changes from `main` have been incorporated to this PR by simple rebase if possible, if not, then conflicts are resolved appropriately. 8 | 9 | # Rationale for this change 10 | 11 | 22 | 23 | # What changes are included in this PR? 24 | 25 | 33 | 34 | # Are these changes tested? 35 | 45 | -------------------------------------------------------------------------------- /scripts/run_ci_checks.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Display a help text 4 | if [[ "$1" == "-h" || "$1" == "--help" ]]; then 5 | echo "Runs all CI checks (excluding tests, udeps, and the 'examples' job)." 6 | fi 7 | 8 | # The path to the YAML file that defines the CI workflows 9 | YAML_FILE=".github/workflows/lint-and-test.yml" 10 | 11 | # Initialize the directory we're searching from (current directory) 12 | current_dir=$(pwd) 13 | 14 | # Traverse upwards to find the root directory, assuming it exists somewhere above 15 | while [[ ! -f "$current_dir/sxt-proof-of-sql/.github/workflows/lint-and-test.yml" ]]; do 16 | # Move up one directory 17 | current_dir=$(dirname "$current_dir") 18 | 19 | # If we reach the root directory (i.e., /), stop to prevent an infinite loop 20 | if [[ "$current_dir" == "/" ]]; then 21 | echo "Could not find file." 22 | fi 23 | done 24 | 25 | # Check if the YAML file exists 26 | if [ ! -f "$YAML_FILE" ]; then 27 | echo "YAML file $YAML_FILE does not exist." 28 | fi 29 | 30 | # 1) Remove the entire `examples:` job section from the file. 31 | # 2) Extract lines that contain 'cargo' commands. 32 | # 3) Exclude lines with '--ignored', 'test', 'rustup', or 'udeps'. 33 | # 4) Strip off the 'run:' prefix. 34 | cargo_commands=$( 35 | sed '/^\s*examples:/,/^[^[:space:]]/d' "$YAML_FILE" \ 36 | | grep -E '^\s*run:.*cargo' \ 37 | | grep -v -- '--ignored' \ 38 | | grep -v 'test' \ 39 | | grep -v 'rustup' \ 40 | | grep -v 'udeps' \ 41 | | sed -E 's/^\s*run:\s*//' 42 | ) 43 | 44 | if [ -z "$cargo_commands" ]; then 45 | echo "No cargo commands found (other than tests, udeps, or in the 'examples' job)." 46 | fi 47 | 48 | # Run each cargo command 49 | echo "Extracted cargo commands (excluding tests, udeps, 'examples' job, and toolchain installs):" 50 | echo "$cargo_commands" 51 | echo "=========================" 52 | 53 | failed_tests=0 54 | while IFS= read -r cmd; do 55 | echo "Running command: $cmd" 56 | if ! eval "$cmd"; then 57 | echo "Error: Command failed - $cmd" 58 | echo "Stopping execution." 59 | failed_tests=$((failed_tests + 1)) 60 | fi 61 | done <<< "$cargo_commands" 62 | 63 | # Print the results 64 | if [ "$failed_tests" -gt 0 ]; then 65 | echo "Error: $failed_tests CI checks have FAILED (excluding tests, udeps, and 'examples' job)." 66 | else 67 | echo "All CI checks (excluding tests, udeps, and 'examples' job) completed successfully." 68 | fi 69 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/base/polynomial/evaluation_vector.rs: -------------------------------------------------------------------------------- 1 | use crate::{base::if_rayon, utils::log}; 2 | use core::{ 3 | cmp, 4 | ops::{Mul, MulAssign, Sub, SubAssign}, 5 | }; 6 | use num_traits::One; 7 | #[cfg(feature = "rayon")] 8 | use rayon::prelude::{IndexedParallelIterator, IntoParallelRefMutIterator, ParallelIterator}; 9 | 10 | #[cfg(feature = "rayon")] 11 | const MIN_PARALLEL_LEN: usize = 16; // The minimum size for which we should actually parallelize the compute. 12 | 13 | /// This method manipulates left and right such that 14 | /// right[i] = left[i] * p and left[i] = left[i] * (1 - p) 15 | fn compute_evaluation_vector_impl(left: &mut [F], right: &mut [F], p: F) 16 | where 17 | F: One + Sub + MulAssign + SubAssign + Mul + Send + Sync + Copy, 18 | { 19 | let k = cmp::min(left.len(), right.len()); 20 | let one_minus_p = F::one() - p; 21 | if_rayon!( 22 | left.par_iter_mut().with_min_len(MIN_PARALLEL_LEN), 23 | left.iter_mut() 24 | ) 25 | .zip(right) 26 | .for_each(|(li, ri)| { 27 | *ri = *li * p; 28 | *li -= *ri; 29 | }); 30 | if_rayon!( 31 | left[k..].par_iter_mut().with_min_len(MIN_PARALLEL_LEN), 32 | left[k..].iter_mut() 33 | ) 34 | .for_each(|li| { 35 | *li *= one_minus_p; 36 | }); 37 | } 38 | 39 | /// Given a point of evaluation, computes the vector that allows us 40 | /// to evaluate a multilinear extension as an inner product. 41 | #[tracing::instrument(level = "debug", skip_all)] 42 | pub fn compute_evaluation_vector(v: &mut [F], point: &[F]) 43 | where 44 | F: One + Sub + MulAssign + SubAssign + Mul + Send + Sync + Copy, 45 | { 46 | log::log_memory_usage("Start"); 47 | 48 | assert!(v.len() <= (1 << point.len())); 49 | if point.is_empty() || v.is_empty() { 50 | // v is guaranteed to be at most length 1 by the assert!. 51 | v.fill(F::one()); 52 | return; 53 | } 54 | v[0] = F::one() - point[0]; 55 | if v.len() > 1 { 56 | v[1] = point[0]; 57 | } 58 | for (level, p) in point[1..].iter().enumerate() { 59 | let mid = 1 << (level + 1); 60 | let (left, right): (&mut [F], &mut [F]) = if mid >= v.len() { 61 | (v, &mut []) 62 | } else { 63 | v.split_at_mut(mid) 64 | }; 65 | compute_evaluation_vector_impl(left, right, *p); 66 | } 67 | 68 | log::log_memory_usage("End"); 69 | } 70 | -------------------------------------------------------------------------------- /crates/proof-of-sql-planner/src/uppercase_column_visitor.rs: -------------------------------------------------------------------------------- 1 | use sqlparser::ast::{visit_relations_mut, Expr, Ident, Statement, VisitMut, VisitorMut}; 2 | use std::ops::ControlFlow; 3 | 4 | /// Returns an uppercased version of Ident 5 | /// Leaving this as public because the sdk also uses this function 6 | #[must_use] 7 | #[expect(clippy::needless_pass_by_value)] 8 | pub fn uppercase_identifier(ident: Ident) -> Ident { 9 | let value = ident.value.to_uppercase(); 10 | Ident { value, ..ident } 11 | } 12 | 13 | struct UppercaseColumnVisitor; 14 | 15 | impl VisitorMut for UppercaseColumnVisitor { 16 | type Break = (); 17 | 18 | fn post_visit_expr(&mut self, expr: &mut Expr) -> ControlFlow { 19 | match expr { 20 | Expr::Identifier(ident) => *ident = uppercase_identifier(ident.clone()), 21 | Expr::CompoundIdentifier(idents) => { 22 | idents 23 | .iter_mut() 24 | .for_each(|ident| *ident = uppercase_identifier(ident.clone())); 25 | } 26 | _ => (), 27 | } 28 | ControlFlow::Continue(()) 29 | } 30 | } 31 | 32 | /// Returns the sqlparser statement with all of its column/table identifiers uppercased. 33 | pub fn statement_with_uppercase_identifiers(mut statement: Statement) -> Statement { 34 | statement.visit(&mut UppercaseColumnVisitor); 35 | 36 | // uppercase all tables 37 | visit_relations_mut(&mut statement, |object_name| { 38 | object_name.0.iter_mut().for_each(|ident| { 39 | ident.value = ident.value.to_uppercase(); 40 | }); 41 | 42 | ControlFlow::<()>::Continue(()) 43 | }); 44 | 45 | statement 46 | } 47 | 48 | #[cfg(test)] 49 | mod tests { 50 | use super::statement_with_uppercase_identifiers; 51 | use sqlparser::{dialect::GenericDialect, parser::Parser}; 52 | 53 | #[test] 54 | fn we_can_capitalize_statement_idents() { 55 | let statement = Parser::parse_sql(&GenericDialect{}, "SELECT a.thissum from (SELECT Sum(uppercase_Value) as thissum, COUNT(puppies) as coUNT fRoM NonSEnSE) as a").unwrap()[0].clone(); 56 | let statement = statement_with_uppercase_identifiers(statement); 57 | let expected_statement = Parser::parse_sql(&GenericDialect{}, "SELECT A.THISSUM from (SELECT Sum(UPPERCASE_VALUE) as thissum, COUNT(PUPPIES) as coUNT fRoM NONSENSE) as a").unwrap()[0].clone(); 58 | assert_eq!(statement, expected_statement); 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/sql/proof_exprs/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module proves provable expressions. 2 | mod proof_expr; 3 | pub(crate) use proof_expr::DecimalProofExpr; 4 | pub use proof_expr::ProofExpr; 5 | #[cfg(all(test, feature = "blitzar"))] 6 | mod proof_expr_test; 7 | 8 | mod aliased_dyn_proof_expr; 9 | pub use aliased_dyn_proof_expr::AliasedDynProofExpr; 10 | 11 | mod add_expr; 12 | pub(crate) use add_expr::AddExpr; 13 | mod subtract_expr; 14 | pub(crate) use subtract_expr::SubtractExpr; 15 | #[cfg(all(test, feature = "blitzar"))] 16 | mod add_subtract_expr_test; 17 | 18 | mod multiply_expr; 19 | pub(crate) use multiply_expr::MultiplyExpr; 20 | #[cfg(all(test, feature = "blitzar"))] 21 | mod multiply_expr_test; 22 | 23 | mod dyn_proof_expr; 24 | pub use dyn_proof_expr::DynProofExpr; 25 | 26 | mod literal_expr; 27 | pub(crate) use literal_expr::LiteralExpr; 28 | #[cfg(all(test, feature = "blitzar"))] 29 | mod literal_expr_test; 30 | 31 | mod placeholder_expr; 32 | pub(crate) use placeholder_expr::PlaceholderExpr; 33 | #[cfg(all(test, feature = "blitzar"))] 34 | mod placeholder_expr_test; 35 | 36 | mod and_expr; 37 | pub(crate) use and_expr::AndExpr; 38 | #[cfg(all(test, feature = "blitzar"))] 39 | mod and_expr_test; 40 | 41 | mod inequality_expr; 42 | use inequality_expr::InequalityExpr; 43 | #[cfg(all(test, feature = "blitzar"))] 44 | mod inequality_expr_test; 45 | 46 | mod or_expr; 47 | pub(crate) use or_expr::OrExpr; 48 | #[cfg(all(test, feature = "blitzar"))] 49 | mod or_expr_test; 50 | 51 | mod not_expr; 52 | pub(crate) use not_expr::NotExpr; 53 | #[cfg(all(test, feature = "blitzar"))] 54 | mod not_expr_test; 55 | 56 | mod numerical_util; 57 | pub(crate) use numerical_util::{add_subtract_columns, multiply_columns}; 58 | #[cfg(test)] 59 | pub(crate) use numerical_util::{divide_columns, modulo_columns}; 60 | 61 | mod equals_expr; 62 | pub(crate) use equals_expr::EqualsExpr; 63 | #[cfg(all(test, feature = "blitzar"))] 64 | mod equals_expr_test; 65 | 66 | mod table_expr; 67 | pub use table_expr::TableExpr; 68 | 69 | #[cfg(test)] 70 | pub(crate) mod test_utility; 71 | 72 | mod column_expr; 73 | pub use column_expr::ColumnExpr; 74 | #[cfg(all(test, feature = "blitzar"))] 75 | mod column_expr_test; 76 | 77 | mod cast_expr; 78 | pub(crate) use cast_expr::CastExpr; 79 | #[cfg(all(test, feature = "blitzar"))] 80 | mod cast_expr_test; 81 | 82 | mod scaling_cast_expr; 83 | pub(crate) use scaling_cast_expr::ScalingCastExpr; 84 | #[cfg(all(test, feature = "blitzar"))] 85 | mod scaling_cast_expr_test; 86 | -------------------------------------------------------------------------------- /crates/proof-of-sql/examples/posql_db/commit_accessor.rs: -------------------------------------------------------------------------------- 1 | use core::error::Error; 2 | use indexmap::IndexMap; 3 | use proof_of_sql::base::{ 4 | commitment::{Commitment, QueryCommitments, TableCommitment}, 5 | database::{ColumnType, CommitmentAccessor, MetadataAccessor, SchemaAccessor, TableRef}, 6 | }; 7 | use serde::{Deserialize, Serialize}; 8 | use sqlparser::ast::Ident; 9 | use std::{fs, path::PathBuf}; 10 | 11 | pub struct CommitAccessor { 12 | base_path: PathBuf, 13 | inner: QueryCommitments, 14 | } 15 | impl Deserialize<'a>> CommitAccessor { 16 | pub fn new(base_path: PathBuf) -> Self { 17 | Self { 18 | base_path, 19 | inner: IndexMap::default(), 20 | } 21 | } 22 | pub fn write_commit( 23 | &self, 24 | table_ref: &TableRef, 25 | commit: &TableCommitment, 26 | ) -> Result<(), Box> { 27 | let path = self.base_path.join(format!("{table_ref}.commit")); 28 | fs::write(path, postcard::to_allocvec(commit)?)?; 29 | Ok(()) 30 | } 31 | pub fn load_commit(&mut self, table_ref: &TableRef) -> Result<(), Box> { 32 | let path = self.base_path.join(format!("{table_ref}.commit")); 33 | let commit = postcard::from_bytes(&fs::read(path)?)?; 34 | self.inner.insert(table_ref.clone(), commit); 35 | Ok(()) 36 | } 37 | pub fn get_commit(&self, table_ref: &TableRef) -> Option<&TableCommitment> { 38 | self.inner.get(table_ref) 39 | } 40 | } 41 | 42 | impl CommitmentAccessor for CommitAccessor { 43 | fn get_commitment(&self, table_ref: &TableRef, column_id: &Ident) -> C { 44 | self.inner.get_commitment(table_ref, column_id) 45 | } 46 | } 47 | impl MetadataAccessor for CommitAccessor { 48 | fn get_length(&self, table_ref: &TableRef) -> usize { 49 | self.inner.get_length(table_ref) 50 | } 51 | 52 | fn get_offset(&self, table_ref: &TableRef) -> usize { 53 | self.inner.get_offset(table_ref) 54 | } 55 | } 56 | impl SchemaAccessor for CommitAccessor { 57 | fn lookup_column(&self, table_ref: &TableRef, column_id: &Ident) -> Option { 58 | self.inner.lookup_column(table_ref, column_id) 59 | } 60 | 61 | fn lookup_schema(&self, table_ref: &TableRef) -> Vec<(Ident, ColumnType)> { 62 | self.inner.lookup_schema(table_ref) 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/inner_product/curve_25519_scalar.rs: -------------------------------------------------------------------------------- 1 | use crate::base::scalar::MontScalar; 2 | use ark_ff::PrimeField; 3 | 4 | /// A wrapper type around the field element `ark_curve25519::Fr` and should be used in place of `ark_curve25519::Fr`. 5 | /// 6 | /// Using the `Scalar` trait rather than this type is encouraged to allow for easier switching of the underlying field. 7 | pub type Curve25519Scalar = MontScalar; 8 | 9 | impl From for curve25519_dalek::scalar::Scalar { 10 | fn from(value: Curve25519Scalar) -> Self { 11 | (&value).into() 12 | } 13 | } 14 | 15 | impl From<&Curve25519Scalar> for curve25519_dalek::scalar::Scalar { 16 | /// 17 | /// # Panics 18 | /// 19 | /// This method will panic if the byte array is not of the expected length (32 bytes) or if it cannot be converted to a valid canonical scalar. However, under normal conditions, valid `Curve25519Scalar` values should always satisfy these requirements. 20 | fn from(value: &Curve25519Scalar) -> Self { 21 | let bytes = ark_ff::BigInteger::to_bytes_le(&value.0.into_bigint()); 22 | curve25519_dalek::scalar::Scalar::from_canonical_bytes(bytes.try_into().unwrap()).unwrap() 23 | } 24 | } 25 | 26 | impl core::ops::Mul for Curve25519Scalar { 27 | type Output = curve25519_dalek::ristretto::RistrettoPoint; 28 | fn mul(self, rhs: curve25519_dalek::ristretto::RistrettoPoint) -> Self::Output { 29 | curve25519_dalek::scalar::Scalar::from(self) * rhs 30 | } 31 | } 32 | 33 | impl core::ops::Mul<&curve25519_dalek::ristretto::RistrettoPoint> for Curve25519Scalar { 34 | type Output = curve25519_dalek::ristretto::RistrettoPoint; 35 | fn mul(self, rhs: &curve25519_dalek::ristretto::RistrettoPoint) -> Self::Output { 36 | curve25519_dalek::scalar::Scalar::from(self) * rhs 37 | } 38 | } 39 | 40 | impl core::ops::Mul for curve25519_dalek::ristretto::RistrettoPoint { 41 | type Output = curve25519_dalek::ristretto::RistrettoPoint; 42 | fn mul(self, rhs: Curve25519Scalar) -> Self::Output { 43 | self * curve25519_dalek::scalar::Scalar::from(rhs) 44 | } 45 | } 46 | 47 | impl core::ops::Mul for &curve25519_dalek::ristretto::RistrettoPoint { 48 | type Output = curve25519_dalek::ristretto::RistrettoPoint; 49 | fn mul(self, rhs: Curve25519Scalar) -> Self::Output { 50 | self * curve25519_dalek::scalar::Scalar::from(rhs) 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /crates/proof-of-sql/src/proof_primitive/dory/extended_dory_inner_product.rs: -------------------------------------------------------------------------------- 1 | use super::{ 2 | scalar_product_prove, scalar_product_verify, DoryMessages, ExtendedProverState, 3 | ExtendedVerifierState, ProverSetup, VerifierSetup, F, 4 | }; 5 | use crate::{ 6 | base::proof::Transcript, 7 | proof_primitive::dory::{ 8 | extended_dory_reduce_prove, extended_dory_reduce_verify, fold_scalars_0_prove, 9 | fold_scalars_0_verify, 10 | }, 11 | utils::log, 12 | }; 13 | 14 | /// This is the prover side of the extended Dory-Innerproduct algorithm in section 4.3 of https://eprint.iacr.org/2020/1274.pdf. 15 | /// This function builds/enqueues `messages`, appends to `transcript`, and consumes `state`. 16 | #[tracing::instrument(level = "debug", skip_all)] 17 | pub fn extended_dory_inner_product_prove( 18 | messages: &mut DoryMessages, 19 | transcript: &mut impl Transcript, 20 | mut state: ExtendedProverState, 21 | setup: &ProverSetup, 22 | ) { 23 | log::log_memory_usage("Start"); 24 | 25 | let nu = state.base_state.nu; 26 | assert!(setup.max_nu >= nu); 27 | for _ in 0..nu { 28 | extended_dory_reduce_prove(messages, transcript, &mut state, setup); 29 | } 30 | let base_state = fold_scalars_0_prove(messages, transcript, state, setup); 31 | scalar_product_prove(messages, transcript, &base_state); 32 | 33 | log::log_memory_usage("End"); 34 | } 35 | 36 | /// This is the verifier side of the extended Dory-Innerproduct algorithm in section 4.3 of https://eprint.iacr.org/2020/1274.pdf. 37 | /// This function consumes/dequeues from `messages`, appends to `transcript`, and consumes `state`. 38 | #[tracing::instrument(level = "debug", skip_all)] 39 | pub fn extended_dory_inner_product_verify( 40 | messages: &mut DoryMessages, 41 | transcript: &mut impl Transcript, 42 | mut state: ExtendedVerifierState, 43 | setup: &VerifierSetup, 44 | fold_s_tensors_verify: impl Fn(&ExtendedVerifierState) -> (F, F), 45 | ) -> bool { 46 | log::log_memory_usage("Start"); 47 | 48 | let nu = state.base_state.nu; 49 | assert!(setup.max_nu >= nu); 50 | for _ in 0..nu { 51 | if !extended_dory_reduce_verify(messages, transcript, &mut state, setup) { 52 | return false; 53 | } 54 | } 55 | let base_state = 56 | fold_scalars_0_verify(messages, transcript, state, setup, fold_s_tensors_verify); 57 | let res = scalar_product_verify(messages, transcript, base_state, setup); 58 | 59 | log::log_memory_usage("End"); 60 | 61 | res 62 | } 63 | --------------------------------------------------------------------------------