├── .github ├── CODEOWNERS ├── PULL_REQUEST_TEMPLATE.md ├── linters │ ├── .markdown-lint.yml │ └── .markdownlint.yml └── workflows │ ├── ci.yml │ ├── linkify_changelog.yml │ └── mdlinter.yml ├── .gitignore ├── CHANGELOG.md ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── crypto-primitives ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── benches │ ├── comm.rs │ ├── crh.rs │ ├── merkle_tree.rs │ ├── prf.rs │ └── signature.rs └── src │ ├── commitment │ ├── blake2s │ │ ├── constraints.rs │ │ └── mod.rs │ ├── constraints.rs │ ├── injective_map │ │ ├── constraints.rs │ │ └── mod.rs │ ├── mod.rs │ └── pedersen │ │ ├── constraints.rs │ │ └── mod.rs │ ├── crh │ ├── bowe_hopwood │ │ ├── constraints.rs │ │ └── mod.rs │ ├── constraints.rs │ ├── injective_map │ │ ├── constraints.rs │ │ └── mod.rs │ ├── mod.rs │ ├── pedersen │ │ ├── constraints.rs │ │ └── mod.rs │ ├── poseidon │ │ ├── constraints.rs │ │ └── mod.rs │ └── sha256 │ │ ├── constraints.rs │ │ └── mod.rs │ ├── encryption │ ├── constraints.rs │ ├── elgamal │ │ ├── constraints.rs │ │ └── mod.rs │ └── mod.rs │ ├── lib.rs │ ├── macros.rs │ ├── merkle_tree │ ├── constraints.rs │ ├── mod.rs │ └── tests │ │ ├── constraints.rs │ │ ├── mod.rs │ │ └── test_utils.rs │ ├── prf │ ├── blake2s │ │ ├── constraints.rs │ │ └── mod.rs │ ├── constraints.rs │ └── mod.rs │ ├── signature │ ├── constraints.rs │ ├── mod.rs │ └── schnorr │ │ ├── constraints.rs │ │ └── mod.rs │ ├── snark │ ├── constraints.rs │ └── mod.rs │ └── sponge │ ├── absorb.rs │ ├── constraints │ ├── absorb.rs │ └── mod.rs │ ├── merlin │ └── mod.rs │ ├── mod.rs │ ├── poseidon │ ├── constraints.rs │ ├── grain_lfsr.rs │ ├── mod.rs │ ├── tests.rs │ └── traits.rs │ └── test.rs ├── macros ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT └── src │ └── lib.rs └── scripts └── linkify_changelog.py /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @arkworks-rs/maintainers 2 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 6 | 7 | ## Description 8 | 9 | 12 | 13 | closes: #XXXX 14 | 15 | --- 16 | 17 | Before we can merge this PR, please make sure that all the following items have been 18 | checked off. If any of the checklist items are not applicable, please leave them but 19 | write a little note why. 20 | 21 | - [ ] Targeted PR against correct branch (main) 22 | - [ ] Linked to Github issue with discussion and accepted design OR have an explanation in the PR that describes this work. 23 | - [ ] Wrote unit tests 24 | - [ ] Updated relevant documentation in the code 25 | - [ ] Added a relevant changelog entry to the `Pending` section in `CHANGELOG.md` 26 | - [ ] Re-reviewed `Files changed` in the Github PR explorer 27 | -------------------------------------------------------------------------------- /.github/linters/.markdown-lint.yml: -------------------------------------------------------------------------------- 1 | # See https://github.com/DavidAnson/markdownlint#rules--aliases for list of markdown lint codes 2 | default: true 3 | # MD01 lint blocks having header's incrementing by more than # at a time. 4 | MD001: false 5 | MD007: { indent: 4 } 6 | # MD013 blocks long lines 7 | MD013: false 8 | MD024: { siblings_only: true } 9 | MD025: false 10 | # MD033 lint blocks HTML in MD 11 | MD033: false 12 | # MD036 no-emphasis-as-heading 13 | MD036: false 14 | MD041: false 15 | -------------------------------------------------------------------------------- /.github/linters/.markdownlint.yml: -------------------------------------------------------------------------------- 1 | # See https://github.com/DavidAnson/markdownlint#rules--aliases for list of markdown lint codes 2 | default: true 3 | # MD01 lint blocks having header's incrementing by more than # at a time. 4 | MD001: false 5 | MD007: { indent: 4 } 6 | # MD013 blocks long lines 7 | MD013: false 8 | MD024: { siblings_only: true } 9 | MD025: false 10 | # MD033 lint blocks HTML in MD 11 | MD033: false 12 | # MD036 no-emphasis-as-heading 13 | MD036: false 14 | MD041: false 15 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: 3 | merge_group: 4 | pull_request: 5 | push: 6 | branches: 7 | - master 8 | env: 9 | RUST_BACKTRACE: 1 10 | 11 | jobs: 12 | style: 13 | name: Check Style 14 | runs-on: ubuntu-latest 15 | steps: 16 | 17 | - name: Checkout 18 | uses: actions/checkout@v1 19 | - name: Install Rust 20 | uses: actions-rs/toolchain@v1 21 | with: 22 | profile: minimal 23 | toolchain: stable 24 | override: true 25 | components: rustfmt 26 | 27 | - name: cargo fmt --check 28 | uses: actions-rs/cargo@v1 29 | with: 30 | command: fmt 31 | args: --all -- --check 32 | 33 | test: 34 | name: Test 35 | runs-on: ubuntu-latest 36 | env: 37 | RUSTFLAGS: -Dwarnings 38 | strategy: 39 | matrix: 40 | rust: 41 | - stable 42 | - nightly 43 | steps: 44 | - name: Checkout 45 | uses: actions/checkout@v2 46 | 47 | - name: Install Rust (${{ matrix.rust }}) 48 | uses: actions-rs/toolchain@v1 49 | with: 50 | profile: minimal 51 | toolchain: ${{ matrix.rust }} 52 | override: true 53 | 54 | - uses: actions/cache@v4 55 | with: 56 | path: | 57 | ~/.cargo/registry 58 | ~/.cargo/git 59 | target 60 | key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} 61 | 62 | - name: Check examples 63 | uses: actions-rs/cargo@v1 64 | with: 65 | command: check 66 | args: --examples --all 67 | 68 | - name: Check examples with all features on stable 69 | uses: actions-rs/cargo@v1 70 | with: 71 | command: check 72 | args: --examples --all-features --all 73 | if: matrix.rust == 'stable' 74 | 75 | - name: Check benchmarks on nightly 76 | uses: actions-rs/cargo@v1 77 | with: 78 | command: check 79 | args: --all-features --examples --all --benches 80 | if: matrix.rust == 'nightly' 81 | 82 | - name: Test 83 | uses: actions-rs/cargo@v1 84 | with: 85 | command: test 86 | args: "--all \ 87 | --all-features \ 88 | --exclude cp-benches " 89 | 90 | check_no_std: 91 | name: Check no_std 92 | runs-on: ubuntu-latest 93 | steps: 94 | - name: Checkout 95 | uses: actions/checkout@v2 96 | 97 | - name: Install Rust 98 | uses: actions-rs/toolchain@v1 99 | with: 100 | toolchain: stable 101 | target: thumbv6m-none-eabi 102 | override: true 103 | 104 | - name: Install Rust ARM64 105 | uses: actions-rs/toolchain@v1 106 | with: 107 | toolchain: stable 108 | target: aarch64-unknown-none 109 | override: true 110 | 111 | - uses: actions/cache@v4 112 | with: 113 | path: | 114 | ~/.cargo/registry 115 | ~/.cargo/git 116 | target 117 | key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} 118 | 119 | - name: crypto-primitives 120 | run: | 121 | cargo build --no-default-features --features=r1cs,merkle_tree,prf,encryption,signature,snark --target aarch64-unknown-none 122 | cargo check --all --no-default-features --features=r1cs,merkle_tree,prf,encryption,signature,snark --target aarch64-unknown-none 123 | -------------------------------------------------------------------------------- /.github/workflows/linkify_changelog.yml: -------------------------------------------------------------------------------- 1 | name: Linkify Changelog 2 | 3 | on: 4 | workflow_dispatch 5 | 6 | jobs: 7 | linkify: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - name: Checkout 11 | uses: actions/checkout@v2 12 | - name: Add links 13 | run: python3 scripts/linkify_changelog.py CHANGELOG.md 14 | - name: Commit 15 | run: | 16 | git config user.name github-actions 17 | git config user.email github-actions@github.com 18 | git add . 19 | git commit -m "Linkify Changelog" 20 | git push -------------------------------------------------------------------------------- /.github/workflows/mdlinter.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | on: 3 | merge_group: 4 | branches: [master, main] 5 | push: 6 | branches: [master, main] 7 | pull_request: 8 | branches: [master, main] 9 | 10 | ############### 11 | # Set the Job # 12 | ############### 13 | jobs: 14 | build: 15 | # Name the Job 16 | name: Lint Code Base 17 | # Set the agent to run on 18 | runs-on: ubuntu-latest 19 | 20 | ################## 21 | # Load all steps # 22 | ################## 23 | steps: 24 | ########################## 25 | # Checkout the code base # 26 | ########################## 27 | - name: Checkout Code 28 | uses: actions/checkout@v4 29 | with: 30 | # Full git history is needed to get a proper list of changed files within `super-linter` 31 | fetch-depth: 0 32 | 33 | ################################ 34 | # Run Linter against code base # 35 | ################################ 36 | - name: Lint Code Base 37 | uses: github/super-linter/slim@v4 38 | env: 39 | VALIDATE_ALL_CODEBASE: false 40 | DEFAULT_BRANCH: master 41 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 42 | VALIDATE_PROTOBUF: false 43 | VALIDATE_JSCPD: false 44 | # use Python Pylint as the only linter to avoid conflicts 45 | VALIDATE_PYTHON_BLACK: false 46 | VALIDATE_PYTHON_FLAKE8: false 47 | VALIDATE_PYTHON_ISORT: false 48 | VALIDATE_PYTHON_MYPY: false 49 | VALIDATE_YAML: false -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | target 2 | Cargo.lock 3 | .DS_Store 4 | .idea 5 | *.iml 6 | *.ipynb_checkpoints 7 | *.pyc 8 | *.sage.py 9 | params 10 | *.swp 11 | *.swo 12 | .vscode -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # CHANGELOG 2 | 3 | ## Pending 4 | 5 | ### Breaking changes 6 | 7 | ### Features 8 | 9 | ### Improvements 10 | 11 | ### Bugfixes 12 | 13 | ## v0.5.0 14 | 15 | - [\#120](https://github.com/arkworks-rs/crypto-primitives/pull/120) Add input size check to `bowe_hopwood::CRHGadget::evaluate`. 16 | 17 | ### Breaking changes 18 | 19 | ### Features 20 | 21 | - [\#107](https://github.com/arkworks-rs/crypto-primitives/pull/107) Impl `CanonicalSerialize` and `CanonicalDeserialize` for `ark_crypto_primitives::crh::pedersen::Parameters` 22 | 23 | ### Improvements 24 | 25 | ### Bugfixes 26 | 27 | ## v0.4.0 28 | 29 | ### Breaking changes 30 | 31 | - [\#56](https://github.com/arkworks-rs/crypto-primitives/pull/56) Compress the output of the Bowe-Hopwood-Pedersen CRH to a single field element, in line with the Zcash specification. 32 | - [\#60](https://github.com/arkworks-rs/crypto-primitives/pull/60) Merkle tree's `Config` requires a user-defined converter to turn leaf hash output to inner hash output. 33 | - [\#60](https://github.com/arkworks-rs/crypto-primitives/pull/60) Rename the CRH trait as `CRHScheme` and the CRHGadget trait to `CRHSchemeGadget`. 34 | - [\#60](https://github.com/arkworks-rs/crypto-primitives/pull/60) Use `ark-sponge` to instantiate Poseidon. 35 | - [\#76](https://github.com/arkworks-rs/crypto-primitives/pull/79) Fix Pedersen padding bug. 36 | - [\#77](https://github.com/arkworks-rs/crypto-primitives/pull/77) Implement SHA-256 CRH. 37 | - [\#86](https://github.com/arkworks-rs/crypto-primitives/pull/86) 38 | - Moves `ark-sponge` here. 39 | - Updates dependencies and version number to `0.4`. 40 | - Adds feature flags to enable downstream users to select exactly those components that they're interested in. 41 | - [\#103](https://github.com/arkworks-rs/crypto-primitives/pull/103) Removes `cp-benches` and moves contents to `benches` 42 | - [\#104](https://github.com/arkworks-rs/crypto-primitives/pull/104) Updates `digest`, `blake2`, `sha2` to `0.10`. Changes API for `Blake2sWithParameterBlock`. 43 | 44 | ### Features 45 | 46 | - [\#59](https://github.com/arkworks-rs/crypto-primitives/pull/59) Implement `TwoToOneCRHScheme` for Bowe-Hopwood CRH. 47 | - [\#60](https://github.com/arkworks-rs/crypto-primitives/pull/60) Merkle tree no longer requires CRH to input and output bytes. Leaf can be any raw input of CRH, such as field elements. 48 | - [\#67](https://github.com/arkworks-rs/crypto-primitives/pull/67) User can access or replace leaf index variable in `PathVar`. 49 | 50 | ### Improvements 51 | 52 | ### Bugfixes 53 | 54 | ## v0.3.0 55 | 56 | ### Breaking changes 57 | 58 | - [\#30](https://github.com/arkworks-rs/crypto-primitives/pull/30) Refactor the Merkle tree to separate the leaf hash and two-to-one hash. 59 | 60 | ### Features 61 | 62 | - [\#38](https://github.com/arkworks-rs/crypto-primitives/pull/38) Add a signature verification trait `SigVerifyGadget`. 63 | - [\#44](https://github.com/arkworks-rs/crypto-primitives/pull/44) Add basic ElGamal encryption gadgets. 64 | - [\#48](https://github.com/arkworks-rs/crypto-primitives/pull/48) Add `CanonicalSerialize` and `CanonicalDeserialize` to `Path` and `CRH` outputs. 65 | 66 | ### Improvements 67 | 68 | ### Bugfixes 69 | 70 | ## v0.2.0 71 | 72 | ### Breaking changes 73 | 74 | ### Features 75 | 76 | - [\#2](https://github.com/arkworks-rs/crypto-primitives/pull/2) Add the `SNARK` gadget traits. 77 | - [\#3](https://github.com/arkworks-rs/crypto-primitives/pull/3) Add unchecked allocation for `ProofVar` and `VerifyingKeyVar`. 78 | - [\#4](https://github.com/arkworks-rs/crypto-primitives/pull/4) Add `verifier_size` to `SNARKGadget`. 79 | - [\#6](https://github.com/arkworks-rs/crypto-primitives/pull/6) Add `IntoIterator` for SNARK input gadgets. 80 | - [\#28](https://github.com/arkworks-rs/crypto-primitives/pull/28) Adds Poseidon CRH w/ constraints. 81 | 82 | ### Improvements 83 | 84 | ### Bugfixes 85 | 86 | ## v0.1.0 (Initial release of arkworks/crypto-primitives) 87 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | "crypto-primitives", 4 | "macros", 5 | ] 6 | resolver = "2" 7 | 8 | [workspace.package] 9 | version = "0.5.0" 10 | authors = [ "arkworks contributors" ] 11 | description = "A library of useful cryptographic primitives" 12 | homepage = "https://arkworks.rs" 13 | repository = "https://github.com/arkworks-rs/crypto-primitives" 14 | documentation = "https://docs.rs/ark-crypto-primitives/" 15 | keywords = [ "r1cs", "pedersen", "blake2s", "snark", "schnorr" ] 16 | categories = ["cryptography"] 17 | include = ["Cargo.toml", "src", "README.md", "LICENSE-APACHE", "LICENSE-MIT"] 18 | license = "MIT/Apache-2.0" 19 | edition = "2021" 20 | 21 | [profile.release] 22 | opt-level = 3 23 | lto = "thin" 24 | incremental = true 25 | panic = 'abort' 26 | 27 | [profile.bench] 28 | opt-level = 3 29 | debug = false 30 | rpath = false 31 | lto = "thin" 32 | incremental = true 33 | debug-assertions = false 34 | 35 | [profile.dev] 36 | opt-level = 0 37 | panic = 'abort' 38 | 39 | [profile.test] 40 | opt-level = 3 41 | lto = "thin" 42 | incremental = true 43 | debug-assertions = true 44 | debug = true 45 | 46 | # [patch.crates-io] 47 | # ark-r1cs-std = { git = "https://github.com/arkworks-rs/r1cs-std/" } 48 | # ark-ff = { git = "https://github.com/arkworks-rs/algebra/" } 49 | # ark-ec = { git = "https://github.com/arkworks-rs/algebra/" } 50 | # ark-poly = { git = "https://github.com/arkworks-rs/algebra/" } 51 | # ark-serialize = { git = "https://github.com/arkworks-rs/algebra/" } 52 | # ark-std = { git = "https://github.com/arkworks-rs/std/" } 53 | 54 | # ark-ed-on-bls12-377 = { git = "https://github.com/arkworks-rs/algebra/" } 55 | # ark-ed-on-bls12-381 = { git = "https://github.com/arkworks-rs/algebra/" } 56 | # ark-bls12-377 = { git = "https://github.com/arkworks-rs/algebra/" } 57 | # ark-mnt4-298 = { git = "https://github.com/arkworks-rs/algebra/" } 58 | # ark-mnt6-298 = { git = "https://github.com/arkworks-rs/algebra/" } 59 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

ark-crypto-primitives

2 | 3 |

4 | 5 | 6 | 7 | 8 |

9 | 10 | The arkworks ecosystem consist of Rust libraries for designing and working with __zero knowledge succinct non-interactive arguments (zkSNARKs)__. This repository contains efficient implementations of cryptographic primitives such as collision-resistant hash functions, hiding commitments, pseudo-random functions, signatures, and, optionally, R1CS constraints for these. 11 | 12 | This library is released under the MIT License and the Apache v2 License (see [License](#license)). 13 | 14 | **WARNING:** This is an academic proof-of-concept prototype, and in particular has not received careful code review. This implementation is NOT ready for production use. 15 | 16 | ## Build guide 17 | 18 | The library compiles on the `stable` toolchain of the Rust compiler. To install the latest version of Rust, first install `rustup` by following the instructions [here](https://rustup.rs/), or via your platform's package manager. Once `rustup` is installed, install the Rust toolchain by invoking: 19 | 20 | ```bash 21 | rustup install stable 22 | ``` 23 | 24 | After that, use `cargo`, the standard Rust build tool, to build the library: 25 | 26 | ```bash 27 | git clone https://github.com/arkworks-rs/crypto-primitives.git 28 | cargo build --release 29 | ``` 30 | 31 | This library comes with unit tests for each of the provided crates. Run the tests with: 32 | 33 | ```bash 34 | cargo test 35 | ``` 36 | 37 | ## License 38 | 39 | This library is licensed under either of the following licenses, at your discretion. 40 | 41 | * Apache License Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or [apache.org license link](http://www.apache.org/licenses/LICENSE-2.0)) 42 | * MIT license ([LICENSE-MIT](LICENSE-MIT) or [opensource.org license link](http://opensource.org/licenses/MIT)) 43 | 44 | Unless you explicitly state otherwise, any contribution submitted for inclusion in this library by you shall be dual licensed as above (as defined in the Apache v2 License), without any additional terms or conditions. 45 | 46 | ## Acknowledgements 47 | 48 | This work was supported by: 49 | a Google Faculty Award; 50 | the National Science Foundation; 51 | the UC Berkeley Center for Long-Term Cybersecurity; 52 | and donations from the Ethereum Foundation, the Interchain Foundation, and Qtum. 53 | 54 | An earlier version of this library was developed as part of the paper *"[ZEXE: Enabling Decentralized Private Computation][zexe]"*. 55 | 56 | [zexe]: https://ia.cr/2018/962 57 | -------------------------------------------------------------------------------- /crypto-primitives/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "ark-crypto-primitives" 3 | description.workspace = true 4 | documentation.workspace = true 5 | keywords.workspace = true 6 | version.workspace = true 7 | authors.workspace = true 8 | homepage.workspace = true 9 | repository.workspace = true 10 | categories.workspace = true 11 | include.workspace = true 12 | license.workspace = true 13 | edition.workspace = true 14 | 15 | ################################# Dependencies ################################ 16 | 17 | [dependencies] 18 | ark-crypto-primitives-macros = { version = "0.5.0", path = "../macros" } 19 | 20 | ark-ff = { version = "0.5.0", default-features = false } 21 | ark-ec = { version = "0.5.0", default-features = false } 22 | ark-std = { version = "0.5.0", default-features = false } 23 | ark-relations = { version = "0.5.0", default-features = false } 24 | ark-serialize = { version = "0.5.0", default-features = false, features = [ "derive" ] } 25 | 26 | blake2 = { version = "0.10", default-features = false } 27 | sha2 = { version = "0.10", default-features = false } 28 | digest = { version = "0.10", default-features = false } 29 | merlin = { version = "3.0.0", default-features = false, optional = true } 30 | 31 | ark-r1cs-std = { version = "0.5.0", optional = true, default-features = false } 32 | ark-snark = { version = "0.5.0", default-features = false } 33 | 34 | rayon = { version = "1.0", optional = true } 35 | derivative = { version = "2.0", features = ["use_core"] } 36 | tracing = { version = "0.1", default-features = false, features = [ "attributes" ], optional = true } 37 | hashbrown = { version = "0.14", default-features = false, features = ["inline-more", "allocator-api2"], optional = true } 38 | 39 | [features] 40 | default = ["std"] 41 | std = [ "ark-ff/std", "ark-ec/std", "ark-std/std", "ark-relations/std" ] 42 | print-trace = [ "ark-std/print-trace" ] 43 | parallel = [ "std", "rayon", "ark-ec/parallel", "ark-std/parallel", "ark-ff/parallel" ] 44 | r1cs = [ "ark-r1cs-std", "tracing" ] 45 | crh = [ "sponge" ] 46 | sponge = [ "merlin" ] 47 | commitment = [ "crh" ] 48 | merkle_tree = ["crh", "hashbrown"] 49 | encryption = [] 50 | prf = [] 51 | snark = [] 52 | signature = [] 53 | asm = [ "ark-ff/asm" ] 54 | 55 | [target.'cfg(all(target_has_atomic = "8", target_has_atomic = "16", target_has_atomic = "32", target_has_atomic = "64", target_has_atomic = "ptr"))'.dependencies] 56 | ahash = { version = "0.8", default-features = false} 57 | 58 | [target.'cfg(not(all(target_has_atomic = "8", target_has_atomic = "16", target_has_atomic = "32", target_has_atomic = "64", target_has_atomic = "ptr")))'.dependencies] 59 | fnv = { version = "1.0", default-features = false } 60 | 61 | [dev-dependencies] 62 | ark-ed-on-bls12-377 = { version = "0.5.0", default-features = false } 63 | ark-ed-on-bls12-381 = { version = "0.5.0", default-features = false, features = [ "r1cs" ] } 64 | ark-bls12-377 = { version = "0.5.0", default-features = false, features = [ "curve", "r1cs" ] } 65 | ark-mnt4-298 = { version = "0.5.0", default-features = false, features = [ "curve", "r1cs" ] } 66 | ark-mnt6-298 = { version = "0.5.0", default-features = false, features = [ "r1cs" ] } 67 | criterion = { version = "0.5" } 68 | 69 | ################################# Benchmarks ################################## 70 | 71 | [[bench]] 72 | name = "pedersen_crh" 73 | path = "benches/crh.rs" 74 | harness = false 75 | required-features = [ "crh" ] 76 | 77 | [[bench]] 78 | name = "pedersen_comm" 79 | path = "benches/comm.rs" 80 | harness = false 81 | required-features = [ "commitment" ] 82 | 83 | [[bench]] 84 | name = "blake2s_prf" 85 | path = "benches/prf.rs" 86 | harness = false 87 | required-features = [ "prf" ] 88 | 89 | [[bench]] 90 | name = "schnorr_sig" 91 | path = "benches/signature.rs" 92 | harness = false 93 | required-features = [ "signature" ] 94 | 95 | [[bench]] 96 | name = "merkle_tree" 97 | path = "benches/merkle_tree.rs" 98 | harness = false 99 | required-features = [ "merkle_tree" ] 100 | -------------------------------------------------------------------------------- /crypto-primitives/LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | ../LICENSE-APACHE -------------------------------------------------------------------------------- /crypto-primitives/LICENSE-MIT: -------------------------------------------------------------------------------- 1 | ../LICENSE-MIT -------------------------------------------------------------------------------- /crypto-primitives/benches/comm.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate criterion; 3 | 4 | use ark_crypto_primitives::commitment::{pedersen::*, CommitmentScheme}; 5 | use ark_ed_on_bls12_377::EdwardsProjective as Edwards; 6 | use ark_std::UniformRand; 7 | use criterion::Criterion; 8 | 9 | #[derive(Clone, PartialEq, Eq, Hash)] 10 | pub struct CommWindow; 11 | 12 | impl Window for CommWindow { 13 | const WINDOW_SIZE: usize = 250; 14 | const NUM_WINDOWS: usize = 8; 15 | } 16 | 17 | fn pedersen_comm_setup(c: &mut Criterion) { 18 | c.bench_function("Pedersen Commitment Setup", move |b| { 19 | b.iter(|| { 20 | let mut rng = &mut ark_std::test_rng(); 21 | Commitment::::setup(&mut rng).unwrap() 22 | }) 23 | }); 24 | } 25 | 26 | fn pedersen_comm_eval(c: &mut Criterion) { 27 | let mut rng = &mut ark_std::test_rng(); 28 | let parameters = Commitment::::setup(&mut rng).unwrap(); 29 | let input = vec![5u8; 128]; 30 | c.bench_function("Pedersen Commitment Eval", move |b| { 31 | b.iter(|| { 32 | let rng = &mut ark_std::test_rng(); 33 | let commitment_randomness = Randomness::rand(rng); 34 | Commitment::::commit(¶meters, &input, &commitment_randomness) 35 | .unwrap() 36 | }) 37 | }); 38 | } 39 | 40 | criterion_group! { 41 | name = comm_setup; 42 | config = Criterion::default().sample_size(10); 43 | targets = pedersen_comm_setup 44 | } 45 | 46 | criterion_group! { 47 | name = comm_eval; 48 | config = Criterion::default().sample_size(10); 49 | targets = pedersen_comm_eval 50 | } 51 | 52 | criterion_main!(comm_setup, comm_eval); 53 | -------------------------------------------------------------------------------- /crypto-primitives/benches/crh.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate criterion; 3 | 4 | use ark_crypto_primitives::crh::{ 5 | pedersen::{Window, CRH as PedersenCRH}, 6 | CRHScheme, 7 | }; 8 | use ark_ed_on_bls12_377::EdwardsProjective as Edwards; 9 | use criterion::Criterion; 10 | 11 | #[derive(Clone, PartialEq, Eq, Hash)] 12 | pub struct HashWindow; 13 | 14 | impl Window for HashWindow { 15 | const WINDOW_SIZE: usize = 250; 16 | const NUM_WINDOWS: usize = 8; 17 | } 18 | 19 | fn pedersen_crh_setup(c: &mut Criterion) { 20 | c.bench_function("Pedersen CRH Setup", move |b| { 21 | b.iter(|| { 22 | let mut rng = &mut ark_std::test_rng(); 23 | PedersenCRH::::setup(&mut rng).unwrap() 24 | }) 25 | }); 26 | } 27 | 28 | fn pedersen_crh_eval(c: &mut Criterion) { 29 | let mut rng = &mut ark_std::test_rng(); 30 | let parameters = PedersenCRH::::setup(&mut rng).unwrap(); 31 | let input = vec![5u8; 128]; 32 | c.bench_function("Pedersen CRH Eval", move |b| { 33 | b.iter(|| PedersenCRH::::evaluate(¶meters, input.clone()).unwrap()) 34 | }); 35 | } 36 | 37 | criterion_group! { 38 | name = crh_setup; 39 | config = Criterion::default().sample_size(10); 40 | targets = pedersen_crh_setup 41 | } 42 | 43 | criterion_group! { 44 | name = crh_eval; 45 | config = Criterion::default().sample_size(10); 46 | targets = pedersen_crh_eval 47 | } 48 | 49 | criterion_main!(crh_setup, crh_eval); 50 | -------------------------------------------------------------------------------- /crypto-primitives/benches/merkle_tree.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate criterion; 3 | 4 | static NUM_LEAVES: i32 = 1 << 20; 5 | 6 | mod bytes_mt_benches { 7 | use ark_crypto_primitives::crh::*; 8 | use ark_crypto_primitives::merkle_tree::*; 9 | use ark_crypto_primitives::to_uncompressed_bytes; 10 | use ark_ff::BigInteger256; 11 | use ark_serialize::CanonicalSerialize; 12 | use ark_std::{test_rng, UniformRand}; 13 | use criterion::Criterion; 14 | use std::borrow::Borrow; 15 | use std::iter::zip; 16 | 17 | use crate::NUM_LEAVES; 18 | 19 | type LeafH = sha2::Sha256; 20 | type CompressH = sha2::Sha256; 21 | 22 | struct Sha256MerkleTreeParams; 23 | 24 | impl Config for Sha256MerkleTreeParams { 25 | type Leaf = [u8]; 26 | 27 | type LeafDigest = ::Output; 28 | type LeafInnerDigestConverter = ByteDigestConverter; 29 | type InnerDigest = ::Output; 30 | 31 | type LeafHash = LeafH; 32 | type TwoToOneHash = CompressH; 33 | } 34 | type Sha256MerkleTree = MerkleTree; 35 | 36 | pub fn merkle_tree_create(c: &mut Criterion) { 37 | let mut rng = test_rng(); 38 | let leaves: Vec<_> = (0..NUM_LEAVES) 39 | .map(|_| { 40 | let rnd = BigInteger256::rand(&mut rng); 41 | to_uncompressed_bytes!(rnd).unwrap() 42 | }) 43 | .collect(); 44 | let leaf_crh_params = ::setup(&mut rng).unwrap(); 45 | let two_to_one_params = ::setup(&mut rng) 46 | .unwrap() 47 | .clone(); 48 | c.bench_function("Merkle Tree Create (Leaves as [u8])", move |b| { 49 | b.iter(|| { 50 | Sha256MerkleTree::new( 51 | &leaf_crh_params.clone(), 52 | &two_to_one_params.clone(), 53 | &leaves, 54 | ) 55 | .unwrap(); 56 | }) 57 | }); 58 | } 59 | 60 | pub fn merkle_tree_generate_proof(c: &mut Criterion) { 61 | let mut rng = test_rng(); 62 | let leaves: Vec<_> = (0..NUM_LEAVES) 63 | .map(|_| { 64 | let rnd = BigInteger256::rand(&mut rng); 65 | to_uncompressed_bytes!(rnd).unwrap() 66 | }) 67 | .collect(); 68 | let leaf_crh_params = ::setup(&mut rng).unwrap(); 69 | let two_to_one_params = ::setup(&mut rng) 70 | .unwrap() 71 | .clone(); 72 | 73 | let tree = Sha256MerkleTree::new( 74 | &leaf_crh_params.clone(), 75 | &two_to_one_params.clone(), 76 | &leaves, 77 | ) 78 | .unwrap(); 79 | c.bench_function("Merkle Tree Generate Proof (Leaves as [u8])", move |b| { 80 | b.iter(|| { 81 | for (i, _) in leaves.iter().enumerate() { 82 | tree.generate_proof(i).unwrap(); 83 | } 84 | }) 85 | }); 86 | } 87 | 88 | pub fn merkle_tree_verify_proof(c: &mut Criterion) { 89 | let mut rng = test_rng(); 90 | let leaves: Vec<_> = (0..NUM_LEAVES) 91 | .map(|_| { 92 | let rnd = BigInteger256::rand(&mut rng); 93 | to_uncompressed_bytes!(rnd).unwrap() 94 | }) 95 | .collect(); 96 | let leaf_crh_params = ::setup(&mut rng).unwrap(); 97 | let two_to_one_params = ::setup(&mut rng) 98 | .unwrap() 99 | .clone(); 100 | 101 | let tree = Sha256MerkleTree::new( 102 | &leaf_crh_params.clone(), 103 | &two_to_one_params.clone(), 104 | &leaves, 105 | ) 106 | .unwrap(); 107 | 108 | let root = tree.root(); 109 | 110 | let proofs: Vec<_> = leaves 111 | .iter() 112 | .enumerate() 113 | .map(|(i, _)| tree.generate_proof(i).unwrap()) 114 | .collect(); 115 | 116 | c.bench_function("Merkle Tree Verify Proof (Leaves as [u8])", move |b| { 117 | b.iter(|| { 118 | for (proof, leaf) in zip(proofs.clone(), leaves.clone()) { 119 | proof 120 | .verify(&leaf_crh_params, &two_to_one_params, &root, leaf.as_slice()) 121 | .unwrap(); 122 | } 123 | }) 124 | }); 125 | } 126 | 127 | pub fn merkle_tree_generate_multi_proof(c: &mut Criterion) { 128 | let mut rng = test_rng(); 129 | let leaves: Vec<_> = (0..NUM_LEAVES) 130 | .map(|_| { 131 | let rnd = BigInteger256::rand(&mut rng); 132 | to_uncompressed_bytes!(rnd).unwrap() 133 | }) 134 | .collect(); 135 | let leaf_crh_params = ::setup(&mut rng).unwrap(); 136 | let two_to_one_params = ::setup(&mut rng) 137 | .unwrap() 138 | .clone(); 139 | 140 | let tree = Sha256MerkleTree::new( 141 | &leaf_crh_params.clone(), 142 | &two_to_one_params.clone(), 143 | &leaves, 144 | ) 145 | .unwrap(); 146 | c.bench_function( 147 | "Merkle Tree Generate Multi Proof (Leaves as [u8])", 148 | move |b| { 149 | b.iter(|| { 150 | tree.generate_multi_proof((0..leaves.len()).collect::>()) 151 | .unwrap(); 152 | }) 153 | }, 154 | ); 155 | } 156 | 157 | pub fn merkle_tree_verify_multi_proof(c: &mut Criterion) { 158 | let mut rng = test_rng(); 159 | let leaves: Vec<_> = (0..NUM_LEAVES) 160 | .map(|_| { 161 | let rnd = BigInteger256::rand(&mut rng); 162 | to_uncompressed_bytes!(rnd).unwrap() 163 | }) 164 | .collect(); 165 | let leaf_crh_params = ::setup(&mut rng).unwrap(); 166 | let two_to_one_params = ::setup(&mut rng) 167 | .unwrap() 168 | .clone(); 169 | 170 | let tree = Sha256MerkleTree::new( 171 | &leaf_crh_params.clone(), 172 | &two_to_one_params.clone(), 173 | &leaves, 174 | ) 175 | .unwrap(); 176 | 177 | let root = tree.root(); 178 | 179 | let multi_proof = tree 180 | .generate_multi_proof((0..leaves.len()).collect::>()) 181 | .unwrap(); 182 | 183 | c.bench_function( 184 | "Merkle Tree Verify Multi Proof (Leaves as [u8])", 185 | move |b| { 186 | b.iter(|| { 187 | multi_proof.verify(&leaf_crh_params, &two_to_one_params, &root, leaves.clone()) 188 | }) 189 | }, 190 | ); 191 | } 192 | 193 | criterion_group! { 194 | name = mt_create; 195 | config = Criterion::default().sample_size(100); 196 | targets = merkle_tree_create 197 | } 198 | 199 | criterion_group! { 200 | name = mt_proof; 201 | config = Criterion::default().sample_size(100); 202 | targets = merkle_tree_generate_proof, merkle_tree_generate_multi_proof 203 | } 204 | 205 | criterion_group! { 206 | name = mt_verify; 207 | config = Criterion::default().sample_size(10); 208 | targets = merkle_tree_verify_proof, merkle_tree_verify_multi_proof 209 | } 210 | } 211 | 212 | criterion_main!( 213 | bytes_mt_benches::mt_create, 214 | bytes_mt_benches::mt_proof, 215 | bytes_mt_benches::mt_verify 216 | ); 217 | -------------------------------------------------------------------------------- /crypto-primitives/benches/prf.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate criterion; 3 | 4 | use ark_crypto_primitives::prf::*; 5 | use ark_std::rand::Rng; 6 | use criterion::Criterion; 7 | 8 | fn blake2s_prf_eval(c: &mut Criterion) { 9 | let rng = &mut ark_std::test_rng(); 10 | let input: [u8; 32] = rng.gen(); 11 | let seed: [u8; 32] = rng.gen(); 12 | c.bench_function("Blake2s PRF Eval", move |b| { 13 | b.iter(|| Blake2s::evaluate(&seed, &input).unwrap()) 14 | }); 15 | } 16 | 17 | criterion_group! { 18 | name = prf_eval; 19 | config = Criterion::default().sample_size(50); 20 | targets = blake2s_prf_eval 21 | } 22 | 23 | criterion_main!(prf_eval); 24 | -------------------------------------------------------------------------------- /crypto-primitives/benches/signature.rs: -------------------------------------------------------------------------------- 1 | #[macro_use] 2 | extern crate criterion; 3 | 4 | use ark_crypto_primitives::signature::{schnorr::*, SignatureScheme}; 5 | use ark_ed_on_bls12_377::EdwardsProjective as Edwards; 6 | use ark_std::rand::Rng; 7 | use blake2::Blake2s256 as Blake2s; 8 | use criterion::Criterion; 9 | 10 | type SchnorrEdwards = Schnorr; 11 | fn schnorr_signature_setup(c: &mut Criterion) { 12 | c.bench_function("SchnorrEdwards: Setup", move |b| { 13 | b.iter(|| { 14 | let mut rng = &mut ark_std::test_rng(); 15 | SchnorrEdwards::setup(&mut rng).unwrap() 16 | }) 17 | }); 18 | } 19 | 20 | fn schnorr_signature_keygen(c: &mut Criterion) { 21 | let mut rng = &mut ark_std::test_rng(); 22 | let parameters = SchnorrEdwards::setup(&mut rng).unwrap(); 23 | 24 | c.bench_function("SchnorrEdwards: KeyGen", move |b| { 25 | b.iter(|| { 26 | let mut rng = &mut ark_std::test_rng(); 27 | SchnorrEdwards::keygen(¶meters, &mut rng).unwrap() 28 | }) 29 | }); 30 | } 31 | 32 | fn schnorr_signature_sign(c: &mut Criterion) { 33 | let mut rng = &mut ark_std::test_rng(); 34 | let parameters = SchnorrEdwards::setup(&mut rng).unwrap(); 35 | let (_, sk) = SchnorrEdwards::keygen(¶meters, &mut rng).unwrap(); 36 | let message = [100u8; 128]; 37 | 38 | c.bench_function("SchnorrEdwards: Sign", move |b| { 39 | b.iter(|| { 40 | let mut rng = &mut ark_std::test_rng(); 41 | SchnorrEdwards::sign(¶meters, &sk, &message, &mut rng).unwrap() 42 | }) 43 | }); 44 | } 45 | 46 | fn schnorr_signature_verify(c: &mut Criterion) { 47 | let mut rng = &mut ark_std::test_rng(); 48 | let parameters = SchnorrEdwards::setup(&mut rng).unwrap(); 49 | let (pk, sk) = SchnorrEdwards::keygen(¶meters, &mut rng).unwrap(); 50 | let message = [100u8; 128]; 51 | let signature = SchnorrEdwards::sign(¶meters, &sk, &message, &mut rng).unwrap(); 52 | 53 | c.bench_function("SchnorrEdwards: Verify", move |b| { 54 | b.iter(|| SchnorrEdwards::verify(¶meters, &pk, &message, &signature).unwrap()) 55 | }); 56 | } 57 | 58 | fn schnorr_signature_randomize_pk(c: &mut Criterion) { 59 | let mut rng = &mut ark_std::test_rng(); 60 | let parameters = SchnorrEdwards::setup(&mut rng).unwrap(); 61 | let (pk, _) = SchnorrEdwards::keygen(¶meters, &mut rng).unwrap(); 62 | let randomness: [u8; 32] = rng.gen(); 63 | 64 | c.bench_function("SchnorrEdwards: Randomize PubKey", move |b| { 65 | b.iter(|| SchnorrEdwards::randomize_public_key(¶meters, &pk, &randomness).unwrap()) 66 | }); 67 | } 68 | 69 | fn schnorr_signature_randomize_signature(c: &mut Criterion) { 70 | let mut rng = &mut ark_std::test_rng(); 71 | let parameters = SchnorrEdwards::setup(&mut rng).unwrap(); 72 | let (_, sk) = SchnorrEdwards::keygen(¶meters, &mut rng).unwrap(); 73 | let randomness: [u8; 32] = rng.gen(); 74 | let message = [100u8; 128]; 75 | let signature = SchnorrEdwards::sign(¶meters, &sk, &message, &mut rng).unwrap(); 76 | 77 | c.bench_function("SchnorrEdwards: Randomize Signature", move |b| { 78 | b.iter(|| { 79 | SchnorrEdwards::randomize_signature(¶meters, &signature, &randomness).unwrap() 80 | }) 81 | }); 82 | } 83 | criterion_group! { 84 | name = schnorr_sig; 85 | config = Criterion::default().sample_size(20); 86 | targets = schnorr_signature_setup, schnorr_signature_keygen, schnorr_signature_sign, 87 | schnorr_signature_verify, schnorr_signature_randomize_pk, schnorr_signature_randomize_signature 88 | } 89 | criterion_main!(schnorr_sig); 90 | -------------------------------------------------------------------------------- /crypto-primitives/src/commitment/blake2s/constraints.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | commitment::{blake2s, CommitmentGadget}, 3 | prf::blake2s::constraints::{evaluate_blake2s, OutputVar}, 4 | }; 5 | use ark_ff::{Field, PrimeField}; 6 | use ark_r1cs_std::prelude::*; 7 | use ark_relations::r1cs::{Namespace, SynthesisError}; 8 | use ark_std::borrow::Borrow; 9 | 10 | #[derive(Clone)] 11 | pub struct ParametersVar; 12 | 13 | #[derive(Clone)] 14 | pub struct RandomnessVar(pub Vec>); 15 | 16 | pub struct CommGadget; 17 | 18 | impl CommitmentGadget for CommGadget { 19 | type OutputVar = OutputVar; 20 | type ParametersVar = ParametersVar; 21 | type RandomnessVar = RandomnessVar; 22 | 23 | #[tracing::instrument(target = "r1cs", skip(input, r))] 24 | fn commit( 25 | _: &Self::ParametersVar, 26 | input: &[UInt8], 27 | r: &Self::RandomnessVar, 28 | ) -> Result { 29 | let mut input_bits = Vec::with_capacity(512); 30 | for byte in input.iter().chain(r.0.iter()) { 31 | input_bits.extend_from_slice(&byte.to_bits_le()?); 32 | } 33 | let mut result = Vec::new(); 34 | for int in evaluate_blake2s(&input_bits)?.into_iter() { 35 | let chunk = int.to_bytes_le()?; 36 | result.extend_from_slice(&chunk); 37 | } 38 | Ok(OutputVar(result)) 39 | } 40 | } 41 | 42 | impl AllocVar<(), ConstraintF> for ParametersVar { 43 | #[tracing::instrument(target = "r1cs", skip(_cs, _f))] 44 | fn new_variable>( 45 | _cs: impl Into>, 46 | _f: impl FnOnce() -> Result, 47 | _mode: AllocationMode, 48 | ) -> Result { 49 | Ok(ParametersVar) 50 | } 51 | } 52 | 53 | impl AllocVar<[u8; 32], ConstraintF> for RandomnessVar { 54 | #[tracing::instrument(target = "r1cs", skip(cs, f))] 55 | fn new_variable>( 56 | cs: impl Into>, 57 | f: impl FnOnce() -> Result, 58 | mode: AllocationMode, 59 | ) -> Result { 60 | let bytes = f().map(|b| *b.borrow()).unwrap_or([0u8; 32]); 61 | match mode { 62 | AllocationMode::Constant => Ok(Self(UInt8::constant_vec(&bytes))), 63 | AllocationMode::Input => UInt8::new_input_vec(cs, &bytes).map(Self), 64 | AllocationMode::Witness => UInt8::new_witness_vec(cs, &bytes).map(Self), 65 | } 66 | } 67 | } 68 | 69 | #[cfg(test)] 70 | mod test { 71 | use crate::commitment::{ 72 | blake2s::{ 73 | constraints::{CommGadget, RandomnessVar}, 74 | Commitment, 75 | }, 76 | CommitmentGadget, CommitmentScheme, 77 | }; 78 | use ark_ed_on_bls12_381::Fq as Fr; 79 | use ark_r1cs_std::prelude::*; 80 | use ark_relations::r1cs::ConstraintSystem; 81 | use ark_std::rand::Rng; 82 | 83 | #[test] 84 | fn commitment_gadget_test() { 85 | let cs = ConstraintSystem::::new_ref(); 86 | 87 | let input = [1u8; 32]; 88 | 89 | let rng = &mut ark_std::test_rng(); 90 | 91 | type TestCOMM = Commitment; 92 | type TestCOMMGadget = CommGadget; 93 | 94 | let mut randomness = [0u8; 32]; 95 | rng.fill(&mut randomness); 96 | 97 | let parameters = (); 98 | let primitive_result = Commitment::commit(¶meters, &input, &randomness).unwrap(); 99 | 100 | let mut input_var = vec![]; 101 | for byte in &input { 102 | input_var.push(UInt8::new_witness(cs.clone(), || Ok(*byte)).unwrap()); 103 | } 104 | 105 | let mut randomness_var = vec![]; 106 | for r_byte in randomness.iter() { 107 | randomness_var.push(UInt8::new_witness(cs.clone(), || Ok(r_byte)).unwrap()); 108 | } 109 | let randomness_var = RandomnessVar(randomness_var); 110 | 111 | let parameters_var = 112 | >::ParametersVar::new_witness( 113 | ark_relations::ns!(cs, "gadget_parameters"), 114 | || Ok(¶meters), 115 | ) 116 | .unwrap(); 117 | let result_var = >::commit( 118 | ¶meters_var, 119 | &input_var, 120 | &randomness_var, 121 | ) 122 | .unwrap(); 123 | 124 | for i in 0..32 { 125 | assert_eq!(primitive_result[i], result_var.0[i].value().unwrap()); 126 | } 127 | assert!(cs.is_satisfied().unwrap()); 128 | } 129 | } 130 | -------------------------------------------------------------------------------- /crypto-primitives/src/commitment/blake2s/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::{commitment::CommitmentScheme, Error}; 2 | use ark_std::rand::Rng; 3 | use blake2::Blake2s256 as b2s; 4 | use digest::Digest; 5 | 6 | pub struct Commitment; 7 | 8 | #[cfg(feature = "r1cs")] 9 | pub mod constraints; 10 | 11 | impl CommitmentScheme for Commitment { 12 | type Parameters = (); 13 | type Randomness = [u8; 32]; 14 | type Output = [u8; 32]; 15 | 16 | fn setup(_: &mut R) -> Result { 17 | Ok(()) 18 | } 19 | 20 | fn commit( 21 | _: &Self::Parameters, 22 | input: &[u8], 23 | r: &Self::Randomness, 24 | ) -> Result { 25 | let mut h = b2s::new(); 26 | h.update(input); 27 | h.update(r.as_ref()); 28 | let mut result = [0u8; 32]; 29 | result.copy_from_slice(&h.finalize()); 30 | Ok(result) 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /crypto-primitives/src/commitment/constraints.rs: -------------------------------------------------------------------------------- 1 | use crate::commitment::CommitmentScheme; 2 | use ark_ff::Field; 3 | use ark_r1cs_std::prelude::*; 4 | use ark_relations::r1cs::SynthesisError; 5 | use ark_std::fmt::Debug; 6 | 7 | pub trait CommitmentGadget { 8 | type OutputVar: EqGadget 9 | + ToBytesGadget 10 | + AllocVar 11 | + R1CSVar 12 | + Clone 13 | + Sized 14 | + Debug; 15 | type ParametersVar: AllocVar + Clone; 16 | type RandomnessVar: AllocVar + Clone; 17 | 18 | fn commit( 19 | parameters: &Self::ParametersVar, 20 | input: &[UInt8], 21 | r: &Self::RandomnessVar, 22 | ) -> Result; 23 | } 24 | -------------------------------------------------------------------------------- /crypto-primitives/src/commitment/injective_map/constraints.rs: -------------------------------------------------------------------------------- 1 | use crate::commitment::{ 2 | injective_map::{InjectiveMap, PedersenCommCompressor}, 3 | pedersen::{ 4 | constraints::{CommGadget, ParametersVar, RandomnessVar}, 5 | Window, 6 | }, 7 | }; 8 | pub use crate::crh::injective_map::constraints::InjectiveMapGadget; 9 | use ark_ec::CurveGroup; 10 | use ark_ff::{Field, PrimeField}; 11 | use ark_r1cs_std::{ 12 | groups::{CurveVar, GroupOpsBounds}, 13 | uint8::UInt8, 14 | }; 15 | use ark_relations::r1cs::SynthesisError; 16 | use ark_std::marker::PhantomData; 17 | 18 | type ConstraintF = <::BaseField as Field>::BasePrimeField; 19 | 20 | pub struct CommitmentCompressorGadget 21 | where 22 | C: CurveGroup, 23 | I: InjectiveMap, 24 | W: Window, 25 | GG: CurveVar>, 26 | IG: InjectiveMapGadget, 27 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 28 | { 29 | _compressor: PhantomData, 30 | _compressor_gadget: PhantomData, 31 | _comm: PhantomData>, 32 | } 33 | 34 | impl 35 | crate::commitment::CommitmentGadget, ConstraintF> 36 | for CommitmentCompressorGadget 37 | where 38 | C: CurveGroup, 39 | I: InjectiveMap, 40 | GG: CurveVar>, 41 | ConstraintF: PrimeField, 42 | IG: InjectiveMapGadget, 43 | W: Window, 44 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 45 | { 46 | type OutputVar = IG::OutputVar; 47 | type ParametersVar = ParametersVar; 48 | type RandomnessVar = RandomnessVar>; 49 | 50 | fn commit( 51 | parameters: &Self::ParametersVar, 52 | input: &[UInt8>], 53 | r: &Self::RandomnessVar, 54 | ) -> Result { 55 | let result = CommGadget::::commit(parameters, input, r)?; 56 | IG::evaluate(&result) 57 | } 58 | } 59 | -------------------------------------------------------------------------------- /crypto-primitives/src/commitment/injective_map/mod.rs: -------------------------------------------------------------------------------- 1 | pub use crate::crh::injective_map::InjectiveMap; 2 | use crate::{ 3 | commitment::{pedersen, CommitmentScheme}, 4 | Error, 5 | }; 6 | use ark_ec::CurveGroup; 7 | use ark_std::{marker::PhantomData, rand::Rng}; 8 | 9 | #[cfg(feature = "r1cs")] 10 | pub mod constraints; 11 | 12 | pub struct PedersenCommCompressor, W: pedersen::Window> { 13 | _group: PhantomData, 14 | _compressor: PhantomData, 15 | _comm: pedersen::Commitment, 16 | } 17 | 18 | impl, W: pedersen::Window> CommitmentScheme 19 | for PedersenCommCompressor 20 | { 21 | type Output = I::Output; 22 | type Parameters = pedersen::Parameters; 23 | type Randomness = pedersen::Randomness; 24 | 25 | fn setup(rng: &mut R) -> Result { 26 | let time = start_timer!(|| format!("PedersenCompressor::Setup")); 27 | let params = pedersen::Commitment::::setup(rng); 28 | end_timer!(time); 29 | params 30 | } 31 | 32 | fn commit( 33 | parameters: &Self::Parameters, 34 | input: &[u8], 35 | randomness: &Self::Randomness, 36 | ) -> Result { 37 | let eval_time = start_timer!(|| "PedersenCompressor::Eval"); 38 | let result = I::injective_map(&pedersen::Commitment::::commit( 39 | parameters, input, randomness, 40 | )?)?; 41 | end_timer!(eval_time); 42 | Ok(result) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /crypto-primitives/src/commitment/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::Error; 2 | use ark_ff::UniformRand; 3 | use ark_serialize::CanonicalSerialize; 4 | use ark_std::{fmt::Debug, hash::Hash, rand::Rng}; 5 | 6 | pub mod blake2s; 7 | pub mod injective_map; 8 | pub mod pedersen; 9 | 10 | #[cfg(feature = "r1cs")] 11 | pub mod constraints; 12 | #[cfg(feature = "r1cs")] 13 | pub use constraints::*; 14 | 15 | pub trait CommitmentScheme { 16 | type Output: CanonicalSerialize + Clone + Default + Eq + Hash + Debug; 17 | type Parameters: Clone; 18 | type Randomness: CanonicalSerialize + Clone + Default + Eq + UniformRand + Debug; 19 | 20 | fn setup(r: &mut R) -> Result; 21 | 22 | fn commit( 23 | parameters: &Self::Parameters, 24 | input: &[u8], 25 | r: &Self::Randomness, 26 | ) -> Result; 27 | } 28 | -------------------------------------------------------------------------------- /crypto-primitives/src/commitment/pedersen/constraints.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | commitment::pedersen::{Commitment, Parameters, Randomness}, 3 | crh::pedersen::Window, 4 | }; 5 | use ark_ec::CurveGroup; 6 | use ark_ff::{ 7 | fields::{Field, PrimeField}, 8 | Zero, 9 | }; 10 | use ark_r1cs_std::prelude::*; 11 | use ark_relations::r1cs::{Namespace, SynthesisError}; 12 | use ark_serialize::CanonicalSerialize; 13 | use ark_std::{borrow::Borrow, iter, marker::PhantomData}; 14 | 15 | type ConstraintF = <::BaseField as Field>::BasePrimeField; 16 | 17 | #[derive(Derivative)] 18 | #[derivative(Clone(bound = "C: CurveGroup, GG: CurveVar>"))] 19 | pub struct ParametersVar>> 20 | where 21 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 22 | { 23 | params: Parameters, 24 | #[doc(hidden)] 25 | _group_var: PhantomData, 26 | } 27 | 28 | #[derive(Clone, Debug)] 29 | pub struct RandomnessVar(Vec>); 30 | 31 | pub struct CommGadget>, W: Window> 32 | where 33 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 34 | { 35 | #[doc(hidden)] 36 | _curve: PhantomData<*const C>, 37 | #[doc(hidden)] 38 | _group_var: PhantomData<*const GG>, 39 | #[doc(hidden)] 40 | _window: PhantomData<*const W>, 41 | } 42 | 43 | impl crate::commitment::CommitmentGadget, ConstraintF> 44 | for CommGadget 45 | where 46 | C: CurveGroup, 47 | GG: CurveVar>, 48 | W: Window, 49 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 50 | ConstraintF: PrimeField, 51 | { 52 | type OutputVar = GG; 53 | type ParametersVar = ParametersVar; 54 | type RandomnessVar = RandomnessVar>; 55 | 56 | #[tracing::instrument(target = "r1cs", skip(parameters, r))] 57 | fn commit( 58 | parameters: &Self::ParametersVar, 59 | input: &[UInt8>], 60 | r: &Self::RandomnessVar, 61 | ) -> Result { 62 | assert!((input.len() * 8) <= (W::WINDOW_SIZE * W::NUM_WINDOWS)); 63 | 64 | // Convert input bytes to little-endian bits 65 | let mut input_in_bits: Vec> = input 66 | .iter() 67 | .flat_map(|byte| byte.to_bits_le().unwrap()) 68 | .collect(); 69 | 70 | // Pad input to `W::WINDOW_SIZE * W::NUM_WINDOWS`. 71 | let padding_size = (W::WINDOW_SIZE * W::NUM_WINDOWS) - input_in_bits.len(); 72 | input_in_bits.extend(iter::repeat(Boolean::FALSE).take(padding_size)); 73 | 74 | // Sanity checks 75 | assert_eq!(input_in_bits.len(), W::WINDOW_SIZE * W::NUM_WINDOWS); 76 | assert_eq!(parameters.params.generators.len(), W::NUM_WINDOWS); 77 | 78 | // Compute the unblinded commitment. Chunk the input bits into correctly sized windows 79 | let input_in_bits = input_in_bits.chunks(W::WINDOW_SIZE); 80 | let mut result = 81 | GG::precomputed_base_multiscalar_mul_le(¶meters.params.generators, input_in_bits)?; 82 | 83 | // Now add in the blinding factor h^r 84 | let rand_bits: Vec<_> = 85 | r.0.iter() 86 | .flat_map(|byte| byte.to_bits_le().unwrap()) 87 | .collect(); 88 | result.precomputed_base_scalar_mul_le( 89 | rand_bits 90 | .iter() 91 | .zip(¶meters.params.randomness_generator), 92 | )?; 93 | 94 | Ok(result) 95 | } 96 | } 97 | 98 | impl AllocVar, ConstraintF> for ParametersVar 99 | where 100 | C: CurveGroup, 101 | GG: CurveVar>, 102 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 103 | { 104 | fn new_variable>>( 105 | _cs: impl Into>>, 106 | f: impl FnOnce() -> Result, 107 | _mode: AllocationMode, 108 | ) -> Result { 109 | let params = f()?.borrow().clone(); 110 | Ok(ParametersVar { 111 | params, 112 | _group_var: PhantomData, 113 | }) 114 | } 115 | } 116 | 117 | impl AllocVar, F> for RandomnessVar 118 | where 119 | C: CurveGroup, 120 | F: PrimeField, 121 | { 122 | fn new_variable>>( 123 | cs: impl Into>, 124 | f: impl FnOnce() -> Result, 125 | mode: AllocationMode, 126 | ) -> Result { 127 | let mut r = Vec::new(); 128 | let _ = &f() 129 | .map(|b| b.borrow().0) 130 | .unwrap_or(C::ScalarField::zero()) 131 | .serialize_uncompressed(&mut r) 132 | .unwrap(); 133 | match mode { 134 | AllocationMode::Constant => Ok(Self(UInt8::constant_vec(&r))), 135 | AllocationMode::Input => UInt8::new_input_vec(cs, &r).map(Self), 136 | AllocationMode::Witness => UInt8::new_witness_vec(cs, &r).map(Self), 137 | } 138 | } 139 | } 140 | 141 | #[cfg(test)] 142 | mod test { 143 | use ark_ed_on_bls12_381::{constraints::EdwardsVar, EdwardsProjective as JubJub, Fq, Fr}; 144 | use ark_std::{test_rng, UniformRand}; 145 | 146 | use crate::{ 147 | commitment::{ 148 | pedersen::{constraints::CommGadget, Commitment, Randomness}, 149 | CommitmentGadget, CommitmentScheme, 150 | }, 151 | crh::pedersen, 152 | }; 153 | use ark_r1cs_std::prelude::*; 154 | use ark_relations::r1cs::ConstraintSystem; 155 | 156 | /// Checks that the primitive Pedersen commitment matches the gadget version 157 | #[test] 158 | fn commitment_gadget_test() { 159 | let cs = ConstraintSystem::::new_ref(); 160 | 161 | #[derive(Clone, PartialEq, Eq, Hash)] 162 | pub(super) struct Window; 163 | 164 | impl pedersen::Window for Window { 165 | const WINDOW_SIZE: usize = 4; 166 | const NUM_WINDOWS: usize = 9; 167 | } 168 | 169 | let input = [1u8; 4]; 170 | 171 | let rng = &mut test_rng(); 172 | 173 | type TestCOMM = Commitment; 174 | type TestCOMMGadget = CommGadget; 175 | 176 | let randomness = Randomness(Fr::rand(rng)); 177 | 178 | let parameters = Commitment::::setup(rng).unwrap(); 179 | let primitive_result = 180 | Commitment::::commit(¶meters, &input, &randomness).unwrap(); 181 | 182 | let mut input_var = vec![]; 183 | for input_byte in input.iter() { 184 | input_var.push(UInt8::new_witness(cs.clone(), || Ok(*input_byte)).unwrap()); 185 | } 186 | 187 | let randomness_var = 188 | >::RandomnessVar::new_witness( 189 | ark_relations::ns!(cs, "gadget_randomness"), 190 | || Ok(&randomness), 191 | ) 192 | .unwrap(); 193 | let parameters_var = 194 | >::ParametersVar::new_witness( 195 | ark_relations::ns!(cs, "gadget_parameters"), 196 | || Ok(¶meters), 197 | ) 198 | .unwrap(); 199 | let result_var = 200 | TestCOMMGadget::commit(¶meters_var, &input_var, &randomness_var).unwrap(); 201 | 202 | let primitive_result = primitive_result; 203 | assert_eq!(primitive_result, result_var.value().unwrap()); 204 | assert!(cs.is_satisfied().unwrap()); 205 | } 206 | } 207 | -------------------------------------------------------------------------------- /crypto-primitives/src/commitment/pedersen/mod.rs: -------------------------------------------------------------------------------- 1 | use super::CommitmentScheme; 2 | pub use crate::crh::pedersen::Window; 3 | use crate::{ 4 | crh::{pedersen, CRHScheme}, 5 | Error, 6 | }; 7 | use ark_ec::CurveGroup; 8 | use ark_ff::{BitIteratorLE, Field, PrimeField, ToConstraintField}; 9 | use ark_serialize::CanonicalSerialize; 10 | #[cfg(not(feature = "std"))] 11 | use ark_std::vec::Vec; 12 | use ark_std::{marker::PhantomData, rand::Rng, UniformRand}; 13 | 14 | #[cfg(feature = "r1cs")] 15 | pub mod constraints; 16 | 17 | #[derive(Clone)] 18 | pub struct Parameters { 19 | pub randomness_generator: Vec, 20 | pub generators: Vec>, 21 | } 22 | 23 | pub struct Commitment { 24 | group: PhantomData, 25 | window: PhantomData, 26 | } 27 | 28 | #[derive(Derivative, CanonicalSerialize)] 29 | #[derivative(Clone, PartialEq, Debug, Eq, Default)] 30 | pub struct Randomness(pub C::ScalarField); 31 | 32 | impl UniformRand for Randomness { 33 | #[inline] 34 | fn rand(rng: &mut R) -> Self { 35 | Randomness(UniformRand::rand(rng)) 36 | } 37 | } 38 | 39 | impl CommitmentScheme for Commitment { 40 | type Parameters = Parameters; 41 | type Randomness = Randomness; 42 | type Output = C::Affine; 43 | 44 | fn setup(rng: &mut R) -> Result { 45 | let time = start_timer!(|| format!( 46 | "PedersenCOMM::Setup: {} {}-bit windows; {{0,1}}^{{{}}} -> C", 47 | W::NUM_WINDOWS, 48 | W::WINDOW_SIZE, 49 | W::NUM_WINDOWS * W::WINDOW_SIZE 50 | )); 51 | let num_powers = ::MODULUS_BIT_SIZE as usize; 52 | let randomness_generator = pedersen::CRH::::generator_powers(num_powers, rng); 53 | let generators = pedersen::CRH::::create_generators(rng); 54 | end_timer!(time); 55 | 56 | Ok(Self::Parameters { 57 | randomness_generator, 58 | generators, 59 | }) 60 | } 61 | 62 | fn commit( 63 | parameters: &Self::Parameters, 64 | input: &[u8], 65 | randomness: &Self::Randomness, 66 | ) -> Result { 67 | let commit_time = start_timer!(|| "PedersenCOMM::Commit"); 68 | // If the input is too long, return an error. 69 | if input.len() > W::WINDOW_SIZE * W::NUM_WINDOWS { 70 | panic!("incorrect input length: {:?}", input.len()); 71 | } 72 | // Pad the input to the necessary length. 73 | let mut padded_input = Vec::with_capacity(input.len()); 74 | let mut input = input; 75 | if (input.len() * 8) < W::WINDOW_SIZE * W::NUM_WINDOWS { 76 | padded_input.extend_from_slice(input); 77 | let padded_length = (W::WINDOW_SIZE * W::NUM_WINDOWS) / 8; 78 | padded_input.resize(padded_length, 0u8); 79 | input = padded_input.as_slice(); 80 | } 81 | assert_eq!(parameters.generators.len(), W::NUM_WINDOWS); 82 | let input = input.to_vec(); 83 | // Invoke Pedersen CRH here, to prevent code duplication. 84 | 85 | let crh_parameters = pedersen::Parameters { 86 | generators: parameters.generators.clone(), 87 | }; 88 | let mut result: C = 89 | pedersen::CRH::::evaluate(&crh_parameters, input.as_slice())?.into(); 90 | let randomize_time = start_timer!(|| "Randomize"); 91 | 92 | // Compute h^r. 93 | for (bit, power) in BitIteratorLE::new(randomness.0.into_bigint()) 94 | .into_iter() 95 | .zip(¶meters.randomness_generator) 96 | { 97 | if bit { 98 | result += power 99 | } 100 | } 101 | end_timer!(randomize_time); 102 | end_timer!(commit_time); 103 | 104 | Ok(result.into()) 105 | } 106 | } 107 | 108 | impl> 109 | ToConstraintField for Parameters 110 | { 111 | #[inline] 112 | fn to_field_elements(&self) -> Option> { 113 | Some(Vec::new()) 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /crypto-primitives/src/crh/bowe_hopwood/mod.rs: -------------------------------------------------------------------------------- 1 | //! The [Bowe-Hopwood-Pedersen] hash is a optimized variant of the Pedersen CRH for 2 | //! specific Twisted Edwards (TE) curves. See [Section 5.4.17 of the Zcash protocol specification](https://raw.githubusercontent.com/zcash/zips/master/protocol/protocol.pdf#concretepedersenhash) for a formal description of this hash function, specialized for the Jubjub curve. 3 | //! The implementation in this repository is generic across choice of TE curves. 4 | 5 | use crate::{ 6 | crh::{pedersen, CRHScheme, TwoToOneCRHScheme}, 7 | Error, 8 | }; 9 | use ark_ec::{ 10 | twisted_edwards::Projective as TEProjective, twisted_edwards::TECurveConfig, AdditiveGroup, 11 | CurveGroup, 12 | }; 13 | use ark_ff::fields::PrimeField; 14 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; 15 | #[cfg(not(feature = "std"))] 16 | use ark_std::vec::Vec; 17 | use ark_std::{ 18 | borrow::Borrow, 19 | cfg_chunks, 20 | fmt::{Debug, Formatter, Result as FmtResult}, 21 | marker::PhantomData, 22 | rand::Rng, 23 | UniformRand, 24 | }; 25 | #[cfg(feature = "parallel")] 26 | use rayon::prelude::*; 27 | 28 | #[cfg(feature = "r1cs")] 29 | pub mod constraints; 30 | 31 | pub const CHUNK_SIZE: usize = 3; 32 | 33 | #[derive(Derivative, CanonicalSerialize, CanonicalDeserialize)] 34 | #[derivative(Clone(bound = ""), Default(bound = ""))] 35 | pub struct Parameters { 36 | pub generators: Vec>>, 37 | } 38 | 39 | pub struct CRH { 40 | group: PhantomData

, 41 | window: PhantomData, 42 | } 43 | 44 | impl CRH { 45 | pub fn create_generators(rng: &mut R) -> Vec>> { 46 | let mut generators = Vec::new(); 47 | for _ in 0..W::NUM_WINDOWS { 48 | let mut generators_for_segment = Vec::new(); 49 | let mut base = TEProjective::rand(rng); 50 | for _ in 0..W::WINDOW_SIZE { 51 | generators_for_segment.push(base); 52 | for _ in 0..4 { 53 | base.double_in_place(); 54 | } 55 | } 56 | generators.push(generators_for_segment); 57 | } 58 | generators 59 | } 60 | } 61 | 62 | pub struct TwoToOneCRH { 63 | group: PhantomData

, 64 | window: PhantomData, 65 | } 66 | 67 | impl TwoToOneCRH { 68 | const INPUT_SIZE_BITS: usize = pedersen::CRH::, W>::INPUT_SIZE_BITS; 69 | const HALF_INPUT_SIZE_BITS: usize = Self::INPUT_SIZE_BITS / 2; 70 | pub fn create_generators(rng: &mut R) -> Vec>> { 71 | CRH::::create_generators(rng) 72 | } 73 | } 74 | 75 | impl CRHScheme for CRH { 76 | type Input = [u8]; 77 | 78 | type Output = P::BaseField; 79 | type Parameters = Parameters

; 80 | 81 | fn setup(rng: &mut R) -> Result { 82 | fn calculate_num_chunks_in_segment() -> usize { 83 | let upper_limit = F::MODULUS_MINUS_ONE_DIV_TWO; 84 | let mut c = 0; 85 | let mut range = F::BigInt::from(2_u64); 86 | while range < upper_limit { 87 | range <<= 4; 88 | c += 1; 89 | } 90 | 91 | c 92 | } 93 | 94 | let maximum_num_chunks_in_segment = calculate_num_chunks_in_segment::(); 95 | if W::WINDOW_SIZE > maximum_num_chunks_in_segment { 96 | panic!( 97 | "Bowe-Hopwood-PedersenCRH hash must have a window size resulting in scalars < (p-1)/2, \ 98 | maximum segment size is {}", 99 | maximum_num_chunks_in_segment 100 | ); 101 | } 102 | 103 | let time = start_timer!(|| format!( 104 | "Bowe-Hopwood-PedersenCRH::Setup: {} segments of {} 3-bit chunks; {{0,1}}^{{{}}} -> P", 105 | W::NUM_WINDOWS, 106 | W::WINDOW_SIZE, 107 | W::WINDOW_SIZE * W::NUM_WINDOWS * CHUNK_SIZE 108 | )); 109 | let generators = Self::create_generators(rng); 110 | end_timer!(time); 111 | Ok(Self::Parameters { generators }) 112 | } 113 | 114 | fn evaluate>( 115 | parameters: &Self::Parameters, 116 | input: T, 117 | ) -> Result { 118 | let input = input.borrow(); 119 | let eval_time = start_timer!(|| "BoweHopwoodPedersenCRH::Eval"); 120 | 121 | if (input.len() * 8) > W::WINDOW_SIZE * W::NUM_WINDOWS * CHUNK_SIZE { 122 | panic!( 123 | "incorrect input bitlength {:?} for window params {:?}x{:?}x{}", 124 | input.len() * 8, 125 | W::WINDOW_SIZE, 126 | W::NUM_WINDOWS, 127 | CHUNK_SIZE, 128 | ); 129 | } 130 | 131 | let mut padded_input = Vec::with_capacity(input.len()); 132 | let input = pedersen::bytes_to_bits(input); 133 | // Pad the input if it is not the current length. 134 | padded_input.extend_from_slice(&input); 135 | if input.len() % CHUNK_SIZE != 0 { 136 | let remaining = CHUNK_SIZE - input.len() % CHUNK_SIZE; 137 | padded_input.extend_from_slice(&vec![false; remaining]); 138 | } 139 | 140 | assert_eq!(padded_input.len() % CHUNK_SIZE, 0); 141 | 142 | assert_eq!( 143 | parameters.generators.len(), 144 | W::NUM_WINDOWS, 145 | "Incorrect pp of size {:?} for window params {:?}x{:?}x{}", 146 | parameters.generators.len(), 147 | W::WINDOW_SIZE, 148 | W::NUM_WINDOWS, 149 | CHUNK_SIZE, 150 | ); 151 | for generators in parameters.generators.iter() { 152 | assert_eq!(generators.len(), W::WINDOW_SIZE); 153 | } 154 | assert_eq!(CHUNK_SIZE, 3); 155 | 156 | // Compute sum of h_i^{sum of 157 | // (1-2*c_{i,j,2})*(1+c_{i,j,0}+2*c_{i,j,1})*2^{4*(j-1)} for all j in segment} 158 | // for all i. Described in section 5.4.1.7 in the Zcash protocol 159 | // specification. 160 | 161 | let result = cfg_chunks!(padded_input, W::WINDOW_SIZE * CHUNK_SIZE) 162 | .zip(¶meters.generators) 163 | .map(|(segment_bits, segment_generators)| { 164 | cfg_chunks!(segment_bits, CHUNK_SIZE) 165 | .zip(segment_generators) 166 | .map(|(chunk_bits, generator)| { 167 | let mut encoded = *generator; 168 | if chunk_bits[0] { 169 | encoded += generator; 170 | } 171 | if chunk_bits[1] { 172 | encoded += &generator.double(); 173 | } 174 | if chunk_bits[2] { 175 | encoded = -encoded; 176 | } 177 | encoded 178 | }) 179 | .sum::>() 180 | }) 181 | .sum::>(); 182 | 183 | end_timer!(eval_time); 184 | 185 | Ok(result.into_affine().x) 186 | } 187 | } 188 | 189 | impl TwoToOneCRHScheme for TwoToOneCRH { 190 | type Input = [u8]; 191 | 192 | type Output = P::BaseField; 193 | type Parameters = Parameters

; 194 | 195 | fn setup(r: &mut R) -> Result { 196 | CRH::::setup(r) 197 | } 198 | 199 | /// A simple implementation method: just concat the left input and right input together 200 | /// 201 | /// `evaluate` requires that `left_input` and `right_input` are of equal length. 202 | fn evaluate>( 203 | parameters: &Self::Parameters, 204 | left_input: T, 205 | right_input: T, 206 | ) -> Result { 207 | let left_input = left_input.borrow(); 208 | let right_input = right_input.borrow(); 209 | assert_eq!( 210 | left_input.len(), 211 | right_input.len(), 212 | "left and right input should be of equal length" 213 | ); 214 | // check overflow 215 | 216 | debug_assert!(left_input.len() * 8 <= Self::HALF_INPUT_SIZE_BITS); 217 | debug_assert!(right_input.len() * 8 <= Self::HALF_INPUT_SIZE_BITS); 218 | 219 | let mut buffer = vec![0u8; Self::INPUT_SIZE_BITS / 8]; 220 | 221 | buffer 222 | .iter_mut() 223 | .zip(left_input.iter().chain(right_input.iter())) 224 | .for_each(|(b, l_b)| *b = *l_b); 225 | 226 | CRH::::evaluate(parameters, buffer) 227 | } 228 | 229 | fn compress>( 230 | parameters: &Self::Parameters, 231 | left_input: T, 232 | right_input: T, 233 | ) -> Result { 234 | Self::evaluate( 235 | parameters, 236 | crate::to_uncompressed_bytes!(left_input)?, 237 | crate::to_uncompressed_bytes!(right_input)?, 238 | ) 239 | } 240 | } 241 | 242 | impl Debug for Parameters

{ 243 | fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { 244 | writeln!(f, "Bowe-Hopwood-Pedersen Hash Parameters {{")?; 245 | for (i, g) in self.generators.iter().enumerate() { 246 | writeln!(f, "\t Generator {}: {:?}", i, g)?; 247 | } 248 | writeln!(f, "}}") 249 | } 250 | } 251 | 252 | #[cfg(test)] 253 | mod test { 254 | use crate::crh::{bowe_hopwood, pedersen::Window, CRHScheme}; 255 | use ark_ed_on_bls12_381::EdwardsConfig; 256 | use ark_std::test_rng; 257 | 258 | #[test] 259 | fn test_simple_bh() { 260 | #[derive(Clone)] 261 | struct TestWindow {} 262 | impl Window for TestWindow { 263 | const WINDOW_SIZE: usize = 63; 264 | const NUM_WINDOWS: usize = 8; 265 | } 266 | 267 | let rng = &mut test_rng(); 268 | let params = bowe_hopwood::CRH::::setup(rng).unwrap(); 269 | let _ = 270 | bowe_hopwood::CRH::::evaluate(¶ms, [1, 2, 3]).unwrap(); 271 | } 272 | } 273 | -------------------------------------------------------------------------------- /crypto-primitives/src/crh/constraints.rs: -------------------------------------------------------------------------------- 1 | use crate::crh::{CRHScheme, TwoToOneCRHScheme}; 2 | use ark_ff::Field; 3 | use ark_r1cs_std::prelude::*; 4 | use ark_relations::r1cs::SynthesisError; 5 | use ark_std::fmt::Debug; 6 | 7 | pub trait CRHSchemeGadget: Sized { 8 | type InputVar: ?Sized; 9 | type OutputVar: EqGadget 10 | + ToBytesGadget 11 | + CondSelectGadget 12 | + AllocVar 13 | + R1CSVar 14 | + Debug 15 | + Clone 16 | + Sized; 17 | type ParametersVar: AllocVar + Clone; 18 | 19 | fn evaluate( 20 | parameters: &Self::ParametersVar, 21 | input: &Self::InputVar, 22 | ) -> Result; 23 | } 24 | 25 | pub trait TwoToOneCRHSchemeGadget: Sized { 26 | type InputVar: ?Sized; 27 | type OutputVar: EqGadget 28 | + ToBytesGadget 29 | + CondSelectGadget 30 | + AllocVar 31 | + R1CSVar 32 | + Debug 33 | + Clone 34 | + Sized; 35 | 36 | type ParametersVar: AllocVar + Clone; 37 | 38 | fn evaluate( 39 | parameters: &Self::ParametersVar, 40 | left_input: &Self::InputVar, 41 | right_input: &Self::InputVar, 42 | ) -> Result; 43 | 44 | fn compress( 45 | parameters: &Self::ParametersVar, 46 | left_input: &Self::OutputVar, 47 | right_input: &Self::OutputVar, 48 | ) -> Result; 49 | } 50 | -------------------------------------------------------------------------------- /crypto-primitives/src/crh/injective_map/constraints.rs: -------------------------------------------------------------------------------- 1 | use crate::crh::{ 2 | constraints, 3 | injective_map::{ 4 | InjectiveMap, PedersenCRHCompressor, PedersenTwoToOneCRHCompressor, TECompressor, 5 | }, 6 | pedersen::{constraints as ped_constraints, Window}, 7 | CRHSchemeGadget, TwoToOneCRHSchemeGadget, 8 | }; 9 | use ark_ec::{ 10 | twisted_edwards::{Projective as TEProjective, TECurveConfig}, 11 | CurveConfig, CurveGroup, 12 | }; 13 | use ark_ff::fields::{Field, PrimeField}; 14 | use ark_r1cs_std::{ 15 | fields::fp::FpVar, groups::curves::twisted_edwards::AffineVar as TEVar, prelude::*, 16 | }; 17 | use ark_relations::r1cs::SynthesisError; 18 | use ark_std::{fmt::Debug, marker::PhantomData}; 19 | 20 | type ConstraintF = <::BaseField as Field>::BasePrimeField; 21 | 22 | pub trait InjectiveMapGadget, GG: CurveVar>> 23 | where 24 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 25 | { 26 | type OutputVar: EqGadget> 27 | + ToBytesGadget> 28 | + CondSelectGadget> 29 | + AllocVar> 30 | + R1CSVar, Value = I::Output> 31 | + Debug 32 | + Clone 33 | + Sized; 34 | 35 | fn evaluate(ge: &GG) -> Result; 36 | } 37 | 38 | pub struct TECompressorGadget; 39 | 40 | impl InjectiveMapGadget, TECompressor, TEVar>> 41 | for TECompressorGadget 42 | where 43 | F: PrimeField, 44 | P: TECurveConfig + CurveConfig, 45 | { 46 | type OutputVar = FpVar; 47 | 48 | fn evaluate(ge: &TEVar>) -> Result { 49 | Ok(ge.x.clone()) 50 | } 51 | } 52 | 53 | pub struct PedersenCRHCompressorGadget 54 | where 55 | C: CurveGroup, 56 | I: InjectiveMap, 57 | W: Window, 58 | GG: CurveVar>, 59 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 60 | IG: InjectiveMapGadget, 61 | { 62 | #[doc(hidden)] 63 | _compressor: PhantomData, 64 | #[doc(hidden)] 65 | _compressor_gadget: PhantomData, 66 | #[doc(hidden)] 67 | _crh: ped_constraints::CRHGadget, 68 | } 69 | 70 | impl constraints::CRHSchemeGadget, ConstraintF> 71 | for PedersenCRHCompressorGadget 72 | where 73 | C: CurveGroup, 74 | I: InjectiveMap, 75 | GG: CurveVar>, 76 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 77 | IG: InjectiveMapGadget, 78 | W: Window, 79 | { 80 | type InputVar = [UInt8>]; 81 | 82 | type OutputVar = IG::OutputVar; 83 | type ParametersVar = ped_constraints::CRHParametersVar; 84 | 85 | #[tracing::instrument(target = "r1cs", skip(parameters, input))] 86 | fn evaluate( 87 | parameters: &Self::ParametersVar, 88 | input: &Self::InputVar, 89 | ) -> Result { 90 | let result = as CRHSchemeGadget<_, _>>::evaluate( 91 | parameters, input, 92 | )?; 93 | IG::evaluate(&result) 94 | } 95 | } 96 | 97 | pub struct PedersenTwoToOneCRHCompressorGadget 98 | where 99 | C: CurveGroup, 100 | I: InjectiveMap, 101 | W: Window, 102 | GG: CurveVar>, 103 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 104 | IG: InjectiveMapGadget, 105 | { 106 | #[doc(hidden)] 107 | _compressor: PhantomData, 108 | #[doc(hidden)] 109 | _compressor_gadget: PhantomData, 110 | #[doc(hidden)] 111 | _crh: ped_constraints::CRHGadget, 112 | } 113 | 114 | impl 115 | constraints::TwoToOneCRHSchemeGadget, ConstraintF> 116 | for PedersenTwoToOneCRHCompressorGadget 117 | where 118 | C: CurveGroup, 119 | I: InjectiveMap, 120 | GG: CurveVar>, 121 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 122 | IG: InjectiveMapGadget, 123 | W: Window, 124 | { 125 | type InputVar = [UInt8>]; 126 | 127 | type OutputVar = IG::OutputVar; 128 | type ParametersVar = ped_constraints::CRHParametersVar; 129 | 130 | #[tracing::instrument(target = "r1cs", skip(parameters))] 131 | fn evaluate( 132 | parameters: &Self::ParametersVar, 133 | left_input: &Self::InputVar, 134 | right_input: &Self::InputVar, 135 | ) -> Result { 136 | // assume equality of left and right length 137 | assert_eq!(left_input.len(), right_input.len()); 138 | let result = ped_constraints::TwoToOneCRHGadget::::evaluate( 139 | parameters, 140 | left_input, 141 | right_input, 142 | )?; 143 | IG::evaluate(&result) 144 | } 145 | 146 | fn compress( 147 | parameters: &Self::ParametersVar, 148 | left_input: &Self::OutputVar, 149 | right_input: &Self::OutputVar, 150 | ) -> Result { 151 | let left_input_bytes = left_input.to_non_unique_bytes_le()?; 152 | let right_input_bytes = right_input.to_non_unique_bytes_le()?; 153 | >::evaluate( 154 | parameters, 155 | &left_input_bytes, 156 | &right_input_bytes, 157 | ) 158 | } 159 | } 160 | -------------------------------------------------------------------------------- /crypto-primitives/src/crh/injective_map/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | crh::{pedersen, CRHScheme, TwoToOneCRHScheme}, 3 | Error, 4 | }; 5 | use ark_ec::{ 6 | twisted_edwards::{Affine as TEAffine, Projective as TEProjective, TECurveConfig}, 7 | CurveConfig, CurveGroup, 8 | }; 9 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; 10 | #[cfg(not(feature = "std"))] 11 | use ark_std::vec::Vec; 12 | use ark_std::{borrow::Borrow, fmt::Debug, hash::Hash, marker::PhantomData, rand::Rng}; 13 | #[cfg(feature = "r1cs")] 14 | pub mod constraints; 15 | 16 | pub trait InjectiveMap { 17 | type Output: Clone + Eq + Hash + Default + Debug + CanonicalSerialize + CanonicalDeserialize; 18 | 19 | fn injective_map(ge: &C::Affine) -> Result; 20 | } 21 | 22 | pub struct TECompressor; 23 | 24 | impl InjectiveMap> for TECompressor { 25 | type Output =

::BaseField; 26 | 27 | fn injective_map(ge: &TEAffine

) -> Result { 28 | debug_assert!(ge.is_in_correct_subgroup_assuming_on_curve()); 29 | Ok(ge.x) 30 | } 31 | } 32 | 33 | pub struct PedersenCRHCompressor, W: pedersen::Window> { 34 | _group: PhantomData, 35 | _compressor: PhantomData, 36 | _window: PhantomData, 37 | } 38 | 39 | impl, W: pedersen::Window> CRHScheme 40 | for PedersenCRHCompressor 41 | { 42 | type Input = as CRHScheme>::Input; 43 | type Output = I::Output; 44 | type Parameters = pedersen::Parameters; 45 | 46 | fn setup(rng: &mut R) -> Result { 47 | let time = start_timer!(|| format!("PedersenCRHCompressor::Setup")); 48 | let params = pedersen::CRH::::setup(rng); 49 | end_timer!(time); 50 | params 51 | } 52 | 53 | fn evaluate>( 54 | parameters: &Self::Parameters, 55 | input: T, 56 | ) -> Result { 57 | let eval_time = start_timer!(|| "PedersenCRHCompressor::Eval"); 58 | let result = I::injective_map(&pedersen::CRH::::evaluate(parameters, input)?)?; 59 | end_timer!(eval_time); 60 | Ok(result) 61 | } 62 | } 63 | 64 | pub struct PedersenTwoToOneCRHCompressor, W: pedersen::Window> { 65 | _group: PhantomData, 66 | _compressor: PhantomData, 67 | _window: PhantomData, 68 | } 69 | 70 | impl, W: pedersen::Window> TwoToOneCRHScheme 71 | for PedersenTwoToOneCRHCompressor 72 | { 73 | type Input = as TwoToOneCRHScheme>::Input; 74 | type Output = I::Output; 75 | type Parameters = pedersen::Parameters; 76 | 77 | fn setup(r: &mut R) -> Result { 78 | pedersen::TwoToOneCRH::::setup(r) 79 | } 80 | 81 | fn evaluate>( 82 | parameters: &Self::Parameters, 83 | left_input: T, 84 | right_input: T, 85 | ) -> Result { 86 | let eval_time = start_timer!(|| "PedersenCRHCompressor::Eval"); 87 | let result = I::injective_map(&pedersen::TwoToOneCRH::::evaluate( 88 | parameters, 89 | left_input, 90 | right_input, 91 | )?)?; 92 | end_timer!(eval_time); 93 | Ok(result) 94 | } 95 | 96 | fn compress>( 97 | parameters: &Self::Parameters, 98 | left_input: T, 99 | right_input: T, 100 | ) -> Result { 101 | // convert output to input 102 | Self::evaluate( 103 | parameters, 104 | crate::to_uncompressed_bytes!(left_input)?, 105 | crate::to_uncompressed_bytes!(right_input)?, 106 | ) 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /crypto-primitives/src/crh/mod.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::upper_case_acronyms)] 2 | use crate::Error; 3 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; 4 | use ark_std::{borrow::Borrow, fmt::Debug, hash::Hash, rand::Rng}; 5 | 6 | pub mod bowe_hopwood; 7 | #[cfg(feature = "r1cs")] 8 | pub mod constraints; 9 | pub mod injective_map; 10 | pub mod pedersen; 11 | pub mod poseidon; 12 | pub mod sha256; 13 | #[cfg(feature = "r1cs")] 14 | pub use constraints::*; 15 | 16 | /// Interface to CRH. Note that in this release, while all implementations of `CRH` have fixed length, 17 | /// variable length CRH may also implement this trait in future. 18 | pub trait CRHScheme { 19 | type Input: ?Sized + Send; 20 | type Output: Clone + Eq + Debug + Hash + Default + CanonicalSerialize + CanonicalDeserialize; 21 | type Parameters: Clone + CanonicalSerialize + CanonicalDeserialize + Sync; 22 | 23 | fn setup(r: &mut R) -> Result; 24 | fn evaluate>( 25 | parameters: &Self::Parameters, 26 | input: T, 27 | ) -> Result; 28 | } 29 | 30 | /// CRH used by merkle tree inner hash. Merkle tree will convert leaf output to bytes first. 31 | pub trait TwoToOneCRHScheme { 32 | /// Raw Input type of TwoToOneCRH 33 | type Input: ?Sized; 34 | /// Raw Output type of TwoToOneCRH 35 | type Output: Clone + Eq + Debug + Hash + Default + CanonicalSerialize + CanonicalDeserialize; 36 | type Parameters: Clone + CanonicalSerialize + CanonicalDeserialize + Sync; 37 | 38 | fn setup(r: &mut R) -> Result; 39 | 40 | fn evaluate>( 41 | parameters: &Self::Parameters, 42 | left_input: T, 43 | right_input: T, 44 | ) -> Result; 45 | 46 | fn compress>( 47 | parameters: &Self::Parameters, 48 | left_input: T, 49 | right_input: T, 50 | ) -> Result; 51 | } 52 | -------------------------------------------------------------------------------- /crypto-primitives/src/crh/pedersen/constraints.rs: -------------------------------------------------------------------------------- 1 | use crate::crh::{ 2 | pedersen::{Parameters, TwoToOneCRH, Window, CRH}, 3 | CRHSchemeGadget as CRHGadgetTrait, CRHSchemeGadget, TwoToOneCRHSchemeGadget, 4 | }; 5 | use ark_ec::CurveGroup; 6 | use ark_ff::Field; 7 | use ark_r1cs_std::prelude::*; 8 | use ark_relations::r1cs::{Namespace, SynthesisError}; 9 | #[cfg(not(feature = "std"))] 10 | use ark_std::vec::Vec; 11 | use ark_std::{borrow::Borrow, iter, marker::PhantomData}; 12 | 13 | #[derive(Derivative)] 14 | #[derivative(Clone(bound = "C: CurveGroup, GG: CurveVar>"))] 15 | pub struct CRHParametersVar>> 16 | where 17 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 18 | { 19 | params: Parameters, 20 | #[doc(hidden)] 21 | _group_g: PhantomData, 22 | } 23 | 24 | type ConstraintF = <::BaseField as Field>::BasePrimeField; 25 | pub struct CRHGadget>, W: Window> 26 | where 27 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 28 | { 29 | #[doc(hidden)] 30 | _group: PhantomData<*const C>, 31 | #[doc(hidden)] 32 | _group_var: PhantomData<*const GG>, 33 | #[doc(hidden)] 34 | _window: PhantomData<*const W>, 35 | } 36 | 37 | impl CRHSchemeGadget, ConstraintF> for CRHGadget 38 | where 39 | C: CurveGroup, 40 | GG: CurveVar>, 41 | W: Window, 42 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 43 | { 44 | type InputVar = [UInt8>]; 45 | type OutputVar = GG; 46 | type ParametersVar = CRHParametersVar; 47 | 48 | #[tracing::instrument(target = "r1cs", skip(parameters, input))] 49 | fn evaluate( 50 | parameters: &Self::ParametersVar, 51 | input: &Self::InputVar, 52 | ) -> Result { 53 | assert!((input.len() * 8) <= (W::WINDOW_SIZE * W::NUM_WINDOWS)); 54 | 55 | // Convert input bytes to little-endian bits 56 | let mut input_in_bits: Vec> = input 57 | .iter() 58 | .flat_map(|byte| byte.to_bits_le().unwrap()) 59 | .collect(); 60 | 61 | // Pad input to `W::WINDOW_SIZE * W::NUM_WINDOWS`. 62 | let padding_size = (W::WINDOW_SIZE * W::NUM_WINDOWS) - input_in_bits.len(); 63 | input_in_bits.extend(iter::repeat(Boolean::FALSE).take(padding_size)); 64 | 65 | // Sanity checks 66 | assert_eq!(input_in_bits.len(), W::WINDOW_SIZE * W::NUM_WINDOWS); 67 | assert_eq!(parameters.params.generators.len(), W::NUM_WINDOWS); 68 | 69 | // Compute the Pedersen CRH. Chunk the input bits into correctly sized windows 70 | let input_in_bits = input_in_bits.chunks(W::WINDOW_SIZE); 71 | 72 | let result = 73 | GG::precomputed_base_multiscalar_mul_le(¶meters.params.generators, input_in_bits)?; 74 | 75 | Ok(result) 76 | } 77 | } 78 | 79 | pub struct TwoToOneCRHGadget>, W: Window> 80 | where 81 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 82 | { 83 | #[doc(hidden)] 84 | _group: PhantomData<*const C>, 85 | #[doc(hidden)] 86 | _group_var: PhantomData<*const GG>, 87 | #[doc(hidden)] 88 | _window: PhantomData<*const W>, 89 | } 90 | 91 | impl TwoToOneCRHSchemeGadget, ConstraintF> 92 | for TwoToOneCRHGadget 93 | where 94 | C: CurveGroup, 95 | GG: CurveVar>, 96 | W: Window, 97 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 98 | { 99 | type InputVar = [UInt8>]; 100 | type OutputVar = GG; 101 | type ParametersVar = CRHParametersVar; 102 | 103 | #[tracing::instrument(target = "r1cs", skip(parameters))] 104 | fn evaluate( 105 | parameters: &Self::ParametersVar, 106 | left_input: &Self::InputVar, 107 | right_input: &Self::InputVar, 108 | ) -> Result { 109 | // assume equality of left and right length 110 | assert_eq!(left_input.len(), right_input.len()); 111 | let chained_input: Vec<_> = left_input 112 | .to_vec() 113 | .into_iter() 114 | .chain(right_input.to_vec().into_iter()) 115 | .collect(); 116 | CRHGadget::::evaluate(parameters, &chained_input) 117 | } 118 | 119 | #[tracing::instrument(target = "r1cs", skip(parameters))] 120 | fn compress( 121 | parameters: &Self::ParametersVar, 122 | left_input: &Self::OutputVar, 123 | right_input: &Self::OutputVar, 124 | ) -> Result { 125 | // convert output to bytes 126 | let left_input = left_input.to_bytes_le()?; 127 | let right_input = right_input.to_bytes_le()?; 128 | Self::evaluate(parameters, &left_input, &right_input) 129 | } 130 | } 131 | 132 | impl AllocVar, ConstraintF> for CRHParametersVar 133 | where 134 | C: CurveGroup, 135 | GG: CurveVar>, 136 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 137 | { 138 | #[tracing::instrument(target = "r1cs", skip(_cs, f))] 139 | fn new_variable>>( 140 | _cs: impl Into>>, 141 | f: impl FnOnce() -> Result, 142 | _mode: AllocationMode, 143 | ) -> Result { 144 | let params = f()?.borrow().clone(); 145 | Ok(CRHParametersVar { 146 | params, 147 | _group_g: PhantomData, 148 | }) 149 | } 150 | } 151 | 152 | #[cfg(test)] 153 | mod test { 154 | use crate::crh::{ 155 | pedersen, CRHScheme, CRHSchemeGadget, TwoToOneCRHScheme, TwoToOneCRHSchemeGadget, 156 | }; 157 | use ark_ec::CurveGroup; 158 | use ark_ed_on_bls12_381::{constraints::EdwardsVar, EdwardsProjective as JubJub, Fq as Fr}; 159 | use ark_r1cs_std::prelude::*; 160 | use ark_relations::r1cs::{ConstraintSystem, ConstraintSystemRef}; 161 | use ark_std::rand::Rng; 162 | use ark_std::{test_rng, UniformRand}; 163 | 164 | type TestCRH = pedersen::CRH; 165 | type TestCRHGadget = pedersen::constraints::CRHGadget; 166 | 167 | type TestTwoToOneCRH = pedersen::TwoToOneCRH; 168 | type TestTwoToOneCRHGadget = 169 | pedersen::constraints::TwoToOneCRHGadget; 170 | 171 | #[derive(Clone, PartialEq, Eq, Hash)] 172 | pub(super) struct Window; 173 | 174 | impl pedersen::Window for Window { 175 | const WINDOW_SIZE: usize = 127; 176 | const NUM_WINDOWS: usize = 9; 177 | } 178 | 179 | fn generate_u8_input( 180 | cs: ConstraintSystemRef, 181 | size: usize, 182 | rng: &mut R, 183 | ) -> (Vec, Vec>) { 184 | let mut input = vec![1u8; size]; 185 | rng.fill_bytes(&mut input); 186 | 187 | let mut input_bytes = vec![]; 188 | for byte in input.iter() { 189 | input_bytes.push(UInt8::new_witness(cs.clone(), || Ok(byte)).unwrap()); 190 | } 191 | (input, input_bytes) 192 | } 193 | 194 | fn generate_affine( 195 | cs: ConstraintSystemRef, 196 | rng: &mut R, 197 | ) -> (::Affine, EdwardsVar) { 198 | let val = ::Affine::rand(rng); 199 | let val_var = EdwardsVar::new_witness(cs.clone(), || Ok(val.clone())).unwrap(); 200 | (val, val_var) 201 | } 202 | 203 | #[test] 204 | fn test_native_equality() { 205 | let rng = &mut test_rng(); 206 | let cs = ConstraintSystem::::new_ref(); 207 | 208 | let (input, input_var) = generate_u8_input(cs.clone(), 128, rng); 209 | 210 | let parameters = TestCRH::setup(rng).unwrap(); 211 | let primitive_result = TestCRH::evaluate(¶meters, input.as_slice()).unwrap(); 212 | 213 | let parameters_var = pedersen::constraints::CRHParametersVar::new_constant( 214 | ark_relations::ns!(cs, "CRH Parameters"), 215 | ¶meters, 216 | ) 217 | .unwrap(); 218 | 219 | let result_var = TestCRHGadget::evaluate(¶meters_var, &input_var).unwrap(); 220 | 221 | let primitive_result = primitive_result; 222 | assert_eq!(primitive_result, result_var.value().unwrap()); 223 | assert!(cs.is_satisfied().unwrap()); 224 | } 225 | 226 | #[test] 227 | fn test_naive_two_to_one_equality() { 228 | let rng = &mut test_rng(); 229 | let cs = ConstraintSystem::::new_ref(); 230 | 231 | let (left_input, left_input_var) = generate_affine(cs.clone(), rng); 232 | let (right_input, right_input_var) = generate_affine(cs.clone(), rng); 233 | let parameters = TestTwoToOneCRH::setup(rng).unwrap(); 234 | let primitive_result = 235 | TestTwoToOneCRH::compress(¶meters, left_input, right_input).unwrap(); 236 | 237 | let parameters_var = pedersen::constraints::CRHParametersVar::new_constant( 238 | ark_relations::ns!(cs, "CRH Parameters"), 239 | ¶meters, 240 | ) 241 | .unwrap(); 242 | 243 | let result_var = 244 | TestTwoToOneCRHGadget::compress(¶meters_var, &left_input_var, &right_input_var) 245 | .unwrap(); 246 | 247 | let primitive_result = primitive_result; 248 | assert_eq!(primitive_result, result_var.value().unwrap().into_affine()); 249 | assert!(cs.is_satisfied().unwrap()); 250 | } 251 | } 252 | -------------------------------------------------------------------------------- /crypto-primitives/src/crh/pedersen/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | crh::{CRHScheme, TwoToOneCRHScheme}, 3 | Error, 4 | }; 5 | use ark_ec::CurveGroup; 6 | use ark_ff::{Field, ToConstraintField}; 7 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; 8 | #[cfg(not(feature = "std"))] 9 | use ark_std::vec::Vec; 10 | use ark_std::{ 11 | borrow::Borrow, 12 | cfg_chunks, 13 | fmt::{Debug, Formatter, Result as FmtResult}, 14 | marker::PhantomData, 15 | rand::Rng, 16 | }; 17 | #[cfg(feature = "parallel")] 18 | use rayon::prelude::*; 19 | 20 | #[cfg(feature = "r1cs")] 21 | pub mod constraints; 22 | 23 | pub trait Window: Clone { 24 | const WINDOW_SIZE: usize; 25 | const NUM_WINDOWS: usize; 26 | } 27 | 28 | #[derive(Clone, Default, CanonicalSerialize, CanonicalDeserialize)] 29 | pub struct Parameters { 30 | pub generators: Vec>, 31 | } 32 | 33 | pub struct CRH { 34 | group: PhantomData, 35 | window: PhantomData, 36 | } 37 | 38 | impl CRH { 39 | pub(crate) const INPUT_SIZE_BITS: usize = W::WINDOW_SIZE * W::NUM_WINDOWS; 40 | pub fn create_generators(rng: &mut R) -> Vec> { 41 | let mut generators_powers = Vec::new(); 42 | for _ in 0..W::NUM_WINDOWS { 43 | generators_powers.push(Self::generator_powers(W::WINDOW_SIZE, rng)); 44 | } 45 | generators_powers 46 | } 47 | 48 | pub fn generator_powers(num_powers: usize, rng: &mut R) -> Vec { 49 | let mut cur_gen_powers = Vec::with_capacity(num_powers); 50 | let mut base = C::rand(rng); 51 | for _ in 0..num_powers { 52 | cur_gen_powers.push(base); 53 | base.double_in_place(); 54 | } 55 | cur_gen_powers 56 | } 57 | } 58 | 59 | impl CRHScheme for CRH { 60 | type Input = [u8]; 61 | type Output = C::Affine; 62 | type Parameters = Parameters; 63 | 64 | fn setup(rng: &mut R) -> Result { 65 | let time = start_timer!(|| format!( 66 | "PedersenCRH::Setup: {} {}-bit windows; {{0,1}}^{{{}}} -> C", 67 | W::NUM_WINDOWS, 68 | W::WINDOW_SIZE, 69 | W::NUM_WINDOWS * W::WINDOW_SIZE 70 | )); 71 | let generators = Self::create_generators(rng); 72 | end_timer!(time); 73 | Ok(Self::Parameters { generators }) 74 | } 75 | 76 | fn evaluate>( 77 | parameters: &Self::Parameters, 78 | input: T, 79 | ) -> Result { 80 | let eval_time = start_timer!(|| "PedersenCRH::Eval"); 81 | let input = input.borrow(); 82 | if (input.len() * 8) > W::WINDOW_SIZE * W::NUM_WINDOWS { 83 | panic!( 84 | "incorrect input length {:?} for window params {:?}✕{:?}", 85 | input.len(), 86 | W::WINDOW_SIZE, 87 | W::NUM_WINDOWS 88 | ); 89 | } 90 | 91 | let mut padded_input = Vec::with_capacity(input.len()); 92 | let mut input = input; 93 | // Pad the input if it is not the current length. 94 | if (input.len() * 8) < W::WINDOW_SIZE * W::NUM_WINDOWS { 95 | padded_input.extend_from_slice(input); 96 | let padded_length = (W::WINDOW_SIZE * W::NUM_WINDOWS) / 8; 97 | padded_input.resize(padded_length, 0u8); 98 | input = padded_input.as_slice(); 99 | } 100 | 101 | assert_eq!( 102 | parameters.generators.len(), 103 | W::NUM_WINDOWS, 104 | "Incorrect pp of size {:?}✕{:?} for window params {:?}✕{:?}", 105 | parameters.generators[0].len(), 106 | parameters.generators.len(), 107 | W::WINDOW_SIZE, 108 | W::NUM_WINDOWS 109 | ); 110 | 111 | // Compute sum of h_i^{m_i} for all i. 112 | let bits = bytes_to_bits(input); 113 | let result = cfg_chunks!(bits, W::WINDOW_SIZE) 114 | .zip(¶meters.generators) 115 | .map(|(bits, generator_powers)| { 116 | let mut encoded = C::zero(); 117 | for (bit, base) in bits.iter().zip(generator_powers.iter()) { 118 | if *bit { 119 | encoded += base; 120 | } 121 | } 122 | encoded 123 | }) 124 | .sum::(); 125 | 126 | end_timer!(eval_time); 127 | 128 | Ok(result.into()) 129 | } 130 | } 131 | 132 | pub struct TwoToOneCRH { 133 | group: PhantomData, 134 | window: PhantomData, 135 | } 136 | 137 | impl TwoToOneCRH { 138 | pub(crate) const INPUT_SIZE_BITS: usize = W::WINDOW_SIZE * W::NUM_WINDOWS; 139 | const HALF_INPUT_SIZE_BITS: usize = Self::INPUT_SIZE_BITS / 2; 140 | pub fn create_generators(rng: &mut R) -> Vec> { 141 | CRH::::create_generators(rng) 142 | } 143 | 144 | pub fn generator_powers(num_powers: usize, rng: &mut R) -> Vec { 145 | CRH::::generator_powers(num_powers, rng) 146 | } 147 | } 148 | 149 | impl TwoToOneCRHScheme for TwoToOneCRH { 150 | type Input = [u8]; 151 | type Output = C::Affine; 152 | type Parameters = Parameters; 153 | 154 | fn setup(r: &mut R) -> Result { 155 | CRH::::setup(r) 156 | } 157 | 158 | fn evaluate>( 159 | parameters: &Self::Parameters, 160 | left_input: T, 161 | right_input: T, 162 | ) -> Result { 163 | let left_input = left_input.borrow(); 164 | let right_input = right_input.borrow(); 165 | assert_eq!( 166 | left_input.len(), 167 | right_input.len(), 168 | "left and right input should be of equal length" 169 | ); 170 | // check overflow 171 | 172 | debug_assert!(left_input.len() * 8 <= Self::HALF_INPUT_SIZE_BITS); 173 | 174 | let mut buffer = vec![0u8; (Self::HALF_INPUT_SIZE_BITS + Self::HALF_INPUT_SIZE_BITS) / 8]; 175 | 176 | buffer 177 | .iter_mut() 178 | .zip(left_input.iter().chain(right_input.iter())) 179 | .for_each(|(b, l_b)| *b = *l_b); 180 | 181 | CRH::::evaluate(parameters, buffer.as_slice()) 182 | } 183 | 184 | /// A simple implementation method: just concat the left input and right input together 185 | /// 186 | /// `evaluate` requires that `left_input` and `right_input` are of equal length. 187 | fn compress>( 188 | parameters: &Self::Parameters, 189 | left_input: T, 190 | right_input: T, 191 | ) -> Result { 192 | Self::evaluate( 193 | parameters, 194 | crate::to_uncompressed_bytes!(left_input)?, 195 | crate::to_uncompressed_bytes!(right_input)?, 196 | ) 197 | } 198 | } 199 | 200 | pub fn bytes_to_bits(bytes: &[u8]) -> Vec { 201 | let mut bits = Vec::with_capacity(bytes.len() * 8); 202 | for byte in bytes { 203 | for i in 0..8 { 204 | let bit = (*byte >> i) & 1; 205 | bits.push(bit == 1) 206 | } 207 | } 208 | bits 209 | } 210 | 211 | impl Debug for Parameters { 212 | fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { 213 | writeln!(f, "Pedersen Hash Parameters {{")?; 214 | for (i, g) in self.generators.iter().enumerate() { 215 | writeln!(f, "\t Generator {}: {:?}", i, g)?; 216 | } 217 | writeln!(f, "}}") 218 | } 219 | } 220 | 221 | impl> 222 | ToConstraintField for Parameters 223 | { 224 | #[inline] 225 | fn to_field_elements(&self) -> Option> { 226 | Some(Vec::new()) 227 | } 228 | } 229 | -------------------------------------------------------------------------------- /crypto-primitives/src/crh/poseidon/constraints.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | crh::{ 3 | poseidon::{TwoToOneCRH, CRH}, 4 | CRHScheme, CRHSchemeGadget as CRHGadgetTrait, 5 | TwoToOneCRHSchemeGadget as TwoToOneCRHGadgetTrait, 6 | }, 7 | sponge::{ 8 | constraints::CryptographicSpongeVar, 9 | poseidon::{constraints::PoseidonSpongeVar, PoseidonConfig}, 10 | Absorb, 11 | }, 12 | }; 13 | use ark_ff::PrimeField; 14 | use ark_r1cs_std::{ 15 | alloc::{AllocVar, AllocationMode}, 16 | fields::fp::FpVar, 17 | R1CSVar, 18 | }; 19 | use ark_relations::r1cs::{Namespace, SynthesisError}; 20 | #[cfg(not(feature = "std"))] 21 | use ark_std::vec::Vec; 22 | use ark_std::{borrow::Borrow, marker::PhantomData}; 23 | 24 | #[derive(Clone)] 25 | pub struct CRHParametersVar { 26 | pub parameters: PoseidonConfig, 27 | } 28 | 29 | pub struct CRHGadget { 30 | field_phantom: PhantomData, 31 | } 32 | 33 | impl CRHGadgetTrait, F> for CRHGadget { 34 | type InputVar = [FpVar]; 35 | type OutputVar = FpVar; 36 | type ParametersVar = CRHParametersVar; 37 | 38 | fn evaluate( 39 | parameters: &Self::ParametersVar, 40 | input: &Self::InputVar, 41 | ) -> Result { 42 | let cs = input.cs(); 43 | 44 | if cs.is_none() { 45 | let mut constant_input = Vec::new(); 46 | for var in input.iter() { 47 | constant_input.push(var.value()?); 48 | } 49 | Ok(FpVar::Constant( 50 | CRH::::evaluate(¶meters.parameters, constant_input).unwrap(), 51 | )) 52 | } else { 53 | let mut sponge = PoseidonSpongeVar::new(cs, ¶meters.parameters); 54 | sponge.absorb(&input)?; 55 | let res = sponge.squeeze_field_elements(1)?; 56 | Ok(res[0].clone()) 57 | } 58 | } 59 | } 60 | 61 | pub struct TwoToOneCRHGadget { 62 | field_phantom: PhantomData, 63 | } 64 | 65 | impl TwoToOneCRHGadgetTrait, F> for TwoToOneCRHGadget { 66 | type InputVar = FpVar; 67 | type OutputVar = FpVar; 68 | type ParametersVar = CRHParametersVar; 69 | 70 | fn evaluate( 71 | parameters: &Self::ParametersVar, 72 | left_input: &Self::InputVar, 73 | right_input: &Self::InputVar, 74 | ) -> Result { 75 | Self::compress(parameters, left_input, right_input) 76 | } 77 | 78 | fn compress( 79 | parameters: &Self::ParametersVar, 80 | left_input: &Self::OutputVar, 81 | right_input: &Self::OutputVar, 82 | ) -> Result { 83 | let cs = left_input.cs().or(right_input.cs()); 84 | 85 | if cs.is_none() { 86 | Ok(FpVar::Constant( 87 | CRH::::evaluate( 88 | ¶meters.parameters, 89 | vec![left_input.value()?, right_input.value()?], 90 | ) 91 | .unwrap(), 92 | )) 93 | } else { 94 | let mut sponge = PoseidonSpongeVar::new(cs, ¶meters.parameters); 95 | sponge.absorb(left_input)?; 96 | sponge.absorb(right_input)?; 97 | let res = sponge.squeeze_field_elements(1)?; 98 | Ok(res[0].clone()) 99 | } 100 | } 101 | } 102 | 103 | impl AllocVar, F> for CRHParametersVar { 104 | fn new_variable>>( 105 | _cs: impl Into>, 106 | f: impl FnOnce() -> Result, 107 | _mode: AllocationMode, 108 | ) -> Result { 109 | f().and_then(|param| { 110 | let parameters = param.borrow().clone(); 111 | 112 | Ok(Self { parameters }) 113 | }) 114 | } 115 | } 116 | 117 | #[cfg(test)] 118 | mod test { 119 | use crate::crh::poseidon::constraints::{CRHGadget, CRHParametersVar, TwoToOneCRHGadget}; 120 | use crate::crh::poseidon::{TwoToOneCRH, CRH}; 121 | use crate::crh::{CRHScheme, CRHSchemeGadget}; 122 | use crate::crh::{TwoToOneCRHScheme, TwoToOneCRHSchemeGadget}; 123 | use crate::sponge::poseidon::PoseidonConfig; 124 | use ark_bls12_377::Fr; 125 | use ark_r1cs_std::alloc::AllocVar; 126 | use ark_r1cs_std::{ 127 | fields::fp::{AllocatedFp, FpVar}, 128 | R1CSVar, 129 | }; 130 | use ark_relations::r1cs::ConstraintSystem; 131 | use ark_std::UniformRand; 132 | 133 | #[test] 134 | fn test_consistency() { 135 | let mut test_rng = ark_std::test_rng(); 136 | 137 | // The following way of generating the MDS matrix is incorrect 138 | // and is only for test purposes. 139 | 140 | let mut mds = vec![vec![]; 3]; 141 | for i in 0..3 { 142 | for _ in 0..3 { 143 | mds[i].push(Fr::rand(&mut test_rng)); 144 | } 145 | } 146 | 147 | let mut ark = vec![vec![]; 8 + 24]; 148 | for i in 0..8 + 24 { 149 | for _ in 0..3 { 150 | ark[i].push(Fr::rand(&mut test_rng)); 151 | } 152 | } 153 | 154 | let mut test_a = Vec::new(); 155 | let mut test_b = Vec::new(); 156 | for _ in 0..3 { 157 | test_a.push(Fr::rand(&mut test_rng)); 158 | test_b.push(Fr::rand(&mut test_rng)); 159 | } 160 | 161 | let params = PoseidonConfig::::new(8, 24, 31, mds, ark, 2, 1); 162 | let crh_a = CRH::::evaluate(¶ms, test_a.clone()).unwrap(); 163 | let crh_b = CRH::::evaluate(¶ms, test_b.clone()).unwrap(); 164 | let crh = TwoToOneCRH::::compress(¶ms, crh_a, crh_b).unwrap(); 165 | 166 | let cs = ConstraintSystem::::new_ref(); 167 | 168 | let mut test_a_g = Vec::new(); 169 | let mut test_b_g = Vec::new(); 170 | 171 | for elem in test_a.iter() { 172 | test_a_g.push(FpVar::Var( 173 | AllocatedFp::::new_witness(cs.clone(), || Ok(elem)).unwrap(), 174 | )); 175 | } 176 | for elem in test_b.iter() { 177 | test_b_g.push(FpVar::Var( 178 | AllocatedFp::::new_witness(cs.clone(), || Ok(elem)).unwrap(), 179 | )); 180 | } 181 | 182 | let params_g = CRHParametersVar::::new_witness(cs, || Ok(params)).unwrap(); 183 | let crh_a_g = CRHGadget::::evaluate(¶ms_g, &test_a_g).unwrap(); 184 | let crh_b_g = CRHGadget::::evaluate(¶ms_g, &test_b_g).unwrap(); 185 | let crh_g = TwoToOneCRHGadget::::compress(¶ms_g, &crh_a_g, &crh_b_g).unwrap(); 186 | 187 | assert_eq!(crh_a, crh_a_g.value().unwrap()); 188 | assert_eq!(crh_b, crh_b_g.value().unwrap()); 189 | assert_eq!(crh, crh_g.value().unwrap()); 190 | } 191 | } 192 | -------------------------------------------------------------------------------- /crypto-primitives/src/crh/poseidon/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | crh::{CRHScheme, TwoToOneCRHScheme}, 3 | sponge::{ 4 | poseidon::{PoseidonConfig, PoseidonSponge}, 5 | Absorb, CryptographicSponge, 6 | }, 7 | Error, 8 | }; 9 | use ark_ff::PrimeField; 10 | use ark_std::{borrow::Borrow, marker::PhantomData, rand::Rng}; 11 | 12 | #[cfg(feature = "r1cs")] 13 | pub mod constraints; 14 | 15 | pub struct CRH { 16 | field_phantom: PhantomData, 17 | } 18 | 19 | impl CRHScheme for CRH { 20 | type Input = [F]; 21 | type Output = F; 22 | type Parameters = PoseidonConfig; 23 | 24 | fn setup(_rng: &mut R) -> Result { 25 | // automatic generation of parameters are not implemented yet 26 | // therefore, the developers must specify the parameters themselves 27 | unimplemented!() 28 | } 29 | 30 | fn evaluate>( 31 | parameters: &Self::Parameters, 32 | input: T, 33 | ) -> Result { 34 | let input = input.borrow(); 35 | 36 | let mut sponge = PoseidonSponge::new(parameters); 37 | sponge.absorb(&input); 38 | let res = sponge.squeeze_field_elements::(1); 39 | Ok(res[0]) 40 | } 41 | } 42 | 43 | pub struct TwoToOneCRH { 44 | field_phantom: PhantomData, 45 | } 46 | 47 | impl TwoToOneCRHScheme for TwoToOneCRH { 48 | type Input = F; 49 | type Output = F; 50 | type Parameters = PoseidonConfig; 51 | 52 | fn setup(_rng: &mut R) -> Result { 53 | // automatic generation of parameters are not implemented yet 54 | // therefore, the developers must specify the parameters themselves 55 | unimplemented!() 56 | } 57 | 58 | fn evaluate>( 59 | parameters: &Self::Parameters, 60 | left_input: T, 61 | right_input: T, 62 | ) -> Result { 63 | Self::compress(parameters, left_input, right_input) 64 | } 65 | 66 | fn compress>( 67 | parameters: &Self::Parameters, 68 | left_input: T, 69 | right_input: T, 70 | ) -> Result { 71 | let left_input = left_input.borrow(); 72 | let right_input = right_input.borrow(); 73 | 74 | let mut sponge = PoseidonSponge::new(parameters); 75 | sponge.absorb(left_input); 76 | sponge.absorb(right_input); 77 | let res = sponge.squeeze_field_elements::(1); 78 | Ok(res[0]) 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /crypto-primitives/src/crh/sha256/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | crh::{CRHScheme, TwoToOneCRHScheme}, 3 | Error, 4 | }; 5 | #[cfg(not(feature = "std"))] 6 | use ark_std::vec::Vec; 7 | use ark_std::{borrow::Borrow, rand::Rng}; 8 | use sha2::digest::Digest; 9 | 10 | // Re-export the RustCrypto Sha256 type and its associated traits 11 | pub use sha2::{digest, Sha256}; 12 | 13 | #[cfg(feature = "r1cs")] 14 | pub mod constraints; 15 | 16 | // Implement the CRH traits for SHA-256 17 | impl CRHScheme for Sha256 { 18 | type Input = [u8]; 19 | // This is always 32 bytes. It has to be a Vec to impl CanonicalSerialize 20 | type Output = Vec; 21 | // There are no parameters for SHA256 22 | type Parameters = (); 23 | 24 | // There are no parameters for SHA256 25 | fn setup(_rng: &mut R) -> Result { 26 | Ok(()) 27 | } 28 | 29 | // Evaluates SHA256(input) 30 | fn evaluate>( 31 | _parameters: &Self::Parameters, 32 | input: T, 33 | ) -> Result { 34 | Ok(Sha256::digest(input.borrow()).to_vec()) 35 | } 36 | } 37 | 38 | impl TwoToOneCRHScheme for Sha256 { 39 | type Input = [u8]; 40 | // This is always 32 bytes. It has to be a Vec to impl CanonicalSerialize 41 | type Output = Vec; 42 | // There are no parameters for SHA256 43 | type Parameters = (); 44 | 45 | // There are no parameters for SHA256 46 | fn setup(_rng: &mut R) -> Result { 47 | Ok(()) 48 | } 49 | 50 | // Evaluates SHA256(left_input || right_input) 51 | fn evaluate>( 52 | _parameters: &Self::Parameters, 53 | left_input: T, 54 | right_input: T, 55 | ) -> Result { 56 | let left_input = left_input.borrow(); 57 | let right_input = right_input.borrow(); 58 | 59 | // Process the left input then the right input 60 | let mut h = Sha256::default(); 61 | h.update(left_input); 62 | h.update(right_input); 63 | Ok(h.finalize().to_vec()) 64 | } 65 | 66 | // Evaluates SHA256(left_input || right_input) 67 | fn compress>( 68 | parameters: &Self::Parameters, 69 | left_input: T, 70 | right_input: T, 71 | ) -> Result { 72 | ::evaluate( 73 | parameters, 74 | left_input.borrow().as_slice(), 75 | right_input.borrow().as_slice(), 76 | ) 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /crypto-primitives/src/encryption/constraints.rs: -------------------------------------------------------------------------------- 1 | use crate::encryption::AsymmetricEncryptionScheme; 2 | use ark_ff::fields::Field; 3 | use ark_r1cs_std::prelude::*; 4 | use ark_relations::r1cs::SynthesisError; 5 | use ark_std::fmt::Debug; 6 | 7 | pub trait AsymmetricEncryptionGadget { 8 | type OutputVar: AllocVar 9 | + EqGadget 10 | + Clone 11 | + Sized 12 | + Debug; 13 | type ParametersVar: AllocVar + Clone; 14 | type PlaintextVar: AllocVar + Clone; 15 | type PublicKeyVar: AllocVar + Clone; 16 | type RandomnessVar: AllocVar + Clone; 17 | 18 | fn encrypt( 19 | parameters: &Self::ParametersVar, 20 | message: &Self::PlaintextVar, 21 | randomness: &Self::RandomnessVar, 22 | public_key: &Self::PublicKeyVar, 23 | ) -> Result; 24 | } 25 | -------------------------------------------------------------------------------- /crypto-primitives/src/encryption/elgamal/constraints.rs: -------------------------------------------------------------------------------- 1 | use crate::encryption::{ 2 | elgamal::{Ciphertext, ElGamal, Parameters, Plaintext, PublicKey, Randomness}, 3 | AsymmetricEncryptionGadget, 4 | }; 5 | use ark_ec::CurveGroup; 6 | use ark_ff::{ 7 | fields::{Field, PrimeField}, 8 | Zero, 9 | }; 10 | use ark_r1cs_std::prelude::*; 11 | use ark_relations::r1cs::{Namespace, SynthesisError}; 12 | use ark_serialize::CanonicalSerialize; 13 | #[cfg(not(feature = "std"))] 14 | use ark_std::vec::Vec; 15 | use ark_std::{borrow::Borrow, marker::PhantomData}; 16 | 17 | pub type ConstraintF = <::BaseField as Field>::BasePrimeField; 18 | 19 | #[derive(Clone, Debug)] 20 | pub struct RandomnessVar(Vec>); 21 | 22 | impl AllocVar, F> for RandomnessVar 23 | where 24 | C: CurveGroup, 25 | F: PrimeField, 26 | { 27 | fn new_variable>>( 28 | cs: impl Into>, 29 | f: impl FnOnce() -> Result, 30 | mode: AllocationMode, 31 | ) -> Result { 32 | let mut r = Vec::new(); 33 | let _ = &f() 34 | .map(|b| b.borrow().0) 35 | .unwrap_or(C::ScalarField::zero()) 36 | .serialize_compressed(&mut r) 37 | .unwrap(); 38 | match mode { 39 | AllocationMode::Constant => Ok(Self(UInt8::constant_vec(&r))), 40 | AllocationMode::Input => UInt8::new_input_vec(cs, &r).map(Self), 41 | AllocationMode::Witness => UInt8::new_witness_vec(cs, &r).map(Self), 42 | } 43 | } 44 | } 45 | 46 | #[derive(Derivative)] 47 | #[derivative(Clone(bound = "C: CurveGroup, GG: CurveVar>"))] 48 | pub struct ParametersVar>> 49 | where 50 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 51 | { 52 | generator: GG, 53 | #[doc(hidden)] 54 | _curve: PhantomData, 55 | } 56 | 57 | impl AllocVar, ConstraintF> for ParametersVar 58 | where 59 | C: CurveGroup, 60 | GG: CurveVar>, 61 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 62 | { 63 | fn new_variable>>( 64 | cs: impl Into>>, 65 | f: impl FnOnce() -> Result, 66 | mode: AllocationMode, 67 | ) -> Result { 68 | let generator = GG::new_variable(cs, || f().map(|g| g.borrow().generator), mode)?; 69 | Ok(Self { 70 | generator, 71 | _curve: PhantomData, 72 | }) 73 | } 74 | } 75 | 76 | #[derive(Derivative)] 77 | #[derivative(Clone(bound = "C: CurveGroup, GG: CurveVar>"))] 78 | pub struct PlaintextVar>> 79 | where 80 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 81 | { 82 | pub plaintext: GG, 83 | #[doc(hidden)] 84 | _curve: PhantomData, 85 | } 86 | 87 | impl AllocVar, ConstraintF> for PlaintextVar 88 | where 89 | C: CurveGroup, 90 | GG: CurveVar>, 91 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 92 | { 93 | fn new_variable>>( 94 | cs: impl Into>>, 95 | f: impl FnOnce() -> Result, 96 | mode: AllocationMode, 97 | ) -> Result { 98 | let plaintext = GG::new_variable(cs, f, mode)?; 99 | Ok(Self { 100 | plaintext, 101 | _curve: PhantomData, 102 | }) 103 | } 104 | } 105 | 106 | #[derive(Derivative)] 107 | #[derivative(Clone(bound = "C: CurveGroup, GG: CurveVar>"))] 108 | pub struct PublicKeyVar>> 109 | where 110 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 111 | { 112 | pub pk: GG, 113 | #[doc(hidden)] 114 | _curve: PhantomData, 115 | } 116 | 117 | impl AllocVar, ConstraintF> for PublicKeyVar 118 | where 119 | C: CurveGroup, 120 | GG: CurveVar>, 121 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 122 | { 123 | fn new_variable>>( 124 | cs: impl Into>>, 125 | f: impl FnOnce() -> Result, 126 | mode: AllocationMode, 127 | ) -> Result { 128 | let pk = GG::new_variable(cs, f, mode)?; 129 | Ok(Self { 130 | pk, 131 | _curve: PhantomData, 132 | }) 133 | } 134 | } 135 | 136 | #[derive(Derivative, Debug)] 137 | #[derivative(Clone(bound = "C: CurveGroup, GG: CurveVar>"))] 138 | pub struct OutputVar>> 139 | where 140 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 141 | { 142 | pub c1: GG, 143 | pub c2: GG, 144 | #[doc(hidden)] 145 | _curve: PhantomData, 146 | } 147 | 148 | impl AllocVar, ConstraintF> for OutputVar 149 | where 150 | C: CurveGroup, 151 | GG: CurveVar>, 152 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 153 | { 154 | fn new_variable>>( 155 | cs: impl Into>>, 156 | f: impl FnOnce() -> Result, 157 | mode: AllocationMode, 158 | ) -> Result { 159 | let ns = cs.into(); 160 | let cs = ns.cs(); 161 | let prep = f().map(|g| *g.borrow()); 162 | let c1 = GG::new_variable(cs.clone(), || prep.map(|g| g.0), mode)?; 163 | let c2 = GG::new_variable(cs.clone(), || prep.map(|g| g.1), mode)?; 164 | Ok(Self { 165 | c1, 166 | c2, 167 | _curve: PhantomData, 168 | }) 169 | } 170 | } 171 | 172 | impl EqGadget> for OutputVar 173 | where 174 | C: CurveGroup, 175 | GC: CurveVar>, 176 | for<'a> &'a GC: GroupOpsBounds<'a, C, GC>, 177 | { 178 | #[inline] 179 | fn is_eq(&self, other: &Self) -> Result>, SynthesisError> { 180 | Ok(self.c1.is_eq(&other.c1)? & &self.c2.is_eq(&other.c2)?) 181 | } 182 | } 183 | 184 | pub struct ElGamalEncGadget>> 185 | where 186 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 187 | { 188 | #[doc(hidden)] 189 | _curve: PhantomData<*const C>, 190 | _group_var: PhantomData<*const GG>, 191 | } 192 | 193 | impl AsymmetricEncryptionGadget, ConstraintF> for ElGamalEncGadget 194 | where 195 | C: CurveGroup, 196 | GG: CurveVar>, 197 | for<'a> &'a GG: GroupOpsBounds<'a, C, GG>, 198 | ConstraintF: PrimeField, 199 | { 200 | type OutputVar = OutputVar; 201 | type ParametersVar = ParametersVar; 202 | type PlaintextVar = PlaintextVar; 203 | type PublicKeyVar = PublicKeyVar; 204 | type RandomnessVar = RandomnessVar>; 205 | 206 | fn encrypt( 207 | parameters: &Self::ParametersVar, 208 | message: &Self::PlaintextVar, 209 | randomness: &Self::RandomnessVar, 210 | public_key: &Self::PublicKeyVar, 211 | ) -> Result { 212 | // flatten randomness to little-endian bit vector 213 | let randomness = randomness 214 | .0 215 | .iter() 216 | .flat_map(|b| b.to_bits_le().unwrap()) 217 | .collect::>(); 218 | 219 | // compute s = randomness*pk 220 | let s = public_key.pk.clone().scalar_mul_le(randomness.iter())?; 221 | 222 | // compute c1 = randomness*generator 223 | let c1 = parameters 224 | .generator 225 | .clone() 226 | .scalar_mul_le(randomness.iter())?; 227 | 228 | // compute c2 = m + s 229 | let c2 = message.plaintext.clone() + s; 230 | 231 | Ok(Self::OutputVar { 232 | c1, 233 | c2, 234 | _curve: PhantomData, 235 | }) 236 | } 237 | } 238 | 239 | #[cfg(test)] 240 | mod test { 241 | use crate::encryption::constraints::AsymmetricEncryptionGadget; 242 | use ark_std::{test_rng, UniformRand}; 243 | 244 | use ark_ed_on_bls12_381::{constraints::EdwardsVar, EdwardsProjective as JubJub, Fq}; 245 | 246 | use crate::encryption::elgamal::{constraints::ElGamalEncGadget, ElGamal, Randomness}; 247 | use crate::encryption::AsymmetricEncryptionScheme; 248 | use ark_r1cs_std::prelude::*; 249 | use ark_relations::r1cs::ConstraintSystem; 250 | 251 | #[test] 252 | fn test_elgamal_gadget() { 253 | let rng = &mut test_rng(); 254 | 255 | type MyEnc = ElGamal; 256 | type MyGadget = ElGamalEncGadget; 257 | 258 | // compute primitive result 259 | let parameters = MyEnc::setup(rng).unwrap(); 260 | let (pk, _) = MyEnc::keygen(¶meters, rng).unwrap(); 261 | let msg = JubJub::rand(rng).into(); 262 | let randomness = Randomness::rand(rng); 263 | let primitive_result = MyEnc::encrypt(¶meters, &pk, &msg, &randomness).unwrap(); 264 | 265 | // construct constraint system 266 | let cs = ConstraintSystem::::new_ref(); 267 | let randomness_var = 268 | >::RandomnessVar::new_witness( 269 | ark_relations::ns!(cs, "gadget_randomness"), 270 | || Ok(&randomness), 271 | ) 272 | .unwrap(); 273 | let parameters_var = 274 | >::ParametersVar::new_constant( 275 | ark_relations::ns!(cs, "gadget_parameters"), 276 | ¶meters, 277 | ) 278 | .unwrap(); 279 | let msg_var = 280 | >::PlaintextVar::new_witness( 281 | ark_relations::ns!(cs, "gadget_message"), 282 | || Ok(&msg), 283 | ) 284 | .unwrap(); 285 | let pk_var = 286 | >::PublicKeyVar::new_witness( 287 | ark_relations::ns!(cs, "gadget_public_key"), 288 | || Ok(&pk), 289 | ) 290 | .unwrap(); 291 | 292 | // use gadget 293 | let result_var = 294 | MyGadget::encrypt(¶meters_var, &msg_var, &randomness_var, &pk_var).unwrap(); 295 | 296 | // check that result equals expected ciphertext in the constraint system 297 | let expected_var = 298 | >::OutputVar::new_input( 299 | ark_relations::ns!(cs, "gadget_expected"), 300 | || Ok(&primitive_result), 301 | ) 302 | .unwrap(); 303 | expected_var.enforce_equal(&result_var).unwrap(); 304 | 305 | assert_eq!(primitive_result.0, result_var.c1.value().unwrap()); 306 | assert_eq!(primitive_result.1, result_var.c2.value().unwrap()); 307 | assert!(cs.is_satisfied().unwrap()); 308 | } 309 | } 310 | -------------------------------------------------------------------------------- /crypto-primitives/src/encryption/elgamal/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::{encryption::AsymmetricEncryptionScheme, Error}; 2 | use ark_ec::{AdditiveGroup, CurveGroup}; 3 | use ark_ff::{fields::PrimeField, UniformRand}; 4 | use ark_std::{marker::PhantomData, ops::Mul, rand::Rng}; 5 | 6 | #[cfg(feature = "r1cs")] 7 | pub mod constraints; 8 | 9 | pub struct ElGamal { 10 | _group: PhantomData, 11 | } 12 | 13 | pub struct Parameters { 14 | pub generator: C::Affine, 15 | } 16 | 17 | pub type PublicKey = ::Affine; 18 | 19 | pub struct SecretKey(pub C::ScalarField); 20 | 21 | pub struct Randomness(pub C::ScalarField); 22 | 23 | impl UniformRand for Randomness { 24 | #[inline] 25 | fn rand(rng: &mut R) -> Self { 26 | Randomness(::Scalar::rand(rng)) 27 | } 28 | } 29 | 30 | pub type Plaintext = ::Affine; 31 | 32 | pub type Ciphertext = (::Affine, ::Affine); 33 | 34 | impl AsymmetricEncryptionScheme for ElGamal 35 | where 36 | C::ScalarField: PrimeField, 37 | { 38 | type Parameters = Parameters; 39 | type PublicKey = PublicKey; 40 | type SecretKey = SecretKey; 41 | type Randomness = Randomness; 42 | type Plaintext = Plaintext; 43 | type Ciphertext = Ciphertext; 44 | 45 | fn setup(rng: &mut R) -> Result { 46 | // get a random generator 47 | let generator = C::rand(rng).into(); 48 | 49 | Ok(Parameters { generator }) 50 | } 51 | 52 | fn keygen( 53 | pp: &Self::Parameters, 54 | rng: &mut R, 55 | ) -> Result<(Self::PublicKey, Self::SecretKey), Error> { 56 | // get a random element from the scalar field 57 | let secret_key: ::Scalar = C::ScalarField::rand(rng); 58 | 59 | // compute secret_key*generator to derive the public key 60 | let public_key = pp.generator.mul(secret_key).into(); 61 | 62 | Ok((public_key, SecretKey(secret_key))) 63 | } 64 | 65 | fn encrypt( 66 | pp: &Self::Parameters, 67 | pk: &Self::PublicKey, 68 | message: &Self::Plaintext, 69 | r: &Self::Randomness, 70 | ) -> Result { 71 | // compute s = r*pk 72 | let s = pk.mul(r.0).into(); 73 | 74 | // compute c1 = r*generator 75 | let c1 = pp.generator.mul(r.0).into(); 76 | 77 | // compute c2 = m + s 78 | let c2 = *message + s; 79 | 80 | Ok((c1, c2.into_affine())) 81 | } 82 | 83 | fn decrypt( 84 | _pp: &Self::Parameters, 85 | sk: &Self::SecretKey, 86 | ciphertext: &Self::Ciphertext, 87 | ) -> Result { 88 | let c1: ::Affine = ciphertext.0; 89 | let c2: ::Affine = ciphertext.1; 90 | 91 | // compute s = secret_key * c1 92 | let s = c1.mul(sk.0); 93 | let s_inv = -s; 94 | 95 | // compute message = c2 - s 96 | let m = c2 + s_inv; 97 | 98 | Ok(m.into_affine()) 99 | } 100 | } 101 | 102 | #[cfg(test)] 103 | mod test { 104 | use ark_std::{test_rng, UniformRand}; 105 | 106 | use ark_ed_on_bls12_381::EdwardsProjective as JubJub; 107 | 108 | use crate::encryption::elgamal::{ElGamal, Randomness}; 109 | use crate::encryption::AsymmetricEncryptionScheme; 110 | 111 | #[test] 112 | fn test_elgamal_encryption() { 113 | let rng = &mut test_rng(); 114 | 115 | // setup and key generation 116 | let parameters = ElGamal::::setup(rng).unwrap(); 117 | let (pk, sk) = ElGamal::::keygen(¶meters, rng).unwrap(); 118 | 119 | // get a random msg and encryption randomness 120 | let msg = JubJub::rand(rng).into(); 121 | let r = Randomness::rand(rng); 122 | 123 | // encrypt and decrypt the message 124 | let cipher = ElGamal::::encrypt(¶meters, &pk, &msg, &r).unwrap(); 125 | let check_msg = ElGamal::::decrypt(¶meters, &sk, &cipher).unwrap(); 126 | 127 | assert_eq!(msg, check_msg); 128 | } 129 | } 130 | -------------------------------------------------------------------------------- /crypto-primitives/src/encryption/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::Error; 2 | use ark_std::rand::Rng; 3 | 4 | #[cfg(feature = "r1cs")] 5 | pub mod constraints; 6 | pub mod elgamal; 7 | #[cfg(feature = "r1cs")] 8 | pub use constraints::*; 9 | 10 | pub trait AsymmetricEncryptionScheme { 11 | type Parameters; 12 | type PublicKey; 13 | type SecretKey; 14 | type Randomness; 15 | type Plaintext; 16 | type Ciphertext; 17 | 18 | fn setup(rng: &mut R) -> Result; 19 | 20 | fn keygen( 21 | pp: &Self::Parameters, 22 | rng: &mut R, 23 | ) -> Result<(Self::PublicKey, Self::SecretKey), Error>; 24 | 25 | fn encrypt( 26 | pp: &Self::Parameters, 27 | pk: &Self::PublicKey, 28 | message: &Self::Plaintext, 29 | r: &Self::Randomness, 30 | ) -> Result; 31 | 32 | fn decrypt( 33 | pp: &Self::Parameters, 34 | sk: &Self::SecretKey, 35 | ciphertext: &Self::Ciphertext, 36 | ) -> Result; 37 | } 38 | -------------------------------------------------------------------------------- /crypto-primitives/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![cfg_attr(not(feature = "std"), no_std)] 2 | #![deny( 3 | unused, 4 | future_incompatible, 5 | nonstandard_style, 6 | rust_2018_idioms, 7 | // missing_docs 8 | )] 9 | #![forbid(unsafe_code)] 10 | 11 | #[macro_use] 12 | extern crate ark_std; 13 | 14 | #[allow(unused_imports)] 15 | #[macro_use] 16 | extern crate derivative; 17 | 18 | #[allow(unused_imports)] 19 | pub(crate) use ark_std::{borrow::ToOwned, boxed::Box, vec::Vec}; 20 | mod macros; 21 | 22 | #[cfg(feature = "commitment")] 23 | pub mod commitment; 24 | 25 | #[cfg(feature = "crh")] 26 | pub mod crh; 27 | 28 | #[cfg(feature = "merkle_tree")] 29 | pub mod merkle_tree; 30 | 31 | #[cfg(feature = "encryption")] 32 | pub mod encryption; 33 | 34 | #[cfg(feature = "prf")] 35 | pub mod prf; 36 | 37 | #[cfg(feature = "signature")] 38 | pub mod signature; 39 | 40 | #[cfg(feature = "snark")] 41 | pub mod snark; 42 | 43 | #[cfg(feature = "sponge")] 44 | pub mod sponge; 45 | 46 | #[derive(Debug)] 47 | pub enum Error { 48 | IncorrectInputLength(usize), 49 | NotPrimeOrder, 50 | GenericError(Box), 51 | SerializationError(ark_serialize::SerializationError), 52 | } 53 | 54 | impl ark_std::fmt::Display for Error { 55 | fn fmt(&self, f: &mut ark_std::fmt::Formatter<'_>) -> ark_std::fmt::Result { 56 | match self { 57 | Self::IncorrectInputLength(len) => write!(f, "incorrect input length: {len}"), 58 | Self::NotPrimeOrder => write!(f, "element is not prime order"), 59 | Self::GenericError(e) => write!(f, "{e}"), 60 | Self::SerializationError(e) => write!(f, "{e}"), 61 | } 62 | } 63 | } 64 | 65 | impl ark_std::error::Error for Error {} 66 | 67 | impl From for Error { 68 | fn from(e: ark_serialize::SerializationError) -> Self { 69 | Self::SerializationError(e) 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /crypto-primitives/src/macros.rs: -------------------------------------------------------------------------------- 1 | #[macro_export] 2 | /// Convert any serializable object to uncompressed bytes. 3 | macro_rules! to_uncompressed_bytes { 4 | ($v: expr) => {{ 5 | let mut bytes = Vec::new(); 6 | let result = $v.borrow().serialize_uncompressed(&mut bytes); 7 | if let Ok(()) = result { 8 | Ok(bytes) 9 | } else { 10 | Err(result.err().unwrap()) 11 | } 12 | }}; 13 | } 14 | -------------------------------------------------------------------------------- /crypto-primitives/src/merkle_tree/constraints.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | crh::{CRHSchemeGadget, TwoToOneCRHSchemeGadget}, 3 | merkle_tree::{Config, IdentityDigestConverter, Path}, 4 | }; 5 | use ark_ff::PrimeField; 6 | use ark_r1cs_std::prelude::*; 7 | use ark_relations::r1cs::{Namespace, SynthesisError}; 8 | #[cfg(not(feature = "std"))] 9 | use ark_std::vec::Vec; 10 | use ark_std::{borrow::Borrow, fmt::Debug}; 11 | 12 | pub trait DigestVarConverter { 13 | type TargetType: Borrow; 14 | fn convert(from: From) -> Result; 15 | } 16 | 17 | impl DigestVarConverter for IdentityDigestConverter { 18 | type TargetType = T; 19 | 20 | fn convert(from: T) -> Result { 21 | Ok(from) 22 | } 23 | } 24 | 25 | pub struct BytesVarDigestConverter, F: PrimeField> { 26 | _prev_layer_digest: T, 27 | _constraint_field: F, 28 | } 29 | 30 | impl, F: PrimeField> DigestVarConverter]> 31 | for BytesVarDigestConverter 32 | { 33 | type TargetType = Vec>; 34 | 35 | fn convert(from: T) -> Result { 36 | from.to_non_unique_bytes_le() 37 | } 38 | } 39 | 40 | pub trait ConfigGadget { 41 | type Leaf: Debug + ?Sized; 42 | type LeafDigest: AllocVar 43 | + EqGadget 44 | + ToBytesGadget 45 | + CondSelectGadget 46 | + R1CSVar 47 | + Debug 48 | + Clone 49 | + Sized; 50 | type LeafInnerConverter: DigestVarConverter< 51 | Self::LeafDigest, 52 | >::InputVar, 53 | >; 54 | type InnerDigest: AllocVar 55 | + EqGadget 56 | + ToBytesGadget 57 | + CondSelectGadget 58 | + R1CSVar 59 | + Debug 60 | + Clone 61 | + Sized; 62 | 63 | type LeafHash: CRHSchemeGadget< 64 | P::LeafHash, 65 | F, 66 | InputVar = Self::Leaf, 67 | OutputVar = Self::LeafDigest, 68 | >; 69 | type TwoToOneHash: TwoToOneCRHSchemeGadget; 70 | } 71 | 72 | type LeafParam = <>::LeafHash as CRHSchemeGadget< 73 |

::LeafHash, 74 | F, 75 | >>::ParametersVar; 76 | type TwoToOneParam = 77 | <>::TwoToOneHash as TwoToOneCRHSchemeGadget< 78 |

::TwoToOneHash, 79 | F, 80 | >>::ParametersVar; 81 | 82 | /// Represents a merkle tree path gadget. 83 | #[derive(Debug, Derivative)] 84 | #[derivative(Clone(bound = "P: Config, F: PrimeField, PG: ConfigGadget"))] 85 | pub struct PathVar> { 86 | /// `path[i]` is 0 (false) iff ith non-leaf node from top to bottom is left. 87 | path: Vec>, 88 | /// `auth_path[i]` is the entry of sibling of ith non-leaf node from top to bottom. 89 | auth_path: Vec, 90 | /// The sibling of leaf. 91 | leaf_sibling: PG::LeafDigest, 92 | /// Is this leaf the right child? 93 | leaf_is_right_child: Boolean, 94 | } 95 | 96 | impl> AllocVar, F> for PathVar 97 | where 98 | P: Config, 99 | F: PrimeField, 100 | { 101 | #[tracing::instrument(target = "r1cs", skip(cs, f))] 102 | fn new_variable>>( 103 | cs: impl Into>, 104 | f: impl FnOnce() -> Result, 105 | mode: AllocationMode, 106 | ) -> Result { 107 | let ns = cs.into(); 108 | let cs = ns.cs(); 109 | f().and_then(|val| { 110 | let leaf_sibling = PG::LeafDigest::new_variable( 111 | ark_relations::ns!(cs, "leaf_sibling"), 112 | || Ok(val.borrow().leaf_sibling_hash.clone()), 113 | mode, 114 | )?; 115 | let leaf_position_bit = Boolean::new_variable( 116 | ark_relations::ns!(cs, "leaf_position_bit"), 117 | || Ok(val.borrow().leaf_index & 1 == 1), 118 | mode, 119 | )?; 120 | let pos_list: Vec<_> = val.borrow().position_list().collect(); 121 | let path = Vec::new_variable( 122 | ark_relations::ns!(cs, "path_bits"), 123 | || Ok(&pos_list[..(pos_list.len() - 1)]), 124 | mode, 125 | )?; 126 | 127 | let auth_path = Vec::new_variable( 128 | ark_relations::ns!(cs, "auth_path_nodes"), 129 | || Ok(&val.borrow().auth_path[..]), 130 | mode, 131 | )?; 132 | Ok(PathVar { 133 | path, 134 | auth_path, 135 | leaf_sibling, 136 | leaf_is_right_child: leaf_position_bit, 137 | }) 138 | }) 139 | } 140 | } 141 | 142 | impl> PathVar { 143 | /// Set the leaf index of the path to a given value. Verifier can use function before calling `verify` 144 | /// to check the correctness leaf position. 145 | /// * `leaf_index`: leaf index encoded in little-endian format 146 | #[tracing::instrument(target = "r1cs", skip(self))] 147 | pub fn set_leaf_position(&mut self, leaf_index: Vec>) { 148 | // The path to a leaf is described by the branching 149 | // decisions taken at each node. This corresponds to the position 150 | // of the leaf. 151 | let mut path = leaf_index; 152 | 153 | // If leaves are numbered left-to-right starting from zero, 154 | // then all left children have odd positions (least significant bit is one), while all 155 | // right children have even positions (least significant bit is zero). 156 | let leaf_is_right_child = path.remove(0); 157 | 158 | // pad with zero if the length of `path` is too short 159 | if path.len() < self.auth_path.len() { 160 | path.extend((0..self.auth_path.len() - path.len()).map(|_| Boolean::constant(false))) 161 | } 162 | 163 | // truncate if the length of `path` is too long 164 | path.truncate(self.auth_path.len()); 165 | 166 | // branching decision starts from root, so we need to reverse it. 167 | path.reverse(); 168 | 169 | self.path = path; 170 | self.leaf_is_right_child = leaf_is_right_child; 171 | } 172 | 173 | /// Return the leaf position index in little-endian form. 174 | pub fn get_leaf_position(&self) -> Vec> { 175 | ark_std::iter::once(self.leaf_is_right_child.clone()) 176 | .chain(self.path.clone().into_iter().rev()) 177 | .collect() 178 | } 179 | 180 | /// Calculate the root of the Merkle tree assuming that `leaf` is the leaf on the path defined by `self`. 181 | #[tracing::instrument(target = "r1cs", skip(self, leaf_params, two_to_one_params))] 182 | pub fn calculate_root( 183 | &self, 184 | leaf_params: &LeafParam, 185 | two_to_one_params: &TwoToOneParam, 186 | leaf: &PG::Leaf, 187 | ) -> Result { 188 | let claimed_leaf_hash = PG::LeafHash::evaluate(leaf_params, leaf)?; 189 | let leaf_sibling_hash = &self.leaf_sibling; 190 | 191 | // calculate hash for the bottom non_leaf_layer 192 | 193 | // We assume that when a bit is 0, it indicates that the currently hashed value H is the left child, 194 | // and when bit is 1, it indicates our H is the right child. 195 | // Thus `left_hash` is sibling if the bit `leaf_is_right_child` is 1, and is leaf otherwise. 196 | 197 | let left_hash = self 198 | .leaf_is_right_child 199 | .select(leaf_sibling_hash, &claimed_leaf_hash)?; 200 | let right_hash = self 201 | .leaf_is_right_child 202 | .select(&claimed_leaf_hash, leaf_sibling_hash)?; 203 | 204 | // convert leaf digest to inner digest 205 | let left_hash = PG::LeafInnerConverter::convert(left_hash)?; 206 | let right_hash = PG::LeafInnerConverter::convert(right_hash)?; 207 | 208 | let mut curr_hash = 209 | PG::TwoToOneHash::evaluate(two_to_one_params, left_hash.borrow(), right_hash.borrow())?; 210 | // To traverse up a MT, we iterate over the path from bottom to top (i.e. in reverse) 211 | 212 | // At any given bit, the bit being 0 indicates our currently hashed value is the left, 213 | // and the bit being 1 indicates our currently hashed value is on the right. 214 | // Thus `left_hash` is the sibling if bit is 1, and it's the computed hash if bit is 0 215 | for (bit, sibling) in self.path.iter().rev().zip(self.auth_path.iter().rev()) { 216 | let left_hash = bit.select(sibling, &curr_hash)?; 217 | let right_hash = bit.select(&curr_hash, sibling)?; 218 | 219 | curr_hash = PG::TwoToOneHash::compress(two_to_one_params, &left_hash, &right_hash)?; 220 | } 221 | 222 | Ok(curr_hash) 223 | } 224 | 225 | /// Check that hashing a Merkle tree path according to `self`, and 226 | /// with `leaf` as the leaf, leads to a Merkle tree root equalling `root`. 227 | #[tracing::instrument(target = "r1cs", skip(self, leaf_params, two_to_one_params))] 228 | pub fn verify_membership( 229 | &self, 230 | leaf_params: &LeafParam, 231 | two_to_one_params: &TwoToOneParam, 232 | root: &PG::InnerDigest, 233 | leaf: &PG::Leaf, 234 | ) -> Result, SynthesisError> { 235 | let expected_root = self.calculate_root(leaf_params, two_to_one_params, leaf)?; 236 | Ok(expected_root.is_eq(root)?) 237 | } 238 | 239 | /// Check that `old_leaf` is the leaf of the Merkle tree on the path defined by 240 | /// `self`, and then compute the new root when replacing `old_leaf` by `new_leaf`. 241 | #[tracing::instrument(target = "r1cs", skip(self, leaf_params, two_to_one_params))] 242 | pub fn update_leaf( 243 | &self, 244 | leaf_params: &LeafParam, 245 | two_to_one_params: &TwoToOneParam, 246 | old_root: &PG::InnerDigest, 247 | old_leaf: &PG::Leaf, 248 | new_leaf: &PG::Leaf, 249 | ) -> Result { 250 | self.verify_membership(leaf_params, two_to_one_params, old_root, old_leaf)? 251 | .enforce_equal(&Boolean::TRUE)?; 252 | Ok(self.calculate_root(leaf_params, two_to_one_params, new_leaf)?) 253 | } 254 | 255 | /// Check that `old_leaf` is the leaf of the Merkle tree on the path defined by 256 | /// `self`, and then compute the expected new root when replacing `old_leaf` by `new_leaf`. 257 | /// Return a boolean indicating whether expected new root equals `new_root`. 258 | #[tracing::instrument(target = "r1cs", skip(self, leaf_params, two_to_one_params))] 259 | pub fn update_and_check( 260 | &self, 261 | leaf_params: &LeafParam, 262 | two_to_one_params: &TwoToOneParam, 263 | old_root: &PG::InnerDigest, 264 | new_root: &PG::InnerDigest, 265 | old_leaf: &PG::Leaf, 266 | new_leaf: &PG::Leaf, 267 | ) -> Result, SynthesisError> { 268 | let actual_new_root = 269 | self.update_leaf(leaf_params, two_to_one_params, old_root, old_leaf, new_leaf)?; 270 | Ok(actual_new_root.is_eq(&new_root)?) 271 | } 272 | } 273 | -------------------------------------------------------------------------------- /crypto-primitives/src/merkle_tree/tests/mod.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "r1cs")] 2 | mod constraints; 3 | mod test_utils; 4 | 5 | mod bytes_mt_tests { 6 | 7 | use crate::{crh::*, merkle_tree::*}; 8 | use ark_ed_on_bls12_381::EdwardsProjective as JubJub; 9 | use ark_ff::BigInteger256; 10 | use ark_std::{iter::zip, test_rng, UniformRand}; 11 | 12 | #[derive(Clone)] 13 | pub(super) struct Window4x256; 14 | impl pedersen::Window for Window4x256 { 15 | const WINDOW_SIZE: usize = 4; 16 | const NUM_WINDOWS: usize = 256; 17 | } 18 | 19 | type LeafH = pedersen::CRH; 20 | type CompressH = pedersen::TwoToOneCRH; 21 | 22 | struct JubJubMerkleTreeParams; 23 | 24 | impl Config for JubJubMerkleTreeParams { 25 | type Leaf = [u8]; 26 | 27 | type LeafDigest = ::Output; 28 | type LeafInnerDigestConverter = ByteDigestConverter; 29 | type InnerDigest = ::Output; 30 | 31 | type LeafHash = LeafH; 32 | type TwoToOneHash = CompressH; 33 | } 34 | type JubJubMerkleTree = MerkleTree; 35 | 36 | /// Pedersen only takes bytes as leaf, so we use `ToBytes` trait. 37 | fn merkle_tree_test(leaves: &[L], update_query: &[(usize, L)]) -> () { 38 | let mut rng = ark_std::test_rng(); 39 | 40 | let mut leaves: Vec<_> = leaves 41 | .iter() 42 | .map(|leaf| crate::to_uncompressed_bytes!(leaf).unwrap()) 43 | .collect(); 44 | 45 | let leaf_crh_params = ::setup(&mut rng).unwrap(); 46 | let two_to_one_params = ::setup(&mut rng).unwrap(); 47 | 48 | let mut tree = 49 | JubJubMerkleTree::new(&leaf_crh_params, &two_to_one_params, &leaves).unwrap(); 50 | 51 | let mut root = tree.root(); 52 | // test merkle tree functionality without update 53 | for (i, leaf) in leaves.iter().enumerate() { 54 | let proof = tree.generate_proof(i).unwrap(); 55 | assert!(proof 56 | .verify(&leaf_crh_params, &two_to_one_params, &root, leaf.as_slice()) 57 | .unwrap()); 58 | } 59 | 60 | // test the merkle tree multi-proof functionality 61 | let mut multi_proof = tree 62 | .generate_multi_proof((0..leaves.len()).collect::>()) 63 | .unwrap(); 64 | 65 | assert!(multi_proof 66 | .verify(&leaf_crh_params, &two_to_one_params, &root, leaves.clone()) 67 | .unwrap()); 68 | 69 | // test merkle tree update functionality 70 | for (i, v) in update_query { 71 | let v = crate::to_uncompressed_bytes!(v).unwrap(); 72 | tree.update(*i, &v).unwrap(); 73 | leaves[*i] = v.clone(); 74 | } 75 | // update the root 76 | root = tree.root(); 77 | // verify again 78 | for (i, leaf) in leaves.iter().enumerate() { 79 | let proof = tree.generate_proof(i).unwrap(); 80 | assert!(proof 81 | .verify(&leaf_crh_params, &two_to_one_params, &root, leaf.as_slice()) 82 | .unwrap()); 83 | } 84 | 85 | // test the merkle tree multi-proof functionality again 86 | multi_proof = tree 87 | .generate_multi_proof((0..leaves.len()).collect::>()) 88 | .unwrap(); 89 | 90 | assert!(multi_proof 91 | .verify(&leaf_crh_params, &two_to_one_params, &root, leaves.clone()) 92 | .unwrap()); 93 | } 94 | 95 | #[test] 96 | fn good_root_test() { 97 | let mut rng = test_rng(); 98 | 99 | let mut leaves = Vec::new(); 100 | for _ in 0..2u8 { 101 | leaves.push(BigInteger256::rand(&mut rng)); 102 | } 103 | merkle_tree_test( 104 | &leaves, 105 | &vec![ 106 | (0, BigInteger256::rand(&mut rng)), 107 | (1, BigInteger256::rand(&mut rng)), 108 | ], 109 | ); 110 | 111 | let mut leaves = Vec::new(); 112 | for _ in 0..4u8 { 113 | leaves.push(BigInteger256::rand(&mut rng)); 114 | } 115 | merkle_tree_test(&leaves, &vec![(3, BigInteger256::rand(&mut rng))]); 116 | 117 | let mut leaves = Vec::new(); 118 | for _ in 0..128u8 { 119 | leaves.push(BigInteger256::rand(&mut rng)); 120 | } 121 | merkle_tree_test( 122 | &leaves, 123 | &vec![ 124 | (2, BigInteger256::rand(&mut rng)), 125 | (3, BigInteger256::rand(&mut rng)), 126 | (5, BigInteger256::rand(&mut rng)), 127 | (111, BigInteger256::rand(&mut rng)), 128 | (127, BigInteger256::rand(&mut rng)), 129 | ], 130 | ); 131 | } 132 | 133 | #[test] 134 | fn multi_proof_dissection_test() { 135 | let mut rng = test_rng(); 136 | 137 | let mut leaves = Vec::new(); 138 | for _ in 0..8u8 { 139 | leaves.push(BigInteger256::rand(&mut rng)); 140 | } 141 | assert_eq!(leaves.len(), 8); 142 | 143 | let serialized_leaves: Vec<_> = leaves 144 | .iter() 145 | .map(|leaf| crate::to_uncompressed_bytes!(leaf).unwrap()) 146 | .collect(); 147 | 148 | let leaf_crh_params = ::setup(&mut rng).unwrap(); 149 | let two_to_one_params = ::setup(&mut rng).unwrap(); 150 | 151 | let tree = JubJubMerkleTree::new(&leaf_crh_params, &two_to_one_params, &serialized_leaves) 152 | .unwrap(); 153 | 154 | let mut proofs = Vec::with_capacity(leaves.len()); 155 | 156 | for (i, _) in leaves.iter().enumerate() { 157 | proofs.push(tree.generate_proof(i).unwrap()); 158 | } 159 | 160 | let multi_proof = tree 161 | .generate_multi_proof((0..leaves.len()).collect::>()) 162 | .unwrap(); 163 | 164 | // test compression theretical prefix lengths for size 8 Tree: 165 | // we should send 6 hashes instead of 2*8 = 16 166 | let theoretical_prefix_lengths = vec![0, 2, 1, 2, 0, 2, 1, 2]; 167 | 168 | for (comp_len, exp_len) in zip( 169 | &multi_proof.auth_paths_prefix_lenghts, 170 | &theoretical_prefix_lengths, 171 | ) { 172 | assert_eq!(comp_len, exp_len); 173 | } 174 | 175 | // test that the compressed paths can expand to expected len 176 | for (prefix_len, suffix) in zip( 177 | &multi_proof.auth_paths_prefix_lenghts, 178 | &multi_proof.auth_paths_suffixes, 179 | ) { 180 | assert_eq!(prefix_len + suffix.len(), proofs[0].auth_path.len()); 181 | } 182 | } 183 | } 184 | 185 | mod field_mt_tests { 186 | use crate::{ 187 | crh::poseidon, 188 | merkle_tree::{ 189 | tests::test_utils::poseidon_parameters, Config, IdentityDigestConverter, MerkleTree, 190 | }, 191 | }; 192 | use ark_std::{test_rng, One, UniformRand}; 193 | 194 | type F = ark_ed_on_bls12_381::Fr; 195 | type H = poseidon::CRH; 196 | type TwoToOneH = poseidon::TwoToOneCRH; 197 | 198 | struct FieldMTConfig; 199 | impl Config for FieldMTConfig { 200 | type Leaf = [F]; 201 | type LeafDigest = F; 202 | type LeafInnerDigestConverter = IdentityDigestConverter; 203 | type InnerDigest = F; 204 | type LeafHash = H; 205 | type TwoToOneHash = TwoToOneH; 206 | } 207 | 208 | type FieldMT = MerkleTree; 209 | 210 | fn merkle_tree_test(leaves: &[Vec], update_query: &[(usize, Vec)]) -> () { 211 | let mut leaves = leaves.to_vec(); 212 | let leaf_crh_params = poseidon_parameters(); 213 | let two_to_one_params = leaf_crh_params.clone(); 214 | 215 | let mut tree = FieldMT::new(&leaf_crh_params, &two_to_one_params, &leaves).unwrap(); 216 | 217 | let mut root = tree.root(); 218 | 219 | // test merkle tree functionality without update 220 | for (i, leaf) in leaves.iter().enumerate() { 221 | let proof = tree.generate_proof(i).unwrap(); 222 | assert!(proof 223 | .verify(&leaf_crh_params, &two_to_one_params, &root, leaf.as_slice()) 224 | .unwrap()); 225 | } 226 | 227 | // test the merkle tree multi-proof functionality 228 | let mut multi_proof = tree 229 | .generate_multi_proof((0..leaves.len()).collect::>()) 230 | .unwrap(); 231 | 232 | assert!(multi_proof 233 | .verify(&leaf_crh_params, &two_to_one_params, &root, leaves.clone()) 234 | .unwrap()); 235 | 236 | { 237 | // wrong root should lead to error but do not panic 238 | let wrong_root = root + F::one(); 239 | let proof = tree.generate_proof(0).unwrap(); 240 | assert!(!proof 241 | .verify( 242 | &leaf_crh_params, 243 | &two_to_one_params, 244 | &wrong_root, 245 | leaves[0].as_slice() 246 | ) 247 | .unwrap()); 248 | 249 | // test the merkle tree multi-proof functionality 250 | let multi_proof = tree 251 | .generate_multi_proof((0..leaves.len()).collect::>()) 252 | .unwrap(); 253 | 254 | assert!(!multi_proof 255 | .verify( 256 | &leaf_crh_params, 257 | &two_to_one_params, 258 | &wrong_root, 259 | leaves.clone() 260 | ) 261 | .unwrap()); 262 | } 263 | 264 | // test merkle tree update functionality 265 | for (i, v) in update_query { 266 | tree.update(*i, v).unwrap(); 267 | leaves[*i] = v.to_vec(); 268 | } 269 | 270 | // update the root 271 | root = tree.root(); 272 | 273 | // verify again 274 | for (i, leaf) in leaves.iter().enumerate() { 275 | let proof = tree.generate_proof(i).unwrap(); 276 | assert!(proof 277 | .verify(&leaf_crh_params, &two_to_one_params, &root, leaf.as_slice()) 278 | .unwrap()); 279 | } 280 | 281 | multi_proof = tree 282 | .generate_multi_proof((0..leaves.len()).collect::>()) 283 | .unwrap(); 284 | 285 | assert!(multi_proof 286 | .verify(&leaf_crh_params, &two_to_one_params, &root, leaves.clone()) 287 | .unwrap()); 288 | } 289 | 290 | #[test] 291 | fn good_root_test() { 292 | let mut rng = test_rng(); 293 | let mut rand_leaves = || (0..3).map(|_| F::rand(&mut rng)).collect(); 294 | 295 | let mut leaves: Vec> = Vec::new(); 296 | for _ in 0..128u8 { 297 | leaves.push(rand_leaves()) 298 | } 299 | merkle_tree_test( 300 | &leaves, 301 | &vec![ 302 | (2, rand_leaves()), 303 | (3, rand_leaves()), 304 | (5, rand_leaves()), 305 | (111, rand_leaves()), 306 | (127, rand_leaves()), 307 | ], 308 | ) 309 | } 310 | } 311 | -------------------------------------------------------------------------------- /crypto-primitives/src/prf/blake2s/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::{prf::PRF, Error}; 2 | #[cfg(not(feature = "std"))] 3 | use ark_std::vec::Vec; 4 | use blake2::{Blake2s256 as B2s, Blake2sMac}; 5 | use digest::Digest; 6 | 7 | #[cfg(feature = "r1cs")] 8 | pub mod constraints; 9 | 10 | #[derive(Clone)] 11 | pub struct Blake2s; 12 | 13 | impl PRF for Blake2s { 14 | type Input = [u8; 32]; 15 | type Output = [u8; 32]; 16 | type Seed = [u8; 32]; 17 | 18 | fn evaluate(seed: &Self::Seed, input: &Self::Input) -> Result { 19 | let eval_time = start_timer!(|| "Blake2s::Eval"); 20 | let mut h = B2s::new(); 21 | h.update(seed.as_ref()); 22 | h.update(input.as_ref()); 23 | let mut result = [0u8; 32]; 24 | result.copy_from_slice(&h.finalize()); 25 | end_timer!(eval_time); 26 | Ok(result) 27 | } 28 | } 29 | 30 | #[derive(Clone)] 31 | pub struct Blake2sWithParameterBlock { 32 | pub output_size: u8, 33 | pub key_size: u8, 34 | pub salt: [u8; 8], 35 | pub personalization: [u8; 8], 36 | } 37 | 38 | impl Blake2sWithParameterBlock { 39 | pub fn evaluate(&self, input: &[u8]) -> Vec { 40 | use digest::{typenum::U32, FixedOutput, Update}; 41 | let eval_time = start_timer!(|| "Blake2sWithParameterBlock::Eval"); 42 | let mut h = 43 | Blake2sMac::::new_with_salt_and_personal(&[], &self.salt, &self.personalization) 44 | .unwrap(); 45 | h.update(input.as_ref()); 46 | end_timer!(eval_time); 47 | h.finalize_fixed().into_iter().collect() 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /crypto-primitives/src/prf/constraints.rs: -------------------------------------------------------------------------------- 1 | use crate::prf::PRF; 2 | use ark_ff::Field; 3 | use ark_r1cs_std::prelude::*; 4 | use ark_relations::r1cs::{Namespace, SynthesisError}; 5 | use ark_std::fmt::Debug; 6 | #[cfg(not(feature = "std"))] 7 | use ark_std::vec::Vec; 8 | 9 | pub trait PRFGadget { 10 | type OutputVar: EqGadget 11 | + ToBytesGadget 12 | + AllocVar 13 | + R1CSVar 14 | + Clone 15 | + Debug; 16 | 17 | fn new_seed(cs: impl Into>, seed: &P::Seed) -> Vec>; 18 | 19 | fn evaluate(seed: &[UInt8], input: &[UInt8]) -> Result; 20 | } 21 | -------------------------------------------------------------------------------- /crypto-primitives/src/prf/mod.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::upper_case_acronyms)] 2 | use crate::Error; 3 | use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; 4 | use ark_std::{fmt::Debug, hash::Hash}; 5 | 6 | #[cfg(feature = "r1cs")] 7 | pub mod constraints; 8 | #[cfg(feature = "r1cs")] 9 | pub use constraints::*; 10 | 11 | pub mod blake2s; 12 | pub use self::blake2s::*; 13 | 14 | pub trait PRF { 15 | type Input: CanonicalDeserialize + Default; 16 | type Output: CanonicalSerialize + Eq + Clone + Debug + Default + Hash; 17 | type Seed: CanonicalDeserialize + CanonicalSerialize + Clone + Default + Debug; 18 | 19 | fn evaluate(seed: &Self::Seed, input: &Self::Input) -> Result; 20 | } 21 | -------------------------------------------------------------------------------- /crypto-primitives/src/signature/constraints.rs: -------------------------------------------------------------------------------- 1 | use crate::signature::SignatureScheme; 2 | use ark_ff::Field; 3 | use ark_r1cs_std::prelude::*; 4 | use ark_relations::r1cs::SynthesisError; 5 | 6 | pub trait SigVerifyGadget { 7 | type ParametersVar: AllocVar + Clone; 8 | 9 | type PublicKeyVar: ToBytesGadget + AllocVar + Clone; 10 | 11 | type SignatureVar: ToBytesGadget + AllocVar + Clone; 12 | 13 | fn verify( 14 | parameters: &Self::ParametersVar, 15 | public_key: &Self::PublicKeyVar, 16 | // TODO: Should we make this take in bytes or something different? 17 | message: &[UInt8], 18 | signature: &Self::SignatureVar, 19 | ) -> Result, SynthesisError>; 20 | } 21 | 22 | pub trait SigRandomizePkGadget { 23 | type ParametersVar: AllocVar + Clone; 24 | 25 | type PublicKeyVar: ToBytesGadget 26 | + EqGadget 27 | + AllocVar 28 | + Clone; 29 | 30 | fn randomize( 31 | parameters: &Self::ParametersVar, 32 | public_key: &Self::PublicKeyVar, 33 | randomness: &[UInt8], 34 | ) -> Result; 35 | } 36 | -------------------------------------------------------------------------------- /crypto-primitives/src/signature/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::Error; 2 | use ark_serialize::CanonicalSerialize; 3 | use ark_std::{hash::Hash, rand::Rng}; 4 | 5 | #[cfg(feature = "r1cs")] 6 | pub mod constraints; 7 | #[cfg(feature = "r1cs")] 8 | pub use constraints::*; 9 | 10 | pub mod schnorr; 11 | 12 | pub trait SignatureScheme { 13 | type Parameters: Clone + Send + Sync; 14 | type PublicKey: CanonicalSerialize + Hash + Eq + Clone + Default + Send + Sync; 15 | type SecretKey: CanonicalSerialize + Clone + Default; 16 | type Signature: Clone + Default + Send + Sync; 17 | 18 | fn setup(rng: &mut R) -> Result; 19 | 20 | fn keygen( 21 | pp: &Self::Parameters, 22 | rng: &mut R, 23 | ) -> Result<(Self::PublicKey, Self::SecretKey), Error>; 24 | 25 | fn sign( 26 | pp: &Self::Parameters, 27 | sk: &Self::SecretKey, 28 | message: &[u8], 29 | rng: &mut R, 30 | ) -> Result; 31 | 32 | fn verify( 33 | pp: &Self::Parameters, 34 | pk: &Self::PublicKey, 35 | message: &[u8], 36 | signature: &Self::Signature, 37 | ) -> Result; 38 | 39 | fn randomize_public_key( 40 | pp: &Self::Parameters, 41 | public_key: &Self::PublicKey, 42 | randomness: &[u8], 43 | ) -> Result; 44 | 45 | fn randomize_signature( 46 | pp: &Self::Parameters, 47 | signature: &Self::Signature, 48 | randomness: &[u8], 49 | ) -> Result; 50 | } 51 | 52 | #[cfg(test)] 53 | mod test { 54 | use crate::signature::*; 55 | use ark_ec::AdditiveGroup; 56 | use ark_ed_on_bls12_381::EdwardsProjective as JubJub; 57 | use ark_std::{test_rng, UniformRand}; 58 | use blake2::Blake2s256 as Blake2s; 59 | 60 | fn sign_and_verify(message: &[u8]) { 61 | let rng = &mut test_rng(); 62 | let parameters = S::setup::<_>(rng).unwrap(); 63 | let (pk, sk) = S::keygen(¶meters, rng).unwrap(); 64 | let sig = S::sign(¶meters, &sk, &message, rng).unwrap(); 65 | assert!(S::verify(¶meters, &pk, &message, &sig).unwrap()); 66 | } 67 | 68 | fn failed_verification(message: &[u8], bad_message: &[u8]) { 69 | let rng = &mut test_rng(); 70 | let parameters = S::setup::<_>(rng).unwrap(); 71 | let (pk, sk) = S::keygen(¶meters, rng).unwrap(); 72 | let sig = S::sign(¶meters, &sk, message, rng).unwrap(); 73 | assert!(!S::verify(¶meters, &pk, bad_message, &sig).unwrap()); 74 | } 75 | 76 | fn randomize_and_verify(message: &[u8], randomness: &[u8]) { 77 | let rng = &mut test_rng(); 78 | let parameters = S::setup::<_>(rng).unwrap(); 79 | let (pk, sk) = S::keygen(¶meters, rng).unwrap(); 80 | let sig = S::sign(¶meters, &sk, message, rng).unwrap(); 81 | assert!(S::verify(¶meters, &pk, message, &sig).unwrap()); 82 | let randomized_pk = S::randomize_public_key(¶meters, &pk, randomness).unwrap(); 83 | let randomized_sig = S::randomize_signature(¶meters, &sig, randomness).unwrap(); 84 | assert!(S::verify(¶meters, &randomized_pk, &message, &randomized_sig).unwrap()); 85 | } 86 | 87 | #[test] 88 | fn schnorr_signature_test() { 89 | let message = "Hi, I am a Schnorr signature!"; 90 | let rng = &mut test_rng(); 91 | sign_and_verify::>(message.as_bytes()); 92 | failed_verification::>( 93 | message.as_bytes(), 94 | "Bad message".as_bytes(), 95 | ); 96 | let mut random_scalar_bytes = Vec::new(); 97 | let random_scalar = ::Scalar::rand(rng); 98 | random_scalar 99 | .serialize_compressed(&mut random_scalar_bytes) 100 | .unwrap(); 101 | randomize_and_verify::>( 102 | message.as_bytes(), 103 | &random_scalar_bytes.as_slice(), 104 | ); 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /crypto-primitives/src/signature/schnorr/constraints.rs: -------------------------------------------------------------------------------- 1 | use crate::signature::{ 2 | schnorr::{Parameters, PublicKey, Schnorr}, 3 | SigRandomizePkGadget, 4 | }; 5 | use ark_ec::CurveGroup; 6 | use ark_ff::Field; 7 | use ark_r1cs_std::prelude::*; 8 | use ark_relations::r1cs::{Namespace, SynthesisError}; 9 | #[cfg(not(feature = "std"))] 10 | use ark_std::vec::Vec; 11 | use ark_std::{borrow::Borrow, marker::PhantomData}; 12 | use digest::Digest; 13 | 14 | type ConstraintF = <::BaseField as Field>::BasePrimeField; 15 | 16 | #[derive(Clone)] 17 | pub struct ParametersVar>> 18 | where 19 | for<'a> &'a GC: GroupOpsBounds<'a, C, GC>, 20 | { 21 | generator: GC, 22 | _curve: PhantomData, 23 | } 24 | 25 | #[derive(Derivative)] 26 | #[derivative( 27 | Debug(bound = "C: CurveGroup, GC: CurveVar>"), 28 | Clone(bound = "C: CurveGroup, GC: CurveVar>") 29 | )] 30 | pub struct PublicKeyVar>> 31 | where 32 | for<'a> &'a GC: GroupOpsBounds<'a, C, GC>, 33 | { 34 | pub_key: GC, 35 | #[doc(hidden)] 36 | _group: PhantomData<*const C>, 37 | } 38 | 39 | pub struct SchnorrRandomizePkGadget>> 40 | where 41 | for<'a> &'a GC: GroupOpsBounds<'a, C, GC>, 42 | { 43 | #[doc(hidden)] 44 | _group: PhantomData<*const C>, 45 | #[doc(hidden)] 46 | _group_gadget: PhantomData<*const GC>, 47 | } 48 | 49 | impl SigRandomizePkGadget, ConstraintF> 50 | for SchnorrRandomizePkGadget 51 | where 52 | C: CurveGroup, 53 | GC: CurveVar>, 54 | D: Digest + Send + Sync, 55 | for<'a> &'a GC: GroupOpsBounds<'a, C, GC>, 56 | { 57 | type ParametersVar = ParametersVar; 58 | type PublicKeyVar = PublicKeyVar; 59 | 60 | #[tracing::instrument(target = "r1cs", skip(parameters, public_key, randomness))] 61 | fn randomize( 62 | parameters: &Self::ParametersVar, 63 | public_key: &Self::PublicKeyVar, 64 | randomness: &[UInt8>], 65 | ) -> Result { 66 | let base = parameters.generator.clone(); 67 | let randomness = randomness 68 | .iter() 69 | .flat_map(|b| b.to_bits_le().unwrap()) 70 | .collect::>(); 71 | let rand_pk = &public_key.pub_key + &base.scalar_mul_le(randomness.iter())?; 72 | Ok(PublicKeyVar { 73 | pub_key: rand_pk, 74 | _group: PhantomData, 75 | }) 76 | } 77 | } 78 | 79 | impl AllocVar, ConstraintF> for ParametersVar 80 | where 81 | C: CurveGroup, 82 | GC: CurveVar>, 83 | D: Digest, 84 | for<'a> &'a GC: GroupOpsBounds<'a, C, GC>, 85 | { 86 | fn new_variable>>( 87 | cs: impl Into>>, 88 | f: impl FnOnce() -> Result, 89 | mode: AllocationMode, 90 | ) -> Result { 91 | let generator = GC::new_variable(cs, || f().map(|g| g.borrow().generator), mode)?; 92 | Ok(Self { 93 | generator, 94 | _curve: PhantomData, 95 | }) 96 | } 97 | } 98 | 99 | impl AllocVar, ConstraintF> for PublicKeyVar 100 | where 101 | C: CurveGroup, 102 | GC: CurveVar>, 103 | for<'a> &'a GC: GroupOpsBounds<'a, C, GC>, 104 | { 105 | fn new_variable>>( 106 | cs: impl Into>>, 107 | f: impl FnOnce() -> Result, 108 | mode: AllocationMode, 109 | ) -> Result { 110 | let pub_key = GC::new_variable(cs, f, mode)?; 111 | Ok(Self { 112 | pub_key, 113 | _group: PhantomData, 114 | }) 115 | } 116 | } 117 | 118 | impl EqGadget> for PublicKeyVar 119 | where 120 | C: CurveGroup, 121 | GC: CurveVar>, 122 | for<'a> &'a GC: GroupOpsBounds<'a, C, GC>, 123 | { 124 | #[inline] 125 | fn is_eq(&self, other: &Self) -> Result>, SynthesisError> { 126 | self.pub_key.is_eq(&other.pub_key) 127 | } 128 | 129 | #[inline] 130 | fn conditional_enforce_equal( 131 | &self, 132 | other: &Self, 133 | condition: &Boolean>, 134 | ) -> Result<(), SynthesisError> { 135 | self.pub_key 136 | .conditional_enforce_equal(&other.pub_key, condition) 137 | } 138 | 139 | #[inline] 140 | fn conditional_enforce_not_equal( 141 | &self, 142 | other: &Self, 143 | condition: &Boolean>, 144 | ) -> Result<(), SynthesisError> { 145 | self.pub_key 146 | .conditional_enforce_not_equal(&other.pub_key, condition) 147 | } 148 | } 149 | 150 | impl ToBytesGadget> for PublicKeyVar 151 | where 152 | C: CurveGroup, 153 | GC: CurveVar>, 154 | for<'a> &'a GC: GroupOpsBounds<'a, C, GC>, 155 | { 156 | fn to_bytes_le(&self) -> Result>>, SynthesisError> { 157 | self.pub_key.to_bytes_le() 158 | } 159 | } 160 | -------------------------------------------------------------------------------- /crypto-primitives/src/signature/schnorr/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::{signature::SignatureScheme, Error}; 2 | use ark_ec::{AffineRepr, CurveGroup}; 3 | use ark_ff::{ 4 | fields::{Field, PrimeField}, 5 | AdditiveGroup, One, ToConstraintField, UniformRand, Zero, 6 | }; 7 | use ark_serialize::CanonicalSerialize; 8 | #[cfg(not(feature = "std"))] 9 | use ark_std::vec::Vec; 10 | use ark_std::{hash::Hash, marker::PhantomData, ops::Mul, rand::Rng}; 11 | use digest::Digest; 12 | 13 | #[cfg(feature = "r1cs")] 14 | pub mod constraints; 15 | 16 | pub struct Schnorr { 17 | _group: PhantomData, 18 | _hash: PhantomData, 19 | } 20 | 21 | #[derive(Derivative)] 22 | #[derivative(Clone(bound = "C: CurveGroup, H: Digest"), Debug)] 23 | pub struct Parameters { 24 | _hash: PhantomData, 25 | pub generator: C::Affine, 26 | pub salt: [u8; 32], 27 | } 28 | 29 | pub type PublicKey = ::Affine; 30 | 31 | #[derive(Clone, Default, Debug, CanonicalSerialize)] 32 | pub struct SecretKey(pub C::ScalarField); 33 | 34 | #[derive(Clone, Default, Debug)] 35 | pub struct Signature { 36 | pub prover_response: C::ScalarField, 37 | pub verifier_challenge: C::ScalarField, 38 | } 39 | 40 | impl SignatureScheme for Schnorr 41 | where 42 | C::ScalarField: PrimeField, 43 | { 44 | type Parameters = Parameters; 45 | type PublicKey = PublicKey; 46 | type SecretKey = SecretKey; 47 | type Signature = Signature; 48 | 49 | fn setup(rng: &mut R) -> Result { 50 | let setup_time = start_timer!(|| "SchnorrSig::Setup"); 51 | 52 | let mut salt = [0u8; 32]; 53 | rng.fill_bytes(&mut salt); 54 | let generator = C::rand(rng).into(); 55 | 56 | end_timer!(setup_time); 57 | Ok(Parameters { 58 | _hash: PhantomData, 59 | generator, 60 | salt, 61 | }) 62 | } 63 | 64 | fn keygen( 65 | parameters: &Self::Parameters, 66 | rng: &mut R, 67 | ) -> Result<(Self::PublicKey, Self::SecretKey), Error> { 68 | let keygen_time = start_timer!(|| "SchnorrSig::KeyGen"); 69 | 70 | let secret_key = C::ScalarField::rand(rng); 71 | let public_key = parameters.generator.mul(secret_key).into(); 72 | 73 | end_timer!(keygen_time); 74 | Ok((public_key, SecretKey(secret_key))) 75 | } 76 | 77 | fn sign( 78 | parameters: &Self::Parameters, 79 | sk: &Self::SecretKey, 80 | message: &[u8], 81 | rng: &mut R, 82 | ) -> Result { 83 | let sign_time = start_timer!(|| "SchnorrSig::Sign"); 84 | // (k, e); 85 | let (random_scalar, verifier_challenge) = loop { 86 | // Sample a random scalar `k` from the prime scalar field. 87 | let random_scalar: C::ScalarField = C::ScalarField::rand(rng); 88 | // Commit to the random scalar via r := k · G. 89 | // This is the prover's first msg in the Sigma protocol. 90 | let prover_commitment = parameters.generator.mul(random_scalar).into_affine(); 91 | 92 | // Hash everything to get verifier challenge. 93 | let mut hash_input = Vec::new(); 94 | parameters.salt.serialize_compressed(&mut hash_input)?; 95 | prover_commitment.serialize_compressed(&mut hash_input)?; 96 | message.serialize_compressed(&mut hash_input)?; 97 | 98 | // Compute the supposed verifier response: e := H(salt || r || msg); 99 | if let Some(verifier_challenge) = 100 | C::ScalarField::from_random_bytes(&D::digest(&hash_input)) 101 | { 102 | break (random_scalar, verifier_challenge); 103 | }; 104 | }; 105 | 106 | // k - xe; 107 | let prover_response = random_scalar - (verifier_challenge * sk.0); 108 | let signature = Signature { 109 | prover_response, 110 | verifier_challenge, 111 | }; 112 | 113 | end_timer!(sign_time); 114 | Ok(signature) 115 | } 116 | 117 | fn verify( 118 | parameters: &Self::Parameters, 119 | pk: &Self::PublicKey, 120 | message: &[u8], 121 | signature: &Self::Signature, 122 | ) -> Result { 123 | let verify_time = start_timer!(|| "SchnorrSig::Verify"); 124 | 125 | let Signature { 126 | prover_response, 127 | verifier_challenge, 128 | } = signature; 129 | let mut claimed_prover_commitment = parameters.generator.mul(*prover_response); 130 | let public_key_times_verifier_challenge = pk.mul(*verifier_challenge); 131 | claimed_prover_commitment += &public_key_times_verifier_challenge; 132 | let claimed_prover_commitment = claimed_prover_commitment.into_affine(); 133 | 134 | let mut hash_input = Vec::new(); 135 | parameters.salt.serialize_compressed(&mut hash_input)?; 136 | claimed_prover_commitment.serialize_compressed(&mut hash_input)?; 137 | message.serialize_compressed(&mut hash_input)?; 138 | 139 | let obtained_verifier_challenge = if let Some(obtained_verifier_challenge) = 140 | C::ScalarField::from_random_bytes(&D::digest(&hash_input)) 141 | { 142 | obtained_verifier_challenge 143 | } else { 144 | return Ok(false); 145 | }; 146 | end_timer!(verify_time); 147 | Ok(verifier_challenge == &obtained_verifier_challenge) 148 | } 149 | 150 | fn randomize_public_key( 151 | parameters: &Self::Parameters, 152 | public_key: &Self::PublicKey, 153 | randomness: &[u8], 154 | ) -> Result { 155 | let rand_pk_time = start_timer!(|| "SchnorrSig::RandomizePubKey"); 156 | 157 | let randomized_pk = *public_key; 158 | let base = parameters.generator; 159 | let mut encoded = C::zero(); 160 | for bit in bytes_to_bits(randomness) 161 | .into_iter() 162 | .rev() 163 | .skip_while(|b| !b) 164 | { 165 | encoded.double_in_place(); 166 | if bit { 167 | encoded.add_assign(&base) 168 | } 169 | } 170 | encoded.add_assign(&randomized_pk); 171 | 172 | end_timer!(rand_pk_time); 173 | 174 | Ok(encoded.into()) 175 | } 176 | 177 | fn randomize_signature( 178 | _parameter: &Self::Parameters, 179 | signature: &Self::Signature, 180 | randomness: &[u8], 181 | ) -> Result { 182 | let rand_signature_time = start_timer!(|| "SchnorrSig::RandomizeSig"); 183 | let Signature { 184 | prover_response, 185 | verifier_challenge, 186 | } = signature; 187 | let mut base = C::ScalarField::one(); 188 | let mut multiplier = C::ScalarField::zero(); 189 | for bit in bytes_to_bits(randomness) { 190 | if bit { 191 | multiplier += &base; 192 | } 193 | base.double_in_place(); 194 | } 195 | 196 | let new_sig = Signature { 197 | prover_response: *prover_response - (*verifier_challenge * multiplier), 198 | verifier_challenge: *verifier_challenge, 199 | }; 200 | end_timer!(rand_signature_time); 201 | Ok(new_sig) 202 | } 203 | } 204 | 205 | pub fn bytes_to_bits(bytes: &[u8]) -> Vec { 206 | let mut bits = Vec::with_capacity(bytes.len() * 8); 207 | for byte in bytes { 208 | for i in 0..8 { 209 | let bit = (*byte >> (8 - i - 1)) & 1; 210 | bits.push(bit == 1); 211 | } 212 | } 213 | bits 214 | } 215 | 216 | impl, D: Digest> 217 | ToConstraintField for Parameters 218 | { 219 | #[inline] 220 | fn to_field_elements(&self) -> Option> { 221 | self.generator.into_group().to_field_elements() 222 | } 223 | } 224 | -------------------------------------------------------------------------------- /crypto-primitives/src/snark/mod.rs: -------------------------------------------------------------------------------- 1 | #[cfg(feature = "r1cs")] 2 | pub mod constraints; 3 | #[cfg(feature = "r1cs")] 4 | pub use constraints::*; 5 | 6 | pub use ark_snark::*; 7 | -------------------------------------------------------------------------------- /crypto-primitives/src/sponge/constraints/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::sponge::{Absorb, CryptographicSponge, FieldElementSize}; 2 | use ark_ff::PrimeField; 3 | use ark_r1cs_std::{ 4 | alloc::AllocVar, 5 | boolean::Boolean, 6 | fields::{ 7 | emulated_fp::{ 8 | params::{get_params, OptimizationType}, 9 | AllocatedEmulatedFpVar, EmulatedFpVar, 10 | }, 11 | fp::{AllocatedFp, FpVar}, 12 | }, 13 | uint8::UInt8, 14 | R1CSVar, 15 | }; 16 | use ark_relations::{ 17 | lc, 18 | r1cs::{ConstraintSystemRef, LinearCombination, SynthesisError}, 19 | }; 20 | #[cfg(not(feature = "std"))] 21 | use ark_std::vec::Vec; 22 | 23 | mod absorb; 24 | pub use absorb::*; 25 | 26 | /// Converts little-endian bits to a list of emulated elements. 27 | pub fn bits_le_to_emulated<'a, F: PrimeField, CF: PrimeField>( 28 | cs: ConstraintSystemRef, 29 | all_emulated_bits_le: impl IntoIterator>>, 30 | ) -> Result>, SynthesisError> { 31 | let all_emulated_bits_le = all_emulated_bits_le.into_iter().collect::>(); 32 | if all_emulated_bits_le.is_empty() { 33 | return Ok(Vec::new()); 34 | } 35 | 36 | let mut max_emulated_bits = 0usize; 37 | for bits in &all_emulated_bits_le { 38 | max_emulated_bits = max_emulated_bits.max(bits.len()); 39 | } 40 | 41 | let mut lookup_table = Vec::>::new(); 42 | let mut cur = F::one(); 43 | for _ in 0..max_emulated_bits { 44 | let repr = AllocatedEmulatedFpVar::::get_limbs_representations( 45 | &cur, 46 | OptimizationType::Constraints, 47 | )?; 48 | lookup_table.push(repr); 49 | cur.double_in_place(); 50 | } 51 | 52 | let params = get_params( 53 | F::MODULUS_BIT_SIZE as usize, 54 | CF::MODULUS_BIT_SIZE as usize, 55 | OptimizationType::Constraints, 56 | ); 57 | 58 | let mut output = Vec::with_capacity(all_emulated_bits_le.len()); 59 | for emulated_bits_le in all_emulated_bits_le { 60 | let mut val = vec![CF::zero(); params.num_limbs]; 61 | let mut lc = vec![LinearCombination::::zero(); params.num_limbs]; 62 | 63 | for (j, bit) in emulated_bits_le.iter().enumerate() { 64 | if bit.value().unwrap_or_default() { 65 | for (k, val) in val.iter_mut().enumerate().take(params.num_limbs) { 66 | *val += &lookup_table[j][k]; 67 | } 68 | } 69 | 70 | #[allow(clippy::needless_range_loop)] 71 | for k in 0..params.num_limbs { 72 | lc[k] = &lc[k] + bit.lc() * lookup_table[j][k]; 73 | } 74 | } 75 | 76 | let mut limbs = Vec::new(); 77 | for k in 0..params.num_limbs { 78 | let gadget = 79 | AllocatedFp::new_witness(ark_relations::ns!(cs, "alloc"), || Ok(val[k])).unwrap(); 80 | lc[k] = lc[k].clone() - (CF::one(), gadget.variable); 81 | cs.enforce_constraint(lc!(), lc!(), lc[k].clone()).unwrap(); 82 | limbs.push(FpVar::::from(gadget)); 83 | } 84 | 85 | output.push(EmulatedFpVar::::Var( 86 | AllocatedEmulatedFpVar:: { 87 | cs: cs.clone(), 88 | limbs, 89 | num_of_additions_over_normal_form: CF::zero(), 90 | is_in_the_normal_form: true, 91 | target_phantom: Default::default(), 92 | }, 93 | )); 94 | } 95 | 96 | Ok(output) 97 | } 98 | 99 | /// Enables simple access to the "gadget" version of the sponge. 100 | /// Simplifies trait bounds in downstream generic code. 101 | pub trait SpongeWithGadget: CryptographicSponge { 102 | /// The gadget version of `Self`. 103 | type Var: CryptographicSpongeVar; 104 | } 105 | 106 | /// The interface for a cryptographic sponge constraints on field `CF`. 107 | /// A sponge can `absorb` or take in inputs and later `squeeze` or output bytes or field elements. 108 | /// The outputs are dependent on previous `absorb` and `squeeze` calls. 109 | pub trait CryptographicSpongeVar: Clone { 110 | /// Parameters used by the sponge. 111 | type Parameters; 112 | 113 | /// Initialize a new instance of the sponge. 114 | fn new(cs: ConstraintSystemRef, params: &Self::Parameters) -> Self; 115 | 116 | /// Returns a ref to the underlying constraint system the sponge is operating in. 117 | fn cs(&self) -> ConstraintSystemRef; 118 | 119 | /// Absorb an input into the sponge. 120 | fn absorb(&mut self, input: &impl AbsorbGadget) -> Result<(), SynthesisError>; 121 | 122 | /// Squeeze `num_bytes` bytes from the sponge. 123 | fn squeeze_bytes(&mut self, num_bytes: usize) -> Result>, SynthesisError>; 124 | 125 | /// Squeeze `num_bit` bits from the sponge. 126 | fn squeeze_bits(&mut self, num_bits: usize) -> Result>, SynthesisError>; 127 | 128 | /// Squeeze `sizes.len()` emulated field elements from the sponge, where the `i`-th element of 129 | /// the output has size `sizes[i]`. 130 | fn squeeze_emulated_field_elements_with_sizes( 131 | &mut self, 132 | sizes: &[FieldElementSize], 133 | ) -> Result<(Vec>, Vec>>), SynthesisError> { 134 | if sizes.len() == 0 { 135 | return Ok((Vec::new(), Vec::new())); 136 | } 137 | 138 | let cs = self.cs(); 139 | 140 | let mut total_bits = 0usize; 141 | for size in sizes { 142 | total_bits += size.num_bits::(); 143 | } 144 | 145 | let bits = self.squeeze_bits(total_bits)?; 146 | 147 | let mut dest_bits = Vec::>>::with_capacity(sizes.len()); 148 | 149 | let mut bits_window = bits.as_slice(); 150 | for size in sizes { 151 | let num_bits = size.num_bits::(); 152 | let emulated_bits_le = bits_window[..num_bits].to_vec(); 153 | bits_window = &bits_window[num_bits..]; 154 | 155 | dest_bits.push(emulated_bits_le); 156 | } 157 | 158 | let dest_gadgets = bits_le_to_emulated(cs, dest_bits.iter())?; 159 | 160 | Ok((dest_gadgets, dest_bits)) 161 | } 162 | 163 | /// Squeeze `num_elements` emulated field elements from the sponge. 164 | fn squeeze_emulated_field_elements( 165 | &mut self, 166 | num_elements: usize, 167 | ) -> Result<(Vec>, Vec>>), SynthesisError> { 168 | self.squeeze_emulated_field_elements_with_sizes::( 169 | vec![FieldElementSize::Full; num_elements].as_slice(), 170 | ) 171 | } 172 | 173 | /// Creates a new sponge with applied domain separation. 174 | fn fork(&self, domain: &[u8]) -> Result { 175 | let mut new_sponge = self.clone(); 176 | 177 | let mut input = Absorb::to_sponge_bytes_as_vec(&domain.len()); 178 | input.extend_from_slice(domain); 179 | 180 | let elems: Vec = input.to_sponge_field_elements_as_vec(); 181 | let elem_vars = elems 182 | .into_iter() 183 | .map(|elem| FpVar::Constant(elem)) 184 | .collect::>(); 185 | 186 | new_sponge.absorb(&elem_vars)?; 187 | 188 | Ok(new_sponge) 189 | } 190 | 191 | /// Squeeze `num_elements` field elements from the sponge. 192 | fn squeeze_field_elements( 193 | &mut self, 194 | num_elements: usize, 195 | ) -> Result>, SynthesisError>; 196 | } 197 | -------------------------------------------------------------------------------- /crypto-primitives/src/sponge/merlin/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::sponge::{Absorb, CryptographicSponge}; 2 | #[cfg(not(feature = "std"))] 3 | use ark_std::vec::Vec; 4 | pub use merlin::Transcript; 5 | 6 | impl CryptographicSponge for Transcript { 7 | type Config = &'static [u8]; 8 | 9 | fn new(params: &Self::Config) -> Self { 10 | Transcript::new(*params) 11 | } 12 | 13 | fn absorb(&mut self, input: &impl Absorb) { 14 | self.append_message(b"", &input.to_sponge_bytes_as_vec()); 15 | } 16 | 17 | fn squeeze_bytes(&mut self, num_bytes: usize) -> Vec { 18 | let mut dest = vec![0; num_bytes]; 19 | self.challenge_bytes(b"", &mut dest); 20 | dest 21 | } 22 | 23 | fn squeeze_bits(&mut self, num_bits: usize) -> Vec { 24 | let num_bytes = (num_bits + 7) / 8; 25 | let mut tmp = vec![0; num_bytes]; 26 | self.challenge_bytes(b"", &mut tmp); 27 | let dest = tmp 28 | .iter() 29 | .flat_map(|byte| (0..8u32).rev().map(move |i| (byte >> i) & 1 == 1)) 30 | .collect::>(); 31 | dest[..num_bits].to_vec() 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /crypto-primitives/src/sponge/mod.rs: -------------------------------------------------------------------------------- 1 | use ark_ff::PrimeField; 2 | #[cfg(not(feature = "std"))] 3 | use ark_std::vec::Vec; 4 | 5 | /// Infrastructure for the constraints counterparts. 6 | #[cfg(feature = "r1cs")] 7 | pub mod constraints; 8 | 9 | mod absorb; 10 | pub use absorb::*; 11 | 12 | /// The sponge for Poseidon 13 | /// 14 | /// This implementation of Poseidon is entirely from Fractal's implementation in [COS20][cos] 15 | /// with small syntax changes. 16 | /// 17 | /// [cos]: https://eprint.iacr.org/2019/1076 18 | pub mod poseidon; 19 | 20 | /// The sponge for [Merlin][merlin] 21 | /// 22 | /// [merlin]: https://merlin.cool/ 23 | pub mod merlin; 24 | 25 | #[cfg(test)] 26 | mod test; 27 | 28 | /// An enum for specifying the output field element size. 29 | #[derive(Clone, Copy, Eq, PartialEq)] 30 | pub enum FieldElementSize { 31 | /// Sample field elements from the entire field. 32 | Full, 33 | 34 | /// Sample field elements from a subset of the field, specified by the maximum number of bits. 35 | Truncated(usize), 36 | } 37 | 38 | impl FieldElementSize { 39 | pub(crate) fn num_bits(&self) -> usize { 40 | if let FieldElementSize::Truncated(num_bits) = self { 41 | if *num_bits > (F::MODULUS_BIT_SIZE as usize) { 42 | panic!("num_bits is greater than the capacity of the field.") 43 | } 44 | *num_bits 45 | } else { 46 | (F::MODULUS_BIT_SIZE - 1) as usize 47 | } 48 | } 49 | 50 | /// Calculate the sum of field element sizes in `elements`. 51 | pub fn sum(elements: &[Self]) -> usize { 52 | elements.iter().map(|item| item.num_bits::()).sum() 53 | } 54 | } 55 | 56 | /// Default implementation of `CryptographicSponge::squeeze_field_elements_with_sizes` 57 | pub(crate) fn squeeze_field_elements_with_sizes_default_impl( 58 | sponge: &mut impl CryptographicSponge, 59 | sizes: &[FieldElementSize], 60 | ) -> Vec { 61 | if sizes.len() == 0 { 62 | return Vec::new(); 63 | } 64 | 65 | let mut total_bits = 0usize; 66 | for size in sizes { 67 | total_bits += size.num_bits::(); 68 | } 69 | 70 | let bits = sponge.squeeze_bits(total_bits); 71 | let mut bits_window = bits.as_slice(); 72 | 73 | let mut output = Vec::with_capacity(sizes.len()); 74 | for size in sizes { 75 | let num_bits = size.num_bits::(); 76 | let emulated_bits_le: Vec = bits_window[..num_bits].to_vec(); 77 | bits_window = &bits_window[num_bits..]; 78 | 79 | let emulated_bytes = emulated_bits_le 80 | .chunks(8) 81 | .map(|bits| { 82 | let mut byte = 0u8; 83 | for (i, &bit) in bits.into_iter().enumerate() { 84 | if bit { 85 | byte += 1 << i; 86 | } 87 | } 88 | byte 89 | }) 90 | .collect::>(); 91 | 92 | output.push(F::from_le_bytes_mod_order(emulated_bytes.as_slice())); 93 | } 94 | 95 | output 96 | } 97 | 98 | /// The interface for a cryptographic sponge. 99 | /// A sponge can `absorb` or take in inputs and later `squeeze` or output bytes or field elements. 100 | /// The outputs are dependent on previous `absorb` and `squeeze` calls. 101 | pub trait CryptographicSponge: Clone { 102 | /// The configuration of the sponge. 103 | type Config; 104 | 105 | /// Initialize a new instance of the sponge. 106 | fn new(params: &Self::Config) -> Self; 107 | 108 | /// Absorb an input into the sponge. 109 | fn absorb(&mut self, input: &impl Absorb); 110 | 111 | /// Squeeze `num_bytes` bytes from the sponge. 112 | fn squeeze_bytes(&mut self, num_bytes: usize) -> Vec; 113 | 114 | /// Squeeze `num_bits` bits from the sponge. 115 | fn squeeze_bits(&mut self, num_bits: usize) -> Vec; 116 | 117 | /// Squeeze `sizes.len()` field elements from the sponge, where the `i`-th element of 118 | /// the output has size `sizes[i]`. 119 | /// 120 | /// If the implementation is field-based, to squeeze native field elements, 121 | /// call `self.squeeze_native_field_elements` instead. 122 | /// 123 | /// TODO: Support general Field. 124 | /// 125 | /// Note that when `FieldElementSize` is `FULL`, the output is not strictly uniform. Output 126 | /// space is uniform in \[0, 2^{F::MODULUS_BITS - 1}\] 127 | fn squeeze_field_elements_with_sizes( 128 | &mut self, 129 | sizes: &[FieldElementSize], 130 | ) -> Vec { 131 | squeeze_field_elements_with_sizes_default_impl(self, sizes) 132 | } 133 | 134 | /// Squeeze `num_elements` emulated field elements from the sponge. 135 | /// 136 | /// Because of rust limitation, for field-based implementation, using this method to squeeze 137 | /// native field elements will have runtime casting cost. For better efficiency, use `squeeze_native_field_elements`. 138 | fn squeeze_field_elements(&mut self, num_elements: usize) -> Vec { 139 | self.squeeze_field_elements_with_sizes::( 140 | vec![FieldElementSize::Full; num_elements].as_slice(), 141 | ) 142 | } 143 | 144 | /// Creates a new sponge with applied domain separation. 145 | fn fork(&self, domain: &[u8]) -> Self { 146 | let mut new_sponge = self.clone(); 147 | 148 | let mut input = Absorb::to_sponge_bytes_as_vec(&domain.len()); 149 | input.extend_from_slice(domain); 150 | new_sponge.absorb(&input); 151 | 152 | new_sponge 153 | } 154 | } 155 | 156 | /// The interface for field-based cryptographic sponge. 157 | /// `CF` is the native field used by the cryptographic sponge implementation. 158 | pub trait FieldBasedCryptographicSponge: CryptographicSponge { 159 | /// Squeeze `num_elements` field elements from the sponge. 160 | fn squeeze_native_field_elements(&mut self, num_elements: usize) -> Vec; 161 | 162 | /// Squeeze `sizes.len()` field elements from the sponge, where the `i`-th element of 163 | /// the output has size `sizes[i]`. 164 | fn squeeze_native_field_elements_with_sizes(&mut self, sizes: &[FieldElementSize]) -> Vec { 165 | let mut all_full_sizes = true; 166 | for size in sizes { 167 | if *size != FieldElementSize::Full { 168 | all_full_sizes = false; 169 | break; 170 | } 171 | } 172 | 173 | if all_full_sizes { 174 | self.squeeze_native_field_elements(sizes.len()) 175 | } else { 176 | squeeze_field_elements_with_sizes_default_impl(self, sizes) 177 | } 178 | } 179 | } 180 | 181 | /// An extension for the interface of a cryptographic sponge. 182 | /// In addition to operations defined in `CryptographicSponge`, `SpongeExt` can convert itself to 183 | /// a state, and instantiate itself from state. 184 | pub trait SpongeExt: CryptographicSponge { 185 | /// The full state of the cryptographic sponge. 186 | type State: Clone; 187 | /// Returns a sponge that uses `state`. 188 | fn from_state(state: Self::State, params: &Self::Config) -> Self; 189 | /// Consumes `self` and returns the state. 190 | fn into_state(self) -> Self::State; 191 | } 192 | 193 | /// The mode structure for duplex sponges 194 | #[derive(Clone, Debug)] 195 | pub enum DuplexSpongeMode { 196 | /// The sponge is currently absorbing data. 197 | Absorbing { 198 | /// next position of the state to be XOR-ed when absorbing. 199 | next_absorb_index: usize, 200 | }, 201 | /// The sponge is currently squeezing data out. 202 | Squeezing { 203 | /// next position of the state to be outputted when squeezing. 204 | next_squeeze_index: usize, 205 | }, 206 | } 207 | -------------------------------------------------------------------------------- /crypto-primitives/src/sponge/poseidon/grain_lfsr.rs: -------------------------------------------------------------------------------- 1 | #![allow(dead_code)] 2 | 3 | use ark_ff::{BigInteger, PrimeField}; 4 | #[cfg(not(feature = "std"))] 5 | use ark_std::vec::Vec; 6 | 7 | pub struct PoseidonGrainLFSR { 8 | pub prime_num_bits: u64, 9 | 10 | pub state: [bool; 80], 11 | pub head: usize, 12 | } 13 | 14 | #[allow(unused_variables)] 15 | impl PoseidonGrainLFSR { 16 | pub fn new( 17 | is_sbox_an_inverse: bool, 18 | prime_num_bits: u64, 19 | state_len: u64, 20 | num_full_rounds: u64, 21 | num_partial_rounds: u64, 22 | ) -> Self { 23 | let mut state = [false; 80]; 24 | 25 | // b0, b1 describes the field 26 | state[1] = true; 27 | 28 | // b2, ..., b5 describes the S-BOX 29 | if is_sbox_an_inverse { 30 | state[5] = true; 31 | } else { 32 | state[5] = false; 33 | } 34 | 35 | // b6, ..., b17 are the binary representation of n (prime_num_bits) 36 | { 37 | let mut cur = prime_num_bits; 38 | for i in (6..=17).rev() { 39 | state[i] = cur & 1 == 1; 40 | cur >>= 1; 41 | } 42 | } 43 | 44 | // b18, ..., b29 are the binary representation of t (state_len, rate + capacity) 45 | { 46 | let mut cur = state_len; 47 | for i in (18..=29).rev() { 48 | state[i] = cur & 1 == 1; 49 | cur >>= 1; 50 | } 51 | } 52 | 53 | // b30, ..., b39 are the binary representation of R_F (the number of full rounds) 54 | { 55 | let mut cur = num_full_rounds; 56 | for i in (30..=39).rev() { 57 | state[i] = cur & 1 == 1; 58 | cur >>= 1; 59 | } 60 | } 61 | 62 | // b40, ..., b49 are the binary representation of R_P (the number of partial rounds) 63 | { 64 | let mut cur = num_partial_rounds; 65 | for i in (40..=49).rev() { 66 | state[i] = cur & 1 == 1; 67 | cur >>= 1; 68 | } 69 | } 70 | 71 | // b50, ..., b79 are set to 1 72 | for i in 50..=79 { 73 | state[i] = true; 74 | } 75 | 76 | let head = 0; 77 | 78 | let mut res = Self { 79 | prime_num_bits, 80 | state, 81 | head, 82 | }; 83 | res.init(); 84 | res 85 | } 86 | 87 | pub fn get_bits(&mut self, num_bits: usize) -> Vec { 88 | let mut res = Vec::new(); 89 | 90 | for _ in 0..num_bits { 91 | // Obtain the first bit 92 | let mut new_bit = self.update(); 93 | 94 | // Loop until the first bit is true 95 | while new_bit == false { 96 | // Discard the second bit 97 | let _ = self.update(); 98 | // Obtain another first bit 99 | new_bit = self.update(); 100 | } 101 | 102 | // Obtain the second bit 103 | res.push(self.update()); 104 | } 105 | 106 | res 107 | } 108 | 109 | pub fn get_field_elements_rejection_sampling( 110 | &mut self, 111 | num_elems: usize, 112 | ) -> Vec { 113 | assert_eq!(F::MODULUS_BIT_SIZE as u64, self.prime_num_bits); 114 | 115 | let mut res = Vec::new(); 116 | for _ in 0..num_elems { 117 | // Perform rejection sampling 118 | loop { 119 | // Obtain n bits and make it most-significant-bit first 120 | let mut bits = self.get_bits(self.prime_num_bits as usize); 121 | bits.reverse(); 122 | 123 | // Construct the number 124 | let bigint = F::BigInt::from_bits_le(&bits); 125 | 126 | if let Some(f) = F::from_bigint(bigint) { 127 | res.push(f); 128 | break; 129 | } 130 | } 131 | } 132 | 133 | res 134 | } 135 | 136 | pub fn get_field_elements_mod_p(&mut self, num_elems: usize) -> Vec { 137 | assert_eq!(F::MODULUS_BIT_SIZE as u64, self.prime_num_bits); 138 | 139 | let mut res = Vec::new(); 140 | for _ in 0..num_elems { 141 | // Obtain n bits and make it most-significant-bit first 142 | let mut bits = self.get_bits(self.prime_num_bits as usize); 143 | bits.reverse(); 144 | 145 | let bytes = bits 146 | .chunks(8) 147 | .map(|chunk| { 148 | let mut result = 0u8; 149 | for (i, bit) in chunk.iter().enumerate() { 150 | result |= u8::from(*bit) << i 151 | } 152 | result 153 | }) 154 | .collect::>(); 155 | 156 | res.push(F::from_le_bytes_mod_order(&bytes)); 157 | } 158 | 159 | res 160 | } 161 | 162 | #[inline] 163 | fn update(&mut self) -> bool { 164 | let new_bit = self.state[(self.head + 62) % 80] 165 | ^ self.state[(self.head + 51) % 80] 166 | ^ self.state[(self.head + 38) % 80] 167 | ^ self.state[(self.head + 23) % 80] 168 | ^ self.state[(self.head + 13) % 80] 169 | ^ self.state[self.head]; 170 | self.state[self.head] = new_bit; 171 | self.head += 1; 172 | self.head %= 80; 173 | 174 | new_bit 175 | } 176 | 177 | fn init(&mut self) { 178 | for _ in 0..160 { 179 | let _ = self.update(); 180 | } 181 | } 182 | } 183 | 184 | #[cfg(test)] 185 | mod test { 186 | use crate::sponge::poseidon::grain_lfsr::PoseidonGrainLFSR; 187 | use crate::sponge::test::Fr; 188 | use ark_ff::MontFp; 189 | 190 | #[test] 191 | fn test_grain_lfsr_consistency() { 192 | let mut lfsr = PoseidonGrainLFSR::new(false, 255, 3, 8, 31); 193 | 194 | assert_eq!( 195 | lfsr.get_field_elements_rejection_sampling::(1)[0], 196 | MontFp!( 197 | "27117311055620256798560880810000042840428971800021819916023577129547249660720" 198 | ) 199 | ); 200 | assert_eq!( 201 | lfsr.get_field_elements_rejection_sampling::(1)[0], 202 | MontFp!( 203 | "51641662388546346858987925410984003801092143452466182801674685248597955169158" 204 | ) 205 | ); 206 | assert_eq!( 207 | lfsr.get_field_elements_mod_p::(1)[0], 208 | MontFp!( 209 | "30468495022634911716522728179277518871747767531215914044579216845399211650580" 210 | ) 211 | ); 212 | assert_eq!( 213 | lfsr.get_field_elements_mod_p::(1)[0], 214 | MontFp!( 215 | "17250718238509906485015112994867732544602358855445377986727968022920517907825" 216 | ) 217 | ); 218 | } 219 | } 220 | -------------------------------------------------------------------------------- /crypto-primitives/src/sponge/test.rs: -------------------------------------------------------------------------------- 1 | use crate::sponge::poseidon::{PoseidonDefaultConfig, PoseidonDefaultConfigEntry}; 2 | use ark_ff::fields::Fp256; 3 | use ark_ff::{MontBackend, MontConfig}; 4 | 5 | #[derive(MontConfig)] 6 | #[modulus = "52435875175126190479447740508185965837690552500527637822603658699938581184513"] 7 | #[generator = "7"] 8 | pub struct FrBackend; 9 | 10 | type FrConfig = MontBackend; 11 | pub type Fr = Fp256; 12 | 13 | impl PoseidonDefaultConfig<4> for FrConfig { 14 | const PARAMS_OPT_FOR_CONSTRAINTS: [PoseidonDefaultConfigEntry; 7] = [ 15 | PoseidonDefaultConfigEntry::new(2, 17, 8, 31, 0), 16 | PoseidonDefaultConfigEntry::new(3, 5, 8, 56, 0), 17 | PoseidonDefaultConfigEntry::new(4, 5, 8, 56, 0), 18 | PoseidonDefaultConfigEntry::new(5, 5, 8, 57, 0), 19 | PoseidonDefaultConfigEntry::new(6, 5, 8, 57, 0), 20 | PoseidonDefaultConfigEntry::new(7, 5, 8, 57, 0), 21 | PoseidonDefaultConfigEntry::new(8, 5, 8, 57, 0), 22 | ]; 23 | const PARAMS_OPT_FOR_WEIGHTS: [PoseidonDefaultConfigEntry; 7] = [ 24 | PoseidonDefaultConfigEntry::new(2, 257, 8, 13, 0), 25 | PoseidonDefaultConfigEntry::new(3, 257, 8, 13, 0), 26 | PoseidonDefaultConfigEntry::new(4, 257, 8, 13, 0), 27 | PoseidonDefaultConfigEntry::new(5, 257, 8, 13, 0), 28 | PoseidonDefaultConfigEntry::new(6, 257, 8, 13, 0), 29 | PoseidonDefaultConfigEntry::new(7, 257, 8, 13, 0), 30 | PoseidonDefaultConfigEntry::new(8, 257, 8, 13, 0), 31 | ]; 32 | } 33 | -------------------------------------------------------------------------------- /macros/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "ark-crypto-primitives-macros" 3 | description.workspace = true 4 | documentation.workspace = true 5 | version.workspace = true 6 | authors.workspace = true 7 | homepage.workspace = true 8 | repository.workspace = true 9 | categories.workspace = true 10 | include.workspace = true 11 | license.workspace = true 12 | edition.workspace = true 13 | 14 | [dependencies] 15 | quote = { version = "1.0" } 16 | syn = { version = "2.0" } 17 | proc-macro2 = { version = "1.0" } 18 | 19 | [lib] 20 | proc-macro = true 21 | -------------------------------------------------------------------------------- /macros/LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | ../LICENSE-APACHE -------------------------------------------------------------------------------- /macros/LICENSE-MIT: -------------------------------------------------------------------------------- 1 | ../LICENSE-MIT -------------------------------------------------------------------------------- /macros/src/lib.rs: -------------------------------------------------------------------------------- 1 | extern crate proc_macro; 2 | use proc_macro::TokenStream; 3 | use proc_macro2::TokenStream as TokenStream2; 4 | use quote::quote; 5 | use syn::{parse_macro_input, Data, DeriveInput, Fields, Index}; 6 | 7 | #[proc_macro_derive(Absorb)] 8 | pub fn derive_absorb(input: TokenStream) -> TokenStream { 9 | let input = parse_macro_input!(input as DeriveInput); 10 | 11 | let name = input.ident; 12 | let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); 13 | 14 | let len = if let Data::Struct(ref data_struct) = input.data { 15 | data_struct.fields.len() 16 | } else { 17 | panic!( 18 | "`Absorb` can only be derived for structs, {} is not a struct", 19 | name 20 | ); 21 | }; 22 | 23 | let mut to_sponge_bytes = Vec::::with_capacity(len); 24 | let mut to_sponge_field_elements = Vec::::with_capacity(len); 25 | 26 | match input.data { 27 | Data::Struct(ref data_struct) => match data_struct.fields { 28 | Fields::Named(ref fields) => { 29 | let _: Vec<_> = fields 30 | .named 31 | .iter() 32 | .map(|f| { 33 | let name = &f.ident; 34 | to_sponge_bytes.push(quote! { 35 | Absorb::to_sponge_bytes(&self.#name, dest); 36 | }); 37 | }) 38 | .collect(); 39 | 40 | let _: Vec<_> = fields 41 | .named 42 | .iter() 43 | .map(|f| { 44 | let name = &f.ident; 45 | to_sponge_field_elements.push(quote! { 46 | Absorb::to_sponge_field_elements(&self.#name, dest); 47 | }); 48 | }) 49 | .collect(); 50 | } 51 | Fields::Unnamed(ref fields) => { 52 | let _: Vec<_> = fields 53 | .unnamed 54 | .iter() 55 | .enumerate() 56 | .map(|(i, _)| { 57 | let index = Index::from(i); 58 | to_sponge_bytes.push(quote! { 59 | Absorb::to_sponge_bytes(&self.#index, dest); 60 | }); 61 | }) 62 | .collect(); 63 | 64 | let _: Vec<_> = fields 65 | .unnamed 66 | .iter() 67 | .enumerate() 68 | .map(|(i, _)| { 69 | let index = Index::from(i); 70 | to_sponge_field_elements.push(quote! { 71 | Absorb::to_sponge_field_elements(&self.#index, dest); 72 | }); 73 | }) 74 | .collect(); 75 | } 76 | _ => panic!("Absorb only supports named and unnamed fields"), 77 | }, 78 | // this should be unreachable, we already checked 79 | _ => panic!("Absorb only supports structs"), 80 | } 81 | 82 | quote! { 83 | impl #impl_generics Absorb for #name #ty_generics #where_clause { 84 | fn to_sponge_bytes(&self, dest: &mut Vec) { 85 | #( #to_sponge_bytes )* 86 | } 87 | 88 | fn to_sponge_field_elements(&self, dest: &mut Vec) { 89 | #( #to_sponge_field_elements )* 90 | } 91 | } 92 | } 93 | .into() 94 | } 95 | -------------------------------------------------------------------------------- /scripts/linkify_changelog.py: -------------------------------------------------------------------------------- 1 | import fileinput 2 | import os 3 | import re 4 | import sys 5 | 6 | # Set this to the name of the repo, if you don't want it to be read from the filesystem. 7 | # It assumes the changelog file is in the root of the repo. 8 | repo_name = "" 9 | 10 | # This script goes through the provided file, and replaces any " \#", 11 | # with the valid mark down formatted link to it. e.g. 12 | # " [\#number](https://github.com/arkworks-rs/template/pull/) 13 | # Note that if the number is for a an issue, github will auto-redirect you when you click the link. 14 | # It is safe to run the script multiple times in succession. 15 | # 16 | # Example usage $ python3 linkify_changelog.py ../CHANGELOG.md 17 | changelog_path = sys.argv[1] 18 | if repo_name == "": 19 | path = os.path.abspath(changelog_path) 20 | components = path.split(os.path.sep) 21 | repo_name = components[-2] 22 | 23 | for line in fileinput.input(inplace=True): 24 | line = re.sub( 25 | r"\- #([0-9]*)", 26 | r"- [\#\1](https://github.com/arkworks-rs/" + repo_name + r"/pull/\1)", 27 | line.rstrip(), 28 | ) 29 | # edits the current file 30 | print(line) --------------------------------------------------------------------------------