├── .cargo-rdme.toml ├── .cargo └── config.toml ├── .github └── workflows │ ├── pr.yml │ ├── release-pr.yml │ ├── release.yml │ └── test.yml ├── .gitignore ├── Cargo.toml ├── LICENSE ├── README.md ├── assets └── audit.pdf ├── light-poseidon ├── Cargo.toml ├── benches │ └── bn254_x5.rs ├── src │ ├── lib.rs │ └── parameters │ │ ├── bn254_x5.rs │ │ └── mod.rs └── tests │ └── bn254_fq_x5.rs └── xtask ├── Cargo.toml └── src ├── generate_parameters.rs └── main.rs /.cargo-rdme.toml: -------------------------------------------------------------------------------- 1 | workspace-project = "light-poseidon" 2 | heading-base-level = 1 3 | -------------------------------------------------------------------------------- /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [alias] 2 | xtask = "run --package xtask --" -------------------------------------------------------------------------------- /.github/workflows/pr.yml: -------------------------------------------------------------------------------- 1 | name: "Lint PR" 2 | 3 | on: 4 | pull_request: 5 | branch: 6 | - main 7 | types: 8 | - opened 9 | - edited 10 | - synchronize 11 | 12 | permissions: 13 | pull-requests: read 14 | 15 | jobs: 16 | main: 17 | name: Validate PR title 18 | runs-on: ubuntu-latest 19 | steps: 20 | - uses: amannn/action-semantic-pull-request@v5 21 | env: 22 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 23 | with: 24 | scopes: | 25 | core 26 | params 27 | bench 28 | xtask 29 | -------------------------------------------------------------------------------- /.github/workflows/release-pr.yml: -------------------------------------------------------------------------------- 1 | name: Open a Rust release PR 2 | on: 3 | workflow_dispatch: 4 | inputs: 5 | version: 6 | description: Version to release 7 | required: true 8 | type: string 9 | 10 | jobs: 11 | make-release-pr: 12 | permissions: 13 | id-token: write 14 | pull-requests: write 15 | contents: write 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v3 19 | 20 | - uses: chainguard-dev/actions/setup-gitsign@main 21 | 22 | - name: Install Rust 23 | uses: dtolnay/rust-toolchain@master 24 | with: 25 | toolchain: stable 26 | 27 | - name: Install cargo-release 28 | run: | 29 | VERSION=$(curl --silent "https://api.github.com/repos/crate-ci/cargo-release/releases/latest" | jq -r .tag_name) 30 | wget https://github.com/crate-ci/cargo-release/releases/download/$VERSION/cargo-release-$VERSION-x86_64-unknown-linux-gnu.tar.gz 31 | tar -xzvf cargo-release-$VERSION-x86_64-unknown-linux-gnu.tar.gz --wildcards '*cargo-release' --strip-components=1 32 | cp cargo-release $HOME/.cargo/bin 33 | 34 | - uses: cargo-bins/release-pr@v2 35 | with: 36 | github-token: ${{ secrets.GITHUB_TOKEN }} 37 | version: ${{ inputs.version }} 38 | crate-name: light-poseidon 39 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - "*" 7 | 8 | permissions: 9 | contents: write 10 | 11 | jobs: 12 | build: 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - name: Checkout code 17 | uses: actions/checkout@v3 18 | 19 | - name: Release 20 | uses: softprops/action-gh-release@v1 21 | if: startsWith(github.ref, 'refs/tags/') 22 | with: 23 | token: ${{ secrets.GITHUB_TOKEN }} 24 | 25 | - name: Run cargo publish 26 | env: 27 | CARGO_REGISTRY_TOKEN: ${{ secrets.CRATES_IO_TOKEN }} 28 | run: | 29 | cargo publish -p light-poseidon --token $CARGO_REGISTRY_TOKEN 30 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - main 5 | pull_request: 6 | branches: 7 | - main 8 | 9 | name: test 10 | 11 | jobs: 12 | test: 13 | name: test 14 | runs-on: ubuntu-latest 15 | steps: 16 | - name: Checkout sources 17 | uses: actions/checkout@v2 18 | 19 | - name: Install stable toolchain 20 | uses: actions-rs/toolchain@v1 21 | with: 22 | profile: minimal 23 | toolchain: stable 24 | override: true 25 | components: rustfmt, clippy 26 | 27 | - name: Run cargo check 28 | uses: actions-rs/cargo@v1 29 | with: 30 | command: check 31 | 32 | - name: Run cargo test 33 | uses: actions-rs/cargo@v1 34 | with: 35 | command: test 36 | 37 | - name: Run cargo fmt 38 | uses: actions-rs/cargo@v1 39 | with: 40 | command: fmt 41 | args: --all -- --check 42 | 43 | - name: Run cargo clippy 44 | uses: actions-rs/cargo@v1 45 | with: 46 | command: clippy 47 | args: -- -D warnings 48 | 49 | - name: Check README 50 | run: | 51 | cargo install cargo-rdme 52 | cargo rdme --check 53 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | /target/ 4 | 5 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 6 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 7 | Cargo.lock 8 | 9 | # These are backup files generated by rustfmt 10 | **/*.rs.bk 11 | 12 | 13 | # Added by cargo 14 | 15 | /target 16 | /Cargo.lock 17 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | "light-poseidon", 4 | "xtask", 5 | ] 6 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | 179 | Copyright 2023 Light Protocol Labs 180 | 181 | Licensed under the Apache License, Version 2.0 (the "License"); 182 | you may not use this file except in compliance with the License. 183 | You may obtain a copy of the License at 184 | 185 | http://www.apache.org/licenses/LICENSE-2.0 186 | 187 | Unless required by applicable law or agreed to in writing, software 188 | distributed under the License is distributed on an "AS IS" BASIS, 189 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 190 | See the License for the specific language governing permissions and 191 | limitations under the License. 192 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Crates.io](https://img.shields.io/crates/v/light-poseidon.svg)](https://crates.io/crates/light-poseidon) 2 | [![Workflow Status](https://github.com/Lightprotocol/light-poseidon/workflows/main/badge.svg)](https://github.com/Lightprotocol/light-poseidon/actions?query=workflow) 3 | 4 | # light-poseidon 5 | 6 | 7 | 8 | **light-poseidon** is a [Poseidon](https://eprint.iacr.org/2019/458) hash 9 | implementation in Rust created for [Light Protocol](https://www.lightprotocol.com/). 10 | 11 | ## Parameters 12 | 13 | The library provides pre-generated parameters over the BN254 curve, however 14 | it can work with any parameters provided as long as developers take care 15 | of generating the round constants. 16 | 17 | Parameters provided by the library are: 18 | 19 | * *x^5* S-boxes 20 | * width - *2 ≤ t ≤ 13* 21 | * inputs - *1 ≤ n ≤ 12* 22 | * 8 full rounds and partial rounds depending on *t*: *[56, 57, 56, 60, 60, 63, 64, 63, 60, 66, 60, 65]* 23 | 24 | The parameters can be generated with: 25 | 26 | ```bash 27 | cargo xtask generate-poseidon-parameters 28 | ```` 29 | 30 | ## Output type 31 | 32 | [`Poseidon`](https://docs.rs/light-poseidon/latest/light_poseidon/struct.Poseidon.html) type implements two traits which serve the purpose 33 | of returning the calculated hash in different representations: 34 | 35 | * [`PoseidonBytesHasher`](https://docs.rs/light-poseidon/latest/light_poseidon/trait.PoseidonBytesHasher.html) with the 36 | `hash_bytes_be` and `hash_bytes_le` methods which returns a byte array. 37 | * [`PoseidonHasher`](https://docs.rs/light-poseidon/latest/light_poseidon/trait.PoseidonHasher.html) with the `hash` method which returns 38 | [`ark_ff::PrimeField`](ark_ff::PrimeField). Might be useful if you want 39 | to immediately process the result with an another library which works with 40 | [`ark_ff::PrimeField`](ark_ff::PrimeField) types. 41 | 42 | ## Examples 43 | 44 | Example with two simple big-endian byte inputs (converted to field elements) 45 | and BN254-based parameters provided by the library, with 46 | [`PoseidonBytesHasher`](https://docs.rs/light-poseidon/latest/light_poseidon/trait.PoseidonHasher.html) trait and a byte array 47 | result: 48 | 49 | ```rust 50 | use light_poseidon::{Poseidon, PoseidonBytesHasher, parameters::bn254_x5}; 51 | use ark_bn254::Fr; 52 | use ark_ff::{BigInteger, PrimeField}; 53 | 54 | let mut poseidon = Poseidon::::new_circom(2).unwrap(); 55 | 56 | let hash = poseidon.hash_bytes_be(&[&[1u8; 32], &[2u8; 32]]).unwrap(); 57 | 58 | println!("{:?}", hash); 59 | // Should print: 60 | // [ 61 | // 13, 84, 225, 147, 143, 138, 140, 28, 125, 235, 94, 3, 85, 242, 99, 25, 32, 123, 132, 62 | // 254, 156, 162, 206, 27, 38, 231, 53, 200, 41, 130, 25, 144 63 | // ] 64 | ``` 65 | 66 | With [`PoseidonHasher`](https://docs.rs/light-poseidon/latest/light_poseidon/trait.PoseidonHasher.html) trait and 67 | [`ark_ff::PrimeField`](ark_ff::PrimeField) result: 68 | 69 | ```rust 70 | use light_poseidon::{Poseidon, PoseidonHasher, parameters::bn254_x5}; 71 | use ark_bn254::Fr; 72 | use ark_ff::{BigInteger, PrimeField}; 73 | 74 | let mut poseidon = Poseidon::::new_circom(2).unwrap(); 75 | 76 | let input1 = Fr::from_be_bytes_mod_order(&[1u8; 32]); 77 | let input2 = Fr::from_be_bytes_mod_order(&[2u8; 32]); 78 | 79 | let hash = poseidon.hash(&[input1, input2]).unwrap(); 80 | 81 | // Do something with `hash`. 82 | ``` 83 | 84 | ## Implementation 85 | 86 | The implementation is compatible with the 87 | [original SageMath implementation](https://extgit.iaik.tugraz.at/krypto/hadeshash/-/tree/master/), 88 | but it was also inspired by the following ones: 89 | 90 | * [circomlibjs](https://github.com/iden3/circomlibjs) 91 | * [zero-knowledge-gadgets](https://github.com/webb-tools/zero-knowledge-gadgets) 92 | 93 | ## Performance 94 | 95 | This repository contains a benchmark measuring the performance of this 96 | Poseidon implementation for given 1 - 12 random 32 bytes inputs. 97 | 98 | To run them, simply use: 99 | 100 | ```bash 101 | cargo bench 102 | ``` 103 | 104 | This is the result from a host with the following hardware: 105 | 106 | * AMD Ryzen™ 9 7945HX with Radeon™ Graphics × 32 107 | 108 | ```norust 109 | poseidon_bn254_x5_1 time: [12.710 µs 12.735 µs 12.754 µs] 110 | 111 | poseidon_bn254_x5_2 time: [18.948 µs 18.963 µs 18.990 µs] 112 | 113 | poseidon_bn254_x5_3 time: [26.607 µs 26.611 µs 26.615 µs] 114 | 115 | poseidon_bn254_x5_4 time: [38.507 µs 38.513 µs 38.519 µs] 116 | 117 | poseidon_bn254_x5_5 time: [51.024 µs 51.031 µs 51.039 µs] 118 | 119 | poseidon_bn254_x5_6 time: [68.368 µs 68.375 µs 68.385 µs] 120 | 121 | poseidon_bn254_x5_7 time: [86.819 µs 86.886 µs 86.968 µs] 122 | 123 | poseidon_bn254_x5_8 time: [105.38 µs 105.49 µs 105.61 µs] 124 | 125 | poseidon_bn254_x5_9 time: [121.99 µs 122.00 µs 122.01 µs] 126 | 127 | poseidon_bn254_x5_10 time: [157.00 µs 157.02 µs 157.05 µs] 128 | 129 | poseidon_bn254_x5_11 time: [170.01 µs 170.04 µs 170.07 µs] 130 | 131 | poseidon_bn254_x5_12 time: [210.78 µs 210.81 µs 210.84 µs] 132 | ``` 133 | 134 | ## Security 135 | 136 | This library has been audited by [Veridise](https://veridise.com/). You can 137 | read the audit report [here](https://github.com/Lightprotocol/light-poseidon/blob/main/assets/audit.pdf). 138 | 139 | 140 | -------------------------------------------------------------------------------- /assets/audit.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Lightprotocol/light-poseidon/92be0d45df64796ba5ce5db68c5b5dbc214b7bbd/assets/audit.pdf -------------------------------------------------------------------------------- /light-poseidon/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "light-poseidon" 3 | version = "0.3.0" 4 | authors = ["Mike Rostecki "] 5 | description = "Poseidon hash implementation in Rust" 6 | repository = "https://github.com/Lightprotocol/light-poseidon" 7 | readme = "../README.md" 8 | keywords = ["cryptography", "hash", "poseidon", "zero-knowledge", "zkSNARK"] 9 | license = "Apache-2.0" 10 | edition = "2021" 11 | 12 | [dependencies] 13 | ark-bn254 = "0.5.0" 14 | ark-ff = "0.5.0" 15 | num-bigint = "0.4.4" 16 | thiserror = "1.0" 17 | 18 | [dev-dependencies] 19 | criterion = "0.5" 20 | rand = "0.8" 21 | hex = "0.4.3" 22 | 23 | [[bench]] 24 | name = "bn254_x5" 25 | harness = false 26 | -------------------------------------------------------------------------------- /light-poseidon/benches/bn254_x5.rs: -------------------------------------------------------------------------------- 1 | use ark_bn254::Fr; 2 | use ark_ff::PrimeField; 3 | use criterion::{criterion_group, criterion_main, Criterion}; 4 | use rand::Rng; 5 | 6 | use light_poseidon::{Poseidon, PoseidonHasher}; 7 | 8 | pub fn bench_poseidon_bn254_x5(c: &mut Criterion) { 9 | let mut inputs = Vec::new(); 10 | for i in 1..13 { 11 | let name = [String::from("poseidon_bn254_x5_"), i.to_string()].concat(); 12 | let random_bytes1 = Fr::from_be_bytes_mod_order(&rand::thread_rng().gen::<[u8; 32]>()); 13 | inputs.push(random_bytes1); 14 | let mut hasher = Poseidon::::new_circom(i).unwrap(); 15 | 16 | c.bench_function(&name, |b| b.iter(|| hasher.hash(&inputs[..]))); 17 | } 18 | } 19 | 20 | criterion_group!(benches, bench_poseidon_bn254_x5); 21 | criterion_main!(benches); 22 | -------------------------------------------------------------------------------- /light-poseidon/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! **light-poseidon** is a [Poseidon](https://eprint.iacr.org/2019/458) hash 2 | //! implementation in Rust created for [Light Protocol](https://www.lightprotocol.com/). 3 | //! 4 | //! # Parameters 5 | //! 6 | //! The library provides pre-generated parameters over the BN254 curve, however 7 | //! it can work with any parameters provided as long as developers take care 8 | //! of generating the round constants. 9 | //! 10 | //! Parameters provided by the library are: 11 | //! 12 | //! * *x^5* S-boxes 13 | //! * width - *2 ≤ t ≤ 13* 14 | //! * inputs - *1 ≤ n ≤ 12* 15 | //! * 8 full rounds and partial rounds depending on *t*: *[56, 57, 56, 60, 60, 63, 64, 63, 60, 66, 60, 65]* 16 | //! 17 | //! The parameters can be generated with: 18 | //! 19 | //! ```bash 20 | //! cargo xtask generate-poseidon-parameters 21 | //! ```` 22 | //! 23 | //! # Output type 24 | //! 25 | //! [`Poseidon`](crate::Poseidon) type implements two traits which serve the purpose 26 | //! of returning the calculated hash in different representations: 27 | //! 28 | //! * [`PoseidonBytesHasher`](crate::PoseidonBytesHasher) with the 29 | //! `hash_bytes_be` and `hash_bytes_le` methods which returns a byte array. 30 | //! * [`PoseidonHasher`](crate::PoseidonHasher) with the `hash` method which returns 31 | //! [`ark_ff::PrimeField`](ark_ff::PrimeField). Might be useful if you want 32 | //! to immediately process the result with an another library which works with 33 | //! [`ark_ff::PrimeField`](ark_ff::PrimeField) types. 34 | //! 35 | //! # Examples 36 | //! 37 | //! Example with two simple big-endian byte inputs (converted to field elements) 38 | //! and BN254-based parameters provided by the library, with 39 | //! [`PoseidonBytesHasher`](crate::PoseidonHasher) trait and a byte array 40 | //! result: 41 | //! 42 | //! ```rust 43 | //! use light_poseidon::{Poseidon, PoseidonBytesHasher, parameters::bn254_x5}; 44 | //! use ark_bn254::Fr; 45 | //! use ark_ff::{BigInteger, PrimeField}; 46 | //! 47 | //! let mut poseidon = Poseidon::::new_circom(2).unwrap(); 48 | //! 49 | //! let hash = poseidon.hash_bytes_be(&[&[1u8; 32], &[2u8; 32]]).unwrap(); 50 | //! 51 | //! println!("{:?}", hash); 52 | //! // Should print: 53 | //! // [ 54 | //! // 13, 84, 225, 147, 143, 138, 140, 28, 125, 235, 94, 3, 85, 242, 99, 25, 32, 123, 132, 55 | //! // 254, 156, 162, 206, 27, 38, 231, 53, 200, 41, 130, 25, 144 56 | //! // ] 57 | //! ``` 58 | //! 59 | //! With [`PoseidonHasher`](crate::PoseidonHasher) trait and 60 | //! [`ark_ff::PrimeField`](ark_ff::PrimeField) result: 61 | //! 62 | //! ```rust 63 | //! use light_poseidon::{Poseidon, PoseidonHasher, parameters::bn254_x5}; 64 | //! use ark_bn254::Fr; 65 | //! use ark_ff::{BigInteger, PrimeField}; 66 | //! 67 | //! let mut poseidon = Poseidon::::new_circom(2).unwrap(); 68 | //! 69 | //! let input1 = Fr::from_be_bytes_mod_order(&[1u8; 32]); 70 | //! let input2 = Fr::from_be_bytes_mod_order(&[2u8; 32]); 71 | //! 72 | //! let hash = poseidon.hash(&[input1, input2]).unwrap(); 73 | //! 74 | //! // Do something with `hash`. 75 | //! ``` 76 | //! 77 | //! # Implementation 78 | //! 79 | //! The implementation is compatible with the 80 | //! [original SageMath implementation](https://extgit.iaik.tugraz.at/krypto/hadeshash/-/tree/master/), 81 | //! but it was also inspired by the following ones: 82 | //! 83 | //! * [circomlibjs](https://github.com/iden3/circomlibjs) 84 | //! * [zero-knowledge-gadgets](https://github.com/webb-tools/zero-knowledge-gadgets) 85 | //! 86 | //! # Performance 87 | //! 88 | //! This repository contains a benchmark measuring the performance of this 89 | //! Poseidon implementation for given 1 - 12 random 32 bytes inputs. 90 | //! 91 | //! To run them, simply use: 92 | //! 93 | //! ```bash 94 | //! cargo bench 95 | //! ``` 96 | //! 97 | //! This is the result from a host with the following hardware: 98 | //! 99 | //! * AMD Ryzen™ 9 7945HX with Radeon™ Graphics × 32 100 | //! 101 | //! ```norust 102 | //! poseidon_bn254_x5_1 time: [12.710 µs 12.735 µs 12.754 µs] 103 | //! 104 | //! poseidon_bn254_x5_2 time: [18.948 µs 18.963 µs 18.990 µs] 105 | //! 106 | //! poseidon_bn254_x5_3 time: [26.607 µs 26.611 µs 26.615 µs] 107 | //! 108 | //! poseidon_bn254_x5_4 time: [38.507 µs 38.513 µs 38.519 µs] 109 | //! 110 | //! poseidon_bn254_x5_5 time: [51.024 µs 51.031 µs 51.039 µs] 111 | //! 112 | //! poseidon_bn254_x5_6 time: [68.368 µs 68.375 µs 68.385 µs] 113 | //! 114 | //! poseidon_bn254_x5_7 time: [86.819 µs 86.886 µs 86.968 µs] 115 | //! 116 | //! poseidon_bn254_x5_8 time: [105.38 µs 105.49 µs 105.61 µs] 117 | //! 118 | //! poseidon_bn254_x5_9 time: [121.99 µs 122.00 µs 122.01 µs] 119 | //! 120 | //! poseidon_bn254_x5_10 time: [157.00 µs 157.02 µs 157.05 µs] 121 | //! 122 | //! poseidon_bn254_x5_11 time: [170.01 µs 170.04 µs 170.07 µs] 123 | //! 124 | //! poseidon_bn254_x5_12 time: [210.78 µs 210.81 µs 210.84 µs] 125 | //! ``` 126 | //! 127 | //! # Security 128 | //! 129 | //! This library has been audited by [Veridise](https://veridise.com/). You can 130 | //! read the audit report [here](https://github.com/Lightprotocol/light-poseidon/blob/main/assets/audit.pdf). 131 | use ark_bn254::Fr; 132 | use ark_ff::{BigInteger, PrimeField, Zero}; 133 | use thiserror::Error; 134 | 135 | pub mod parameters; 136 | 137 | pub const HASH_LEN: usize = 32; 138 | pub const MAX_X5_LEN: usize = 13; 139 | 140 | #[derive(Error, Debug, PartialEq)] 141 | pub enum PoseidonError { 142 | #[error("Invalid number of inputs: {inputs}. Maximum allowed is {max_limit} ({width} - 1).")] 143 | InvalidNumberOfInputs { 144 | inputs: usize, 145 | max_limit: usize, 146 | width: usize, 147 | }, 148 | #[error("Input is an empty slice.")] 149 | EmptyInput, 150 | #[error("Invalid length of the input: {len}. The length matching the modulus of the prime field is: {modulus_bytes_len}.")] 151 | InvalidInputLength { 152 | len: usize, 153 | modulus_bytes_len: usize, 154 | }, 155 | #[error("Failed to convert bytes {bytes:?} into a prime field element")] 156 | BytesToPrimeFieldElement { bytes: Vec }, 157 | #[error("Input is larger than the modulus of the prime field.")] 158 | InputLargerThanModulus, 159 | #[error("Failed to convert a vector of bytes into an array.")] 160 | VecToArray, 161 | #[error("Failed to convert the number of inputs from u64 to u8.")] 162 | U64Tou8, 163 | #[error("Failed to convert bytes to BigInt")] 164 | BytesToBigInt, 165 | #[error("Invalid width: {width}. Choose a width between 2 and 16 for 1 to 15 inputs.")] 166 | InvalidWidthCircom { width: usize, max_limit: usize }, 167 | } 168 | 169 | /// Parameters for the Poseidon hash algorithm. 170 | pub struct PoseidonParameters { 171 | /// Round constants. 172 | pub ark: Vec, 173 | /// MDS matrix. 174 | pub mds: Vec>, 175 | /// Number of full rounds (where S-box is applied to all elements of the 176 | /// state). 177 | pub full_rounds: usize, 178 | /// Number of partial rounds (where S-box is applied only to the first 179 | /// element of the state). 180 | pub partial_rounds: usize, 181 | /// Number of prime fields in the state. 182 | pub width: usize, 183 | /// Exponential used in S-box to power elements of the state. 184 | pub alpha: u64, 185 | } 186 | 187 | impl PoseidonParameters { 188 | pub fn new( 189 | ark: Vec, 190 | mds: Vec>, 191 | full_rounds: usize, 192 | partial_rounds: usize, 193 | width: usize, 194 | alpha: u64, 195 | ) -> Self { 196 | Self { 197 | ark, 198 | mds, 199 | full_rounds, 200 | partial_rounds, 201 | width, 202 | alpha, 203 | } 204 | } 205 | } 206 | 207 | pub trait PoseidonHasher { 208 | /// Calculates a Poseidon hash for the given input of prime fields and 209 | /// returns the result as a prime field. 210 | /// 211 | /// # Examples 212 | /// 213 | /// Example with two simple big-endian byte inputs (converted to prime 214 | /// fields) and BN254-based parameters provided by the library. 215 | /// 216 | /// ```rust 217 | /// use light_poseidon::{Poseidon, PoseidonHasher, parameters::bn254_x5}; 218 | /// use ark_bn254::Fr; 219 | /// use ark_ff::{BigInteger, PrimeField}; 220 | /// 221 | /// let mut poseidon = Poseidon::::new_circom(2).unwrap(); 222 | /// 223 | /// let input1 = Fr::from_be_bytes_mod_order(&[1u8; 32]); 224 | /// let input2 = Fr::from_be_bytes_mod_order(&[2u8; 32]); 225 | /// 226 | /// let hash = poseidon.hash(&[input1, input2]).unwrap(); 227 | /// 228 | /// // Do something with `hash`. 229 | fn hash(&mut self, inputs: &[F]) -> Result; 230 | } 231 | 232 | pub trait PoseidonBytesHasher { 233 | /// Calculates a Poseidon hash for the given input of big-endian byte slices 234 | /// and returns the result as a byte array. 235 | /// 236 | /// # Examples 237 | /// 238 | /// Example with two simple big-endian byte inputs and BN254-based 239 | /// parameters provided by the library. 240 | /// 241 | /// ```rust 242 | /// use light_poseidon::{Poseidon, PoseidonBytesHasher, parameters::bn254_x5}; 243 | /// use ark_bn254::Fr; 244 | /// use ark_ff::{BigInteger, PrimeField}; 245 | /// 246 | /// let mut poseidon = Poseidon::::new_circom(2).unwrap(); 247 | /// 248 | /// let hash = poseidon.hash_bytes_be(&[&[1u8; 32], &[2u8; 32]]).unwrap(); 249 | /// 250 | /// println!("{:?}", hash); 251 | /// // Should print: 252 | /// // [ 253 | /// // 13, 84, 225, 147, 143, 138, 140, 28, 125, 235, 94, 3, 85, 242, 99, 25, 32, 123, 132, 254 | /// // 254, 156, 162, 206, 27, 38, 231, 53, 200, 41, 130, 25, 144 255 | /// // ] 256 | /// ``` 257 | /// 258 | /// # Safety 259 | /// 260 | /// Unlike the 261 | /// [`PrimeField::from_be_bytes_mod_order`](ark_ff::PrimeField::from_be_bytes_mod_order) 262 | /// and [`Field::from_random_bytes`](ark_ff::Field::from_random_bytes) 263 | /// methods, this function ensures that the input byte slice's length exactly matches 264 | /// the modulus size of the prime field. If the size doesn't match, an error is returned. 265 | /// 266 | /// This strict check is designed to prevent unexpected behaviors and collisions 267 | /// that might occur when using `from_be_bytes_mod_order` or `from_random_bytes`, 268 | /// which simply take a subslice of the input if it's too large, potentially 269 | /// leading to collisions. 270 | fn hash_bytes_be(&mut self, inputs: &[&[u8]]) -> Result<[u8; HASH_LEN], PoseidonError>; 271 | /// Calculates a Poseidon hash for the given input of little-endian byte 272 | /// slices and returns the result as a byte array. 273 | /// 274 | /// # Examples 275 | /// 276 | /// Example with two simple little-endian byte inputs and BN254-based 277 | /// parameters provided by the library. 278 | /// 279 | /// ```rust 280 | /// use light_poseidon::{Poseidon, PoseidonBytesHasher, parameters::bn254_x5}; 281 | /// use ark_bn254::Fr; 282 | /// use ark_ff::{BigInteger, PrimeField}; 283 | /// 284 | /// let mut poseidon = Poseidon::::new_circom(2).unwrap(); 285 | /// 286 | /// let hash = poseidon.hash_bytes_le(&[&[1u8; 32], &[2u8; 32]]).unwrap(); 287 | /// 288 | /// println!("{:?}", hash); 289 | /// // Should print: 290 | /// // [ 291 | /// // 144, 25, 130, 41, 200, 53, 231, 38, 27, 206, 162, 156, 254, 132, 123, 32, 25, 99, 242, 292 | /// // 85, 3, 94, 235, 125, 28, 140, 138, 143, 147, 225, 84, 13 293 | /// // ] 294 | /// ``` 295 | /// 296 | /// # Safety 297 | /// 298 | /// Unlike the 299 | /// [`PrimeField::from_le_bytes_mod_order`](ark_ff::PrimeField::from_le_bytes_mod_order) 300 | /// and [`Field::from_random_bytes`](ark_ff::Field::from_random_bytes) 301 | /// methods, this function ensures that the input byte slice's length exactly matches 302 | /// the modulus size of the prime field. If the size doesn't match, an error is returned. 303 | /// 304 | /// This strict check is designed to prevent unexpected behaviors and collisions 305 | /// that might occur when using `from_be_bytes_mod_order` or `from_random_bytes`, 306 | /// which simply take a subslice of the input if it's too large, potentially 307 | /// leading to collisions. 308 | fn hash_bytes_le(&mut self, inputs: &[&[u8]]) -> Result<[u8; HASH_LEN], PoseidonError>; 309 | } 310 | 311 | /// A stateful sponge performing Poseidon hash computation. 312 | pub struct Poseidon { 313 | params: PoseidonParameters, 314 | domain_tag: F, 315 | state: Vec, 316 | } 317 | 318 | impl Poseidon { 319 | /// Returns a new Poseidon hasher based on the given parameters. 320 | /// 321 | /// Optionally, a domain tag can be provided. If it is not provided, it 322 | /// will be set to zero. 323 | pub fn new(params: PoseidonParameters) -> Self { 324 | Self::with_domain_tag(params, F::zero()) 325 | } 326 | 327 | fn with_domain_tag(params: PoseidonParameters, domain_tag: F) -> Self { 328 | let width = params.width; 329 | Self { 330 | domain_tag, 331 | params, 332 | state: Vec::with_capacity(width), 333 | } 334 | } 335 | 336 | #[inline(always)] 337 | fn apply_ark(&mut self, round: usize) { 338 | self.state.iter_mut().enumerate().for_each(|(i, a)| { 339 | let c = self.params.ark[round * self.params.width + i]; 340 | *a += c; 341 | }); 342 | } 343 | 344 | #[inline(always)] 345 | fn apply_sbox_full(&mut self) { 346 | self.state.iter_mut().for_each(|a| { 347 | *a = a.pow([self.params.alpha]); 348 | }); 349 | } 350 | 351 | #[inline(always)] 352 | fn apply_sbox_partial(&mut self) { 353 | self.state[0] = self.state[0].pow([self.params.alpha]); 354 | } 355 | 356 | #[inline(always)] 357 | fn apply_mds(&mut self) { 358 | self.state = self 359 | .state 360 | .iter() 361 | .enumerate() 362 | .map(|(i, _)| { 363 | self.state 364 | .iter() 365 | .enumerate() 366 | .fold(F::zero(), |acc, (j, a)| acc + *a * self.params.mds[i][j]) 367 | }) 368 | .collect(); 369 | } 370 | } 371 | 372 | impl PoseidonHasher for Poseidon { 373 | fn hash(&mut self, inputs: &[F]) -> Result { 374 | if inputs.len() != self.params.width - 1 { 375 | return Err(PoseidonError::InvalidNumberOfInputs { 376 | inputs: inputs.len(), 377 | max_limit: self.params.width - 1, 378 | width: self.params.width, 379 | }); 380 | } 381 | 382 | self.state.push(self.domain_tag); 383 | 384 | for input in inputs { 385 | self.state.push(*input); 386 | } 387 | 388 | let all_rounds = self.params.full_rounds + self.params.partial_rounds; 389 | let half_rounds = self.params.full_rounds / 2; 390 | 391 | // full rounds + partial rounds 392 | for round in 0..half_rounds { 393 | self.apply_ark(round); 394 | self.apply_sbox_full(); 395 | self.apply_mds(); 396 | } 397 | 398 | for round in half_rounds..half_rounds + self.params.partial_rounds { 399 | self.apply_ark(round); 400 | self.apply_sbox_partial(); 401 | self.apply_mds(); 402 | } 403 | 404 | for round in half_rounds + self.params.partial_rounds..all_rounds { 405 | self.apply_ark(round); 406 | self.apply_sbox_full(); 407 | self.apply_mds(); 408 | } 409 | 410 | let result = self.state[0]; 411 | self.state.clear(); 412 | Ok(result) 413 | } 414 | } 415 | 416 | macro_rules! impl_hash_bytes { 417 | ($fn_name:ident, $bytes_to_prime_field_element_fn:ident, $to_bytes_fn:ident) => { 418 | fn $fn_name(&mut self, inputs: &[&[u8]]) -> Result<[u8; HASH_LEN], PoseidonError> { 419 | let inputs: Result, _> = inputs 420 | .iter() 421 | .map(|input| validate_bytes_length::(input)) 422 | .collect(); 423 | let inputs = inputs?; 424 | let inputs: Result, _> = inputs 425 | .iter() 426 | .map(|input| $bytes_to_prime_field_element_fn(input)) 427 | .collect(); 428 | let inputs = inputs?; 429 | let hash = self.hash(&inputs)?; 430 | 431 | hash.into_bigint() 432 | .$to_bytes_fn() 433 | .try_into() 434 | .map_err(|_| PoseidonError::VecToArray) 435 | } 436 | }; 437 | } 438 | 439 | impl PoseidonBytesHasher for Poseidon { 440 | impl_hash_bytes!(hash_bytes_le, bytes_to_prime_field_element_le, to_bytes_le); 441 | impl_hash_bytes!(hash_bytes_be, bytes_to_prime_field_element_be, to_bytes_be); 442 | } 443 | 444 | /// Checks whether a slice of bytes is not empty or its length does not exceed 445 | /// the modulus size od the prime field. If it does, an error is returned. 446 | /// 447 | /// # Safety 448 | /// 449 | /// [`PrimeField::from_be_bytes_mod_order`](ark_ff::PrimeField::from_be_bytes_mod_order) 450 | /// just takes a subslice of the input if it's too large, potentially leading 451 | /// to collisions. The purpose of this function is to prevent them by returning 452 | /// and error. It should be always used before converting byte slices to 453 | /// prime field elements. 454 | pub fn validate_bytes_length(input: &[u8]) -> Result<&[u8], PoseidonError> 455 | where 456 | F: PrimeField, 457 | { 458 | let modulus_bytes_len = ((F::MODULUS_BIT_SIZE + 7) / 8) as usize; 459 | if input.is_empty() { 460 | return Err(PoseidonError::EmptyInput); 461 | } 462 | if input.len() > modulus_bytes_len { 463 | return Err(PoseidonError::InvalidInputLength { 464 | len: input.len(), 465 | modulus_bytes_len, 466 | }); 467 | } 468 | Ok(input) 469 | } 470 | 471 | macro_rules! impl_bytes_to_prime_field_element { 472 | ($name:ident, $from_bytes_method:ident, $endianess:expr) => { 473 | #[doc = "Converts a slice of "] 474 | #[doc = $endianess] 475 | #[doc = "-endian bytes into a prime field element, \ 476 | represented by the [`ark_ff::PrimeField`](ark_ff::PrimeField) trait."] 477 | pub fn $name(input: &[u8]) -> Result 478 | where 479 | F: PrimeField, 480 | { 481 | let element = num_bigint::BigUint::$from_bytes_method(input); 482 | let element = F::BigInt::try_from(element).map_err(|_| PoseidonError::BytesToBigInt)?; 483 | 484 | // In theory, `F::from_bigint` should also perform a check whether input is 485 | // larger than modulus (and return `None` if it is), but it's not reliable... 486 | // To be sure, we check it ourselves. 487 | if element >= F::MODULUS { 488 | return Err(PoseidonError::InputLargerThanModulus); 489 | } 490 | let element = F::from_bigint(element).ok_or(PoseidonError::InputLargerThanModulus)?; 491 | 492 | Ok(element) 493 | } 494 | }; 495 | } 496 | 497 | impl_bytes_to_prime_field_element!(bytes_to_prime_field_element_le, from_bytes_le, "little"); 498 | impl_bytes_to_prime_field_element!(bytes_to_prime_field_element_be, from_bytes_be, "big"); 499 | 500 | impl Poseidon { 501 | pub fn new_circom(nr_inputs: usize) -> Result, PoseidonError> { 502 | Self::with_domain_tag_circom(nr_inputs, Fr::zero()) 503 | } 504 | 505 | pub fn with_domain_tag_circom( 506 | nr_inputs: usize, 507 | domain_tag: Fr, 508 | ) -> Result, PoseidonError> { 509 | let width = nr_inputs + 1; 510 | if width > MAX_X5_LEN { 511 | return Err(PoseidonError::InvalidWidthCircom { 512 | width, 513 | max_limit: MAX_X5_LEN, 514 | }); 515 | } 516 | 517 | let params = crate::parameters::bn254_x5::get_poseidon_parameters::( 518 | (width).try_into().map_err(|_| PoseidonError::U64Tou8)?, 519 | )?; 520 | Ok(Poseidon::::with_domain_tag(params, domain_tag)) 521 | } 522 | } 523 | -------------------------------------------------------------------------------- /light-poseidon/src/parameters/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod bn254_x5; 2 | -------------------------------------------------------------------------------- /light-poseidon/tests/bn254_fq_x5.rs: -------------------------------------------------------------------------------- 1 | use ark_bn254::Fr; 2 | use ark_ff::{BigInteger, BigInteger256, One, PrimeField, UniformRand, Zero}; 3 | use light_poseidon::{ 4 | bytes_to_prime_field_element_be, bytes_to_prime_field_element_le, validate_bytes_length, 5 | Poseidon, PoseidonError, 6 | }; 7 | use light_poseidon::{PoseidonBytesHasher, PoseidonHasher}; 8 | use rand::Rng; 9 | 10 | /// Checks the hash of `1` as a prime field element. 11 | #[test] 12 | fn test_poseidon_one() { 13 | let mut hasher = Poseidon::::new_circom(2).unwrap(); 14 | 15 | let expected = [ 16 | 0, 122, 243, 70, 226, 211, 4, 39, 158, 121, 224, 169, 243, 2, 63, 119, 18, 148, 167, 138, 17 | 203, 112, 231, 63, 144, 175, 226, 124, 173, 64, 30, 129, 18 | ]; 19 | 20 | let input = Fr::from_be_bytes_mod_order(&[1u8]); 21 | let hash = hasher.hash(&[input, input]).unwrap(); 22 | 23 | assert_eq!(hash.into_bigint().to_bytes_be(), expected,); 24 | 25 | let input = Fr::from_be_bytes_mod_order(&[0u8, 1u8]); 26 | let hash = hasher.hash(&[input, input]).unwrap(); 27 | 28 | assert_eq!(hash.into_bigint().to_bytes_be(), expected); 29 | 30 | let input = Fr::from_be_bytes_mod_order(&[0u8, 0u8, 1u8]); 31 | let hash = hasher.hash(&[input, input]).unwrap(); 32 | 33 | assert_eq!(hash.into_bigint().to_bytes_be(), expected); 34 | } 35 | 36 | /// Checks the hash of byte slices consistng of ones and twos. 37 | #[test] 38 | fn test_poseidon_bn254_x5_fq_input_ones_twos() { 39 | let input1 = Fr::from_be_bytes_mod_order(&[1u8; 32]); 40 | let input2 = Fr::from_be_bytes_mod_order(&[2u8; 32]); 41 | let mut hasher = Poseidon::::new_circom(2).unwrap(); 42 | let hash = hasher.hash(&[input1, input2]).unwrap(); 43 | assert_eq!( 44 | hash.into_bigint().to_bytes_be(), 45 | [ 46 | 13, 84, 225, 147, 143, 138, 140, 28, 125, 235, 94, 3, 85, 242, 99, 25, 32, 123, 132, 47 | 254, 156, 162, 206, 27, 38, 231, 53, 200, 41, 130, 25, 144 48 | ] 49 | ); 50 | 51 | let hash = hasher.hash_bytes_be(&[&[1u8; 32], &[2u8; 32]]).unwrap(); 52 | assert_eq!( 53 | hash, 54 | [ 55 | 13, 84, 225, 147, 143, 138, 140, 28, 125, 235, 94, 3, 85, 242, 99, 25, 32, 123, 132, 56 | 254, 156, 162, 206, 27, 38, 231, 53, 200, 41, 130, 25, 144 57 | ] 58 | ); 59 | 60 | let hash = hasher.hash_bytes_le(&[&[1u8; 32], &[2u8; 32]]).unwrap(); 61 | assert_eq!( 62 | hash, 63 | [ 64 | 144, 25, 130, 41, 200, 53, 231, 38, 27, 206, 162, 156, 254, 132, 123, 32, 25, 99, 242, 65 | 85, 3, 94, 235, 125, 28, 140, 138, 143, 147, 225, 84, 13 66 | ] 67 | ) 68 | } 69 | 70 | /// Checks thebash of bytes slices consisting of ones and twos, with a custom 71 | /// domain tag. 72 | #[test] 73 | fn test_poseidon_bn254_x5_fq_with_domain_tag() { 74 | let input1 = Fr::from_be_bytes_mod_order(&[1u8; 32]); 75 | let input2 = Fr::from_be_bytes_mod_order(&[2u8; 32]); 76 | let mut hasher = Poseidon::::with_domain_tag_circom(2, Fr::zero()).unwrap(); 77 | let hash = hasher.hash(&[input1, input2]).unwrap(); 78 | 79 | let expected_tag_zero = [ 80 | 13, 84, 225, 147, 143, 138, 140, 28, 125, 235, 94, 3, 85, 242, 99, 25, 32, 123, 132, 254, 81 | 156, 162, 206, 27, 38, 231, 53, 200, 41, 130, 25, 144, 82 | ]; 83 | 84 | assert_eq!(hash.into_bigint().to_bytes_be(), expected_tag_zero); 85 | 86 | let mut hasher = Poseidon::::with_domain_tag_circom(2, Fr::one()).unwrap(); 87 | let hash = hasher.hash(&[input1, input2]).unwrap(); 88 | 89 | assert_ne!(hash.into_bigint().to_bytes_be(), expected_tag_zero); 90 | } 91 | 92 | /// Checks the hash of one and two. 93 | #[test] 94 | fn test_poseidon_bn254_x5_fq_input_one_two() { 95 | let input1 = Fr::from_be_bytes_mod_order(&[1]); 96 | let input2 = Fr::from_be_bytes_mod_order(&[2]); 97 | 98 | let mut hasher = Poseidon::::new_circom(2).unwrap(); 99 | let hash = hasher.hash(&[input1, input2]).unwrap(); 100 | 101 | assert_eq!( 102 | hash.into_bigint().to_bytes_le(), 103 | [ 104 | 154, 24, 23, 68, 122, 96, 25, 158, 81, 69, 50, 116, 242, 23, 54, 42, 207, 233, 98, 150, 105 | 107, 76, 246, 61, 65, 144, 214, 231, 245, 192, 92, 17 106 | ] 107 | ); 108 | } 109 | 110 | #[test] 111 | fn test_poseidon_bn254_x5_fq_input_random() { 112 | let input1 = Fr::from_be_bytes_mod_order(&[ 113 | 0x06, 0x9c, 0x63, 0x81, 0xac, 0x0b, 0x96, 0x8e, 0x88, 0x1c, 0x91, 0x3c, 0x17, 0xd8, 0x36, 114 | 0x06, 0x7f, 0xd1, 0x5f, 0x2c, 0xc7, 0x9f, 0x90, 0x2c, 0x80, 0x70, 0xb3, 0x6d, 0x28, 0x66, 115 | 0x17, 0xdd, 116 | ]); 117 | let input2 = Fr::from_be_bytes_mod_order(&[ 118 | 0xc3, 0x3b, 0x60, 0x04, 0x2f, 0x76, 0xc7, 0xfb, 0xd0, 0x5d, 0xb7, 0x76, 0x23, 0xcb, 0x17, 119 | 0xb8, 0x1d, 0x49, 0x41, 0x4b, 0x82, 0xe5, 0x6a, 0x2e, 0xc0, 0x18, 0xf7, 0xa5, 0x5c, 0x3f, 120 | 0x30, 0x0b, 121 | ]); 122 | 123 | let mut hasher = Poseidon::::new_circom(2).unwrap(); 124 | let hash = hasher.hash(&[input1, input2]).unwrap(); 125 | assert_eq!( 126 | hash.into_bigint().to_bytes_le(), 127 | [ 128 | 75, 85, 249, 42, 66, 238, 230, 151, 158, 90, 250, 51, 131, 212, 131, 18, 151, 235, 96, 129 | 103, 135, 243, 186, 61, 173, 135, 52, 77, 132, 173, 19, 10 130 | ] 131 | ) 132 | } 133 | 134 | /// Check whther providing different number of inputs than supported by the 135 | /// hasher results in an error. 136 | #[test] 137 | fn test_poseidon_bn254_x5_fq_too_many_inputs() { 138 | let mut rng = rand::thread_rng(); 139 | 140 | for i in 1..13 { 141 | let mut hasher = Poseidon::::new_circom(i).unwrap(); 142 | 143 | for j in 1..13 { 144 | if i != j { 145 | let inputs: Vec<_> = (0..j).map(|_| Fr::rand(&mut rng)).collect(); 146 | let res = hasher.hash(&inputs); 147 | assert!(res.is_err()); 148 | 149 | let inputs_bytes_be: Vec<_> = inputs 150 | .iter() 151 | .map(|i| i.into_bigint().to_bytes_be()) 152 | .collect(); 153 | let inputs_bytes_be: Vec<&[u8]> = inputs_bytes_be.iter().map(|v| &v[..]).collect(); 154 | let res_bytes_be = hasher.hash_bytes_be(&inputs_bytes_be); 155 | assert!(res_bytes_be.is_err()); 156 | 157 | let inputs_bytes_le: Vec<_> = inputs 158 | .iter() 159 | .map(|i| i.into_bigint().to_bytes_le()) 160 | .collect(); 161 | let inputs_bytes_le: Vec<&[u8]> = inputs_bytes_le.iter().map(|v| &v[..]).collect(); 162 | let res_bytes_le = hasher.hash_bytes_le(&inputs_bytes_le); 163 | assert!(res_bytes_le.is_err()); 164 | } 165 | } 166 | } 167 | } 168 | 169 | /// Check whether byte inputs with length lower than the byte limit indicated 170 | /// by the modulus produce the same hashes as equivalent byte inputs padded with 171 | /// zeros. They should be serialized as the same prime field elements. 172 | #[test] 173 | fn test_poseidon_bn254_x5_fq_smaller_arrays() { 174 | let mut hasher = Poseidon::::new_circom(1).unwrap(); 175 | 176 | let input1 = vec![1; 1]; 177 | let hash1 = hasher.hash_bytes_le(&[input1.as_slice()]).unwrap(); 178 | 179 | for len in 2..32 { 180 | let input = [vec![1u8], vec![0; len - 1]].concat(); 181 | let hash = hasher.hash_bytes_le(&[input.as_slice()]).unwrap(); 182 | 183 | assert_eq!(hash, hash1); 184 | } 185 | 186 | let input1 = vec![1; 1]; 187 | let hash1 = hasher.hash_bytes_be(&[input1.as_slice()]).unwrap(); 188 | 189 | for len in 2..32 { 190 | let input = [vec![0; len - 1], vec![1u8]].concat(); 191 | let hash = hasher.hash_bytes_be(&[input.as_slice()]).unwrap(); 192 | 193 | assert_eq!(hash, hash1); 194 | } 195 | } 196 | 197 | /// Check whether big-endian byte inputs with length lower than the byte limit 198 | /// indicated by the modulus produce the same hashes as equivalent byte inputs 199 | /// padded with zeros. Randomize the byte slices and try all the possible 200 | /// lengths. They should be serialized as the same prime field elements. 201 | #[test] 202 | fn test_poseidon_bn254_x5_fq_hash_bytes_be_smaller_arrays_random() { 203 | for nr_inputs in 1..12 { 204 | let mut hasher = Poseidon::::new_circom(nr_inputs).unwrap(); 205 | for smaller_arr_len in 1..31 { 206 | let inputs: Vec> = (0..nr_inputs) 207 | .map(|_| { 208 | let rng = rand::thread_rng(); 209 | rng.sample_iter(rand::distributions::Standard) 210 | .take(smaller_arr_len) 211 | .collect() 212 | }) 213 | .collect(); 214 | let inputs: Vec<&[u8]> = inputs.iter().map(|v| &v[..]).collect(); 215 | let hash1 = hasher.hash_bytes_be(inputs.as_slice()).unwrap(); 216 | 217 | for greater_arr_len in smaller_arr_len + 1..32 { 218 | let inputs: Vec> = inputs 219 | .iter() 220 | .map(|input| { 221 | [vec![0u8; greater_arr_len - smaller_arr_len], input.to_vec()].concat() 222 | }) 223 | .collect(); 224 | let inputs: Vec<&[u8]> = inputs.iter().map(|v| &v[..]).collect(); 225 | let hash = hasher.hash_bytes_be(inputs.as_slice()).unwrap(); 226 | 227 | assert_eq!( 228 | hash, hash1, 229 | "inputs: {nr_inputs}, smaller array length: {smaller_arr_len}, greater array length: {greater_arr_len}" 230 | ); 231 | } 232 | } 233 | } 234 | } 235 | 236 | /// Check whether little-endian byte inputs with length lower than the byte limit 237 | /// indicated by the modulus produce the same hashes as equivalent byte inputs 238 | /// padded with zeros. Randomize the byte slices and try all the possible 239 | /// lengths. They should be serialized as the same prime field elements. 240 | #[test] 241 | fn test_poseidon_bn254_x5_fq_hash_bytes_le_smaller_arrays_random() { 242 | for nr_inputs in 1..12 { 243 | let mut hasher = Poseidon::::new_circom(nr_inputs).unwrap(); 244 | for smaller_arr_len in 1..31 { 245 | let inputs: Vec> = (0..nr_inputs) 246 | .map(|_| { 247 | let rng = rand::thread_rng(); 248 | rng.sample_iter(rand::distributions::Standard) 249 | .take(smaller_arr_len) 250 | .collect() 251 | }) 252 | .collect(); 253 | let inputs: Vec<&[u8]> = inputs.iter().map(|v| &v[..]).collect(); 254 | let hash1 = hasher.hash_bytes_le(inputs.as_slice()).unwrap(); 255 | 256 | for greater_arr_len in smaller_arr_len + 1..32 { 257 | let inputs: Vec> = inputs 258 | .iter() 259 | .map(|input| { 260 | [input.to_vec(), vec![0u8; greater_arr_len - smaller_arr_len]].concat() 261 | }) 262 | .collect(); 263 | let inputs: Vec<&[u8]> = inputs.iter().map(|v| &v[..]).collect(); 264 | let hash = hasher.hash_bytes_le(inputs.as_slice()).unwrap(); 265 | 266 | assert_eq!( 267 | hash, hash1, 268 | "inputs: {nr_inputs}, smaller array length: {smaller_arr_len}, greater array length: {greater_arr_len}" 269 | ); 270 | } 271 | } 272 | } 273 | } 274 | 275 | /// Check whether `validate_bytes_length` returns an error when an input is a 276 | /// byte slice with greater number of elements than indicated by the modulus. 277 | #[test] 278 | fn test_poseidon_bn254_x5_fq_validate_bytes_length() { 279 | for i in 1..32 { 280 | let input = vec![1u8; i]; 281 | let res = validate_bytes_length::(&input).unwrap(); 282 | assert_eq!(res, &input); 283 | } 284 | 285 | for i in 33..64 { 286 | let input = vec![1u8; i]; 287 | let res = validate_bytes_length::(&input); 288 | assert!(res.is_err()); 289 | } 290 | } 291 | 292 | /// Check whether `validate_bytes_length` returns an error when an input is a 293 | /// byte slice with greater number of elements than indicated by the modulus. 294 | /// Randomize the length. 295 | #[test] 296 | fn test_poseidon_bn254_x5_fq_validate_bytes_length_fuzz() { 297 | let mut rng = rand::thread_rng(); 298 | 299 | for _ in 0..100 { 300 | let len = rng.gen_range(33..524_288_000); // Maximum 500 MB. 301 | let input = vec![1u8; len]; 302 | let res = validate_bytes_length::(&input); 303 | 304 | assert!(res.is_err()); 305 | } 306 | } 307 | 308 | /// Checks whether hashes generated by [`PoseidonHasher::hash`], 309 | /// [`PoseidonBytesHasher::hash_bytes_be`] and [`PoseidonBytesHasher::hash_bytes_le`] 310 | /// are the same. 311 | #[test] 312 | fn test_poseidon_bn254_x5_fq_bytes() { 313 | let mut rng = rand::thread_rng(); 314 | 315 | for _ in 0..100 { 316 | for nr_inputs in 1..12 { 317 | let mut hasher = Poseidon::::new_circom(nr_inputs).unwrap(); 318 | 319 | // Hash prime field elements. 320 | let mut inputs = Vec::with_capacity(nr_inputs); 321 | for _ in 0..nr_inputs { 322 | inputs.push(Fr::rand(&mut rng)); 323 | } 324 | let res = hasher.hash(&inputs).unwrap(); 325 | 326 | // Hash big-endian bytes. Ensure that the result is the same. 327 | let inputs_bytes_be: Vec<_> = inputs 328 | .iter() 329 | .map(|i| i.into_bigint().to_bytes_be()) 330 | .collect(); 331 | let inputs_bytes_be: Vec<&[u8]> = inputs_bytes_be.iter().map(|v| &v[..]).collect(); 332 | let res_bytes_be = hasher.hash_bytes_be(&inputs_bytes_be).unwrap(); 333 | assert_eq!(res.into_bigint().to_bytes_be(), res_bytes_be); 334 | 335 | // Hash little-endian bytes. Ensure that the result is the same. 336 | let inputs_bytes_le: Vec<_> = inputs 337 | .iter() 338 | .map(|i| i.into_bigint().to_bytes_le()) 339 | .collect(); 340 | let inputs_bytes_le: Vec<&[u8]> = inputs_bytes_le.iter().map(|v| &v[..]).collect(); 341 | let res_bytes_le = hasher.hash_bytes_le(&inputs_bytes_le).unwrap(); 342 | assert_eq!(res.into_bigint().to_bytes_le(), res_bytes_le); 343 | } 344 | } 345 | } 346 | 347 | macro_rules! test_bytes_to_prime_field_element { 348 | ($name:ident, $to_bytes_method:ident, $fn:ident) => { 349 | /// Checks whether `bytes_to_prime_field_element_*` functions: 350 | /// 351 | /// * Are converting the valid byte slices appropiately. 352 | /// * Are throwing an error if the input is greater or equal to the 353 | /// modulus. 354 | #[test] 355 | fn $name() { 356 | // Test conversion of random prime field elements from bytes to `F`. 357 | let mut rng = rand::thread_rng(); 358 | for _ in 0..100 { 359 | let f = Fr::rand(&mut rng); 360 | let f = f.into_bigint().$to_bytes_method(); 361 | let res = $fn::(&f); 362 | assert!(res.is_ok()); 363 | } 364 | 365 | let mut lt = Fr::MODULUS; 366 | lt.sub_with_borrow(&BigInteger256::from(1u64)); 367 | let lt = lt.$to_bytes_method(); 368 | let res = $fn::(<); 369 | 370 | assert!(res.is_ok()); 371 | 372 | let eq = Fr::MODULUS; 373 | let eq = eq.$to_bytes_method(); 374 | let res = $fn::(&eq); 375 | 376 | assert!(res.is_err()); 377 | 378 | let mut gt = Fr::MODULUS; 379 | gt.add_with_carry(&BigInteger256::from(1u64)); 380 | let gt = gt.$to_bytes_method(); 381 | let res = $fn::(>); 382 | 383 | assert!(res.is_err()); 384 | } 385 | }; 386 | } 387 | 388 | test_bytes_to_prime_field_element!( 389 | test_poseidon_bn254_x5_fq_to_prime_field_element_be, 390 | to_bytes_be, 391 | bytes_to_prime_field_element_be 392 | ); 393 | 394 | test_bytes_to_prime_field_element!( 395 | test_poseidon_bn254_x5_fq_to_prime_field_element_le, 396 | to_bytes_le, 397 | bytes_to_prime_field_element_le 398 | ); 399 | 400 | macro_rules! test_random_input_same_results { 401 | ($name:ident, $method:ident) => { 402 | /// Check whether hashing the same input twice, separately, produces the 403 | /// same results. 404 | #[test] 405 | fn $name() { 406 | let input = [1u8; 32]; 407 | 408 | for nr_inputs in 1..12 { 409 | let mut hasher = Poseidon::::new_circom(nr_inputs).unwrap(); 410 | 411 | let mut inputs = Vec::with_capacity(nr_inputs); 412 | for _ in 0..nr_inputs { 413 | inputs.push(input.as_slice()); 414 | } 415 | 416 | let hash1 = hasher.$method(inputs.as_slice()).unwrap(); 417 | let hash2 = hasher.$method(inputs.as_slice()).unwrap(); 418 | 419 | assert_eq!(hash1, hash2); 420 | } 421 | } 422 | }; 423 | } 424 | 425 | test_random_input_same_results!( 426 | test_poseidon_bn254_x5_fq_hash_bytes_be_random_input_same_results, 427 | hash_bytes_be 428 | ); 429 | 430 | test_random_input_same_results!( 431 | test_poseidon_bn254_x5_fq_hash_bytes_le_random_input_same_results, 432 | hash_bytes_le 433 | ); 434 | 435 | macro_rules! test_invalid_input_length { 436 | ($name:ident, $method:ident) => { 437 | /// Checks whether hashing byte slices with number of elements larger 438 | /// than indicated by modulus returns an error. 439 | #[test] 440 | fn $name() { 441 | let mut rng = rand::thread_rng(); 442 | 443 | for _ in 0..100 { 444 | let len = rng.gen_range(33..524_288_000); // Maximum 500 MB. 445 | let input = vec![1u8; len]; 446 | 447 | for nr_inputs in 1..12 { 448 | let mut hasher = Poseidon::::new_circom(nr_inputs).unwrap(); 449 | 450 | let mut inputs = Vec::with_capacity(nr_inputs); 451 | for _ in 0..nr_inputs { 452 | inputs.push(input.as_slice()); 453 | } 454 | 455 | let hash = hasher.$method(inputs.as_slice()); 456 | assert_eq!( 457 | hash, 458 | Err(PoseidonError::InvalidInputLength { 459 | len, 460 | modulus_bytes_len: 32, 461 | }) 462 | ); 463 | } 464 | } 465 | } 466 | }; 467 | } 468 | 469 | test_invalid_input_length!( 470 | test_poseidon_bn254_x5_fq_hash_bytes_be_invalid_input_length, 471 | hash_bytes_be 472 | ); 473 | 474 | test_invalid_input_length!( 475 | test_poseidon_bn254_x5_fq_hash_bytes_le_invalid_input_length, 476 | hash_bytes_le 477 | ); 478 | 479 | macro_rules! test_fuzz_input_gte_field_size { 480 | ($name:ident, $method:ident, $to_bytes_method:ident) => { 481 | /// Checks whether hashing a byte slice representing an element larger 482 | /// than modulus returns an error. 483 | #[test] 484 | fn $name() { 485 | let mut greater_than_field_size = Fr::MODULUS; 486 | let mut rng = rand::thread_rng(); 487 | let random_number = rng.gen_range(0u64..1_000_000u64); 488 | greater_than_field_size.add_with_carry(&BigInteger256::from(random_number)); 489 | let greater_than_field_size = greater_than_field_size.$to_bytes_method(); 490 | 491 | assert_eq!(greater_than_field_size.len(), 32); 492 | 493 | for nr_inputs in 1..12 { 494 | let mut hasher = Poseidon::::new_circom(nr_inputs).unwrap(); 495 | 496 | let mut inputs = Vec::with_capacity(nr_inputs); 497 | for _ in 0..nr_inputs { 498 | inputs.push(&greater_than_field_size[..]); 499 | } 500 | 501 | let hash = hasher.$method(inputs.as_slice()); 502 | assert_eq!(hash, Err(PoseidonError::InputLargerThanModulus)); 503 | } 504 | } 505 | }; 506 | } 507 | 508 | test_fuzz_input_gte_field_size!( 509 | test_fuzz_poseidon_bn254_fq_hash_bytes_be_input_gt_field_size, 510 | hash_bytes_be, 511 | to_bytes_be 512 | ); 513 | 514 | test_fuzz_input_gte_field_size!( 515 | test_fuzz_poseidon_bn254_fq_hash_bytes_le_input_gt_field_size, 516 | hash_bytes_le, 517 | to_bytes_le 518 | ); 519 | 520 | macro_rules! test_input_gte_field_size { 521 | ($name:ident, $method:ident, $greater_than_field_size:expr) => { 522 | /// Checks whether hashing a byte slice representing an element larger 523 | /// than modulus returns an error. 524 | #[test] 525 | fn $name() { 526 | for nr_inputs in 1..12 { 527 | let mut hasher = Poseidon::::new_circom(nr_inputs).unwrap(); 528 | 529 | let mut inputs = Vec::with_capacity(nr_inputs); 530 | for _ in 0..nr_inputs { 531 | inputs.push(&$greater_than_field_size[..]); 532 | } 533 | 534 | let hash = hasher.$method(inputs.as_slice()); 535 | assert_eq!(hash, Err(PoseidonError::InputLargerThanModulus)); 536 | } 537 | } 538 | }; 539 | } 540 | 541 | test_input_gte_field_size!( 542 | test_poseidon_bn254_fq_hash_bytes_be_input_gt_field_size_our_check, 543 | hash_bytes_be, 544 | [ 545 | 216, 137, 85, 159, 239, 194, 107, 138, 254, 68, 21, 16, 165, 41, 64, 148, 208, 198, 201, 546 | 59, 220, 102, 142, 81, 49, 251, 174, 183, 183, 182, 4, 32, 547 | ] 548 | ); 549 | 550 | test_input_gte_field_size!( 551 | test_poseidon_bn254_fq_hash_bytes_le_input_gt_field_size_our_check, 552 | hash_bytes_le, 553 | [ 554 | 32, 4, 182, 183, 183, 174, 251, 49, 81, 142, 102, 220, 59, 201, 198, 208, 148, 64, 41, 165, 555 | 16, 21, 68, 254, 138, 107, 194, 239, 159, 85, 137, 216, 556 | ] 557 | ); 558 | 559 | test_input_gte_field_size!( 560 | test_poseidon_bn254_fq_hash_bytes_be_input_gt_field_size, 561 | hash_bytes_be, 562 | [ 563 | 48, 100, 78, 114, 225, 49, 160, 41, 184, 80, 69, 182, 129, 129, 88, 93, 40, 51, 232, 72, 564 | 121, 185, 112, 145, 67, 225, 245, 147, 240, 0, 0, 2 565 | ] 566 | ); 567 | 568 | test_input_gte_field_size!( 569 | test_poseidon_bn254_fq_hash_bytes_le_input_gt_field_size, 570 | hash_bytes_le, 571 | [ 572 | 2, 0, 0, 240, 147, 245, 225, 67, 145, 112, 185, 121, 72, 232, 51, 40, 93, 88, 129, 129, 573 | 182, 69, 80, 184, 41, 160, 49, 225, 114, 78, 100, 48 574 | ] 575 | ); 576 | 577 | macro_rules! test_input_eq_field_size { 578 | ($name:ident, $method:ident, $to_bytes_method:ident) => { 579 | /// Checks whether hashing a byte slice representing a modulus returns 580 | /// an error. 581 | #[test] 582 | fn $name() { 583 | let mut hasher = Poseidon::::new_circom(1).unwrap(); 584 | let input = Fr::MODULUS.$to_bytes_method(); 585 | let hash = hasher.$method(&[&input]); 586 | assert_eq!(hash, Err(PoseidonError::InputLargerThanModulus)); 587 | } 588 | }; 589 | } 590 | 591 | test_input_eq_field_size!(test_input_eq_field_size_be, hash_bytes_be, to_bytes_be); 592 | test_input_eq_field_size!(test_input_eq_field_size_le, hash_bytes_le, to_bytes_le); 593 | 594 | /// Checks that endianness is honored correctly and produces expected hashes. 595 | #[test] 596 | fn test_endianness() { 597 | let mut hasher = Poseidon::::new_circom(2).unwrap(); 598 | let le_input: &[u8] = &[0, 0, 0, 1]; 599 | let be_input: &[u8] = &[1, 0, 0, 0]; 600 | 601 | let hash1 = hasher.hash_bytes_le(&[le_input, le_input]).unwrap(); 602 | let mut hash2 = hasher.hash_bytes_be(&[be_input, be_input]).unwrap(); 603 | 604 | assert_ne!(hash1, hash2); 605 | 606 | // Make it little-endian. 607 | hash2.reverse(); 608 | 609 | assert_eq!(hash1, hash2); 610 | 611 | let le_input: &[u8] = &[ 612 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 613 | ]; 614 | let be_input: &[u8] = &[ 615 | 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 616 | ]; 617 | 618 | let hash3 = hasher.hash_bytes_le(&[le_input, le_input]).unwrap(); 619 | let mut hash4 = hasher.hash_bytes_be(&[be_input, be_input]).unwrap(); 620 | 621 | assert_ne!(hash3, hash4); 622 | 623 | // Make it little-endian. 624 | hash4.reverse(); 625 | 626 | // Compare the latest hashes. 627 | assert_eq!(hash3, hash4); 628 | 629 | let one = 1u64; 630 | let le_input = one.to_le_bytes(); 631 | let be_input = one.to_be_bytes(); 632 | 633 | let hash5 = hasher.hash_bytes_le(&[&le_input, &le_input]).unwrap(); 634 | let mut hash6 = hasher.hash_bytes_be(&[&be_input, &be_input]).unwrap(); 635 | 636 | assert_ne!(hash5, hash6); 637 | 638 | // Make it little-endian, 639 | hash6.reverse(); 640 | 641 | // Compare the latest hashes. 642 | assert_eq!(hash5, hash6); 643 | } 644 | 645 | /// Checks whether providing an empty input results in an error. 646 | #[test] 647 | fn test_empty_input() { 648 | let empty: &[u8] = &[]; 649 | let non_empty = &[1u8; 32]; 650 | 651 | // All inputs empty. 652 | for nr_inputs in 1..12 { 653 | let mut hasher = Poseidon::::new_circom(nr_inputs).unwrap(); 654 | 655 | let mut inputs = Vec::with_capacity(nr_inputs); 656 | for _ in 0..nr_inputs { 657 | inputs.push(empty); 658 | } 659 | 660 | let hash = hasher.hash_bytes_be(inputs.as_slice()); 661 | assert_eq!(hash, Err(PoseidonError::EmptyInput)); 662 | 663 | let hash = hasher.hash_bytes_le(inputs.as_slice()); 664 | assert_eq!(hash, Err(PoseidonError::EmptyInput)); 665 | } 666 | 667 | // One empty input. 668 | for nr_inputs in 1..12 { 669 | let mut hasher = Poseidon::::new_circom(nr_inputs).unwrap(); 670 | 671 | let mut inputs = Vec::with_capacity(nr_inputs); 672 | for _ in 0..(nr_inputs - 1) { 673 | inputs.push(non_empty.as_slice()); 674 | } 675 | inputs.push(empty); 676 | 677 | let hash = hasher.hash_bytes_be(inputs.as_slice()); 678 | assert_eq!(hash, Err(PoseidonError::EmptyInput)); 679 | 680 | let hash = hasher.hash_bytes_le(inputs.as_slice()); 681 | assert_eq!(hash, Err(PoseidonError::EmptyInput)); 682 | } 683 | } 684 | 685 | // test cases were created with circomlibjs poseidon([1, ...]) for 1 to 16 inputs 686 | const TEST_CASES: [[u8; 32]; 12] = [ 687 | [ 688 | 41, 23, 97, 0, 234, 169, 98, 189, 193, 254, 108, 101, 77, 106, 60, 19, 14, 150, 164, 209, 689 | 22, 139, 51, 132, 139, 137, 125, 197, 2, 130, 1, 51, 690 | ], 691 | [ 692 | 0, 122, 243, 70, 226, 211, 4, 39, 158, 121, 224, 169, 243, 2, 63, 119, 18, 148, 167, 138, 693 | 203, 112, 231, 63, 144, 175, 226, 124, 173, 64, 30, 129, 694 | ], 695 | [ 696 | 2, 192, 6, 110, 16, 167, 42, 189, 43, 51, 195, 178, 20, 203, 62, 129, 188, 177, 182, 227, 697 | 9, 97, 205, 35, 194, 2, 177, 134, 115, 191, 37, 67, 698 | ], 699 | [ 700 | 8, 44, 156, 55, 10, 13, 36, 244, 65, 111, 188, 65, 74, 55, 104, 31, 120, 68, 45, 39, 216, 701 | 99, 133, 153, 28, 23, 214, 252, 12, 75, 125, 113, 702 | ], 703 | [ 704 | 16, 56, 150, 5, 174, 104, 141, 79, 20, 219, 133, 49, 34, 196, 125, 102, 168, 3, 199, 43, 705 | 65, 88, 156, 177, 191, 134, 135, 65, 178, 6, 185, 187, 706 | ], 707 | [ 708 | 42, 115, 246, 121, 50, 140, 62, 171, 114, 74, 163, 229, 189, 191, 80, 179, 144, 53, 215, 709 | 114, 159, 19, 91, 151, 9, 137, 15, 133, 197, 220, 94, 118, 710 | ], 711 | [ 712 | 34, 118, 49, 10, 167, 243, 52, 58, 40, 66, 20, 19, 157, 157, 169, 89, 190, 42, 49, 178, 713 | 199, 8, 165, 248, 25, 84, 178, 101, 229, 58, 48, 184, 714 | ], 715 | [ 716 | 23, 126, 20, 83, 196, 70, 225, 176, 125, 43, 66, 51, 66, 81, 71, 9, 92, 79, 202, 187, 35, 717 | 61, 35, 11, 109, 70, 162, 20, 217, 91, 40, 132, 718 | ], 719 | [ 720 | 14, 143, 238, 47, 228, 157, 163, 15, 222, 235, 72, 196, 46, 187, 68, 204, 110, 231, 5, 95, 721 | 97, 251, 202, 94, 49, 59, 138, 95, 202, 131, 76, 71, 722 | ], 723 | [ 724 | 46, 196, 198, 94, 99, 120, 171, 140, 115, 48, 133, 79, 74, 112, 119, 193, 255, 146, 96, 725 | 228, 72, 133, 196, 184, 29, 209, 49, 173, 58, 134, 205, 150, 726 | ], 727 | [ 728 | 0, 113, 61, 65, 236, 166, 53, 241, 23, 212, 236, 188, 235, 95, 58, 102, 220, 65, 66, 235, 729 | 112, 181, 103, 101, 188, 53, 143, 27, 236, 64, 187, 155, 730 | ], 731 | [ 732 | 20, 57, 11, 224, 186, 239, 36, 155, 212, 124, 101, 221, 172, 101, 194, 229, 46, 133, 19, 733 | 192, 129, 193, 205, 114, 201, 128, 6, 9, 142, 154, 143, 190, 734 | ], 735 | ]; 736 | 737 | #[test] 738 | fn test_circom_1_to_12_inputs() { 739 | let mut inputs = Vec::new(); 740 | let value = [vec![0u8; 31], vec![1u8]].concat(); 741 | for i in 1..13 { 742 | inputs.push(value.as_slice()); 743 | let mut hasher = Poseidon::::new_circom(i).unwrap(); 744 | let hash = hasher.hash_bytes_be(&inputs[..]).unwrap(); 745 | assert_eq!(hash, TEST_CASES[i - 1]); 746 | } 747 | let mut inputs = Vec::new(); 748 | let value = [vec![0u8; 31], vec![2u8]].concat(); 749 | for i in 1..13 { 750 | inputs.push(value.as_slice()); 751 | let mut hasher = Poseidon::::new_circom(i).unwrap(); 752 | let hash = hasher.hash_bytes_be(&inputs[..]).unwrap(); 753 | assert!(hash != TEST_CASES[i - 1]); 754 | } 755 | } 756 | 757 | /// Checks whether creating a hasher for more than 12 inputs results in an 758 | /// error. 759 | #[test] 760 | fn test_circom_solana_t_gt_12_fails() { 761 | use light_poseidon::PoseidonError; 762 | 763 | let mut inputs = Vec::new(); 764 | let value = [vec![0u8; 31], vec![1u8]].concat(); 765 | for i in 13..16 { 766 | inputs.push(value.as_slice()); 767 | let hasher = Poseidon::::new_circom(i); 768 | unsafe { 769 | assert_eq!( 770 | hasher.unwrap_err_unchecked(), 771 | PoseidonError::InvalidWidthCircom { 772 | width: i + 1, 773 | max_limit: 13 774 | } 775 | ); 776 | } 777 | } 778 | } 779 | 780 | /// Checks whether crating a hasher for 0 inputs results in an error. 781 | #[test] 782 | fn test_circom_t_0_fails() { 783 | use light_poseidon::PoseidonError; 784 | let hasher = Poseidon::::new_circom(0); 785 | unsafe { 786 | assert_eq!( 787 | hasher.unwrap_err_unchecked(), 788 | PoseidonError::InvalidWidthCircom { 789 | width: 1, 790 | max_limit: 13 791 | } 792 | ); 793 | } 794 | } 795 | -------------------------------------------------------------------------------- /xtask/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "xtask" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | 7 | [dependencies] 8 | anyhow = "1.0" 9 | cargo-readme = "3.2" 10 | clap = { version = "4", features = ["derive"] } 11 | hex = "0.4.3" 12 | -------------------------------------------------------------------------------- /xtask/src/generate_parameters.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | env, 3 | fs::File, 4 | io::{self, prelude::*}, 5 | path::PathBuf, 6 | process::{Command, Stdio}, 7 | thread::spawn, 8 | }; 9 | 10 | use clap::Parser; 11 | 12 | #[derive(Debug, Parser)] 13 | pub struct Options { 14 | #[clap(long)] 15 | path: Option, 16 | } 17 | #[allow(dead_code)] 18 | fn rustfmt(code: String) -> Result, anyhow::Error> { 19 | let mut cmd = match env::var_os("RUSTFMT") { 20 | Some(r) => Command::new(r), 21 | None => Command::new("rustfmt"), 22 | }; 23 | 24 | let mut cmd = cmd 25 | .stdin(Stdio::piped()) 26 | .stdout(Stdio::piped()) 27 | .stderr(Stdio::piped()) 28 | .spawn()?; 29 | 30 | let mut stdin = cmd.stdin.take().unwrap(); 31 | let mut stdout = cmd.stdout.take().unwrap(); 32 | 33 | let stdin_handle = spawn(move || { 34 | stdin.write_all(code.as_bytes()).unwrap(); 35 | }); 36 | 37 | let mut formatted_code = vec![]; 38 | io::copy(&mut stdout, &mut formatted_code)?; 39 | 40 | let _ = cmd.wait(); 41 | stdin_handle.join().unwrap(); 42 | 43 | Ok(formatted_code) 44 | } 45 | use std::path::Path; 46 | 47 | const FIELD: &str = "1"; 48 | const SBOX: &str = "0"; 49 | const FIELD_ELEMENT_BIT_SIZE: &str = "254"; 50 | const FULL_ROUNDS: &str = "8"; 51 | const PARTIAL_ROUNDS: [u8; 12] = [56, 57, 56, 60, 60, 63, 64, 63, 60, 66, 60, 65]; 52 | const MODULUS_HEX: &str = "0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001"; 53 | #[allow(clippy::needless_return)] 54 | pub fn generate_parameters(_opts: Options) -> Result<(), anyhow::Error> { 55 | // git clone hadehash into target 56 | // create params to compute files in target 57 | // loop over files in dir target/params/ 58 | // the line after Round constants for GF(p): 59 | // remove [ ], split at , parse 60 | if !Path::new("./target/hadeshash").exists() { 61 | let _git_result = std::process::Command::new("git") 62 | .arg("clone") 63 | .arg("https://extgit.iaik.tugraz.at/krypto/hadeshash.git") 64 | .arg("./target/hadeshash") 65 | .stdout(Stdio::inherit()) 66 | .stderr(Stdio::inherit()) 67 | .output() 68 | .map_err(|e| anyhow::format_err!("git clone failed: {}", e.to_string()))?; 69 | } 70 | if !Path::new("./target/params").exists() { 71 | let _mkdir_result = std::process::Command::new("mkdir") 72 | .arg("./target/params") 73 | .stdout(Stdio::inherit()) 74 | .stderr(Stdio::inherit()) 75 | .output() 76 | .map_err(|e| anyhow::format_err!("mkdir failed: {}", e.to_string()))?; 77 | } 78 | for i in 2..14 { 79 | let path = format!("./target/params/poseidon_params_bn254_x5_{}", i); 80 | 81 | if !Path::new(&path).exists() { 82 | println!( 83 | "Generating Parameters partial rounds {} t = {}", 84 | PARTIAL_ROUNDS[i - 2], 85 | i 86 | ); 87 | let arg = "./target/hadeshash/code/generate_parameters_grain.sage".to_string(); 88 | 89 | let output = std::process::Command::new("sage") 90 | .args([ 91 | arg, 92 | FIELD.to_string(), 93 | SBOX.to_string(), 94 | FIELD_ELEMENT_BIT_SIZE.to_string(), 95 | format!("{}", i), 96 | FULL_ROUNDS.to_string(), 97 | format!("{}", PARTIAL_ROUNDS[i - 2]), 98 | MODULUS_HEX.to_string(), 99 | ]) 100 | .output()?; 101 | let mut file = File::create(&path)?; 102 | file.write_all(&output.stdout)?; 103 | } 104 | } 105 | 106 | let mut code = String::new(); 107 | code += " 108 | //! Constants and MDS matrix for the BN254 curve with the following properties: 109 | //! 110 | //! * x^5 S-boxes 111 | //! * 3 prime fields (one zero prime field and two inputs from the caller) 112 | //! * 8 full rounds and 57 partial rounds 113 | //! 114 | //! Those parameters are used for our Poseidon hash implementation. 115 | //! 116 | //! They were generated using the official script from the Poseidon paper: 117 | //! [generate_parameters_grain.sage](https://extgit.iaik.tugraz.at/krypto/hadeshash/-/blob/master/code/generate_parameters_grain.sage) 118 | //! with the following parameters: 119 | //! 120 | //! ```bash 121 | //! sage generate_parameters_grain.sage 1 0 254 3 8 57 0x30644e72e131a029b85045b68181585d2833e84879b9709143e1f593f0000001 122 | //! ``` 123 | pub const FULL_ROUNDS: usize = 8; 124 | pub const PARTIAL_ROUNDS: [usize; 15] = [56, 57, 56, 60, 60, 63, 64, 63, 60, 66, 60, 65, 70, 60, 64]; 125 | pub const ALPHA: u64 = 5; 126 | 127 | /// Returns Poseidon parameters for the BN254 curve with the following 128 | /// properties: 129 | /// 130 | /// * x^5 S-boxes 131 | /// * 3 inputs (one input with zeros and two inputs from the syscall) 132 | /// * 8 full rounds and 57 partial rounds 133 | /// 134 | /// The argument of this macro is a type which implements 135 | /// [`ark_ff::PrimeField`](ark_ff::PrimeField). 136 | use ark_ff::PrimeField; 137 | use crate::{PoseidonParameters, PoseidonError}; 138 | // to avoid warnings when width_limit_13 feature is used 139 | #[allow(unused_variables)] 140 | pub fn get_poseidon_parameters>(t: u8) -> Result, PoseidonError> { 141 | if t == 0_u8 { 142 | Err(PoseidonError::InvalidWidthCircom { 143 | width: t as usize, 144 | max_limit: 13usize, 145 | })\n 146 | }\n"; 147 | for t in 2..14 { 148 | let path = format!("./target/params/poseidon_params_bn254_x5_{}", t); 149 | let mut file = File::open(path)?; 150 | let mut contents = String::new(); 151 | file.read_to_string(&mut contents)?; 152 | let lines = contents.lines(); 153 | 154 | for line in lines { 155 | if line.starts_with("['") { 156 | code += &[ 157 | String::from("\telse if "), 158 | t.to_string(), 159 | String::from( 160 | " == t { 161 | let ark = vec![\n", 162 | ), 163 | ] 164 | .concat(); 165 | 166 | let line_processed = line 167 | .strip_prefix('[') 168 | .unwrap() 169 | .strip_suffix(']') 170 | .unwrap() 171 | .trim() 172 | .split(", ") 173 | .collect::>(); 174 | let _x: Vec<&str> = line_processed 175 | .iter() 176 | .map(|elem| { 177 | let str = String::from( 178 | elem.strip_prefix('\'').unwrap().strip_suffix('\'').unwrap(), 179 | ); 180 | code += &get_fr_string(&str); 181 | 182 | return "1"; 183 | }) 184 | .collect(); 185 | code += "\t\t\t\t];\n"; 186 | } else if line.starts_with(" [['") { 187 | code += &String::from("\t\t\t\tlet mds = vec![\n"); 188 | let line_processed = line.split('[').collect::>(); 189 | 190 | let _x: Vec<&str> = line_processed 191 | .iter() 192 | .map(|e| { 193 | if e.starts_with('\'') { 194 | code += &String::from("\t\t\t\t\tvec![\n"); 195 | } 196 | 197 | for elem in e.split('\'') { 198 | if elem.starts_with("0x") { 199 | code += &get_fr_string(&String::from(elem)); 200 | } 201 | } 202 | if e.starts_with('\'') { 203 | code += &String::from("\t\t\t\t\t],\n"); 204 | } 205 | 206 | return "1"; 207 | }) 208 | .collect(); 209 | code += &String::from("\t\t];\n"); 210 | } 211 | } 212 | code += &format!( 213 | "return Ok(crate::PoseidonParameters::new( 214 | ark, 215 | mds, 216 | FULL_ROUNDS, 217 | PARTIAL_ROUNDS[{}], 218 | t.into(), 219 | ALPHA, 220 | ));\n", 221 | t - 2 222 | ); 223 | 224 | code += "\t}\n"; 225 | } 226 | code += "else { 227 | return Err(PoseidonError::InvalidWidthCircom { 228 | width: t as usize, 229 | max_limit: 13usize, 230 | });\n 231 | }"; 232 | code += "}\n"; 233 | 234 | let path = "./light-poseidon/src/parameters/bn254_x5.rs"; 235 | let mut file = File::create(path)?; 236 | file.write_all(b"// This file is generated by xtask. Do not edit it manually.\n\n")?; 237 | // file.write_all(&rustfmt(code.to_string())?)?; 238 | write!(file, "{}", code)?; 239 | println!("Poseidon Parameters written to {:?}", path); 240 | std::process::Command::new("cargo") 241 | .arg("fmt") 242 | .output() 243 | .map_err(|e| anyhow::format_err!("cargo fmt failed: {}", e.to_string()))?; 244 | Ok(()) 245 | } 246 | 247 | fn get_fr_string(string: &str) -> String { 248 | let mut bytes = hex::decode(string.split_at(2).1).unwrap(); 249 | let mut tmp_str = String::from("F::from(ark_ff::BigInteger256::new([\n"); 250 | bytes.reverse(); 251 | for i in 0..4 { 252 | tmp_str += &format!( 253 | "\t{},\n", 254 | u64::from_le_bytes(bytes[i * 8..(i + 1) * 8].try_into().unwrap()) 255 | ); 256 | } 257 | 258 | tmp_str += "])),\n"; 259 | tmp_str 260 | } 261 | -------------------------------------------------------------------------------- /xtask/src/main.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | 3 | mod generate_parameters; 4 | 5 | #[derive(Parser)] 6 | pub struct XtaskOptions { 7 | #[clap(subcommand)] 8 | command: Command, 9 | } 10 | 11 | #[derive(Parser)] 12 | enum Command { 13 | GeneratePoseidonParameters(generate_parameters::Options), 14 | } 15 | 16 | fn main() -> Result<(), anyhow::Error> { 17 | let opts = XtaskOptions::parse(); 18 | 19 | match opts.command { 20 | Command::GeneratePoseidonParameters(opts) => { 21 | generate_parameters::generate_parameters(opts)? 22 | } 23 | } 24 | 25 | Ok(()) 26 | } 27 | --------------------------------------------------------------------------------