├── .codecov.yml ├── .github ├── CODEOWNERS ├── dependabot.yml └── workflows │ ├── code-analysis.yml │ ├── contract-inheritance-check.yml │ ├── go-check.yml │ ├── labels.yml │ ├── lint.yml │ └── tests.yml ├── .gitignore ├── .gitmodules ├── .golangci.yml ├── .markdownlint.yaml ├── .markdownlint.yml ├── .markdownlintignore ├── .prettierrc.json ├── LICENSE ├── Makefile ├── README.md ├── docs └── inclusion-proofs.md ├── foundry.toml ├── go.mod ├── go.sum ├── hardhat.config.ts ├── package-lock.json ├── package.json ├── remappings.txt ├── scripts ├── Dockerfile_Environment ├── deploy.ts ├── gen.sh └── upgradability_check.sh ├── slither.config.json ├── src ├── Blobstream.sol ├── Constants.sol ├── DataRootTuple.sol ├── IDAOracle.sol ├── lib │ ├── tree │ │ ├── Constants.sol │ │ ├── Types.sol │ │ ├── Utils.sol │ │ ├── binary │ │ │ ├── BinaryMerkleMultiproof.sol │ │ │ ├── BinaryMerkleProof.sol │ │ │ ├── BinaryMerkleTree.sol │ │ │ ├── TreeHasher.sol │ │ │ └── test │ │ │ │ ├── BinaryMerkleTree.t.sol │ │ │ │ └── TreeHasher.t.sol │ │ ├── namespace │ │ │ ├── NamespaceMerkleMultiproof.sol │ │ │ ├── NamespaceMerkleProof.sol │ │ │ ├── NamespaceMerkleTree.sol │ │ │ ├── NamespaceNode.sol │ │ │ ├── TreeHasher.sol │ │ │ └── test │ │ │ │ ├── NamespaceMerkleMultiproof.t.sol │ │ │ │ ├── NamespaceMerkleTree.t.sol │ │ │ │ └── TreeHasher.t.sol │ │ └── test │ │ │ ├── Utils.t.sol │ │ │ ├── blob.dat │ │ │ ├── header.dat │ │ │ └── proofs.json │ └── verifier │ │ ├── DAVerifier.sol │ │ └── test │ │ ├── DAVerifier.t.sol │ │ └── RollupInclusionProofs.t.sol └── test │ ├── Blobstream.t.sol │ └── BlobstreamBenchmark.t.sol ├── tsconfig.json └── wrappers ├── Blobstream.sol └── wrapper.go └── ERC1967Proxy.sol └── wrapper.go /.codecov.yml: -------------------------------------------------------------------------------- 1 | coverage: 2 | precision: 2 3 | round: down 4 | range: 70...100 5 | 6 | status: 7 | # Learn more at https://docs.codecov.io/docs/commit-status 8 | project: 9 | default: 10 | threshold: 1% # allow this much decrease on project 11 | app: 12 | target: 70% 13 | flags: 14 | - app 15 | modules: 16 | target: 70% 17 | flags: 18 | - modules 19 | client: 20 | flags: 21 | - client 22 | changes: false 23 | 24 | comment: 25 | layout: "reach, diff, files" 26 | behavior: default # update if exists else create new 27 | require_changes: true 28 | 29 | # flags: 30 | # app: 31 | # paths: 32 | # - "app/" 33 | # modules: 34 | # paths: 35 | # - "x/" 36 | # - "!x/**/client/" # ignore client package 37 | # client: 38 | # paths: 39 | # - "client/" 40 | # - "x/**/client/" 41 | 42 | ignore: 43 | - "docs" 44 | - "*.md" 45 | - "*.rst" 46 | - "**/*.pb.go" 47 | - "types/*.pb.go" 48 | - "test/*" 49 | - "test/**/*" 50 | - "scripts/" 51 | - "contrib" 52 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # CODEOWNERS: https://help.github.com/articles/about-codeowners/ 2 | 3 | # NOTE: Order is important; the last matching pattern takes the 4 | # most precedence. 5 | 6 | # Primary repo maintainers 7 | * @adlerjohn @rach-id 8 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: github-actions 4 | directory: "/" 5 | schedule: 6 | interval: daily 7 | open-pull-requests-limit: 10 8 | reviewers: 9 | - "rach-id" 10 | - package-ecosystem: gomod 11 | directory: "/" 12 | schedule: 13 | interval: daily 14 | open-pull-requests-limit: 10 15 | labels: 16 | - dependencies 17 | reviewers: 18 | - "rach-id" 19 | -------------------------------------------------------------------------------- /.github/workflows/code-analysis.yml: -------------------------------------------------------------------------------- 1 | name: Slither Analysis 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | jobs: 10 | analyze: 11 | runs-on: ubuntu-latest 12 | permissions: 13 | contents: read 14 | security-events: write 15 | steps: 16 | - uses: actions/checkout@v4 17 | with: 18 | submodules: recursive 19 | 20 | - name: Install Foundry 21 | uses: onbjerg/foundry-toolchain@v1 22 | with: 23 | version: stable 24 | 25 | - name: Build project 26 | working-directory: ./ 27 | run: forge build --build-info --skip test script 28 | 29 | - name: Run Slither 30 | uses: crytic/slither-action@v0.4.1 31 | id: slither 32 | with: 33 | ignore-compile: true 34 | node-version: 16 35 | sarif: results.sarif 36 | fail-on: none 37 | 38 | - name: Upload SARIF file 39 | uses: github/codeql-action/upload-sarif@v3 40 | with: 41 | sarif_file: ${{ steps.slither.outputs.sarif }} 42 | -------------------------------------------------------------------------------- /.github/workflows/contract-inheritance-check.yml: -------------------------------------------------------------------------------- 1 | name: Check Contract Inheritance 2 | 3 | on: 4 | pull_request: 5 | push: 6 | branches: 7 | - master 8 | 9 | jobs: 10 | check_inheritance: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Checkout code 15 | uses: actions/checkout@v4 16 | 17 | - name: Set up Node.js 18 | uses: actions/setup-node@v4 19 | with: 20 | node-version: 16 21 | 22 | - name: Install Surya 23 | run: | 24 | npm install -g surya 25 | 26 | - name: Run Surya Inheritance Check 27 | run: | 28 | ./scripts/upgradability_check.sh 29 | -------------------------------------------------------------------------------- /.github/workflows/go-check.yml: -------------------------------------------------------------------------------- 1 | name: Go-Check 2 | on: 3 | pull_request: 4 | push: 5 | branches: 6 | - master 7 | - release/** 8 | 9 | jobs: 10 | check: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - name: Checkout code 15 | uses: actions/checkout@v4 16 | 17 | - name: Set up Go 18 | uses: actions/setup-go@v5 19 | with: 20 | go-version: 1.24.0 21 | 22 | - name: Verify Go Mod and Go Sum 23 | run: | 24 | go mod tidy 25 | git diff --exit-code go.mod go.sum 26 | -------------------------------------------------------------------------------- /.github/workflows/labels.yml: -------------------------------------------------------------------------------- 1 | name: Required Labels 2 | 3 | on: 4 | pull_request: 5 | types: [opened, labeled, unlabeled, synchronize] 6 | 7 | concurrency: 8 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} 9 | cancel-in-progress: true 10 | 11 | jobs: 12 | label: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: mheap/github-action-required-labels@v5 16 | with: 17 | mode: minimum 18 | count: 1 19 | labels: "bug, contracts, chore, dependencies, documentation, enhancement, github_actions, optimization, testing, T:Dependencies" # yamllint disable-line rule:line-length 20 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | on: 3 | pull_request: 4 | push: 5 | branches: 6 | - master 7 | - release/** 8 | 9 | jobs: 10 | check-solidity-formatting: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v4 14 | with: 15 | submodules: recursive 16 | 17 | - name: Install Foundry 18 | uses: onbjerg/foundry-toolchain@v1 19 | with: 20 | version: stable 21 | 22 | - name: Format Solidity files 23 | working-directory: ./ 24 | run: make fmt 25 | 26 | - name: Check Solidity formatting 27 | run: | 28 | if [ "$(git diff --ignore-space-at-eol src | wc -l)" -gt "0" ]; then 29 | echo "Detected uncommitted changes after formatting. See status below:" 30 | git diff 31 | exit 1 32 | fi 33 | markdown-lint: 34 | runs-on: ubuntu-latest 35 | steps: 36 | - uses: actions/checkout@v4 37 | - uses: celestiaorg/.github/.github/actions/markdown-lint@v0.6.3 38 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | on: 3 | pull_request: 4 | push: 5 | branches: 6 | - master 7 | - release/** 8 | 9 | jobs: 10 | cleanup-runs: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: rokroskar/workflow-run-cleanup-action@master 14 | env: 15 | GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" 16 | if: "!startsWith(github.ref, 'refs/tags/') && github.ref != 'refs/heads/master'" 17 | 18 | forge-test: 19 | runs-on: ubuntu-latest 20 | steps: 21 | - uses: actions/checkout@v4 22 | with: 23 | submodules: recursive 24 | 25 | - name: Install Foundry 26 | uses: onbjerg/foundry-toolchain@v1 27 | with: 28 | version: stable 29 | 30 | - name: Check formatting 31 | working-directory: ./ 32 | run: forge fmt --check 33 | 34 | - name: Run forge test 35 | working-directory: ./ 36 | run: forge test 37 | 38 | go-wrapper-check: 39 | runs-on: ubuntu-latest 40 | needs: forge-test 41 | steps: 42 | - uses: actions/checkout@v4 43 | with: 44 | submodules: recursive 45 | 46 | - uses: actions/setup-go@v5 47 | with: 48 | go-version: "1.24.0" 49 | 50 | - name: Install Foundry 51 | uses: onbjerg/foundry-toolchain@v1 52 | with: 53 | version: stable 54 | 55 | - name: Install Go Ethereum devtools 56 | working-directory: ./ 57 | run: | 58 | git clone --depth 1 --branch v1.15.3 https://github.com/ethereum/go-ethereum.git 59 | cd go-ethereum 60 | make devtools 61 | 62 | - name: Make Go wrapper 63 | working-directory: ./ 64 | run: make 65 | 66 | - name: Check Go wrapper match 67 | run: | 68 | if [ "$(git diff --ignore-space-at-eol wrappers | wc -l)" -gt "0" ]; then 69 | echo "Detected uncommitted changes after build. See status below:" 70 | git diff 71 | exit 1 72 | fi 73 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Binaries for programs and plugins 2 | *.exe 3 | *.exe~ 4 | *.dll 5 | *.so 6 | *.dylib 7 | 8 | # Test binary, built with `go test -c` 9 | *.test 10 | 11 | # Output of the go coverage tool, specifically when used with LiteIDE 12 | *.out 13 | 14 | # Dependency directories (remove the comment below to include it) 15 | # vendor/ 16 | 17 | 18 | # OS 19 | .DS_Store 20 | *.swp 21 | *.swo 22 | .vscode 23 | .idea 24 | 25 | # Build 26 | build 27 | 28 | # IDE 29 | .idea/ 30 | *.iml 31 | .root/ 32 | 33 | dist/ 34 | .env 35 | 36 | solidity/artifacts/ 37 | solidity/node_modules/ 38 | solidity/typechain/ 39 | solidity/cache/ 40 | solidity/0x_ganache_snapshot/ 41 | 42 | # dir with temp data 43 | data/ 44 | test/solidity/build 45 | solidity/deployment/build 46 | solidity/deployment/.env 47 | 48 | # Foundry 49 | cache/ 50 | 51 | artifacts 52 | build 53 | cache 54 | .coverage_* 55 | coverage 56 | **/typechain/**/* 57 | node_modules 58 | out/ 59 | 60 | # hardhat 61 | 62 | typechain-types/ 63 | cache_hardhat/ 64 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "ethereum/solidity/lib/ds-test"] 2 | path = lib/ds-test 3 | url = https://github.com/dapphub/ds-test 4 | [submodule "ethereum/solidity/lib/openzeppelin-contracts"] 5 | path = lib/openzeppelin-contracts 6 | url = https://github.com/openzeppelin/openzeppelin-contracts 7 | [submodule "lib/openzeppelin-contracts-upgradeable"] 8 | path = lib/openzeppelin-contracts-upgradeable 9 | url = https://github.com/OpenZeppelin/openzeppelin-contracts-upgradeable 10 | [submodule "lib/forge-std"] 11 | path = lib/forge-std 12 | url = https://github.com/foundry-rs/forge-std 13 | -------------------------------------------------------------------------------- /.golangci.yml: -------------------------------------------------------------------------------- 1 | run: 2 | tests: false 3 | skip-dirs: 4 | - test 5 | 6 | linters: 7 | enable: 8 | - bodyclose 9 | - deadcode 10 | - depguard 11 | - dogsled 12 | - errcheck 13 | - goconst 14 | - gocritic 15 | - gofmt 16 | - goimports 17 | - golint 18 | - gosec 19 | - gosimple 20 | - govet 21 | - ineffassign 22 | - lll 23 | - misspell 24 | - maligned 25 | - nakedret 26 | - prealloc 27 | - scopelint 28 | - staticcheck 29 | - structcheck 30 | - stylecheck 31 | - typecheck 32 | - unconvert 33 | - unparam 34 | - unused 35 | - varcheck 36 | - nolintlint 37 | - asciicheck 38 | # - whitespace 39 | # - wsl 40 | 41 | issues: 42 | exclude-rules: 43 | - path: _test\.go 44 | linters: 45 | - gosec 46 | - linters: 47 | - lll 48 | source: "https://" 49 | max-same-issues: 50 50 | 51 | linters-settings: 52 | dogsled: 53 | max-blank-identifiers: 3 54 | golint: 55 | min-confidence: 0 56 | maligned: 57 | suggest-new: true 58 | misspell: 59 | locale: US 60 | nolintlint: 61 | allow-unused: false 62 | allow-leading-space: true 63 | require-explanation: false 64 | require-specific: false 65 | -------------------------------------------------------------------------------- /.markdownlint.yaml: -------------------------------------------------------------------------------- 1 | "default": true # Default state for all rules 2 | "MD010": 3 | "code_blocks": false # Disable rule for hard tabs in code blocks 4 | "MD013": false # Disable rule for line length 5 | "MD033": false # Disable rule banning inline HTML 6 | -------------------------------------------------------------------------------- /.markdownlint.yml: -------------------------------------------------------------------------------- 1 | # Default state for all rules 2 | default: true 3 | # MD014/commands-show-output Dollar signs used before commands without showing output 4 | MD014: false 5 | -------------------------------------------------------------------------------- /.markdownlintignore: -------------------------------------------------------------------------------- 1 | ethereum/solidity/lib/ 2 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "overrides": [ 3 | { 4 | "files": "*.json", 5 | "options": { 6 | "tabWidth": 2, 7 | "useTabs": false 8 | } 9 | } 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: gen 2 | 3 | ############################################################################### 4 | ## Solidity ## 5 | ############################################################################### 6 | 7 | gen: solidity-wrappers 8 | 9 | SOLIDITY_DIR = . 10 | SOLIDITY_SRC_DIR = $(SOLIDITY_DIR)/src 11 | CONTRACTS = Blobstream.sol ERC1967Proxy.sol 12 | 13 | fmt: 14 | @echo "--> Running forge fmt" 15 | @forge fmt 16 | 17 | solidity-wrappers: 18 | ./scripts/gen.sh $(SOLIDITY_SRC_DIR) $(CONTRACTS) 19 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Blobstream-contracts 2 | 3 | 4 | 5 | [![GoDoc](https://img.shields.io/badge/godoc-reference-blue?style=flat-square&logo=go)](https://godoc.org/github.com/celestiaorg/blobstream-contracts) 6 | [![Version](https://img.shields.io/github/tag/celestiaorg/blobstream-contracts.svg?style=flat-square)](https://github.com/celestiaorg/blobstream-contracts/releases/latest) 7 | [![License: Apache-2.0](https://img.shields.io/github/license/celestiaorg/blobstream-contracts.svg?style=flat-square)](https://github.com/celestiaorg/blobstream-contracts/blob/master/LICENSE) 8 | 9 | Blobstream is a Celestia -> EVM message relay. 10 | It is based on Umee's Gravity Bridge implementation, [Peggo](https://github.com/umee-network/peggo). 11 | **This project is under active development and should not be used in production**. 12 | 13 | ## Table of Contents 14 | 15 | - [Building From Source](#building-from-source) 16 | - [Send a message from Celestia to an EVM chain](#send-a-message-from-celestia-to-an-evm-chain) 17 | - [How it works](#how-it-works) 18 | 19 | ## Building From Source 20 | 21 | ### Dependencies 22 | 23 | Initialize git submodules, needed for Forge dependencies: 24 | 25 | ```sh 26 | git submodule init 27 | git submodule update 28 | ``` 29 | 30 | To regenerate the Go ABI wrappers with `make gen`, you need the `abigen` tool. 31 | Building requires [Go 1.19+](https://golang.org/dl/). 32 | Install `abigen` with: 33 | 34 | ```sh 35 | git clone https://github.com/ethereum/go-ethereum.git 36 | cd go-ethereum 37 | make devtools 38 | ``` 39 | 40 | ### Build and Test Contracts 41 | 42 | Build with: 43 | 44 | ```sh 45 | forge build 46 | ``` 47 | 48 | Test with: 49 | 50 | ```sh 51 | forge test 52 | ``` 53 | 54 | ### Format 55 | 56 | Format Solidity with: 57 | 58 | ```sh 59 | forge fmt 60 | ``` 61 | 62 | ### Regenerate Go Wrappers 63 | 64 | Go wrappers can be regenerated with: 65 | 66 | ```sh 67 | make 68 | ``` 69 | 70 | ## Send a message from Celestia to an EVM chain 71 | 72 | A message can be included on Celestia by using the Celestia app. 73 | Instructions [here](https://github.com/celestiaorg/celestia-app). 74 | 75 | ## How it works 76 | 77 | Blobstream allows Celestia block header data roots to be relayed in one direction, from Celestia to an EVM chain. 78 | It does not support bridging assets such as fungible or non-fungible tokens directly, and cannot send messages from the EVM chain back to Celestia. 79 | 80 | It works by relying on a set of signers to attest to some event on Celestia: the Celestia validator set. 81 | Blobstream contract keeps track of the Celestia validator set by updating its view of the validator set with `updateValidatorSet()`. 82 | More than 2/3 of the voting power of the current view of the validator set must sign off on new relayed events, submitted with `submitDataRootTupleRoot()`. 83 | Each event is a batch of `DataRootTuple`s, with each tuple representing a single [data root (i.e. block header)](https://celestiaorg.github.io/celestia-app/specs/data_structures.html#header). 84 | Relayed tuples are in the same order as Celestia block headers. 85 | 86 | ### Events and messages relayed 87 | 88 | **Validator sets**: 89 | The relayer informs the Blobstream contract who are the current validators and their power. 90 | This results in an execution of the `updateValidatorSet` function. 91 | 92 | **Batches**: 93 | The relayer informs the Blobstream contract of new data root tuple roots. 94 | This results in an execution of the `submitDataRootTupleRoot` function. 95 | 96 | ## Audits 97 | 98 | | Date | Auditor | celestia-app | blobstream-contracts | Report | 99 | |------------|-----------------------------------------------|-------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------| 100 | | 2023/10/17 | [Binary Builders](https://binary.builders/) | [v1.0.0-rc10](https://github.com/celestiaorg/celestia-app/releases/tag/v1.0.0-rc10) | [eb7a4e7](https://github.com/celestiaorg/blobstream-contracts/commit/eb7a4e74718b80277ad9dde116ead67383f5fe15) | [binary-builders.pdf](https://github.com/celestiaorg/blobstream-contracts/files/13961809/2023-10-17_Celestia_Audit_Report_Binary_Builders.pdf) | 101 | | 2023/10/26 | [Informal Systems](https://informal.systems/) | [v1.0.0](https://github.com/celestiaorg/celestia-app/tree/v1.0.0) | [cf301adf](https://github.com/celestiaorg/blobstream-contracts/blob/cf301adfbfdae138526199fab805822400dcfd5d) | [informal-systems.pdf](https://github.com/celestiaorg/blobstream-contracts/files/13961767/Celestia_.Q4.2023.QGB-v2-20231026_182304.pdf) | 102 | | 2023/11/16 | [Ottersec](https://osec.io/) | [v1.3.0](https://github.com/celestiaorg/celestia-app/releases/tag/v1.3.0) | [v3.1.0](https://github.com/celestiaorg/blobstream-contracts/releases/tag/v3.1.0) | [ottersec.pdf](https://github.com/celestiaorg/blobstream-contracts/files/14383577/celestia_blobstream_audit_final.pdf) | 103 | -------------------------------------------------------------------------------- /docs/inclusion-proofs.md: -------------------------------------------------------------------------------- 1 | # Blobstream Fraud Proofs 2 | 3 | ## Blobstream Intro 4 | 5 | A Blobstream rollup is a blockchain that uses Celestia for data availability but settles on any EVM chain. Blobstream operates by having the Celestia validator set periodically sign over batched data commitments and validator set updates, which are relayed an EVM smart contract. The data commitments are stored in the EVM chain's state, and can be used to prove inclusion of any data historically posted to Celestia. 6 | 7 | ## Fraud Proofs 8 | 9 | Fraud proofs can be used to inform light clients (including on-chain smart contract light clients) in the case of an invalid rollup state transition or unavailable rollup block data—specifically rollup block data that is claimed to be on Celestia but is not. They rely on rollup full nodes getting the data that was published to Celestia, and executing all the state transitions to verify the rollup state. If they discover an invalid state transition or unavailable rollup block, they emit a fraud proof with the necessary information to convince light clients that fraud happened. This allows for trust-minimized light clients, as the network only needs one honest full node to create the fraud proof and propagate it. 10 | 11 | ## Rollup Header 12 | 13 | Rollups can adopt many approaches to prove that fraud occurred. One of which could be having the following fields in the rollup header: 14 | 15 | - Rollup block state root 16 | - A sequence of spans in Celestia: which references where the rollup data was published in the Celestia chain. 17 | 18 | > [!NOTE] 19 | > The sequence of spans can be defined using the following: `height`, `start index`, and `length` in the Celestia block, in the case of a single Celestia block. However, it could be generalized to span over multiple blocks. 20 | 21 | For the rest of the document, we will suppose that the sequence of spans only references one Celestia block. 22 | 23 | ## Proving Unavailable Data 24 | 25 | By construction, the rollup block data **is the sequence of spans defined in the header**. Thus to prove that the rollup data is unavailable, it is necessary and sufficient to show that the sequence of spans doesn't belong to the Celestia block, i.e. the span is out of bounds. 26 | 27 | We could prove that via creating a binary [Merkle proof](https://github.com/celestiaorg/celestia-core/blob/c3ab251659f6fe0f36d10e0dbd14c29a78a85352/crypto/merkle/proof.go#L19-L31) of any row/column to the Celestia data root. This proof will provide the `total` which is the number of rows/columns in the extended data square. This can be used to calculate the square size. 28 | 29 | Then, we will use that information to check if the provided transaction index, in the header, is out of the square size bounds. 30 | 31 | For the data root, we will use a binary Merkle proof to prove its inclusion in a data root tuple root that was committed to by the Blobstream smart contract. More on this in [here](#1-data-root-inclusion-proof). 32 | 33 | ## Proving an Invalid State Transition 34 | 35 | In order to prove an invalid transaction in the rollup, we need to prove the following: 36 | 37 | - Prove that the transaction was posted to Celestia, and 38 | - Prove that the transaction is invalid. This is left to the rollup to define. 39 | 40 | The first part, proving that the transaction was posted to Celestia, can be done in three steps: 41 | 42 | 1. Prove that the data root tuple is committed to by the Blobstream smart contract 43 | 2. Verify inclusion proof of the transaction to Celestia data root 44 | 3. Prove that the transaction is in the rollup sequence spans 45 | 46 | ### 1. Data root inclusion proof 47 | 48 | To prove the data root is committed to by the Blobstream smart contract, we will need to provide a Merkle proof of the data root tuple to a data root tuple root. This can be created using the [`data_root_inclusion_proof`](https://github.com/celestiaorg/celestia-core/blob/c3ab251659f6fe0f36d10e0dbd14c29a78a85352/rpc/client/http/http.go#L492-L511) query. 49 | 50 | ### 2. Transaction inclusion proof 51 | 52 | To prove that a rollup transaction is part of the data root, we will need to provide two proofs: a namespace Merkle proof of the transaction to a row root. This could be done via proving the shares that contain the transaction to the row root using a namespace Merkle proof. And, a binary Merkle proof of the row root to the data root. 53 | 54 | These proofs can be generated using the [`ProveShares`](https://github.com/celestiaorg/celestia-core/blob/c3ab251659f6fe0f36d10e0dbd14c29a78a85352/rpc/client/http/http.go#L526-L543) query. 55 | 56 | ### 3. Transaction part of the rollup sequence 57 | 58 | To prove that a transaction is part of the rollup sequence of spans, we take the authenticated share proof and use the shares begin/end key to define the share position in the row. 59 | 60 | Then, we use the row proof to get the row index in the extended Celestia square and get the index of the share in row major order: 61 | 62 | ```solidity 63 | uint256 shareIndexInRow = shareProof.shareProofs[0].beginKey; 64 | uint256 shareIndexInRowMajorOrder = shareIndexInRow + shareProof.rowProofs[0].numLeaves * shareProof.rowProofs[0].key; 65 | ``` 66 | 67 | Finally, we can compare the computed index with the rollup header sequence of spans, and be sure that the share/transaction is part of the rollup data. 68 | 69 | Check the `RollupInclusionProofs.t.sol` for an example. 70 | -------------------------------------------------------------------------------- /foundry.toml: -------------------------------------------------------------------------------- 1 | [profile.default] 2 | solc_version = "0.8.22" 3 | via_ir = true 4 | gas_reports = ["*"] 5 | -------------------------------------------------------------------------------- /go.mod: -------------------------------------------------------------------------------- 1 | module github.com/celestiaorg/blobstream-contracts/v4 2 | 3 | go 1.24.0 4 | 5 | require github.com/ethereum/go-ethereum v1.15.11 6 | 7 | require ( 8 | github.com/Microsoft/go-winio v0.6.2 // indirect 9 | github.com/StackExchange/wmi v1.2.1 // indirect 10 | github.com/bits-and-blooms/bitset v1.20.0 // indirect 11 | github.com/consensys/bavard v0.1.27 // indirect 12 | github.com/consensys/gnark-crypto v0.16.0 // indirect 13 | github.com/crate-crypto/go-eth-kzg v1.3.0 // indirect 14 | github.com/crate-crypto/go-ipa v0.0.0-20240724233137-53bbb0ceb27a // indirect 15 | github.com/deckarep/golang-set/v2 v2.6.0 // indirect 16 | github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 // indirect 17 | github.com/ethereum/c-kzg-4844/v2 v2.1.0 // indirect 18 | github.com/ethereum/go-verkle v0.2.2 // indirect 19 | github.com/fsnotify/fsnotify v1.6.0 // indirect 20 | github.com/go-ole/go-ole v1.3.0 // indirect 21 | github.com/google/uuid v1.3.0 // indirect 22 | github.com/gorilla/websocket v1.4.2 // indirect 23 | github.com/holiman/uint256 v1.3.2 // indirect 24 | github.com/mmcloughlin/addchain v0.4.0 // indirect 25 | github.com/shirou/gopsutil v3.21.4-0.20210419000835-c7a38de76ee5+incompatible // indirect 26 | github.com/supranational/blst v0.3.14 // indirect 27 | github.com/tklauser/go-sysconf v0.3.12 // indirect 28 | github.com/tklauser/numcpus v0.6.1 // indirect 29 | golang.org/x/crypto v0.35.0 // indirect 30 | golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa // indirect 31 | golang.org/x/sync v0.11.0 // indirect 32 | golang.org/x/sys v0.30.0 // indirect 33 | rsc.io/tmplfunc v0.0.3 // indirect 34 | ) 35 | -------------------------------------------------------------------------------- /go.sum: -------------------------------------------------------------------------------- 1 | github.com/DataDog/zstd v1.4.5 h1:EndNeuB0l9syBZhut0wns3gV1hL8zX8LIu6ZiVHWLIQ= 2 | github.com/DataDog/zstd v1.4.5/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo= 3 | github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= 4 | github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= 5 | github.com/StackExchange/wmi v1.2.1 h1:VIkavFPXSjcnS+O8yTq7NI32k0R5Aj+v39y29VYDOSA= 6 | github.com/StackExchange/wmi v1.2.1/go.mod h1:rcmrprowKIVzvc+NUiLncP2uuArMWLCbu9SBzvHz7e8= 7 | github.com/VictoriaMetrics/fastcache v1.12.2 h1:N0y9ASrJ0F6h0QaC3o6uJb3NIZ9VKLjCM7NQbSmF7WI= 8 | github.com/VictoriaMetrics/fastcache v1.12.2/go.mod h1:AmC+Nzz1+3G2eCPapF6UcsnkThDcMsQicp4xDukwJYI= 9 | github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= 10 | github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= 11 | github.com/bits-and-blooms/bitset v1.20.0 h1:2F+rfL86jE2d/bmw7OhqUg2Sj/1rURkBn3MdfoPyRVU= 12 | github.com/bits-and-blooms/bitset v1.20.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= 13 | github.com/cespare/cp v0.1.0 h1:SE+dxFebS7Iik5LK0tsi1k9ZCxEaFX4AjQmoyA+1dJk= 14 | github.com/cespare/cp v0.1.0/go.mod h1:SOGHArjBr4JWaSDEVpWpo/hNg6RoKrls6Oh40hiwW+s= 15 | github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= 16 | github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= 17 | github.com/cockroachdb/errors v1.11.3 h1:5bA+k2Y6r+oz/6Z/RFlNeVCesGARKuC6YymtcDrbC/I= 18 | github.com/cockroachdb/errors v1.11.3/go.mod h1:m4UIW4CDjx+R5cybPsNrRbreomiFqt8o1h1wUVazSd8= 19 | github.com/cockroachdb/fifo v0.0.0-20240606204812-0bbfbd93a7ce h1:giXvy4KSc/6g/esnpM7Geqxka4WSqI1SZc7sMJFd3y4= 20 | github.com/cockroachdb/fifo v0.0.0-20240606204812-0bbfbd93a7ce/go.mod h1:9/y3cnZ5GKakj/H4y9r9GTjCvAFta7KLgSHPJJYc52M= 21 | github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b h1:r6VH0faHjZeQy818SGhaone5OnYfxFR/+AzdY3sf5aE= 22 | github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b/go.mod h1:Vz9DsVWQQhf3vs21MhPMZpMGSht7O/2vFW2xusFUVOs= 23 | github.com/cockroachdb/pebble v1.1.2 h1:CUh2IPtR4swHlEj48Rhfzw6l/d0qA31fItcIszQVIsA= 24 | github.com/cockroachdb/pebble v1.1.2/go.mod h1:4exszw1r40423ZsmkG/09AFEG83I0uDgfujJdbL6kYU= 25 | github.com/cockroachdb/redact v1.1.5 h1:u1PMllDkdFfPWaNGMyLD1+so+aq3uUItthCFqzwPJ30= 26 | github.com/cockroachdb/redact v1.1.5/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg= 27 | github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 h1:zuQyyAKVxetITBuuhv3BI9cMrmStnpT18zmgmTxunpo= 28 | github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06/go.mod h1:7nc4anLGjupUW/PeY5qiNYsdNXj7zopG+eqsS7To5IQ= 29 | github.com/consensys/bavard v0.1.27 h1:j6hKUrGAy/H+gpNrpLU3I26n1yc+VMGmd6ID5+gAhOs= 30 | github.com/consensys/bavard v0.1.27/go.mod h1:k/zVjHHC4B+PQy1Pg7fgvG3ALicQw540Crag8qx+dZs= 31 | github.com/consensys/gnark-crypto v0.16.0 h1:8Dl4eYmUWK9WmlP1Bj6je688gBRJCJbT8Mw4KoTAawo= 32 | github.com/consensys/gnark-crypto v0.16.0/go.mod h1:Ke3j06ndtPTVvo++PhGNgvm+lgpLvzbcE2MqljY7diU= 33 | github.com/cpuguy83/go-md2man/v2 v2.0.5 h1:ZtcqGrnekaHpVLArFSe4HK5DoKx1T0rq2DwVB0alcyc= 34 | github.com/cpuguy83/go-md2man/v2 v2.0.5/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= 35 | github.com/crate-crypto/go-eth-kzg v1.3.0 h1:05GrhASN9kDAidaFJOda6A4BEvgvuXbazXg/0E3OOdI= 36 | github.com/crate-crypto/go-eth-kzg v1.3.0/go.mod h1:J9/u5sWfznSObptgfa92Jq8rTswn6ahQWEuiLHOjCUI= 37 | github.com/crate-crypto/go-ipa v0.0.0-20240724233137-53bbb0ceb27a h1:W8mUrRp6NOVl3J+MYp5kPMoUZPp7aOYHtaua31lwRHg= 38 | github.com/crate-crypto/go-ipa v0.0.0-20240724233137-53bbb0ceb27a/go.mod h1:sTwzHBvIzm2RfVCGNEBZgRyjwK40bVoun3ZnGOCafNM= 39 | github.com/crate-crypto/go-kzg-4844 v1.1.0 h1:EN/u9k2TF6OWSHrCCDBBU6GLNMq88OspHHlMnHfoyU4= 40 | github.com/crate-crypto/go-kzg-4844 v1.1.0/go.mod h1:JolLjpSff1tCCJKaJx4psrlEdlXuJEC996PL3tTAFks= 41 | github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= 42 | github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 43 | github.com/deckarep/golang-set/v2 v2.6.0 h1:XfcQbWM1LlMB8BsJ8N9vW5ehnnPVIw0je80NsVHagjM= 44 | github.com/deckarep/golang-set/v2 v2.6.0/go.mod h1:VAky9rY/yGXJOLEDv3OMci+7wtDpOF4IN+y82NBOac4= 45 | github.com/decred/dcrd/crypto/blake256 v1.0.0 h1:/8DMNYp9SGi5f0w7uCm6d6M4OU2rGFK09Y2A4Xv7EE0= 46 | github.com/decred/dcrd/crypto/blake256 v1.0.0/go.mod h1:sQl2p6Y26YV+ZOcSTP6thNdn47hh8kt6rqSlvmrXFAc= 47 | github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 h1:YLtO71vCjJRCBcrPMtQ9nqBsqpA1m5sE92cU+pd5Mcc= 48 | github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1/go.mod h1:hyedUtir6IdtD/7lIxGeCxkaw7y45JueMRL4DIyJDKs= 49 | github.com/deepmap/oapi-codegen v1.6.0 h1:w/d1ntwh91XI0b/8ja7+u5SvA4IFfM0UNNLmiDR1gg0= 50 | github.com/deepmap/oapi-codegen v1.6.0/go.mod h1:ryDa9AgbELGeB+YEXE1dR53yAjHwFvE9iAUlWl9Al3M= 51 | github.com/ethereum/c-kzg-4844/v2 v2.1.0 h1:gQropX9YFBhl3g4HYhwE70zq3IHFRgbbNPw0Shwzf5w= 52 | github.com/ethereum/c-kzg-4844/v2 v2.1.0/go.mod h1:TC48kOKjJKPbN7C++qIgt0TJzZ70QznYR7Ob+WXl57E= 53 | github.com/ethereum/go-ethereum v1.15.11 h1:JK73WKeu0WC0O1eyX+mdQAVHUV+UR1a9VB/domDngBU= 54 | github.com/ethereum/go-ethereum v1.15.11/go.mod h1:mf8YiHIb0GR4x4TipcvBUPxJLw1mFdmxzoDi11sDRoI= 55 | github.com/ethereum/go-verkle v0.2.2 h1:I2W0WjnrFUIzzVPwm8ykY+7pL2d4VhlsePn4j7cnFk8= 56 | github.com/ethereum/go-verkle v0.2.2/go.mod h1:M3b90YRnzqKyyzBEWJGqj8Qff4IDeXnzFw0P9bFw3uk= 57 | github.com/ferranbt/fastssz v0.1.2 h1:Dky6dXlngF6Qjc+EfDipAkE83N5I5DE68bY6O0VLNPk= 58 | github.com/ferranbt/fastssz v0.1.2/go.mod h1:X5UPrE2u1UJjxHA8X54u04SBwdAQjG2sFtWs39YxyWs= 59 | github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= 60 | github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= 61 | github.com/gballet/go-libpcsclite v0.0.0-20190607065134-2772fd86a8ff h1:tY80oXqGNY4FhTFhk+o9oFHGINQ/+vhlm8HFzi6znCI= 62 | github.com/gballet/go-libpcsclite v0.0.0-20190607065134-2772fd86a8ff/go.mod h1:x7DCsMOv1taUwEWCzT4cmDeAkigA5/QCwUodaVOe8Ww= 63 | github.com/getsentry/sentry-go v0.27.0 h1:Pv98CIbtB3LkMWmXi4Joa5OOcwbmnX88sF5qbK3r3Ps= 64 | github.com/getsentry/sentry-go v0.27.0/go.mod h1:lc76E2QywIyW8WuBnwl8Lc4bkmQH4+w1gwTf25trprY= 65 | github.com/go-ole/go-ole v1.2.5/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= 66 | github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= 67 | github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= 68 | github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= 69 | github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= 70 | github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= 71 | github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= 72 | github.com/golang-jwt/jwt/v4 v4.5.1 h1:JdqV9zKUdtaa9gdPlywC3aeoEsR681PlKC+4F5gQgeo= 73 | github.com/golang-jwt/jwt/v4 v4.5.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= 74 | github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= 75 | github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= 76 | github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb h1:PBC98N2aIaM3XXiurYmW7fx4GZkL8feAMVq7nEjURHk= 77 | github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= 78 | github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= 79 | github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= 80 | github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= 81 | github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= 82 | github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= 83 | github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc= 84 | github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= 85 | github.com/graph-gophers/graphql-go v1.3.0 h1:Eb9x/q6MFpCLz7jBCiP/WTxjSDrYLR1QY41SORZyNJ0= 86 | github.com/graph-gophers/graphql-go v1.3.0/go.mod h1:9CQHMSxwO4MprSdzoIEobiHpoLtHm77vfxsvsIN5Vuc= 87 | github.com/hashicorp/go-bexpr v0.1.10 h1:9kuI5PFotCboP3dkDYFr/wi0gg0QVbSNz5oFRpxn4uE= 88 | github.com/hashicorp/go-bexpr v0.1.10/go.mod h1:oxlubA2vC/gFVfX1A6JGp7ls7uCDlfJn732ehYYg+g0= 89 | github.com/holiman/billy v0.0.0-20240216141850-2abb0c79d3c4 h1:X4egAf/gcS1zATw6wn4Ej8vjuVGxeHdan+bRb2ebyv4= 90 | github.com/holiman/billy v0.0.0-20240216141850-2abb0c79d3c4/go.mod h1:5GuXa7vkL8u9FkFuWdVvfR5ix8hRB7DbOAaYULamFpc= 91 | github.com/holiman/bloomfilter/v2 v2.0.3 h1:73e0e/V0tCydx14a0SCYS/EWCxgwLZ18CZcZKVu0fao= 92 | github.com/holiman/bloomfilter/v2 v2.0.3/go.mod h1:zpoh+gs7qcpqrHr3dB55AMiJwo0iURXE7ZOP9L9hSkA= 93 | github.com/holiman/uint256 v1.3.2 h1:a9EgMPSC1AAaj1SZL5zIQD3WbwTuHrMGOerLjGmM/TA= 94 | github.com/holiman/uint256 v1.3.2/go.mod h1:EOMSn4q6Nyt9P6efbI3bueV4e1b3dGlUCXeiRV4ng7E= 95 | github.com/huin/goupnp v1.3.0 h1:UvLUlWDNpoUdYzb2TCn+MuTWtcjXKSza2n6CBdQ0xXc= 96 | github.com/huin/goupnp v1.3.0/go.mod h1:gnGPsThkYa7bFi/KWmEysQRf48l2dvR5bxr2OFckNX8= 97 | github.com/influxdata/influxdb-client-go/v2 v2.4.0 h1:HGBfZYStlx3Kqvsv1h2pJixbCl/jhnFtxpKFAv9Tu5k= 98 | github.com/influxdata/influxdb-client-go/v2 v2.4.0/go.mod h1:vLNHdxTJkIf2mSLvGrpj8TCcISApPoXkaxP8g9uRlW8= 99 | github.com/influxdata/influxdb1-client v0.0.0-20220302092344-a9ab5670611c h1:qSHzRbhzK8RdXOsAdfDgO49TtqC1oZ+acxPrkfTxcCs= 100 | github.com/influxdata/influxdb1-client v0.0.0-20220302092344-a9ab5670611c/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= 101 | github.com/influxdata/line-protocol v0.0.0-20200327222509-2487e7298839 h1:W9WBk7wlPfJLvMCdtV4zPulc4uCPrlywQOmbFOhgQNU= 102 | github.com/influxdata/line-protocol v0.0.0-20200327222509-2487e7298839/go.mod h1:xaLFMmpvUxqXtVkUJfg9QmT88cDaCJ3ZKgdZ78oO8Qo= 103 | github.com/jackpal/go-nat-pmp v1.0.2 h1:KzKSgb7qkJvOUTqYl9/Hg/me3pWgBmERKrTGD7BdWus= 104 | github.com/jackpal/go-nat-pmp v1.0.2/go.mod h1:QPH045xvCAeXUZOxsnwmrtiCoxIr9eob+4orBN1SBKc= 105 | github.com/klauspost/compress v1.16.0 h1:iULayQNOReoYUe+1qtKOqw9CwJv3aNQu8ivo7lw1HU4= 106 | github.com/klauspost/compress v1.16.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= 107 | github.com/klauspost/cpuid/v2 v2.0.9 h1:lgaqFMSdTdQYdZ04uHyN2d/eKdOMyi2YLSvlQIBFYa4= 108 | github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= 109 | github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= 110 | github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= 111 | github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= 112 | github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= 113 | github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= 114 | github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= 115 | github.com/leanovate/gopter v0.2.11 h1:vRjThO1EKPb/1NsDXuDrzldR28RLkBflWYcU9CvzWu4= 116 | github.com/leanovate/gopter v0.2.11/go.mod h1:aK3tzZP/C+p1m3SPRE4SYZFGP7jjkuSI4f7Xvpt0S9c= 117 | github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= 118 | github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= 119 | github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= 120 | github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= 121 | github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU= 122 | github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= 123 | github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 h1:I0XW9+e1XWDxdcEniV4rQAIOPUGDq67JSCiRCgGCZLI= 124 | github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= 125 | github.com/minio/sha256-simd v1.0.0 h1:v1ta+49hkWZyvaKwrQB8elexRqm6Y0aMLjCNsrYxo6g= 126 | github.com/minio/sha256-simd v1.0.0/go.mod h1:OuYzVNI5vcoYIAmbIvHPl3N3jUzVedXbKy5RFepssQM= 127 | github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag= 128 | github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= 129 | github.com/mitchellh/pointerstructure v1.2.0 h1:O+i9nHnXS3l/9Wu7r4NrEdwA2VFTicjUEN1uBnDo34A= 130 | github.com/mitchellh/pointerstructure v1.2.0/go.mod h1:BRAsLI5zgXmw97Lf6s25bs8ohIXc3tViBH44KcwB2g4= 131 | github.com/mmcloughlin/addchain v0.4.0 h1:SobOdjm2xLj1KkXN5/n0xTIWyZA2+s99UCY1iPfkHRY= 132 | github.com/mmcloughlin/addchain v0.4.0/go.mod h1:A86O+tHqZLMNO4w6ZZ4FlVQEadcoqkyU72HC5wJ4RlU= 133 | github.com/mmcloughlin/profile v0.1.1/go.mod h1:IhHD7q1ooxgwTgjxQYkACGA77oFTDdFVejUS1/tS/qU= 134 | github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= 135 | github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= 136 | github.com/opentracing/opentracing-go v1.1.0 h1:pWlfV3Bxv7k65HYwkikxat0+s3pV4bsqf19k25Ur8rU= 137 | github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= 138 | github.com/peterh/liner v1.1.1-0.20190123174540-a2c9a5303de7 h1:oYW+YCJ1pachXTQmzR3rNLYGGz4g/UgFcjb28p/viDM= 139 | github.com/peterh/liner v1.1.1-0.20190123174540-a2c9a5303de7/go.mod h1:CRroGNssyjTd/qIG2FyxByd2S8JEAZXBl4qUrZf8GS0= 140 | github.com/pion/dtls/v2 v2.2.7 h1:cSUBsETxepsCSFSxC3mc/aDo14qQLMSL+O6IjG28yV8= 141 | github.com/pion/dtls/v2 v2.2.7/go.mod h1:8WiMkebSHFD0T+dIU+UeBaoV7kDhOW5oDCzZ7WZ/F9s= 142 | github.com/pion/logging v0.2.2 h1:M9+AIj/+pxNsDfAT64+MAVgJO0rsyLnoJKCqf//DoeY= 143 | github.com/pion/logging v0.2.2/go.mod h1:k0/tDVsRCX2Mb2ZEmTqNa7CWsQPc+YYCB7Q+5pahoms= 144 | github.com/pion/stun/v2 v2.0.0 h1:A5+wXKLAypxQri59+tmQKVs7+l6mMM+3d+eER9ifRU0= 145 | github.com/pion/stun/v2 v2.0.0/go.mod h1:22qRSh08fSEttYUmJZGlriq9+03jtVmXNODgLccj8GQ= 146 | github.com/pion/transport/v2 v2.2.1 h1:7qYnCBlpgSJNYMbLCKuSY9KbQdBFoETvPNETv0y4N7c= 147 | github.com/pion/transport/v2 v2.2.1/go.mod h1:cXXWavvCnFF6McHTft3DWS9iic2Mftcz1Aq29pGcU5g= 148 | github.com/pion/transport/v3 v3.0.1 h1:gDTlPJwROfSfz6QfSi0ZmeCSkFcnWWiiR9ES0ouANiM= 149 | github.com/pion/transport/v3 v3.0.1/go.mod h1:UY7kiITrlMv7/IKgd5eTUcaahZx5oUN3l9SzK5f5xE0= 150 | github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= 151 | github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= 152 | github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= 153 | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= 154 | github.com/prometheus/client_golang v1.12.0 h1:C+UIj/QWtmqY13Arb8kwMt5j34/0Z2iKamrJ+ryC0Gg= 155 | github.com/prometheus/client_golang v1.12.0/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= 156 | github.com/prometheus/client_model v0.2.1-0.20210607210712-147c58e9608a h1:CmF68hwI0XsOQ5UwlBopMi2Ow4Pbg32akc4KIVCOm+Y= 157 | github.com/prometheus/client_model v0.2.1-0.20210607210712-147c58e9608a/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= 158 | github.com/prometheus/common v0.32.1 h1:hWIdL3N2HoUx3B8j3YN9mWor0qhY/NlEKZEaXxuIRh4= 159 | github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= 160 | github.com/prometheus/procfs v0.7.3 h1:4jVXhlkAyzOScmCkXBTOLRLTz8EeU+eyjrwB/EPq0VU= 161 | github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= 162 | github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= 163 | github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= 164 | github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= 165 | github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= 166 | github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik= 167 | github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= 168 | github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= 169 | github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= 170 | github.com/shirou/gopsutil v3.21.4-0.20210419000835-c7a38de76ee5+incompatible h1:Bn1aCHHRnjv4Bl16T8rcaFjYSrGrIZvpiGO6P3Q4GpU= 171 | github.com/shirou/gopsutil v3.21.4-0.20210419000835-c7a38de76ee5+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA= 172 | github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= 173 | github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= 174 | github.com/supranational/blst v0.3.14 h1:xNMoHRJOTwMn63ip6qoWJ2Ymgvj7E2b9jY2FAwY+qRo= 175 | github.com/supranational/blst v0.3.14/go.mod h1:jZJtfjgudtNl4en1tzwPIV3KjUnQUvG3/j+w+fVonLw= 176 | github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 h1:epCh84lMvA70Z7CTTCmYQn2CKbY8j86K7/FAIr141uY= 177 | github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7/go.mod h1:q4W45IWZaF22tdD+VEXcAWRA037jwmWEB5VWYORlTpc= 178 | github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= 179 | github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= 180 | github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= 181 | github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= 182 | github.com/urfave/cli/v2 v2.27.5 h1:WoHEJLdsXr6dDWoJgMq/CboDmyY/8HMMH1fTECbih+w= 183 | github.com/urfave/cli/v2 v2.27.5/go.mod h1:3Sevf16NykTbInEnD0yKkjDAeZDS0A6bzhBH5hrMvTQ= 184 | github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 h1:gEOO8jv9F4OT7lGCjxCBTO/36wtF6j2nSip77qHd4x4= 185 | github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM= 186 | golang.org/x/crypto v0.35.0 h1:b15kiHdrGCHrP6LvwaQ3c03kgNhhiMgvlhxHQhmg2Xs= 187 | golang.org/x/crypto v0.35.0/go.mod h1:dy7dXNW32cAb/6/PRuTNsix8T+vJAqvuIy5Bli/x0YQ= 188 | golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa h1:FRnLl4eNAQl8hwxVVC17teOw8kdjVDVAiFMtgUdTSRQ= 189 | golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa/go.mod h1:zk2irFbV9DP96SEBUUAy67IdHUaZuSnrz1n472HUCLE= 190 | golang.org/x/net v0.36.0 h1:vWF2fRbw4qslQsQzgFqZff+BItCvGFQqKzKIzx1rmoA= 191 | golang.org/x/net v0.36.0/go.mod h1:bFmbeoIPfrw4sMHNhb4J9f6+tPziuGjq7Jk/38fxi1I= 192 | golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w= 193 | golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= 194 | golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 195 | golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= 196 | golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= 197 | golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= 198 | golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= 199 | golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc= 200 | golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= 201 | golang.org/x/text v0.22.0 h1:bofq7m3/HAFvbF51jz3Q9wLg3jkvSPuiZu/pD1XwgtM= 202 | golang.org/x/text v0.22.0/go.mod h1:YRoo4H8PVmsu+E3Ou7cqLVH8oXWIHVoX0jqUWALQhfY= 203 | golang.org/x/time v0.9.0 h1:EsRrnYcQiGH+5FfbgvV4AP7qEZstoyrHB0DzarOQ4ZY= 204 | golang.org/x/time v0.9.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= 205 | google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg= 206 | google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw= 207 | gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc= 208 | gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc= 209 | gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= 210 | gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= 211 | gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= 212 | gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= 213 | rsc.io/tmplfunc v0.0.3 h1:53XFQh69AfOa8Tw0Jm7t+GV7KZhOi6jzsCzTtKbMvzU= 214 | rsc.io/tmplfunc v0.0.3/go.mod h1:AG3sTPzElb1Io3Yg4voV9AGZJuleGAwaVRxL9M49PhA= 215 | -------------------------------------------------------------------------------- /hardhat.config.ts: -------------------------------------------------------------------------------- 1 | import { HardhatUserConfig } from "hardhat/config"; 2 | import "@nomicfoundation/hardhat-toolbox"; 3 | import "hardhat-preprocessor"; 4 | import fs from "fs"; 5 | 6 | const config: HardhatUserConfig = { 7 | solidity: { 8 | version: "0.8.22", 9 | settings: { 10 | optimizer: { 11 | enabled: true, 12 | runs: 200, 13 | }, 14 | viaIR: true 15 | }, 16 | }, 17 | preprocess: { 18 | eachLine: (hre) => ({ 19 | transform: (line: string) => { 20 | if (line.match(/^\s*import /i)) { 21 | for (const [from, to] of getRemappings()) { 22 | if (line.includes(from)) { 23 | line = line.replace(from, to); 24 | break; 25 | } 26 | } 27 | } 28 | return line; 29 | }, 30 | }), 31 | }, 32 | paths: { 33 | sources: "./src", 34 | cache: "./cache_hardhat", 35 | }, 36 | }; 37 | 38 | export default config; 39 | 40 | function getRemappings() { 41 | return fs 42 | .readFileSync("remappings.txt", "utf8") 43 | .split("\n") 44 | .filter(Boolean) // remove empty lines 45 | .map((line) => line.trim().split("=")); 46 | } 47 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "blobstream-contracts", 3 | "version": "3.0.0", 4 | "description": "Celestia -> EVM bridge", 5 | "main": "index.js", 6 | "directories": { 7 | "doc": "docs", 8 | "lib": "lib" 9 | }, 10 | "scripts": { 11 | "test": "forge test" 12 | }, 13 | "repository": { 14 | "type": "git", 15 | "url": "git+https://github.com/celestiaorg/blobstream-contracts.git" 16 | }, 17 | "keywords": [ 18 | "blobstream" 19 | ], 20 | "author": "", 21 | "license": "Apache-2.0", 22 | "bugs": { 23 | "url": "https://github.com/celestiaorg/blobstream-contracts/issues" 24 | }, 25 | "homepage": "https://github.com/celestiaorg/blobstream-contracts#readme", 26 | "devDependencies": { 27 | "@nomicfoundation/hardhat-toolbox": "^3.0.0", 28 | "hardhat": "^2.22.17", 29 | "hardhat-preprocessor": "^0.1.5" 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /remappings.txt: -------------------------------------------------------------------------------- 1 | @openzeppelin/contracts/=lib/openzeppelin-contracts-upgradeable/contracts/ 2 | ds-test/=lib/ds-test/src/ 3 | erc4626-tests/=lib/openzeppelin-contracts-upgradeable/lib/erc4626-tests/ 4 | forge-std/=lib/forge-std/src/ 5 | openzeppelin-contracts-upgradeable/=lib/openzeppelin-contracts-upgradeable/ 6 | openzeppelin-contracts/=lib/openzeppelin-contracts/ 7 | -------------------------------------------------------------------------------- /scripts/Dockerfile_Environment: -------------------------------------------------------------------------------- 1 | # This Dockerfile will contain the CI environment used to run the tests and generate the wrappers. 2 | # It would help not having to worry about the versions difference between the local setup and that of the CI 3 | # and would avoid the issues related to wrappers generation using different versions. 4 | # Note: this image is not build to be distributed or pushed to remote registries. Thus, it does not optimise the build layers and build stages. 5 | # 6 | # How to use: 7 | # First, build the docker image using: 8 | # $ docker build -t blobstream-env -f Dockerfile_Environment . 9 | # Then, run the docker image: 10 | # $ docker run -it blobstream-env 11 | # This should give you a shell inside the image where you have all the dependencies installed. 12 | # 13 | # For example, if you want to generate the wrappers for this repo, run the following inside the shell: 14 | # $ git clone https://github.com/celestiaorg/blobstream-contracts 15 | # $ cd blobstream-contracts 16 | # $ make 17 | # And you will see that the wrappers are being regenerated for this repo. 18 | # Finally, you can push the changes to your branch using git add/commit/push. 19 | FROM ubuntu:22.04 20 | 21 | # install necessary dependencies 22 | RUN apt update && apt install -y git build-essential software-properties-common curl protobuf-compiler wget jq 23 | 24 | # install forge 25 | RUN curl -L https://foundry.paradigm.xyz | bash && . /root/.bashrc && foundryup 26 | 27 | # install solc 28 | RUN wget https://github.com/ethereum/solidity/releases/download/v0.8.22/solc-static-linux -O /usr/bin/solc && chmod +x /usr/bin/solc 29 | 30 | # install go 31 | RUN wget https://go.dev/dl/go1.24.0.linux-arm64.tar.gz && rm -rf /usr/local/go && tar -C /usr/local -xzf go1.24.0.linux-arm64.tar.gz && echo 'PATH=$PATH:/usr/local/go/bin:/root/go/bin' >> ~/.bashrc 32 | 33 | # install abigen 34 | RUN git clone --depth 1 --branch v1.15.3 https://github.com/ethereum/go-ethereum.git && cd go-ethereum && PATH=$PATH:/usr/local/go/bin make devtools 35 | 36 | WORKDIR /root 37 | ENTRYPOINT bash 38 | 39 | # at this level, you can clone the blobstream-contracts repo and build the wrappers, run the tests etc. 40 | -------------------------------------------------------------------------------- /scripts/deploy.ts: -------------------------------------------------------------------------------- 1 | import { ethers } from "hardhat"; 2 | 3 | async function main() { 4 | // TODO add deploy script 5 | } 6 | 7 | // We recommend this pattern to be able to use async/await everywhere 8 | // and properly handle errors. 9 | main().catch((error) => { 10 | console.error(error); 11 | process.exitCode = 1; 12 | }); 13 | -------------------------------------------------------------------------------- /scripts/gen.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | if (( $# < 2 )); then 6 | echo "Go wrappers generator script. Make sure to specify the following params:" 7 | echo " - first parameter: the contracts source directory" 8 | echo " - second parameter: the contracts names (including the .sol extension) separated by a space" 9 | echo "the output files will be in the ./wrappers directory." 10 | exit 1 11 | fi 12 | 13 | # compile the Blobstream contracts 14 | forge build > /dev/null 15 | 16 | # compile the proxy contracts 17 | forge build -C lib/openzeppelin-contracts/contracts/proxy > /dev/null 18 | 19 | cd "$1" 20 | 21 | for file in "${@:2}"; do 22 | mkdir -p ../wrappers/"${file}" 23 | contractName=$(basename "${file}" .sol) 24 | 25 | jq .abi < ../out/"${file}"/"${contractName}".json > ../out/"${file}"/"${contractName}".abi 26 | jq -r .bytecode.object < ../out/"${file}"/"${contractName}".json > ../out/"${file}"/"${contractName}".bin 27 | 28 | abigen --pkg wrappers \ 29 | --out=../wrappers/"${file}"/wrapper.go \ 30 | --abi ../out/"${file}"/"${contractName}".abi \ 31 | --bin ../out/"${file}"/"${contractName}".bin 32 | done 33 | 34 | echo "done." 35 | -------------------------------------------------------------------------------- /scripts/upgradability_check.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # this script will check if the Blobstream contract is inheriting the correct upgradability contracts. 4 | 5 | out=$(surya inheritance src/Blobstream.sol | grep -i "\"Blobstream\" ->" | cut -d ">" -f 2 | sed 's/[";]//g') 6 | 7 | required_contracts=("Initializable" "UUPSUpgradeable" "OwnableUpgradeable") 8 | missing_contracts=() 9 | 10 | for field in "${required_contracts[@]}"; do 11 | if ! grep -q "\<$field\>" <<< "$out"; then 12 | missing_contracts+=("$field") 13 | fi 14 | done 15 | 16 | if [ ${#missing_contracts[@]} -eq 0 ]; then 17 | echo "The Blobstream contract is inheriting the right contracts. Exiting." 18 | exit 0 19 | else 20 | echo "The Blobstream contract is missing the necessary inherited contracts: ${missing_contracts[*]}" 21 | exit 1 22 | fi 23 | -------------------------------------------------------------------------------- /slither.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "filter_paths": "(lib/)" 3 | } 4 | -------------------------------------------------------------------------------- /src/Blobstream.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | import "openzeppelin-contracts-upgradeable/contracts/access/OwnableUpgradeable.sol"; 5 | import "openzeppelin-contracts-upgradeable/contracts/proxy/utils/Initializable.sol"; 6 | import "openzeppelin-contracts-upgradeable/contracts/proxy/utils/UUPSUpgradeable.sol"; 7 | import "openzeppelin-contracts/contracts/utils/cryptography/ECDSA.sol"; 8 | 9 | import "./Constants.sol"; 10 | import "./DataRootTuple.sol"; 11 | import "./IDAOracle.sol"; 12 | import "./lib/tree/binary/BinaryMerkleProof.sol"; 13 | import "./lib/tree/binary/BinaryMerkleTree.sol"; 14 | 15 | struct Validator { 16 | address addr; 17 | uint256 power; 18 | } 19 | 20 | struct Signature { 21 | uint8 v; 22 | bytes32 r; 23 | bytes32 s; 24 | } 25 | 26 | /// @title Blobstream: Celestia -> EVM, Data Availability relay. 27 | /// @dev The relay relies on a set of signers to attest to some event on 28 | /// Celestia. These signers are the Celestia validator set, who sign over every 29 | /// Celestia block. Keeping track of the Celestia validator set is accomplished 30 | /// by updating this contract's view of the validator set with 31 | /// `updateValidatorSet()`. At least 2/3 of the voting power of the current 32 | /// view of the validator set must sign off on new relayed events, submitted 33 | /// with `submitDataRootTupleRoot()`. Each event is a batch of `DataRootTuple`s 34 | /// (see ./DataRootTuple.sol), with each tuple representing a single data root 35 | /// in a Celestia block header. Relayed tuples are in the same order as the 36 | /// block headers. 37 | /// @dev DO NOT REMOVE INHERITANCE OF THE FOLLOWING CONTRACTS: Initializable, UUPSUpgradeable and 38 | /// OwnableUpgradeable! They're essential for upgradability. 39 | contract Blobstream is IDAOracle, Initializable, UUPSUpgradeable, OwnableUpgradeable { 40 | // Don't change the order of state for working upgrades AND BE AWARE OF 41 | // INHERITANCE VARIABLES! Inherited contracts contain storage slots and must 42 | // be accounted for in any upgrades. Always test an exact upgrade on testnet 43 | // and localhost before mainnet upgrades. 44 | 45 | ///////////// 46 | // Storage // 47 | ///////////// 48 | 49 | /// @notice Domain-separated commitment to the latest validator set. 50 | bytes32 public state_lastValidatorSetCheckpoint; 51 | /// @notice Voting power required to submit a new update. 52 | uint256 public state_powerThreshold; 53 | /// @notice Nonce for bridge events. Must be incremented sequentially. 54 | uint256 public state_eventNonce; 55 | /// @notice Mapping of data root tuple root nonces to data root tuple roots. 56 | mapping(uint256 => bytes32) public state_dataRootTupleRoots; 57 | 58 | //////////// 59 | // Events // 60 | //////////// 61 | 62 | /// @notice Emitted when a new root of data root tuples is relayed. 63 | /// @param nonce Event nonce. 64 | /// @param dataRootTupleRoot Merkle root of relayed data root tuples. 65 | /// See `submitDataRootTupleRoot`. 66 | event DataRootTupleRootEvent(uint256 indexed nonce, bytes32 dataRootTupleRoot); 67 | 68 | /// @notice Emitted when the validator set is updated. 69 | /// @param nonce Event nonce. 70 | /// @param powerThreshold New voting power threshold. 71 | /// @param validatorSetHash Hash of new validator set. 72 | /// See `updateValidatorSet`. 73 | event ValidatorSetUpdatedEvent(uint256 indexed nonce, uint256 powerThreshold, bytes32 validatorSetHash); 74 | 75 | //////////// 76 | // Errors // 77 | //////////// 78 | 79 | /// @notice Malformed current validator set. 80 | error MalformedCurrentValidatorSet(); 81 | 82 | /// @notice Validator signature does not match. 83 | error InvalidSignature(); 84 | 85 | /// @notice Submitted validator set signatures do not have enough power. 86 | error InsufficientVotingPower(); 87 | 88 | /// @notice New validator set nonce must be greater than the current nonce. 89 | error InvalidValidatorSetNonce(); 90 | 91 | /// @notice Supplied current validators and powers do not match checkpoint. 92 | error SuppliedValidatorSetInvalid(); 93 | 94 | /// @notice Data root tuple root nonce must be greater than the current nonce. 95 | error InvalidDataRootTupleRootNonce(); 96 | 97 | /////////////// 98 | // Functions // 99 | /////////////// 100 | 101 | /// @param _nonce Initial event nonce. 102 | /// @param _powerThreshold Initial voting power that is needed to approve 103 | /// operations. 104 | /// @param _validatorSetCheckpoint Initial checkpoint of the validator set. This does not need 105 | /// to be the genesis validator set of the bridged chain, only the initial 106 | /// validator set of the bridge. 107 | /// @dev DO NOT REMOVE THE INITIALIZER! It is mandatory for upgradability. 108 | function initialize(uint256 _nonce, uint256 _powerThreshold, bytes32 _validatorSetCheckpoint) public initializer { 109 | // EFFECTS 110 | 111 | state_eventNonce = _nonce; 112 | state_lastValidatorSetCheckpoint = _validatorSetCheckpoint; 113 | state_powerThreshold = _powerThreshold; 114 | 115 | /// @dev Initialize the OwnableUpgradeable explicitly. 116 | /// DO NOT REMOVE! It is mandatory for allowing the owner to upgrade. 117 | __Ownable_init(_msgSender()); 118 | } 119 | 120 | /// @dev only authorize the upgrade for the owner of the contract. 121 | /// Additional access control logic can be added to allow multiple actors to upgrade. 122 | /// @dev DO NOT REMOVE! It is mandatory for upgradability. 123 | function _authorizeUpgrade(address) internal override onlyOwner {} 124 | 125 | /// @notice Utility function to check if a signature is nil. 126 | /// If all bytes of the 65-byte signature are zero, then it's a nil signature. 127 | function isSigNil(Signature calldata _sig) private pure returns (bool) { 128 | return (_sig.r == 0 && _sig.s == 0 && _sig.v == 0); 129 | } 130 | 131 | /// @notice Utility function to verify EIP-191 signatures. 132 | function verifySig(address _signer, bytes32 _digest, Signature calldata _sig) private pure returns (bool) { 133 | bytes32 digest_eip191 = ECDSA.toEthSignedMessageHash(_digest); 134 | 135 | return _signer == ECDSA.recover(digest_eip191, _sig.v, _sig.r, _sig.s); 136 | } 137 | 138 | /// @dev Computes the hash of a validator set. 139 | /// @param _validators The validator set to hash. 140 | function computeValidatorSetHash(Validator[] calldata _validators) private pure returns (bytes32) { 141 | return keccak256(abi.encode(_validators)); 142 | } 143 | 144 | /// @dev Make a domain-separated commitment to the validator set. 145 | /// A hash of all relevant information about the validator set. 146 | /// The format of the hash is: 147 | /// keccak256(VALIDATOR_SET_HASH_DOMAIN_SEPARATOR, nonce, power_threshold, validator_set_hash) 148 | /// The elements in the validator set should be monotonically decreasing by power. 149 | /// @param _nonce Nonce. 150 | /// @param _powerThreshold The voting power threshold. 151 | /// @param _validatorSetHash Validator set hash. 152 | function domainSeparateValidatorSetHash(uint256 _nonce, uint256 _powerThreshold, bytes32 _validatorSetHash) 153 | private 154 | pure 155 | returns (bytes32) 156 | { 157 | bytes32 c = 158 | keccak256(abi.encode(VALIDATOR_SET_HASH_DOMAIN_SEPARATOR, _nonce, _powerThreshold, _validatorSetHash)); 159 | 160 | return c; 161 | } 162 | 163 | /// @dev Make a domain-separated commitment to a data root tuple root. 164 | /// A hash of all relevant information about a data root tuple root. 165 | /// The format of the hash is: 166 | /// keccak256(DATA_ROOT_TUPLE_ROOT_DOMAIN_SEPARATOR, nonce, dataRootTupleRoot) 167 | /// @param _nonce Event nonce. 168 | /// @param _dataRootTupleRoot Data root tuple root. 169 | function domainSeparateDataRootTupleRoot(uint256 _nonce, bytes32 _dataRootTupleRoot) 170 | private 171 | pure 172 | returns (bytes32) 173 | { 174 | bytes32 c = keccak256(abi.encode(DATA_ROOT_TUPLE_ROOT_DOMAIN_SEPARATOR, _nonce, _dataRootTupleRoot)); 175 | 176 | return c; 177 | } 178 | 179 | /// @dev Checks that enough voting power signed over a digest. 180 | /// It expects the signatures to be in the same order as the _currentValidators. 181 | /// @param _currentValidators The current validators. 182 | /// @param _sigs The current validators' signatures. 183 | /// @param _digest This is what we are checking they have signed. 184 | /// @param _powerThreshold At least this much power must have signed. 185 | function checkValidatorSignatures( 186 | // The current validator set and their powers 187 | Validator[] calldata _currentValidators, 188 | Signature[] calldata _sigs, 189 | bytes32 _digest, 190 | uint256 _powerThreshold 191 | ) private pure { 192 | uint256 cumulativePower = 0; 193 | for (uint256 i = 0; i < _currentValidators.length; i++) { 194 | // If the signature is nil, then it's not present so continue. 195 | if (isSigNil(_sigs[i])) { 196 | continue; 197 | } 198 | 199 | // Check that the current validator has signed off on the hash. 200 | if (!verifySig(_currentValidators[i].addr, _digest, _sigs[i])) { 201 | revert InvalidSignature(); 202 | } 203 | 204 | // Sum up cumulative power. 205 | cumulativePower += _currentValidators[i].power; 206 | 207 | // Break early to avoid wasting gas. 208 | if (cumulativePower >= _powerThreshold) { 209 | break; 210 | } 211 | } 212 | // Check that there was enough power. 213 | if (cumulativePower < _powerThreshold) { 214 | revert InsufficientVotingPower(); 215 | } 216 | } 217 | 218 | /// @notice This updates the validator set by checking that the validators 219 | /// in the current validator set have signed off on the new validator set. 220 | /// The signatures supplied are the signatures of the current validator set 221 | /// over the checkpoint hash generated from the new validator set. Anyone 222 | /// can call this function, but they must supply valid signatures of the 223 | /// current validator set over the new validator set. 224 | /// 225 | /// The validator set hash that is signed over is domain separated as per 226 | /// `domainSeparateValidatorSetHash`. 227 | /// @param _newNonce The new event nonce. 228 | /// @param _oldNonce The nonce of the latest update to the validator set. 229 | /// @param _newPowerThreshold At least this much power must have signed. 230 | /// @param _newValidatorSetHash The hash of the new validator set. 231 | /// @param _currentValidatorSet The current validator set. 232 | /// @param _sigs Signatures. 233 | function updateValidatorSet( 234 | uint256 _newNonce, 235 | uint256 _oldNonce, 236 | uint256 _newPowerThreshold, 237 | bytes32 _newValidatorSetHash, 238 | Validator[] calldata _currentValidatorSet, 239 | Signature[] calldata _sigs 240 | ) external { 241 | // CHECKS 242 | 243 | uint256 currentNonce = state_eventNonce; 244 | uint256 currentPowerThreshold = state_powerThreshold; 245 | bytes32 lastValidatorSetCheckpoint = state_lastValidatorSetCheckpoint; 246 | 247 | // Check that the new nonce is one more than the current one. 248 | if (_newNonce != currentNonce + 1) { 249 | revert InvalidValidatorSetNonce(); 250 | } 251 | 252 | // Check that current validators and signatures are well-formed. 253 | if (_currentValidatorSet.length != _sigs.length) { 254 | revert MalformedCurrentValidatorSet(); 255 | } 256 | 257 | // Check that the supplied current validator set matches the saved checkpoint. 258 | bytes32 currentValidatorSetHash = computeValidatorSetHash(_currentValidatorSet); 259 | if ( 260 | domainSeparateValidatorSetHash(_oldNonce, currentPowerThreshold, currentValidatorSetHash) 261 | != lastValidatorSetCheckpoint 262 | ) { 263 | revert SuppliedValidatorSetInvalid(); 264 | } 265 | 266 | // Check that enough current validators have signed off on the new validator set. 267 | bytes32 newCheckpoint = domainSeparateValidatorSetHash(_newNonce, _newPowerThreshold, _newValidatorSetHash); 268 | checkValidatorSignatures(_currentValidatorSet, _sigs, newCheckpoint, currentPowerThreshold); 269 | 270 | // EFFECTS 271 | 272 | state_lastValidatorSetCheckpoint = newCheckpoint; 273 | state_powerThreshold = _newPowerThreshold; 274 | state_eventNonce = _newNonce; 275 | 276 | // LOGS 277 | 278 | emit ValidatorSetUpdatedEvent(_newNonce, _newPowerThreshold, _newValidatorSetHash); 279 | } 280 | 281 | /// @notice Relays a root of Celestia data root tuples to an EVM chain. Anyone 282 | /// can call this function, but they must supply valid signatures of the 283 | /// current validator set over the data root tuple root. 284 | /// 285 | /// The data root root is the Merkle root of the binary Merkle tree 286 | /// (https://github.com/celestiaorg/celestia-specs/blob/master/src/specs/data_structures.md#binary-merkle-tree) 287 | /// where each leaf is in an ABI-encoded `DataRootTuple`. Each relayed data 288 | /// root tuple will 1:1 mirror data roots as they are included in headers 289 | /// on Celestia, _in order of inclusion_. 290 | /// 291 | /// The data tuple root that is signed over is domain separated as per 292 | /// `domainSeparateDataRootTupleRoot`. 293 | /// @param _newNonce The new event nonce. 294 | /// @param _validatorSetNonce The nonce of the latest update to the 295 | /// validator set. 296 | /// @param _dataRootTupleRoot The Merkle root of data root tuples. 297 | /// @param _currentValidatorSet The current validator set. 298 | /// @param _sigs Signatures. 299 | function submitDataRootTupleRoot( 300 | uint256 _newNonce, 301 | uint256 _validatorSetNonce, 302 | bytes32 _dataRootTupleRoot, 303 | Validator[] calldata _currentValidatorSet, 304 | Signature[] calldata _sigs 305 | ) external { 306 | // CHECKS 307 | 308 | uint256 currentNonce = state_eventNonce; 309 | uint256 currentPowerThreshold = state_powerThreshold; 310 | bytes32 lastValidatorSetCheckpoint = state_lastValidatorSetCheckpoint; 311 | 312 | // Check that the new nonce is one more than the current one. 313 | if (_newNonce != currentNonce + 1) { 314 | revert InvalidDataRootTupleRootNonce(); 315 | } 316 | 317 | // Check that current validators and signatures are well-formed. 318 | if (_currentValidatorSet.length != _sigs.length) { 319 | revert MalformedCurrentValidatorSet(); 320 | } 321 | 322 | // Check that the supplied current validator set matches the saved checkpoint. 323 | bytes32 currentValidatorSetHash = computeValidatorSetHash(_currentValidatorSet); 324 | if ( 325 | domainSeparateValidatorSetHash(_validatorSetNonce, currentPowerThreshold, currentValidatorSetHash) 326 | != lastValidatorSetCheckpoint 327 | ) { 328 | revert SuppliedValidatorSetInvalid(); 329 | } 330 | 331 | // Check that enough current validators have signed off on the data 332 | // root tuple root and nonce. 333 | bytes32 c = domainSeparateDataRootTupleRoot(_newNonce, _dataRootTupleRoot); 334 | checkValidatorSignatures(_currentValidatorSet, _sigs, c, currentPowerThreshold); 335 | 336 | // EFFECTS 337 | 338 | state_eventNonce = _newNonce; 339 | state_dataRootTupleRoots[_newNonce] = _dataRootTupleRoot; 340 | 341 | // LOGS 342 | 343 | emit DataRootTupleRootEvent(_newNonce, _dataRootTupleRoot); 344 | } 345 | 346 | /// @dev see "./IDAOracle.sol" 347 | function verifyAttestation(uint256 _tupleRootNonce, DataRootTuple memory _tuple, BinaryMerkleProof memory _proof) 348 | external 349 | view 350 | override 351 | returns (bool) 352 | { 353 | // Tuple must have been committed before. 354 | if (_tupleRootNonce > state_eventNonce) { 355 | return false; 356 | } 357 | 358 | // Load the tuple root at the given index from storage. 359 | bytes32 root = state_dataRootTupleRoots[_tupleRootNonce]; 360 | 361 | // Verify the proof. 362 | (bool isProofValid,) = BinaryMerkleTree.verify(root, _proof, abi.encode(_tuple)); 363 | 364 | return isProofValid; 365 | } 366 | } 367 | -------------------------------------------------------------------------------- /src/Constants.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | /// @dev bytes32 encoding of the string "checkpoint" 5 | bytes32 constant VALIDATOR_SET_HASH_DOMAIN_SEPARATOR = 6 | 0x636865636b706f696e7400000000000000000000000000000000000000000000; 7 | 8 | /// @dev bytes32 encoding of the string "transactionBatch" 9 | bytes32 constant DATA_ROOT_TUPLE_ROOT_DOMAIN_SEPARATOR = 10 | 0x7472616e73616374696f6e426174636800000000000000000000000000000000; 11 | -------------------------------------------------------------------------------- /src/DataRootTuple.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | /// @notice A tuple of data root with metadata. Each data root is associated 5 | /// with a Celestia block height. 6 | /// @dev `availableDataRoot` in 7 | /// https://github.com/celestiaorg/celestia-specs/blob/master/src/specs/data_structures.md#header 8 | struct DataRootTuple { 9 | // Celestia block height the data root was included in. 10 | // Genesis block is height = 0. 11 | // First queryable block is height = 1. 12 | uint256 height; 13 | // Data root. 14 | bytes32 dataRoot; 15 | } 16 | -------------------------------------------------------------------------------- /src/IDAOracle.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.19; 3 | 4 | import "./DataRootTuple.sol"; 5 | import "./lib/tree/binary/BinaryMerkleProof.sol"; 6 | 7 | /// @notice Data Availability Oracle interface. 8 | interface IDAOracle { 9 | /// @notice Verify a Data Availability attestation. 10 | /// @param _tupleRootNonce Nonce of the tuple root to prove against. 11 | /// @param _tuple Data root tuple to prove inclusion of. 12 | /// @param _proof Binary Merkle tree proof that `tuple` is in the root at `_tupleRootNonce`. 13 | /// @return `true` is proof is valid, `false` otherwise. 14 | function verifyAttestation(uint256 _tupleRootNonce, DataRootTuple memory _tuple, BinaryMerkleProof memory _proof) 15 | external 16 | view 17 | returns (bool); 18 | } 19 | -------------------------------------------------------------------------------- /src/lib/tree/Constants.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | import "./Types.sol"; 5 | 6 | library Constants { 7 | /////////////// 8 | // Constants // 9 | /////////////// 10 | 11 | /// @dev Maximum tree height 12 | uint256 internal constant MAX_HEIGHT = 256; 13 | 14 | /// @dev The prefixes of leaves and nodes 15 | bytes1 internal constant LEAF_PREFIX = 0x00; 16 | bytes1 internal constant NODE_PREFIX = 0x01; 17 | } 18 | 19 | /// @dev Parity share namespace. 20 | /// utility function to provide the parity share namespace as a Namespace struct. 21 | function PARITY_SHARE_NAMESPACE() pure returns (Namespace memory) { 22 | return Namespace(0xFF, 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF); 23 | } 24 | -------------------------------------------------------------------------------- /src/lib/tree/Types.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | /// @notice A representation of the Celestia-app namespace ID and its version. 5 | /// See: https://celestiaorg.github.io/celestia-app/specs/namespace.html 6 | struct Namespace { 7 | // The namespace version. 8 | bytes1 version; 9 | // The namespace ID. 10 | bytes28 id; 11 | } 12 | 13 | using {equalTo, lessThan, greaterThan, toBytes} for Namespace global; 14 | 15 | function equalTo(Namespace memory l, Namespace memory r) pure returns (bool) { 16 | return l.toBytes() == r.toBytes(); 17 | } 18 | 19 | function lessThan(Namespace memory l, Namespace memory r) pure returns (bool) { 20 | return l.toBytes() < r.toBytes(); 21 | } 22 | 23 | function greaterThan(Namespace memory l, Namespace memory r) pure returns (bool) { 24 | return l.toBytes() > r.toBytes(); 25 | } 26 | 27 | function toBytes(Namespace memory n) pure returns (bytes29) { 28 | return bytes29(abi.encodePacked(n.version, n.id)); 29 | } 30 | 31 | function toNamespace(bytes29 n) pure returns (Namespace memory) { 32 | bytes memory id = new bytes(28); 33 | for (uint256 i = 1; i < 29; i++) { 34 | id[i - 1] = n[i]; 35 | } 36 | return Namespace(n[0], bytes28(id)); 37 | } 38 | -------------------------------------------------------------------------------- /src/lib/tree/Utils.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | import "./Constants.sol"; 5 | 6 | /// @notice Calculate the starting bit of the path to a leaf 7 | /// @param numLeaves : The total number of leaves in the tree 8 | /// @return startingBit : The starting bit of the path 9 | // solhint-disable-next-line func-visibility 10 | function getStartingBit(uint256 numLeaves) pure returns (uint256 startingBit) { 11 | // Determine height of the left subtree. This is the maximum path length, so all paths start at this offset from the right-most bit 12 | startingBit = 0; 13 | while ((1 << startingBit) < numLeaves) { 14 | startingBit += 1; 15 | } 16 | return Constants.MAX_HEIGHT - startingBit; 17 | } 18 | 19 | /// @notice Calculate the length of the path to a leaf 20 | /// @param key: The key of the leaf 21 | /// @param numLeaves: The total number of leaves in the tree 22 | /// @return pathLength : The length of the path to the leaf 23 | // solhint-disable-next-line func-visibility 24 | function pathLengthFromKey(uint256 key, uint256 numLeaves) pure returns (uint256 pathLength) { 25 | if (numLeaves <= 1) { 26 | // if the number of leaves of the tree is 1 or 0, the path always is 0. 27 | return 0; 28 | } 29 | // Get the height of the left subtree. This is equal to the offset of the starting bit of the path 30 | pathLength = Constants.MAX_HEIGHT - getStartingBit(numLeaves); 31 | 32 | // Determine the number of leaves in the left subtree 33 | uint256 numLeavesLeftSubTree = (1 << (pathLength - 1)); 34 | 35 | // If leaf is in left subtree, path length is full height of left subtree 36 | if (key <= numLeavesLeftSubTree - 1) { 37 | return pathLength; 38 | } 39 | // If left sub tree has only one leaf but key is not there, path has one additional step 40 | else if (numLeavesLeftSubTree == 1) { 41 | return 1; 42 | } 43 | // Otherwise, add 1 to height and recurse into right subtree 44 | else { 45 | return 1 + pathLengthFromKey(key - numLeavesLeftSubTree, numLeaves - numLeavesLeftSubTree); 46 | } 47 | } 48 | 49 | /// @notice Returns the minimum number of bits required to represent `x`; the 50 | /// result is 0 for `x` == 0. 51 | /// @param x Number. 52 | function _bitsLen(uint256 x) pure returns (uint256) { 53 | uint256 count = 0; 54 | 55 | while (x != 0) { 56 | count++; 57 | x >>= 1; 58 | } 59 | 60 | return count; 61 | } 62 | 63 | /// @notice Returns the largest power of 2 less than `x`. 64 | /// @param x Number. 65 | function _getSplitPoint(uint256 x) pure returns (uint256) { 66 | // Note: since `x` is always an unsigned int * 2, the only way for this 67 | // to be violated is if the input == 0. Since the input is the end 68 | // index exclusive, an input of 0 is guaranteed to be invalid (it would 69 | // be a proof of inclusion of nothing, which is vacuous). 70 | require(x >= 1); 71 | 72 | uint256 bitLen = _bitsLen(x); 73 | uint256 k = 1 << (bitLen - 1); 74 | if (k == x) { 75 | k >>= 1; 76 | } 77 | return k; 78 | } 79 | 80 | /// @notice Returns the size of the subtree adjacent to `begin` that does 81 | /// not overlap `end`. 82 | /// @param begin Begin index, inclusive. 83 | /// @param end End index, exclusive. 84 | function _nextSubtreeSize(uint256 begin, uint256 end) pure returns (uint256) { 85 | uint256 ideal = _bitsTrailingZeroes(begin); 86 | uint256 max = _bitsLen(end - begin) - 1; 87 | if (ideal > max) { 88 | return 1 << max; 89 | } 90 | return 1 << ideal; 91 | } 92 | 93 | /// @notice Returns the number of trailing zero bits in `x`; the result is 94 | /// 256 for `x` == 0. 95 | /// @param x Number. 96 | function _bitsTrailingZeroes(uint256 x) pure returns (uint256) { 97 | uint256 mask = 1; 98 | uint256 count = 0; 99 | 100 | while (x != 0 && mask & x == 0) { 101 | count++; 102 | x >>= 1; 103 | } 104 | 105 | return count; 106 | } 107 | -------------------------------------------------------------------------------- /src/lib/tree/binary/BinaryMerkleMultiproof.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | /// @notice Merkle Tree Proof structure. 5 | struct BinaryMerkleMultiproof { 6 | // List of side nodes to verify and calculate tree. 7 | bytes32[] sideNodes; 8 | // The (included) beginning key of the leaves to verify. 9 | uint256 beginKey; 10 | // The (excluded) ending key of the leaves to verify. 11 | uint256 endKey; 12 | } 13 | -------------------------------------------------------------------------------- /src/lib/tree/binary/BinaryMerkleProof.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | /// @notice Merkle Tree Proof structure. 5 | struct BinaryMerkleProof { 6 | // List of side nodes to verify and calculate tree. 7 | bytes32[] sideNodes; 8 | // The key of the leaf to verify. 9 | uint256 key; 10 | // The number of leaves in the tree 11 | uint256 numLeaves; 12 | } 13 | -------------------------------------------------------------------------------- /src/lib/tree/binary/BinaryMerkleTree.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | import "../Constants.sol"; 5 | import "../Utils.sol"; 6 | import "./TreeHasher.sol"; 7 | import "./BinaryMerkleProof.sol"; 8 | import "./BinaryMerkleMultiproof.sol"; 9 | 10 | /// @title Binary Merkle Tree. 11 | library BinaryMerkleTree { 12 | ///////////////// 13 | // Error codes // 14 | ///////////////// 15 | 16 | enum ErrorCodes { 17 | NoError, 18 | /// @notice The provided side nodes count is invalid for the proof. 19 | InvalidNumberOfSideNodes, 20 | /// @notice The provided proof key is not part of the tree. 21 | KeyNotInTree, 22 | /// @notice Invalid number of leaves in proof. 23 | InvalidNumberOfLeavesInProof, 24 | /// @notice The proof contains unexpected side nodes. 25 | UnexpectedInnerHashes, 26 | /// @notice The proof verification expected at least one inner hash. 27 | ExpectedAtLeastOneInnerHash 28 | } 29 | 30 | /////////////// 31 | // Functions // 32 | /////////////// 33 | 34 | /// @notice Verify if element exists in Merkle tree, given data, proof, and root. 35 | /// @param root The root of the tree in which verify the given leaf. 36 | /// @param proof Binary Merkle proof for the leaf. 37 | /// @param data The data of the leaf to verify. 38 | /// @return `true` is proof is valid, `false` otherwise. 39 | /// @dev proof.numLeaves is necessary to determine height of subtree containing the data to prove. 40 | function verify(bytes32 root, BinaryMerkleProof memory proof, bytes memory data) 41 | internal 42 | pure 43 | returns (bool, ErrorCodes) 44 | { 45 | // Check proof is correct length for the key it is proving 46 | if (proof.numLeaves <= 1) { 47 | if (proof.sideNodes.length != 0) { 48 | return (false, ErrorCodes.InvalidNumberOfSideNodes); 49 | } 50 | } else if (proof.sideNodes.length != pathLengthFromKey(proof.key, proof.numLeaves)) { 51 | return (false, ErrorCodes.InvalidNumberOfSideNodes); 52 | } 53 | 54 | // Check key is in tree 55 | if (proof.key >= proof.numLeaves) { 56 | return (false, ErrorCodes.KeyNotInTree); 57 | } 58 | 59 | // A sibling at height 1 is created by getting the hash of the data to prove. 60 | bytes32 digest = leafDigest(data); 61 | 62 | // Null proof is only valid if numLeaves = 1 63 | // If so, just verify hash(data) is root 64 | if (proof.sideNodes.length == 0) { 65 | if (proof.numLeaves == 1) { 66 | return (root == digest, ErrorCodes.NoError); 67 | } else { 68 | return (false, ErrorCodes.NoError); 69 | } 70 | } 71 | 72 | (bytes32 computedHash, ErrorCodes error) = computeRootHash(proof.key, proof.numLeaves, digest, proof.sideNodes); 73 | 74 | if (error != ErrorCodes.NoError) { 75 | return (false, error); 76 | } 77 | 78 | return (computedHash == root, ErrorCodes.NoError); 79 | } 80 | 81 | function verifyMulti(bytes32 root, BinaryMerkleMultiproof memory proof, bytes[] memory data) 82 | internal 83 | pure 84 | returns (bool) 85 | { 86 | bytes32[] memory nodes = new bytes32[](data.length); 87 | for (uint256 i = 0; i < data.length; i++) { 88 | nodes[i] = leafDigest(data[i]); 89 | } 90 | 91 | return verifyMultiHashes(root, proof, nodes); 92 | } 93 | 94 | function verifyMultiHashes(bytes32 root, BinaryMerkleMultiproof memory proof, bytes32[] memory leafNodes) 95 | internal 96 | pure 97 | returns (bool) 98 | { 99 | uint256 leafIndex = 0; 100 | bytes32[] memory leftSubtrees = new bytes32[](proof.sideNodes.length); 101 | 102 | for (uint256 i = 0; leafIndex != proof.beginKey && i < proof.sideNodes.length; ++i) { 103 | uint256 subtreeSize = _nextSubtreeSize(leafIndex, proof.beginKey); 104 | leftSubtrees[i] = proof.sideNodes[i]; 105 | leafIndex += subtreeSize; 106 | } 107 | 108 | uint256 proofRangeSubtreeEstimate = _getSplitPoint(proof.endKey) * 2; 109 | if (proofRangeSubtreeEstimate < 1) { 110 | proofRangeSubtreeEstimate = 1; 111 | } 112 | 113 | (bytes32 rootHash, uint256 proofHead,,) = 114 | _computeRootMulti(proof, leafNodes, 0, proofRangeSubtreeEstimate, 0, 0); 115 | for (uint256 i = proofHead; i < proof.sideNodes.length; ++i) { 116 | rootHash = nodeDigest(rootHash, proof.sideNodes[i]); 117 | } 118 | 119 | return (rootHash == root); 120 | } 121 | 122 | function _computeRootMulti( 123 | BinaryMerkleMultiproof memory proof, 124 | bytes32[] memory leafNodes, 125 | uint256 begin, 126 | uint256 end, 127 | uint256 headProof, 128 | uint256 headLeaves 129 | ) private pure returns (bytes32, uint256, uint256, bool) { 130 | // reached a leaf 131 | if (end - begin == 1) { 132 | // if current range overlaps with proof range, pop and return a leaf 133 | if (proof.beginKey <= begin && begin < proof.endKey) { 134 | // Note: second return value is guaranteed to be `false` by 135 | // construction. 136 | return _popLeavesIfNonEmpty(leafNodes, headLeaves, leafNodes.length, headProof); 137 | } 138 | 139 | // if current range does not overlap with proof range, 140 | // pop and return a proof node (leaf) if present, 141 | // else return nil because leaf doesn't exist 142 | return _popProofIfNonEmpty(proof.sideNodes, headProof, end, headLeaves); 143 | } 144 | 145 | // if current range does not overlap with proof range, 146 | // pop and return a proof node if present, 147 | // else return nil because subtree doesn't exist 148 | if (end <= proof.beginKey || begin >= proof.endKey) { 149 | return _popProofIfNonEmpty(proof.sideNodes, headProof, end, headLeaves); 150 | } 151 | 152 | // Recursively get left and right subtree 153 | uint256 k = _getSplitPoint(end - begin); 154 | (bytes32 left, uint256 newHeadProofLeft, uint256 newHeadLeavesLeft,) = 155 | _computeRootMulti(proof, leafNodes, begin, begin + k, headProof, headLeaves); 156 | (bytes32 right, uint256 newHeadProof, uint256 newHeadLeaves, bool rightIsNil) = 157 | _computeRootMulti(proof, leafNodes, begin + k, end, newHeadProofLeft, newHeadLeavesLeft); 158 | 159 | // only right leaf/subtree can be non-existent 160 | if (rightIsNil == true) { 161 | return (left, newHeadProof, newHeadLeaves, false); 162 | } 163 | bytes32 hash = nodeDigest(left, right); 164 | return (hash, newHeadProof, newHeadLeaves, false); 165 | } 166 | 167 | function _popProofIfNonEmpty(bytes32[] memory nodes, uint256 headProof, uint256 end, uint256 headLeaves) 168 | private 169 | pure 170 | returns (bytes32, uint256, uint256, bool) 171 | { 172 | (bytes32 node, uint256 newHead, bool isNil) = _popIfNonEmpty(nodes, headProof, end); 173 | return (node, newHead, headLeaves, isNil); 174 | } 175 | 176 | function _popLeavesIfNonEmpty(bytes32[] memory nodes, uint256 headLeaves, uint256 end, uint256 headProof) 177 | private 178 | pure 179 | returns (bytes32, uint256, uint256, bool) 180 | { 181 | (bytes32 node, uint256 newHead, bool isNil) = _popIfNonEmpty(nodes, headLeaves, end); 182 | return (node, headProof, newHead, isNil); 183 | } 184 | 185 | function _popIfNonEmpty(bytes32[] memory nodes, uint256 head, uint256 end) 186 | private 187 | pure 188 | returns (bytes32, uint256, bool) 189 | { 190 | if (nodes.length == 0 || head >= nodes.length || head >= end) { 191 | bytes32 node; 192 | return (node, head, true); 193 | } 194 | return (nodes[head], head + 1, false); 195 | } 196 | 197 | /// @notice Use the leafHash and innerHashes to get the root merkle hash. 198 | /// If the length of the innerHashes slice isn't exactly correct, the result is nil. 199 | /// Recursive impl. 200 | function computeRootHash(uint256 key, uint256 numLeaves, bytes32 leafHash, bytes32[] memory sideNodes) 201 | private 202 | pure 203 | returns (bytes32, ErrorCodes) 204 | { 205 | if (numLeaves == 0) { 206 | return (leafHash, ErrorCodes.InvalidNumberOfLeavesInProof); 207 | } 208 | if (numLeaves == 1) { 209 | if (sideNodes.length != 0) { 210 | return (leafHash, ErrorCodes.UnexpectedInnerHashes); 211 | } 212 | return (leafHash, ErrorCodes.NoError); 213 | } 214 | if (sideNodes.length == 0) { 215 | return (leafHash, ErrorCodes.ExpectedAtLeastOneInnerHash); 216 | } 217 | uint256 numLeft = _getSplitPoint(numLeaves); 218 | bytes32[] memory sideNodesLeft = slice(sideNodes, 0, sideNodes.length - 1); 219 | ErrorCodes error; 220 | if (key < numLeft) { 221 | bytes32 leftHash; 222 | (leftHash, error) = computeRootHash(key, numLeft, leafHash, sideNodesLeft); 223 | if (error != ErrorCodes.NoError) { 224 | return (leafHash, error); 225 | } 226 | return (nodeDigest(leftHash, sideNodes[sideNodes.length - 1]), ErrorCodes.NoError); 227 | } 228 | bytes32 rightHash; 229 | (rightHash, error) = computeRootHash(key - numLeft, numLeaves - numLeft, leafHash, sideNodesLeft); 230 | if (error != ErrorCodes.NoError) { 231 | return (leafHash, error); 232 | } 233 | return (nodeDigest(sideNodes[sideNodes.length - 1], rightHash), ErrorCodes.NoError); 234 | } 235 | 236 | /// @notice creates a slice of bytes32 from the data slice of bytes32 containing the elements 237 | /// that correspond to the provided range. 238 | /// It selects a half-open range which includes the begin element, but excludes the end one. 239 | /// @param _data The slice that we want to select data from. 240 | /// @param _begin The beginning of the range (inclusive). 241 | /// @param _end The ending of the range (exclusive). 242 | /// @return _ the sliced data. 243 | function slice(bytes32[] memory _data, uint256 _begin, uint256 _end) internal pure returns (bytes32[] memory) { 244 | if (_begin > _end) { 245 | revert("Invalid range: _begin is greater than _end"); 246 | } 247 | if (_begin > _data.length || _end > _data.length) { 248 | revert("Invalid range: _begin or _end are out of bounds"); 249 | } 250 | bytes32[] memory out = new bytes32[](_end - _begin); 251 | for (uint256 i = _begin; i < _end; i++) { 252 | out[i - _begin] = _data[i]; 253 | } 254 | return out; 255 | } 256 | } 257 | -------------------------------------------------------------------------------- /src/lib/tree/binary/TreeHasher.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | import "../Constants.sol"; 5 | 6 | /// @notice Calculate the digest of a node. 7 | /// @param left The left child. 8 | /// @param right The right child. 9 | /// @return digest The node digest. 10 | /// @dev More details in https://github.com/celestiaorg/celestia-specs/blob/master/src/specs/data_structures.md#binary-merkle-tree 11 | // solhint-disable-next-line func-visibility 12 | function nodeDigest(bytes32 left, bytes32 right) pure returns (bytes32 digest) { 13 | digest = sha256(abi.encodePacked(Constants.NODE_PREFIX, left, right)); 14 | } 15 | 16 | /// @notice Calculate the digest of a leaf. 17 | /// @param data The data of the leaf. 18 | /// @return digest The leaf digest. 19 | /// @dev More details in https://github.com/celestiaorg/celestia-specs/blob/master/src/specs/data_structures.md#binary-merkle-tree 20 | // solhint-disable-next-line func-visibility 21 | function leafDigest(bytes memory data) pure returns (bytes32 digest) { 22 | digest = sha256(abi.encodePacked(Constants.LEAF_PREFIX, data)); 23 | } 24 | -------------------------------------------------------------------------------- /src/lib/tree/binary/test/BinaryMerkleTree.t.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | import "ds-test/test.sol"; 5 | import "forge-std/Vm.sol"; 6 | 7 | import "../BinaryMerkleProof.sol"; 8 | import "../BinaryMerkleTree.sol"; 9 | import "../BinaryMerkleMultiproof.sol"; 10 | 11 | /** 12 | * TEST VECTORS 13 | * 14 | * 0x01 15 | * 0x02 16 | * 0x03 17 | * 0x04 18 | * 0x05 19 | * 0x06 20 | * 0x07 21 | * 0x08 22 | * 23 | * 0xb413f47d13ee2fe6c845b2ee141af81de858df4ec549a58b7970bb96645bc8d2 24 | * 0xfcf0a6c700dd13e274b6fba8deea8dd9b26e4eedde3495717cac8408c9c5177f 25 | * 0x583c7dfb7b3055d99465544032a571e10a134b1b6f769422bbb71fd7fa167a5d 26 | * 0x4f35212d12f9ad2036492c95f1fe79baf4ec7bd9bef3dffa7579f2293ff546a4 27 | * 0x9f1afa4dc124cba73134e82ff50f17c8f7164257c79fed9a13f5943a6acb8e3d 28 | * 0x40d88127d4d31a3891f41598eeed41174e5bc89b1eb9bbd66a8cbfc09956a3fd 29 | * 0x2ecd8a6b7d2845546659ad4cf443533cf921b19dc81fa83934e83821b4dfdcb7 30 | * 0xb4c43b50bf245bd727623e3c775a8fcfb8d823d00b57dd65f7f79dd33f126315 31 | * 32 | * 0x6bcf0e2e93e0a18e22789aee965e6553f4fbe93f0acfc4a705d691c8311c4965 33 | * 0x78850a5ab36238b076dd99fd258c70d523168704247988a94caa8c9ccd056b8d 34 | * 0x90eeb2c4a04ec33ee4dd2677593331910e4203db4fcc120a6cdb95b13cfe83f0 35 | * 0x28c01722dd8dd05b63bcdeb6878bc2c083118cc2b170646d6b842d0bdbdc9d29 36 | * 37 | * 0xfa02d31a63cc11cc624881e52af14af7a1c6ab745efa71021cb24086b9b1793f 38 | * 0x4301a067262bbb18b4919742326f6f6d706099f9c0e8b0f2db7b88f204b2cf09 39 | * 40 | * 0xc1ad6548cb4c7663110df219ec8b36ca63b01158956f4be31a38a88d0c7f7071 41 | * 42 | */ 43 | 44 | // Intermediate mock contract that calls the library function 45 | // and returns the result. 46 | // This is done because of the expectRevert v1 behaviour change: 47 | // https://github.com/foundry-rs/book/pull/922 48 | contract BinaryMerkleTreeLibMock { 49 | function slice(bytes32[] memory _data, uint256 _begin, uint256 _end) external returns (bytes32[] memory) { 50 | return BinaryMerkleTree.slice(_data, _begin, _end); 51 | } 52 | } 53 | 54 | contract BinaryMerkleProofTest is DSTest { 55 | Vm private constant vm = Vm(address(uint160(uint256(keccak256("hevm cheat code"))))); 56 | BinaryMerkleTreeLibMock public binaryMerkleTreeMock; 57 | 58 | function setUp() external { 59 | binaryMerkleTreeMock = new BinaryMerkleTreeLibMock(); 60 | } 61 | 62 | function testVerifyNone() external { 63 | bytes32 root = sha256(""); 64 | bytes32[] memory sideNodes; 65 | uint256 key = 0; 66 | uint256 numLeaves = 0; 67 | BinaryMerkleProof memory proof = BinaryMerkleProof(sideNodes, key, numLeaves); 68 | bytes memory data; 69 | (bool isValid,) = BinaryMerkleTree.verify(root, proof, data); 70 | assertTrue(!isValid); 71 | } 72 | 73 | function testVerifyOneLeafEmpty() external { 74 | bytes32 root = 0x6e340b9cffb37a989ca544e6bb780a2c78901d3fb33738768511a30617afa01d; 75 | bytes32[] memory sideNodes; 76 | uint256 key = 0; 77 | uint256 numLeaves = 1; 78 | BinaryMerkleProof memory proof = BinaryMerkleProof(sideNodes, key, numLeaves); 79 | bytes memory data; 80 | (bool isValid, BinaryMerkleTree.ErrorCodes error) = BinaryMerkleTree.verify(root, proof, data); 81 | assertEq(uint256(BinaryMerkleTree.ErrorCodes.NoError), uint256(error)); 82 | assertTrue(isValid); 83 | } 84 | 85 | function testVerifyOneLeafSome() external { 86 | bytes32 root = 0x48c90c8ae24688d6bef5d48a30c2cc8b6754335a8db21793cc0a8e3bed321729; 87 | bytes32[] memory sideNodes; 88 | uint256 key = 0; 89 | uint256 numLeaves = 1; 90 | BinaryMerkleProof memory proof = BinaryMerkleProof(sideNodes, key, numLeaves); 91 | bytes memory data = hex"deadbeef"; 92 | (bool isValid, BinaryMerkleTree.ErrorCodes error) = BinaryMerkleTree.verify(root, proof, data); 93 | assertEq(uint256(BinaryMerkleTree.ErrorCodes.NoError), uint256(error)); 94 | assertTrue(isValid); 95 | } 96 | 97 | function testVerifyOneLeaf01() external { 98 | bytes32 root = 0xb413f47d13ee2fe6c845b2ee141af81de858df4ec549a58b7970bb96645bc8d2; 99 | bytes32[] memory sideNodes; 100 | uint256 key = 0; 101 | uint256 numLeaves = 1; 102 | BinaryMerkleProof memory proof = BinaryMerkleProof(sideNodes, key, numLeaves); 103 | bytes memory data = hex"01"; 104 | (bool isValid, BinaryMerkleTree.ErrorCodes error) = BinaryMerkleTree.verify(root, proof, data); 105 | assertEq(uint256(BinaryMerkleTree.ErrorCodes.NoError), uint256(error)); 106 | assertTrue(isValid); 107 | } 108 | 109 | function testVerifyLeafOneOfEight() external { 110 | bytes32 root = 0xc1ad6548cb4c7663110df219ec8b36ca63b01158956f4be31a38a88d0c7f7071; 111 | bytes32[] memory sideNodes = new bytes32[](3); 112 | sideNodes[0] = 0xfcf0a6c700dd13e274b6fba8deea8dd9b26e4eedde3495717cac8408c9c5177f; 113 | sideNodes[1] = 0x78850a5ab36238b076dd99fd258c70d523168704247988a94caa8c9ccd056b8d; 114 | sideNodes[2] = 0x4301a067262bbb18b4919742326f6f6d706099f9c0e8b0f2db7b88f204b2cf09; 115 | 116 | uint256 key = 0; 117 | uint256 numLeaves = 8; 118 | BinaryMerkleProof memory proof = BinaryMerkleProof(sideNodes, key, numLeaves); 119 | bytes memory data = hex"01"; 120 | (bool isValid, BinaryMerkleTree.ErrorCodes error) = BinaryMerkleTree.verify(root, proof, data); 121 | assertEq(uint256(BinaryMerkleTree.ErrorCodes.NoError), uint256(error)); 122 | assertTrue(isValid); 123 | } 124 | 125 | function testVerifyLeafTwoOfEight() external { 126 | bytes32 root = 0xc1ad6548cb4c7663110df219ec8b36ca63b01158956f4be31a38a88d0c7f7071; 127 | bytes32[] memory sideNodes = new bytes32[](3); 128 | sideNodes[0] = 0xb413f47d13ee2fe6c845b2ee141af81de858df4ec549a58b7970bb96645bc8d2; 129 | sideNodes[1] = 0x78850a5ab36238b076dd99fd258c70d523168704247988a94caa8c9ccd056b8d; 130 | sideNodes[2] = 0x4301a067262bbb18b4919742326f6f6d706099f9c0e8b0f2db7b88f204b2cf09; 131 | 132 | uint256 key = 1; 133 | uint256 numLeaves = 8; 134 | BinaryMerkleProof memory proof = BinaryMerkleProof(sideNodes, key, numLeaves); 135 | bytes memory data = hex"02"; 136 | (bool isValid, BinaryMerkleTree.ErrorCodes error) = BinaryMerkleTree.verify(root, proof, data); 137 | assertEq(uint256(BinaryMerkleTree.ErrorCodes.NoError), uint256(error)); 138 | assertTrue(isValid); 139 | } 140 | 141 | function testVerifyLeafThreeOfEight() external { 142 | bytes32 root = 0xc1ad6548cb4c7663110df219ec8b36ca63b01158956f4be31a38a88d0c7f7071; 143 | bytes32[] memory sideNodes = new bytes32[](3); 144 | sideNodes[0] = 0x4f35212d12f9ad2036492c95f1fe79baf4ec7bd9bef3dffa7579f2293ff546a4; 145 | sideNodes[1] = 0x6bcf0e2e93e0a18e22789aee965e6553f4fbe93f0acfc4a705d691c8311c4965; 146 | sideNodes[2] = 0x4301a067262bbb18b4919742326f6f6d706099f9c0e8b0f2db7b88f204b2cf09; 147 | 148 | uint256 key = 2; 149 | uint256 numLeaves = 8; 150 | BinaryMerkleProof memory proof = BinaryMerkleProof(sideNodes, key, numLeaves); 151 | bytes memory data = hex"03"; 152 | (bool isValid, BinaryMerkleTree.ErrorCodes error) = BinaryMerkleTree.verify(root, proof, data); 153 | assertEq(uint256(BinaryMerkleTree.ErrorCodes.NoError), uint256(error)); 154 | assertTrue(isValid); 155 | } 156 | 157 | function testVerifyLeafSevenOfEight() external { 158 | bytes32 root = 0xc1ad6548cb4c7663110df219ec8b36ca63b01158956f4be31a38a88d0c7f7071; 159 | bytes32[] memory sideNodes = new bytes32[](3); 160 | sideNodes[0] = 0xb4c43b50bf245bd727623e3c775a8fcfb8d823d00b57dd65f7f79dd33f126315; 161 | sideNodes[1] = 0x90eeb2c4a04ec33ee4dd2677593331910e4203db4fcc120a6cdb95b13cfe83f0; 162 | sideNodes[2] = 0xfa02d31a63cc11cc624881e52af14af7a1c6ab745efa71021cb24086b9b1793f; 163 | 164 | uint256 key = 6; 165 | uint256 numLeaves = 8; 166 | BinaryMerkleProof memory proof = BinaryMerkleProof(sideNodes, key, numLeaves); 167 | bytes memory data = hex"07"; 168 | (bool isValid, BinaryMerkleTree.ErrorCodes error) = BinaryMerkleTree.verify(root, proof, data); 169 | assertEq(uint256(BinaryMerkleTree.ErrorCodes.NoError), uint256(error)); 170 | assertTrue(isValid); 171 | } 172 | 173 | function testVerifyLeafEightOfEight() external { 174 | bytes32 root = 0xc1ad6548cb4c7663110df219ec8b36ca63b01158956f4be31a38a88d0c7f7071; 175 | bytes32[] memory sideNodes = new bytes32[](3); 176 | sideNodes[0] = 0x2ecd8a6b7d2845546659ad4cf443533cf921b19dc81fa83934e83821b4dfdcb7; 177 | sideNodes[1] = 0x90eeb2c4a04ec33ee4dd2677593331910e4203db4fcc120a6cdb95b13cfe83f0; 178 | sideNodes[2] = 0xfa02d31a63cc11cc624881e52af14af7a1c6ab745efa71021cb24086b9b1793f; 179 | 180 | uint256 key = 7; 181 | uint256 numLeaves = 8; 182 | BinaryMerkleProof memory proof = BinaryMerkleProof(sideNodes, key, numLeaves); 183 | bytes memory data = hex"08"; 184 | (bool isValid, BinaryMerkleTree.ErrorCodes error) = BinaryMerkleTree.verify(root, proof, data); 185 | assertEq(uint256(BinaryMerkleTree.ErrorCodes.NoError), uint256(error)); 186 | assertTrue(isValid); 187 | } 188 | 189 | // Test vectors: 190 | // 0x00 191 | // 0x01 192 | // 0x02 193 | // 0x03 194 | // 0x04 195 | function testVerifyProofOfFiveLeaves() external { 196 | bytes32 root = 0xb855b42d6c30f5b087e05266783fbd6e394f7b926013ccaa67700a8b0c5a596f; 197 | bytes32[] memory sideNodes = new bytes32[](3); 198 | sideNodes[0] = 0x96a296d224f285c67bee93c30f8a309157f0daa35dc5b87e410b78630a09cfc7; 199 | sideNodes[1] = 0x52c56b473e5246933e7852989cd9feba3b38f078742b93afff1e65ed46797825; 200 | sideNodes[2] = 0x4f35212d12f9ad2036492c95f1fe79baf4ec7bd9bef3dffa7579f2293ff546a4; 201 | 202 | uint256 key = 1; 203 | uint256 numLeaves = 5; 204 | BinaryMerkleProof memory proof = BinaryMerkleProof(sideNodes, key, numLeaves); 205 | bytes memory data = bytes(hex"01"); 206 | (bool isValid, BinaryMerkleTree.ErrorCodes error) = BinaryMerkleTree.verify(root, proof, data); 207 | assertEq(uint256(BinaryMerkleTree.ErrorCodes.NoError), uint256(error)); 208 | assertTrue(isValid); 209 | } 210 | 211 | function testVerifyInvalidProofRoot() external { 212 | // correct root: 0xb855b42d6c30f5b087e05266783fbd6e394f7b926013ccaa67700a8b0c5a596f; 213 | bytes32 root = 0xc855b42d6c30f5b087e05266783fbd6e394f7b926013ccaa67700a8b0c5a596f; 214 | bytes32[] memory sideNodes = new bytes32[](3); 215 | sideNodes[0] = 0x96a296d224f285c67bee93c30f8a309157f0daa35dc5b87e410b78630a09cfc7; 216 | sideNodes[1] = 0x52c56b473e5246933e7852989cd9feba3b38f078742b93afff1e65ed46797825; 217 | sideNodes[2] = 0x4f35212d12f9ad2036492c95f1fe79baf4ec7bd9bef3dffa7579f2293ff546a4; 218 | 219 | uint256 key = 1; 220 | uint256 numLeaves = 5; 221 | BinaryMerkleProof memory proof = BinaryMerkleProof(sideNodes, key, numLeaves); 222 | bytes memory data = bytes(hex"01"); 223 | (bool isValid, BinaryMerkleTree.ErrorCodes error) = BinaryMerkleTree.verify(root, proof, data); 224 | assertEq(uint256(BinaryMerkleTree.ErrorCodes.NoError), uint256(error)); 225 | assertTrue(!isValid); 226 | } 227 | 228 | function testVerifyInvalidProofKey() external { 229 | bytes32 root = 0xb855b42d6c30f5b087e05266783fbd6e394f7b926013ccaa67700a8b0c5a596f; 230 | bytes32[] memory sideNodes = new bytes32[](3); 231 | sideNodes[0] = 0x96a296d224f285c67bee93c30f8a309157f0daa35dc5b87e410b78630a09cfc7; 232 | sideNodes[1] = 0x52c56b473e5246933e7852989cd9feba3b38f078742b93afff1e65ed46797825; 233 | sideNodes[2] = 0x4f35212d12f9ad2036492c95f1fe79baf4ec7bd9bef3dffa7579f2293ff546a4; 234 | 235 | // correct key: 1 236 | uint256 key = 2; 237 | uint256 numLeaves = 5; 238 | BinaryMerkleProof memory proof = BinaryMerkleProof(sideNodes, key, numLeaves); 239 | bytes memory data = bytes(hex"01"); 240 | (bool isValid, BinaryMerkleTree.ErrorCodes error) = BinaryMerkleTree.verify(root, proof, data); 241 | assertEq(uint256(BinaryMerkleTree.ErrorCodes.NoError), uint256(error)); 242 | assertTrue(!isValid); 243 | } 244 | 245 | function testVerifyInvalidProofNumberOfLeaves() external { 246 | bytes32 root = 0xb855b42d6c30f5b087e05266783fbd6e394f7b926013ccaa67700a8b0c5a596f; 247 | bytes32[] memory sideNodes = new bytes32[](3); 248 | sideNodes[0] = 0x96a296d224f285c67bee93c30f8a309157f0daa35dc5b87e410b78630a09cfc7; 249 | sideNodes[1] = 0x52c56b473e5246933e7852989cd9feba3b38f078742b93afff1e65ed46797825; 250 | sideNodes[2] = 0x4f35212d12f9ad2036492c95f1fe79baf4ec7bd9bef3dffa7579f2293ff546a4; 251 | 252 | uint256 key = 1; 253 | // correct numLeaves: 5 254 | uint256 numLeaves = 200; 255 | BinaryMerkleProof memory proof = BinaryMerkleProof(sideNodes, key, numLeaves); 256 | bytes memory data = bytes(hex"01"); 257 | (bool isValid,) = BinaryMerkleTree.verify(root, proof, data); 258 | assertTrue(!isValid); 259 | } 260 | 261 | function testVerifyInvalidProofSideNodes() external { 262 | bytes32 root = 0xb855b42d6c30f5b087e05266783fbd6e394f7b926013ccaa67700a8b0c5a596f; 263 | bytes32[] memory sideNodes = new bytes32[](3); 264 | sideNodes[0] = 0x96a296d224f285c67bee93c30f8a309157f0daa35dc5b87e410b78630a09cfc7; 265 | sideNodes[1] = 0x52c56b473e5246933e7852989cd9feba3b38f078742b93afff1e65ed46797825; 266 | // correct side node: 0x4f35212d12f9ad2036492c95f1fe79baf4ec7bd9bef3dffa7579f2293ff546a4; 267 | sideNodes[2] = 0x5f35212d12f9ad2036492c95f1fe79baf4ec7bd9bef3dffa7579f2293ff546a4; 268 | 269 | uint256 key = 1; 270 | uint256 numLeaves = 5; 271 | BinaryMerkleProof memory proof = BinaryMerkleProof(sideNodes, key, numLeaves); 272 | bytes memory data = bytes(hex"01"); 273 | (bool isValid, BinaryMerkleTree.ErrorCodes error) = BinaryMerkleTree.verify(root, proof, data); 274 | assertEq(uint256(BinaryMerkleTree.ErrorCodes.NoError), uint256(error)); 275 | assertTrue(!isValid); 276 | } 277 | 278 | function testVerifyInvalidProofData() external { 279 | bytes32 root = 0xb855b42d6c30f5b087e05266783fbd6e394f7b926013ccaa67700a8b0c5a596f; 280 | bytes32[] memory sideNodes = new bytes32[](3); 281 | sideNodes[0] = 0x96a296d224f285c67bee93c30f8a309157f0daa35dc5b87e410b78630a09cfc7; 282 | sideNodes[1] = 0x52c56b473e5246933e7852989cd9feba3b38f078742b93afff1e65ed46797825; 283 | sideNodes[2] = 0x4f35212d12f9ad2036492c95f1fe79baf4ec7bd9bef3dffa7579f2293ff546a4; 284 | 285 | uint256 key = 1; 286 | uint256 numLeaves = 5; 287 | BinaryMerkleProof memory proof = BinaryMerkleProof(sideNodes, key, numLeaves); 288 | // correct data: 01 289 | bytes memory data = bytes(hex"012345"); 290 | (bool isValid, BinaryMerkleTree.ErrorCodes error) = BinaryMerkleTree.verify(root, proof, data); 291 | assertEq(uint256(BinaryMerkleTree.ErrorCodes.NoError), uint256(error)); 292 | assertTrue(!isValid); 293 | } 294 | 295 | function testValidSlice() public { 296 | bytes32[] memory data = new bytes32[](4); 297 | data[0] = "a"; 298 | data[1] = "b"; 299 | data[2] = "c"; 300 | data[3] = "d"; 301 | 302 | bytes32[] memory result = BinaryMerkleTree.slice(data, 1, 3); 303 | 304 | assertEq(result[0], data[1]); 305 | assertEq(result[1], data[2]); 306 | } 307 | 308 | function testSameKeyAndLeavesNumber() external { 309 | bytes32 root = 0xb855b42d6c30f5b087e05266783fbd6e394f7b926013ccaa67700a8b0c5a596f; 310 | bytes32[] memory sideNodes = new bytes32[](0); 311 | uint256 key = 3; 312 | uint256 numLeaves = 3; 313 | BinaryMerkleProof memory proof = BinaryMerkleProof(sideNodes, key, numLeaves); 314 | bytes memory data = bytes(hex"01"); 315 | (bool isValid,) = BinaryMerkleTree.verify(root, proof, data); 316 | assertTrue(!isValid); 317 | } 318 | 319 | function testConsecutiveKeyAndNumberOfLeaves() external { 320 | bytes32 root = 0xb855b42d6c30f5b087e05266783fbd6e394f7b926013ccaa67700a8b0c5a596f; 321 | bytes32[] memory sideNodes = new bytes32[](0); 322 | uint256 key = 6; 323 | uint256 numLeaves = 7; 324 | BinaryMerkleProof memory proof = BinaryMerkleProof(sideNodes, key, numLeaves); 325 | bytes memory data = bytes(hex"01"); 326 | (bool isValid,) = BinaryMerkleTree.verify(root, proof, data); 327 | assertTrue(!isValid); 328 | } 329 | 330 | function testInvalidSliceBeginEnd() public { 331 | bytes32[] memory data = new bytes32[](4); 332 | data[0] = "a"; 333 | data[1] = "b"; 334 | data[2] = "c"; 335 | data[3] = "d"; 336 | 337 | vm.expectRevert("Invalid range: _begin is greater than _end"); 338 | binaryMerkleTreeMock.slice(data, 2, 1); 339 | } 340 | 341 | function testOutOfBoundsSlice() public { 342 | bytes32[] memory data = new bytes32[](4); 343 | data[0] = "a"; 344 | data[1] = "b"; 345 | data[2] = "c"; 346 | data[3] = "d"; 347 | 348 | vm.expectRevert("Invalid range: _begin or _end are out of bounds"); 349 | binaryMerkleTreeMock.slice(data, 2, 5); 350 | } 351 | 352 | // header.dat, blob.dat, and proofs.json test vectors included in ../../test/ and serialized to hex bytes using Rust 353 | // The hard-coded serialized proofs and data were generated in Rust, with this code 354 | // https://github.com/S1nus/hyperchain-da/blob/main/src/clients/celestia/evm_types.rs#L132 355 | function testMultiproof() public { 356 | bytes memory proofData = 357 | hex"00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000060000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000090000000000000000000000000000000000000000000000000000000000000006ce29bcde696f84e35c5626904542a549b080e92603243b34794242473940706917519bf954f5b30495af5c8cdb9983e6319104badc1ea811ed2c421018a3ad7821ea268d3540deab8f9b2024464618610c9a7083620badcf505bda647cc8e9f82bfc87d990d8344f6efd44fcb09b46b87f9a92230d41329452efee8656c6760a9ad9f3a95af971e89e2a80b255bb56d5aae15de69803b52aa5079b33374b16e16178fc62a2f2ce6bf21909c0a0edea9525486e0ece65bff23499342cca38dd62"; 358 | BinaryMerkleMultiproof memory multiproof = abi.decode(proofData, (BinaryMerkleMultiproof)); 359 | bytes32 dataroot = hex"ef8920d86519bd5f8ce3c802b84fc9b9512483e4d4a5c9608b44af4d6639f7d1"; 360 | bytes memory leafData = 361 | hex"00000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000009000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000001a0000000000000000000000000000000000000000000000000000000000000022000000000000000000000000000000000000000000000000000000000000002a0000000000000000000000000000000000000000000000000000000000000032000000000000000000000000000000000000000000000000000000000000003a0000000000000000000000000000000000000000000000000000000000000042000000000000000000000000000000000000000000000000000000000000004a00000000000000000000000000000000000000000000000000000000000000520000000000000000000000000000000000000000000000000000000000000005a00000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000102030405746e218305fe3dbbef65feceed939fe8dd93c88b06c95473fbe344fb864060f3000000000000000000000000000000000000000000000000000000000000000000000000005a0000000000000000000000000000000000000000000000000102030405000000000000000000000000000000000000000000000000010203040555cd7fb524ae792c9d4bc8946d07209728c533a3e14d4e7c0c95c0b150d0c284000000000000000000000000000000000000000000000000000000000000000000000000005a00000000000000000000000000000000000000000000000001020304050000000000000000000000000000000000000000000000000102030405505c1e7c897461a152e152f1ff3ecc358fefdf1f69448ab1165b6ca76836933b000000000000000000000000000000000000000000000000000000000000000000000000005a00000000000000000000000000000000000000000000000001020304050000000000000000000000000000000000000000000000000102030405100a0548893d8eab0322f34f45ac84785cdf50dfab5102a12d958e6031bacebe000000000000000000000000000000000000000000000000000000000000000000000000005a0000000000000000000000000000000000000000000000000102030405000000000000000000000000000000000000000000000000010203040566e5eb1da67430f204a3c5615591f71316695c7ec1f1f713cde7e936d4a43ec1000000000000000000000000000000000000000000000000000000000000000000000000005a00000000000000000000000000000000000000000000000001020304050000000000000000000000000000000000000000000000000102030405d2a5de6299e28c2fec359a2718599f5ac22c2948a71d26a438295e531b6f4cb5000000000000000000000000000000000000000000000000000000000000000000000000005a00000000000000000000000000000000000000000000000001020304050000000000000000000000000000000000000000000000000102030405688c5238e50c0a8a556bfabff31bef1fa9cdd812c9fd4dcee5c2a0836f687fbf000000000000000000000000000000000000000000000000000000000000000000000000005a00000000000000000000000000000000000000000000000001020304050000000000000000000000000000000000000000000000000102030405b55a5b1efc2a22cdbfa21d050bd67147ff2b936c68354eb1a83bcdf14eb57e38000000000000000000000000000000000000000000000000000000000000000000000000005a000000000000000000000000000000000000000000000000010203040500000000000000000000000000000000000000000067480c4a88c4d129947e11c33fa811daa791771e591dd933498d1212d46b8cde9c34c28831b0b532000000000000"; 362 | bytes[] memory leaves = abi.decode(leafData, (bytes[])); 363 | assertTrue(BinaryMerkleTree.verifyMulti(dataroot, multiproof, leaves)); 364 | } 365 | } 366 | -------------------------------------------------------------------------------- /src/lib/tree/binary/test/TreeHasher.t.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | import "ds-test/test.sol"; 5 | 6 | import "../TreeHasher.sol"; 7 | 8 | contract TreeHasherTest is DSTest { 9 | function setUp() external {} 10 | 11 | function testLeafDigestEmpty() external { 12 | bytes32 expected = 0x6e340b9cffb37a989ca544e6bb780a2c78901d3fb33738768511a30617afa01d; 13 | bytes memory data; 14 | bytes32 digest = leafDigest(data); 15 | assertEq(digest, expected); 16 | } 17 | 18 | function testLeafDigestSome() external { 19 | bytes32 expected = 0x48c90c8ae24688d6bef5d48a30c2cc8b6754335a8db21793cc0a8e3bed321729; 20 | bytes memory data = hex"deadbeef"; 21 | bytes32 digest = leafDigest(data); 22 | assertEq(digest, expected); 23 | } 24 | 25 | function testNodeDigestEmptyChildren() external { 26 | bytes32 expected = 0xfe43d66afa4a9a5c4f9c9da89f4ffb52635c8f342e7ffb731d68e36c5982072a; 27 | bytes32 left = 0x6e340b9cffb37a989ca544e6bb780a2c78901d3fb33738768511a30617afa01d; 28 | bytes32 right = 0x6e340b9cffb37a989ca544e6bb780a2c78901d3fb33738768511a30617afa01d; 29 | bytes32 digest = nodeDigest(left, right); 30 | assertEq(digest, expected); 31 | } 32 | 33 | function testNodeDigestSomeChildren() external { 34 | bytes32 expected = 0x62343bba7c4d6259f0d4863cdf476f1c0ac1b9fbe9244723a9b8b5c8aae72c38; 35 | bytes32 left = 0xdb55da3fc3098e9c42311c6013304ff36b19ef73d12ea932054b5ad51df4f49d; 36 | bytes32 right = 0xc75cb66ae28d8ebc6eded002c28a8ba0d06d3a78c6b5cbf9b2ade051f0775ac4; 37 | bytes32 digest = nodeDigest(left, right); 38 | assertEq(digest, expected); 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /src/lib/tree/namespace/NamespaceMerkleMultiproof.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | import "./NamespaceNode.sol"; 5 | 6 | /// @notice Namespace Merkle Tree Multiproof structure. Proves multiple leaves. 7 | struct NamespaceMerkleMultiproof { 8 | // The (included) beginning key of the leaves to verify. 9 | uint256 beginKey; 10 | // The (excluded) ending key of the leaves to verify. 11 | uint256 endKey; 12 | // List of side nodes to verify and calculate tree. 13 | NamespaceNode[] sideNodes; 14 | } 15 | -------------------------------------------------------------------------------- /src/lib/tree/namespace/NamespaceMerkleProof.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | import "./NamespaceNode.sol"; 5 | 6 | /// @notice Namespace Merkle Tree Proof structure. 7 | struct NamespaceMerkleProof { 8 | // List of side nodes to verify and calculate tree. 9 | NamespaceNode[] sideNodes; 10 | // The key of the leaf to verify. 11 | uint256 key; 12 | // The number of leaves in the tree 13 | uint256 numLeaves; 14 | } 15 | -------------------------------------------------------------------------------- /src/lib/tree/namespace/NamespaceMerkleTree.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | import "../Constants.sol"; 5 | import "../Types.sol"; 6 | import "../Utils.sol"; 7 | import "./NamespaceMerkleProof.sol"; 8 | import "./NamespaceMerkleMultiproof.sol"; 9 | import "./NamespaceNode.sol"; 10 | import "./TreeHasher.sol"; 11 | 12 | /// @title Namespace Merkle Tree. 13 | library NamespaceMerkleTree { 14 | /// @notice Verify if element exists in Merkle tree, given data, proof, and root. 15 | /// @param root The root of the tree in which the given leaf is verified. 16 | /// @param proof Namespace Merkle proof for the leaf. 17 | /// @param namespace Namespace of the leaf. 18 | /// @param data The data of the leaf to verify. 19 | /// @return `true` if the proof is valid, `false` otherwise. 20 | /// @dev proof.numLeaves is necessary to determine height of subtree containing the data to prove. 21 | function verify( 22 | NamespaceNode memory root, 23 | NamespaceMerkleProof memory proof, 24 | Namespace memory namespace, 25 | bytes memory data 26 | ) internal pure returns (bool) { 27 | // A sibling at height 1 is created by getting the leafDigest of the original data. 28 | NamespaceNode memory node = leafDigest(namespace, data); 29 | 30 | // Since we're verifying a leaf, height parameter is 1. 31 | return verifyInner(root, proof, node, 1); 32 | } 33 | 34 | /// @notice Verify if inner node exists in Merkle tree, given node, proof, and root. 35 | /// @param root The root of the tree in which the given leaf is verified. 36 | /// @param proof Namespace Merkle proof for the leaf. 37 | /// proof.key is any key in the subtree rooted at the inner node. 38 | /// @param node The inner node to verify. 39 | /// @param startingHeight Starting height of the proof. 40 | /// @return `true` if the proof is valid, `false` otherwise. 41 | /// @dev proof.numLeaves is necessary to determine height of subtree containing the data to prove. 42 | function verifyInner( 43 | NamespaceNode memory root, 44 | NamespaceMerkleProof memory proof, 45 | NamespaceNode memory node, 46 | uint256 startingHeight 47 | ) internal pure returns (bool) { 48 | // Check starting height is at least 1 49 | if (startingHeight < 1) { 50 | return false; 51 | } 52 | uint256 heightOffset = startingHeight - 1; 53 | 54 | // Check proof is correct length for the key it is proving 55 | if (proof.numLeaves <= 1) { 56 | if (proof.sideNodes.length != 0) { 57 | return false; 58 | } 59 | } else if (proof.sideNodes.length + heightOffset != pathLengthFromKey(proof.key, proof.numLeaves)) { 60 | return false; 61 | } 62 | 63 | // Check key is in tree 64 | if (proof.key >= proof.numLeaves) { 65 | return false; 66 | } 67 | // Handle case where proof is empty: i.e, only one leaf exists, so verify hash(data) is root 68 | if (proof.sideNodes.length == 0) { 69 | if (proof.numLeaves == 1) { 70 | return namespaceNodeEquals(root, node); 71 | } else { 72 | return false; 73 | } 74 | } 75 | 76 | // The case where inner node is actually the root of a tree with more than one node is not relevant 77 | // to our use case, since the only case where an inner node is the root of the tree is when the tree 78 | // has only one inner node. So, there is no need to handle that case. 79 | 80 | uint256 height = startingHeight; 81 | uint256 stableEnd = proof.key; 82 | 83 | // While the current subtree (of height 'height') is complete, determine 84 | // the position of the next sibling using the complete subtree algorithm. 85 | // 'stableEnd' tells us the ending index of the last full subtree. It gets 86 | // initialized to 'key' because the first full subtree was the 87 | // subtree of height 1, created above (and had an ending index of 88 | // 'key'). 89 | 90 | while (true) { 91 | // Determine if the subtree is complete. This is accomplished by 92 | // rounding down the key to the nearest 1 << 'height', adding 1 93 | // << 'height', and comparing the result to the number of leaves in the 94 | // Merkle tree. 95 | 96 | uint256 subTreeStartIndex = (proof.key / (1 << height)) * (1 << height); 97 | uint256 subTreeEndIndex = subTreeStartIndex + (1 << height) - 1; 98 | 99 | // If the Merkle tree does not have a leaf at index 100 | // 'subTreeEndIndex', then the subtree of the current height is not 101 | // a complete subtree. 102 | if (subTreeEndIndex >= proof.numLeaves) { 103 | break; 104 | } 105 | stableEnd = subTreeEndIndex; 106 | 107 | // Determine if the key is in the first or the second half of 108 | // the subtree. 109 | if (proof.sideNodes.length + heightOffset <= height - 1) { 110 | return false; 111 | } 112 | if (proof.key - subTreeStartIndex < (1 << (height - 1))) { 113 | node = nodeDigest(node, proof.sideNodes[height - heightOffset - 1]); 114 | } else { 115 | node = nodeDigest(proof.sideNodes[height - heightOffset - 1], node); 116 | } 117 | 118 | height += 1; 119 | } 120 | 121 | // Determine if the next hash belongs to an orphan that was elevated. This 122 | // is the case IFF 'stableEnd' (the last index of the largest full subtree) 123 | // is equal to the number of leaves in the Merkle tree. 124 | if (stableEnd != proof.numLeaves - 1) { 125 | if (proof.sideNodes.length <= height - heightOffset - 1) { 126 | return false; 127 | } 128 | node = nodeDigest(node, proof.sideNodes[height - heightOffset - 1]); 129 | height += 1; 130 | } 131 | // All remaining elements in the proof set will belong to a left sibling. 132 | while (height - heightOffset - 1 < proof.sideNodes.length) { 133 | node = nodeDigest(proof.sideNodes[height - heightOffset - 1], node); 134 | height += 1; 135 | } 136 | 137 | return namespaceNodeEquals(root, node); 138 | } 139 | 140 | /// @notice Verify if contiguous elements exists in Merkle tree, given leaves, mutliproof, and root. 141 | /// @param root The root of the tree in which the given leaves are verified. 142 | /// @param proof Namespace Merkle multiproof for the leaves. 143 | /// @param namespace Namespace of the leaves. All leaves must have the same namespace. 144 | /// @param data The leaves to verify. Note: leaf data must be the _entire_ share (including namespace prefixing). 145 | /// @return `true` if the proof is valid, `false` otherwise. 146 | function verifyMulti( 147 | NamespaceNode memory root, 148 | NamespaceMerkleMultiproof memory proof, 149 | Namespace memory namespace, 150 | bytes[] memory data 151 | ) internal pure returns (bool) { 152 | // Hash all the leaves to get leaf nodes. 153 | NamespaceNode[] memory nodes = new NamespaceNode[](data.length); 154 | for (uint256 i = 0; i < data.length; ++i) { 155 | nodes[i] = leafDigest(namespace, data[i]); 156 | } 157 | 158 | // Verify inclusion of leaf nodes. 159 | return verifyMultiHashes(root, proof, nodes); 160 | } 161 | 162 | /// @notice Verify if contiguous leaf hashes exists in Merkle tree, given leaf nodes, multiproof, and root. 163 | /// @param root The root of the tree in which the given leaf nodes are verified. 164 | /// @param proof Namespace Merkle multiproof for the leaves. 165 | /// @param leafNodes The leaf nodes to verify. 166 | /// @return `true` if the proof is valid, `false` otherwise. 167 | function verifyMultiHashes( 168 | NamespaceNode memory root, 169 | NamespaceMerkleMultiproof memory proof, 170 | NamespaceNode[] memory leafNodes 171 | ) internal pure returns (bool) { 172 | uint256 leafIndex = 0; 173 | NamespaceNode[] memory leftSubtrees = new NamespaceNode[](proof.sideNodes.length); 174 | 175 | for (uint256 i = 0; leafIndex != proof.beginKey && i < proof.sideNodes.length; ++i) { 176 | uint256 subtreeSize = _nextSubtreeSize(leafIndex, proof.beginKey); 177 | leftSubtrees[i] = proof.sideNodes[i]; 178 | leafIndex += subtreeSize; 179 | } 180 | 181 | // estimate the leaf size of the subtree containing the proof range 182 | uint256 proofRangeSubtreeEstimate = _getSplitPoint(proof.endKey) * 2; 183 | if (proofRangeSubtreeEstimate < 1) { 184 | proofRangeSubtreeEstimate = 1; 185 | } 186 | 187 | (NamespaceNode memory rootHash, uint256 proofHead,,) = 188 | _computeRoot(proof, leafNodes, 0, proofRangeSubtreeEstimate, 0, 0); 189 | for (uint256 i = proofHead; i < proof.sideNodes.length; ++i) { 190 | rootHash = nodeDigest(rootHash, proof.sideNodes[i]); 191 | } 192 | 193 | return namespaceNodeEquals(rootHash, root); 194 | } 195 | 196 | /// @notice Computes the NMT root recursively. 197 | /// @param proof Namespace Merkle multiproof for the leaves. 198 | /// @param leafNodes Leaf nodes for which inclusion is proven. 199 | /// @param begin Begin index, inclusive. 200 | /// @param end End index, exclusive. 201 | /// @param headProof Internal detail: head of proof sidenodes array. Used for recursion. Set to `0` on first call. 202 | /// @param headLeaves Internal detail: head of leaves array. Used for recursion. Set to `0` on first call. 203 | /// @return _ Subtree root. 204 | /// @return _ New proof sidenodes array head. Used for recursion. 205 | /// @return _ New leaves array head. Used for recursion. 206 | /// @return _ If the subtree root is "nil." 207 | function _computeRoot( 208 | NamespaceMerkleMultiproof memory proof, 209 | NamespaceNode[] memory leafNodes, 210 | uint256 begin, 211 | uint256 end, 212 | uint256 headProof, 213 | uint256 headLeaves 214 | ) private pure returns (NamespaceNode memory, uint256, uint256, bool) { 215 | // reached a leaf 216 | if (end - begin == 1) { 217 | // if current range overlaps with proof range, pop and return a leaf 218 | if (proof.beginKey <= begin && begin < proof.endKey) { 219 | // Note: second return value is guaranteed to be `false` by 220 | // construction. 221 | return _popLeavesIfNonEmpty(leafNodes, headLeaves, leafNodes.length, headProof); 222 | } 223 | 224 | // if current range does not overlap with proof range, 225 | // pop and return a proof node (leaf) if present, 226 | // else return nil because leaf doesn't exist 227 | return _popProofIfNonEmpty(proof.sideNodes, headProof, end, headLeaves); 228 | } 229 | 230 | // if current range does not overlap with proof range, 231 | // pop and return a proof node if present, 232 | // else return nil because subtree doesn't exist 233 | if (end <= proof.beginKey || begin >= proof.endKey) { 234 | return _popProofIfNonEmpty(proof.sideNodes, headProof, end, headLeaves); 235 | } 236 | 237 | // Recursively get left and right subtree 238 | uint256 k = _getSplitPoint(end - begin); 239 | (NamespaceNode memory left, uint256 newHeadProofLeft, uint256 newHeadLeavesLeft,) = 240 | _computeRoot(proof, leafNodes, begin, begin + k, headProof, headLeaves); 241 | (NamespaceNode memory right, uint256 newHeadProof, uint256 newHeadLeaves, bool rightIsNil) = 242 | _computeRoot(proof, leafNodes, begin + k, end, newHeadProofLeft, newHeadLeavesLeft); 243 | 244 | // only right leaf/subtree can be non-existent 245 | if (rightIsNil == true) { 246 | return (left, newHeadProof, newHeadLeaves, false); 247 | } 248 | NamespaceNode memory hash = nodeDigest(left, right); 249 | return (hash, newHeadProof, newHeadLeaves, false); 250 | } 251 | 252 | /// @notice Pop from the leaf nodes array slice if it's not empty. 253 | /// @param nodes Entire leaf nodes array. 254 | /// @param headLeaves Head of leaf nodes array slice. 255 | /// @param end End of leaf nodes array slice. 256 | /// @param headProof Used only to return for recursion. 257 | /// @return _ Popped node. 258 | /// @return _ Head of proof sidenodes array slice (unchanged). 259 | /// @return _ New head of leaf nodes array slice. 260 | /// @return _ If the popped node is "nil." 261 | function _popLeavesIfNonEmpty(NamespaceNode[] memory nodes, uint256 headLeaves, uint256 end, uint256 headProof) 262 | private 263 | pure 264 | returns (NamespaceNode memory, uint256, uint256, bool) 265 | { 266 | (NamespaceNode memory node, uint256 newHead, bool isNil) = _popIfNonEmpty(nodes, headLeaves, end); 267 | return (node, headProof, newHead, isNil); 268 | } 269 | 270 | /// @notice Pop from the proof sidenodes array slice if it's not empty. 271 | /// @param nodes Entire proof sidenodes array. 272 | /// @param headLeaves Head of proof sidenodes array slice. 273 | /// @param end End of proof sidenodes array slice. 274 | /// @param headProof Used only to return for recursion. 275 | /// @return _ Popped node. 276 | /// @return _ New head of proof sidenodes array slice. 277 | /// @return _ Head of proof sidenodes array slice (unchanged). 278 | /// @return _ If the popped node is "nil." 279 | function _popProofIfNonEmpty(NamespaceNode[] memory nodes, uint256 headProof, uint256 end, uint256 headLeaves) 280 | private 281 | pure 282 | returns (NamespaceNode memory, uint256, uint256, bool) 283 | { 284 | (NamespaceNode memory node, uint256 newHead, bool isNil) = _popIfNonEmpty(nodes, headProof, end); 285 | return (node, newHead, headLeaves, isNil); 286 | } 287 | 288 | /// @notice Pop from an array slice if it's not empty. 289 | /// @param nodes Entire array. 290 | /// @param head Head of array slice. 291 | /// @param end End of array slice. 292 | /// @return _ Popped node. 293 | /// @return _ New head of array slice. 294 | /// @return _ If the popped node is "nil." 295 | function _popIfNonEmpty(NamespaceNode[] memory nodes, uint256 head, uint256 end) 296 | private 297 | pure 298 | returns (NamespaceNode memory, uint256, bool) 299 | { 300 | if (nodes.length == 0 || head >= nodes.length || head >= end) { 301 | NamespaceNode memory node; 302 | return (node, head, true); 303 | } 304 | return (nodes[head], head + 1, false); 305 | } 306 | } 307 | -------------------------------------------------------------------------------- /src/lib/tree/namespace/NamespaceNode.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | import "../Types.sol"; 5 | 6 | /// @notice Namespace Merkle Tree node. 7 | struct NamespaceNode { 8 | // Minimum namespace. 9 | Namespace min; 10 | // Maximum namespace. 11 | Namespace max; 12 | // Node value. 13 | bytes32 digest; 14 | } 15 | 16 | /// @notice Compares two `NamespaceNode`s. 17 | /// @param first First node. 18 | /// @param second Second node. 19 | /// @return `true` is equal, `false otherwise. 20 | // solhint-disable-next-line func-visibility 21 | function namespaceNodeEquals(NamespaceNode memory first, NamespaceNode memory second) pure returns (bool) { 22 | return first.min.equalTo(second.min) && first.max.equalTo(second.max) && (first.digest == second.digest); 23 | } 24 | -------------------------------------------------------------------------------- /src/lib/tree/namespace/TreeHasher.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | import "../Constants.sol"; 5 | import "../Types.sol"; 6 | import "./NamespaceNode.sol"; 7 | 8 | /// @notice Get the minimum namespace. 9 | // solhint-disable-next-line func-visibility 10 | function namespaceMin(Namespace memory l, Namespace memory r) pure returns (Namespace memory) { 11 | if (l.lessThan(r)) { 12 | return l; 13 | } else { 14 | return r; 15 | } 16 | } 17 | 18 | /// @notice Get the maximum namespace. 19 | // solhint-disable-next-line func-visibility 20 | function namespaceMax(Namespace memory l, Namespace memory r) pure returns (Namespace memory) { 21 | if (l.greaterThan(r)) { 22 | return l; 23 | } else { 24 | return r; 25 | } 26 | } 27 | 28 | /// @notice Hash a leaf node. 29 | /// @param namespace Namespace of the leaf. 30 | /// @param data Raw data of the leaf. 31 | /// @dev More details in https://github.com/celestiaorg/celestia-specs/blob/master/src/specs/data_structures.md#namespace-merkle-tree 32 | // solhint-disable-next-line func-visibility 33 | function leafDigest(Namespace memory namespace, bytes memory data) pure returns (NamespaceNode memory) { 34 | bytes32 digest = sha256(abi.encodePacked(Constants.LEAF_PREFIX, namespace.toBytes(), data)); 35 | NamespaceNode memory node = NamespaceNode(namespace, namespace, digest); 36 | return node; 37 | } 38 | 39 | /// @notice Hash an internal node. 40 | /// @param l Left child. 41 | /// @param r Right child. 42 | /// @dev More details in https://github.com/celestiaorg/celestia-specs/blob/master/src/specs/data_structures.md#namespace-merkle-tree 43 | // solhint-disable-next-line func-visibility 44 | function nodeDigest(NamespaceNode memory l, NamespaceNode memory r) pure returns (NamespaceNode memory) { 45 | Namespace memory min = namespaceMin(l.min, r.min); 46 | Namespace memory max; 47 | if (l.min.equalTo(PARITY_SHARE_NAMESPACE())) { 48 | max = PARITY_SHARE_NAMESPACE(); 49 | } else if (r.min.equalTo(PARITY_SHARE_NAMESPACE())) { 50 | max = l.max; 51 | } else { 52 | max = namespaceMax(l.max, r.max); 53 | } 54 | 55 | bytes32 digest = sha256( 56 | abi.encodePacked( 57 | Constants.NODE_PREFIX, 58 | l.min.toBytes(), 59 | l.max.toBytes(), 60 | l.digest, 61 | r.min.toBytes(), 62 | r.max.toBytes(), 63 | r.digest 64 | ) 65 | ); 66 | 67 | NamespaceNode memory node = NamespaceNode(min, max, digest); 68 | return node; 69 | } 70 | -------------------------------------------------------------------------------- /src/lib/tree/namespace/test/TreeHasher.t.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | import "ds-test/test.sol"; 5 | 6 | import "../../Constants.sol"; 7 | import "../../Types.sol"; 8 | import "../NamespaceNode.sol"; 9 | import "../TreeHasher.sol"; 10 | 11 | contract TreeHasherTest is DSTest { 12 | function setUp() external {} 13 | 14 | function assertEqNamespaceNode(NamespaceNode memory first, NamespaceNode memory second) internal { 15 | assertTrue(first.min.equalTo(second.min)); 16 | assertTrue(first.max.equalTo(second.max)); 17 | assertEq(first.digest, second.digest); 18 | } 19 | 20 | function testLeafDigestEmpty() external { 21 | Namespace memory nid = Namespace(0x00, 0x00000000000000000000000000000000000000000000000000000000); 22 | NamespaceNode memory expected = 23 | NamespaceNode(nid, nid, 0x0679246d6c4216de0daa08e5523fb2674db2b6599c3b72ff946b488a15290b62); 24 | bytes memory data; 25 | NamespaceNode memory node = leafDigest(nid, data); 26 | assertEqNamespaceNode(node, expected); 27 | } 28 | 29 | function testLeafDigestSome() external { 30 | Namespace memory nid = Namespace(0xde, 0xadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefde); 31 | NamespaceNode memory expected = 32 | NamespaceNode(nid, nid, 0x3624c7f7169cb5bbd0d010b851ebd0edca10b2a1b126f5fb1a6d5e0d98356e63); 33 | bytes memory data = hex"69"; 34 | NamespaceNode memory node = leafDigest(nid, data); 35 | assertEqNamespaceNode(node, expected); 36 | } 37 | 38 | function testNodeDigest() external { 39 | Namespace memory nidLeft = Namespace(0x00, 0x00000000000000000000000000000000000000000000000000000000); 40 | Namespace memory nidRight = Namespace(0xde, 0xadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefde); 41 | NamespaceNode memory expected = 42 | NamespaceNode(nidLeft, nidRight, 0x95cad48bc181484c851004cf772abe767391e19549d3b8192b55b1d654a71bcd); 43 | NamespaceNode memory left = 44 | NamespaceNode(nidLeft, nidLeft, 0xdb55da3fc3098e9c42311c6013304ff36b19ef73d12ea932054b5ad51df4f49d); 45 | NamespaceNode memory right = 46 | NamespaceNode(nidRight, nidRight, 0xc75cb66ae28d8ebc6eded002c28a8ba0d06d3a78c6b5cbf9b2ade051f0775ac4); 47 | NamespaceNode memory node = nodeDigest(left, right); 48 | assertEqNamespaceNode(node, expected); 49 | } 50 | 51 | function testNodeParity() external { 52 | Namespace memory nidMin = Namespace(0x00, 0x00000000000000000000000000000000000000000000000000000000); 53 | Namespace memory nidMax = Namespace(0xde, 0xadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefdeadbeefde); 54 | NamespaceNode memory expected = 55 | NamespaceNode(nidMin, nidMax, 0xc6960f535d4ab0aed075aed34a116725e8035012ceffe5405ae72abe3bcaa28f); 56 | NamespaceNode memory left = 57 | NamespaceNode(nidMin, nidMax, 0xdb55da3fc3098e9c42311c6013304ff36b19ef73d12ea932054b5ad51df4f49d); 58 | NamespaceNode memory right = NamespaceNode( 59 | PARITY_SHARE_NAMESPACE(), 60 | PARITY_SHARE_NAMESPACE(), 61 | 0xc75cb66ae28d8ebc6eded002c28a8ba0d06d3a78c6b5cbf9b2ade051f0775ac4 62 | ); 63 | NamespaceNode memory node = nodeDigest(left, right); 64 | assertEqNamespaceNode(node, expected); 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /src/lib/tree/test/Utils.t.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | import "ds-test/test.sol"; 5 | 6 | import "../Utils.sol"; 7 | 8 | contract UtilsTest is DSTest { 9 | function testPathLengthFromKey0_2() external { 10 | assertEq(pathLengthFromKey(0, 2), 1); 11 | } 12 | 13 | function testPathLengthFromKey1_2() external { 14 | assertEq(pathLengthFromKey(1, 2), 1); 15 | } 16 | 17 | function testPathLengthFromKey0_8() external { 18 | assertEq(pathLengthFromKey(0, 8), 3); 19 | } 20 | 21 | function testPathLengthFromKey1_8() external { 22 | assertEq(pathLengthFromKey(1, 8), 3); 23 | } 24 | 25 | function testPathLengthFromKey2_8() external { 26 | assertEq(pathLengthFromKey(2, 8), 3); 27 | } 28 | 29 | function testPathLengthFromKey3_8() external { 30 | assertEq(pathLengthFromKey(3, 8), 3); 31 | } 32 | 33 | function testPathLengthFromKey4_8() external { 34 | assertEq(pathLengthFromKey(4, 8), 3); 35 | } 36 | 37 | function testPathLengthFromKey5_8() external { 38 | assertEq(pathLengthFromKey(5, 8), 3); 39 | } 40 | 41 | function testPathLengthFromKey6_8() external { 42 | assertEq(pathLengthFromKey(6, 8), 3); 43 | } 44 | 45 | function testPathLengthFromKey7_8() external { 46 | assertEq(pathLengthFromKey(7, 8), 3); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/lib/tree/test/blob.dat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/celestiaorg/blobstream-contracts/cfdc2a0b651f2d28fb5e10386f94d74944d4d5d4/src/lib/tree/test/blob.dat -------------------------------------------------------------------------------- /src/lib/tree/test/header.dat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/celestiaorg/blobstream-contracts/cfdc2a0b651f2d28fb5e10386f94d74944d4d5d4/src/lib/tree/test/header.dat -------------------------------------------------------------------------------- /src/lib/tree/test/proofs.json: -------------------------------------------------------------------------------- 1 | [{"start":8,"end":32,"nodes":["AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABOLCUcGcDNOGgcYmOnu7snv+cn+3G+vkto91wnXa3kVQ","/////////////////////////////////////////////////////////////////////////////zmvU+iSdf6GDmfvDMVa0YqTan9iPIiX5UHyC8zhZkkf"],"leaf_hash":"","is_max_namespace_ignored":true},{"start":0,"end":32,"nodes":["/////////////////////////////////////////////////////////////////////////////xZ6gFJq4RO/FIE75WZbKQOZmS3FCVTEVM/dKR/kzDZz"],"leaf_hash":"","is_max_namespace_ignored":true},{"start":0,"end":32,"nodes":["/////////////////////////////////////////////////////////////////////////////5MEmpTXPlH6UVRm8X2csA+EaccobRIwYWyOF5ls5abx"],"leaf_hash":"","is_max_namespace_ignored":true},{"start":0,"end":32,"nodes":["/////////////////////////////////////////////////////////////////////////////3wn2BGIhHap4sg/oUMt6THYs/c8kj+mFXPoFL3NxC9I"],"leaf_hash":"","is_max_namespace_ignored":true},{"start":0,"end":32,"nodes":["//////////////////////////////////////////////////////////////////////////////vTJmXZEYdLpfIuqDC7XrkhCLaw6GE1Iz1EzwDecR57"],"leaf_hash":"","is_max_namespace_ignored":true},{"start":0,"end":32,"nodes":["/////////////////////////////////////////////////////////////////////////////+HJEAbaxVdhII24yEvjUz8rAJmA8T9ratZdZHUASsLe"],"leaf_hash":"","is_max_namespace_ignored":true},{"start":0,"end":32,"nodes":["/////////////////////////////////////////////////////////////////////////////4K3KpNnwd8qcf40yM88DYQ087APK4Kjc98+WAchu59l"],"leaf_hash":"","is_max_namespace_ignored":true},{"start":0,"end":32,"nodes":["/////////////////////////////////////////////////////////////////////////////5wXTQR9kfP9JmlezblQvri0MM7UccRF1qUZ/2ELgyMf"],"leaf_hash":"","is_max_namespace_ignored":true},{"start":0,"end":24,"nodes":["AAAAAAAAAAAAAAAAAAAAAAAAAAAABpbeXXkKu9gAAAAAAAAAAAAAAAAAAAAAAAAAAABnSAxKiMTRKTLbUSuhrp5YtSI7UOw4sUkTDR1mrRutu+xxGAq64vXA","/////////////////////////////////////////////////////////////////////////////9+uMDUChYyrcrRy2fQ9h15MuGF69AmtPLH3FGuTFCUX"],"leaf_hash":"","is_max_namespace_ignored":true}] -------------------------------------------------------------------------------- /src/lib/verifier/DAVerifier.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | import "openzeppelin-contracts/contracts/utils/cryptography/ECDSA.sol"; 5 | 6 | import "../../Constants.sol"; 7 | import "../../DataRootTuple.sol"; 8 | import "../../IDAOracle.sol"; 9 | import "../tree/binary/BinaryMerkleProof.sol"; 10 | import "../tree/binary/BinaryMerkleTree.sol"; 11 | import "../tree/namespace/NamespaceMerkleTree.sol"; 12 | import "../tree/Types.sol"; 13 | 14 | /// @notice Contains the necessary parameters to prove that some shares, which were posted to 15 | /// the Celestia network, were committed to by the Blobstream smart contract. 16 | struct SharesProof { 17 | // The shares that were committed to. 18 | bytes[] data; 19 | // The shares proof to the row roots. If the shares span multiple rows, we will have multiple nmt proofs. 20 | NamespaceMerkleMultiproof[] shareProofs; 21 | // The namespace of the shares. 22 | Namespace namespace; 23 | // The rows where the shares belong. If the shares span multiple rows, we will have multiple rows. 24 | NamespaceNode[] rowRoots; 25 | // The proofs of the rowRoots to the data root. 26 | BinaryMerkleProof[] rowProofs; 27 | // The proof of the data root tuple to the data root tuple root that was posted to the Blobstream contract. 28 | AttestationProof attestationProof; 29 | } 30 | 31 | /// @notice Contains the necessary parameters needed to verify that a data root tuple 32 | /// was committed to, by the Blobstream smart contract, at some specif nonce. 33 | struct AttestationProof { 34 | // the attestation nonce that commits to the data root tuple. 35 | uint256 tupleRootNonce; 36 | // the data root tuple that was committed to. 37 | DataRootTuple tuple; 38 | // the binary merkle proof of the tuple to the commitment. 39 | BinaryMerkleProof proof; 40 | } 41 | 42 | /// @title DAVerifier: Celestia -> EVM, Data Availability verifier. 43 | /// @dev The DAVerifier verifies that some shares, which were posted on Celestia, were committed to 44 | /// by the Blobstream smart contract. 45 | library DAVerifier { 46 | ///////////////// 47 | // Error codes // 48 | ///////////////// 49 | 50 | enum ErrorCodes { 51 | NoError, 52 | /// @notice The shares to the rows proof is invalid. 53 | InvalidSharesToRowsProof, 54 | /// @notice The rows to the data root proof is invalid. 55 | InvalidRowsToDataRootProof, 56 | /// @notice The row to the data root proof is invalid. 57 | InvalidRowToDataRootProof, 58 | /// @notice The data root tuple to the data root tuple roof proof is invalid. 59 | InvalidDataRootTupleToDataRootTupleRootProof, 60 | /// @notice The number of share proofs isn't equal to the number of rows roots. 61 | UnequalShareProofsAndRowRootsNumber, 62 | /// @notice The number of rows proofs isn't equal to the number of rows roots. 63 | UnequalRowProofsAndRowRootsNumber, 64 | /// @notice The verifier data length isn't equal to the number of shares in the shares proofs. 65 | UnequalDataLengthAndNumberOfSharesProofs, 66 | /// @notice The number of leaves in the binary merkle proof is not divisible by 4. 67 | InvalidNumberOfLeavesInProof, 68 | /// @notice The provided range is invalid. 69 | InvalidRange, 70 | /// @notice The provided range is out of bounds. 71 | OutOfBoundsRange 72 | } 73 | 74 | /////////////// 75 | // Functions // 76 | /////////////// 77 | 78 | /// @notice Verifies that the shares, which were posted to Celestia, were committed to by the Blobstream smart contract. 79 | /// @param _bridge The Blobstream smart contract instance. 80 | /// @param _sharesProof The proof of the shares to the data root tuple root. 81 | /// @return `true` if the proof is valid, `false` otherwise. 82 | /// @return an error code if the proof is invalid, ErrorCodes.NoError otherwise. 83 | function verifySharesToDataRootTupleRoot(IDAOracle _bridge, SharesProof memory _sharesProof) 84 | internal 85 | view 86 | returns (bool, ErrorCodes) 87 | { 88 | // checking that the data root was committed to by the Blobstream smart contract. 89 | (bool success, ErrorCodes errorCode) = verifyMultiRowRootsToDataRootTupleRoot( 90 | _bridge, _sharesProof.rowRoots, _sharesProof.rowProofs, _sharesProof.attestationProof 91 | ); 92 | if (!success) { 93 | return (false, errorCode); 94 | } 95 | 96 | (bool valid, ErrorCodes error) = verifySharesToDataRootTupleRootProof( 97 | _sharesProof.data, 98 | _sharesProof.shareProofs, 99 | _sharesProof.namespace, 100 | _sharesProof.rowRoots, 101 | _sharesProof.rowProofs, 102 | _sharesProof.attestationProof.tuple.dataRoot 103 | ); 104 | 105 | return (valid, error); 106 | } 107 | 108 | /// @notice Verifies the shares to data root tuple root proof. 109 | /// NOTE: This doesn't authenticate the proof to Blobstream. It only verifies if the proof is valid. 110 | /// @param _data The data that needs to proven. 111 | /// @param _shareProofs The share to the row roots proof. 112 | /// @param _namespace The namespace of the shares. 113 | /// @param _rowRoots The row roots where the shares belong. 114 | /// @param _rowProofs The proofs of the rowRoots to the data root. 115 | /// @param _root The data root of the block that contains the shares. 116 | /// @return `true` if the proof is valid, `false` otherwise. 117 | /// @return an error code if the proof is invalid, ErrorCodes.NoError otherwise. 118 | function verifySharesToDataRootTupleRootProof( 119 | bytes[] memory _data, 120 | NamespaceMerkleMultiproof[] memory _shareProofs, 121 | Namespace memory _namespace, 122 | NamespaceNode[] memory _rowRoots, 123 | BinaryMerkleProof[] memory _rowProofs, 124 | bytes32 _root 125 | ) internal pure returns (bool, ErrorCodes) { 126 | // verifying the row root to data root tuple root proof. 127 | (bool success, ErrorCodes errorCode) = verifyMultiRowRootsToDataRootTupleRootProof(_rowRoots, _rowProofs, _root); 128 | if (!success) { 129 | return (false, errorCode); 130 | } 131 | 132 | // checking that the shares were committed to by the rows roots. 133 | if (_shareProofs.length != _rowRoots.length) { 134 | return (false, ErrorCodes.UnequalShareProofsAndRowRootsNumber); 135 | } 136 | 137 | uint256 numberOfSharesInProofs = 0; 138 | for (uint256 i = 0; i < _shareProofs.length; i++) { 139 | numberOfSharesInProofs += _shareProofs[i].endKey - _shareProofs[i].beginKey; 140 | } 141 | 142 | if (_data.length != numberOfSharesInProofs) { 143 | return (false, ErrorCodes.UnequalDataLengthAndNumberOfSharesProofs); 144 | } 145 | 146 | uint256 cursor = 0; 147 | for (uint256 i = 0; i < _shareProofs.length; i++) { 148 | uint256 sharesUsed = _shareProofs[i].endKey - _shareProofs[i].beginKey; 149 | (bytes[] memory s, ErrorCodes err) = slice(_data, cursor, cursor + sharesUsed); 150 | if (err != ErrorCodes.NoError) { 151 | return (false, err); 152 | } 153 | if (!NamespaceMerkleTree.verifyMulti(_rowRoots[i], _shareProofs[i], _namespace, s)) { 154 | return (false, ErrorCodes.InvalidSharesToRowsProof); 155 | } 156 | cursor += sharesUsed; 157 | } 158 | 159 | return (true, ErrorCodes.NoError); 160 | } 161 | 162 | /// @notice Verifies that a row/column root, from a Celestia block, was committed to by the Blobstream smart contract. 163 | /// @param _bridge The Blobstream smart contract instance. 164 | /// @param _rowRoot The row/column root to be proven. 165 | /// @param _rowProof The proof of the row/column root to the data root. 166 | /// @return `true` if the proof is valid, `false` otherwise. 167 | /// @return an error code if the proof is invalid, ErrorCodes.NoError otherwise. 168 | function verifyRowRootToDataRootTupleRoot( 169 | IDAOracle _bridge, 170 | NamespaceNode memory _rowRoot, 171 | BinaryMerkleProof memory _rowProof, 172 | AttestationProof memory _attestationProof 173 | ) internal view returns (bool, ErrorCodes) { 174 | // checking that the data root was committed to by the Blobstream smart contract 175 | if ( 176 | !_bridge.verifyAttestation( 177 | _attestationProof.tupleRootNonce, _attestationProof.tuple, _attestationProof.proof 178 | ) 179 | ) { 180 | return (false, ErrorCodes.InvalidDataRootTupleToDataRootTupleRootProof); 181 | } 182 | 183 | (bool valid, ErrorCodes error) = 184 | verifyRowRootToDataRootTupleRootProof(_rowRoot, _rowProof, _attestationProof.tuple.dataRoot); 185 | 186 | return (valid, error); 187 | } 188 | 189 | /// @notice Verifies that a row/column root proof, from a Celestia block, to the data root tuple root. 190 | /// NOTE: This doesn't authenticate the proof to Blobstream. It only verifies if the proof is valid. 191 | /// @param _rowRoot The row/column root to be proven. 192 | /// @param _rowProof The proof of the row/column root to the data root. 193 | /// @param _root The data root of the block that contains the row. 194 | /// @return `true` if the proof is valid, `false` otherwise. 195 | /// @return an error code if the proof is invalid, ErrorCodes.NoError otherwise. 196 | function verifyRowRootToDataRootTupleRootProof( 197 | NamespaceNode memory _rowRoot, 198 | BinaryMerkleProof memory _rowProof, 199 | bytes32 _root 200 | ) internal pure returns (bool, ErrorCodes) { 201 | bytes memory rowRoot = abi.encodePacked(_rowRoot.min.toBytes(), _rowRoot.max.toBytes(), _rowRoot.digest); 202 | (bool valid,) = BinaryMerkleTree.verify(_root, _rowProof, rowRoot); 203 | if (!valid) { 204 | return (false, ErrorCodes.InvalidRowToDataRootProof); 205 | } 206 | 207 | return (true, ErrorCodes.NoError); 208 | } 209 | 210 | /// @notice Verifies that a set of rows/columns, from a Celestia block, were committed to by the Blobstream smart contract. 211 | /// @param _bridge The Blobstream smart contract instance. 212 | /// @param _rowRoots The set of row/column roots to be proved. 213 | /// @param _rowProofs The set of proofs of the _rowRoots in the same order. 214 | /// @return `true` if the proof is valid, `false` otherwise. 215 | /// @return an error code if the proof is invalid, ErrorCodes.NoError otherwise. 216 | function verifyMultiRowRootsToDataRootTupleRoot( 217 | IDAOracle _bridge, 218 | NamespaceNode[] memory _rowRoots, 219 | BinaryMerkleProof[] memory _rowProofs, 220 | AttestationProof memory _attestationProof 221 | ) internal view returns (bool, ErrorCodes) { 222 | // checking that the data root was committed to by the Blobstream smart contract 223 | if ( 224 | !_bridge.verifyAttestation( 225 | _attestationProof.tupleRootNonce, _attestationProof.tuple, _attestationProof.proof 226 | ) 227 | ) { 228 | return (false, ErrorCodes.InvalidDataRootTupleToDataRootTupleRootProof); 229 | } 230 | 231 | // checking that the rows roots commit to the data root. 232 | (bool valid, ErrorCodes error) = 233 | verifyMultiRowRootsToDataRootTupleRootProof(_rowRoots, _rowProofs, _attestationProof.tuple.dataRoot); 234 | 235 | return (valid, error); 236 | } 237 | 238 | /// @notice Verifies the proof a set of rows/columns, from a Celestia block, to their corresponding data root. 239 | /// NOTE: This doesn't authenticate the proof to Blobstream. It only verifies if the proof is valid. 240 | /// @param _rowRoots The set of row/column roots to be proved. 241 | /// @param _rowProofs The set of proofs of the _rowRoots in the same order. 242 | /// @param _root The data root of the block that contains the rows. 243 | /// @return `true` if the proof is valid, `false` otherwise. 244 | /// @return an error code if the proof is invalid, ErrorCodes.NoError otherwise. 245 | function verifyMultiRowRootsToDataRootTupleRootProof( 246 | NamespaceNode[] memory _rowRoots, 247 | BinaryMerkleProof[] memory _rowProofs, 248 | bytes32 _root 249 | ) internal pure returns (bool, ErrorCodes) { 250 | // checking that the rows roots commit to the data root. 251 | if (_rowProofs.length != _rowRoots.length) { 252 | return (false, ErrorCodes.UnequalRowProofsAndRowRootsNumber); 253 | } 254 | 255 | for (uint256 i = 0; i < _rowProofs.length; i++) { 256 | bytes memory rowRoot = 257 | abi.encodePacked(_rowRoots[i].min.toBytes(), _rowRoots[i].max.toBytes(), _rowRoots[i].digest); 258 | (bool valid,) = BinaryMerkleTree.verify(_root, _rowProofs[i], rowRoot); 259 | if (!valid) { 260 | return (false, ErrorCodes.InvalidRowsToDataRootProof); 261 | } 262 | } 263 | 264 | return (true, ErrorCodes.NoError); 265 | } 266 | 267 | /// @notice computes the Celestia block square size from a row/column root to data root binary merkle proof. 268 | /// The square size is the number of rows of the original square. 269 | /// Note: the provided proof is not authenticated to the Blobstream smart contract. It is the user's responsibility 270 | /// to verify that the proof is valid and was successfully committed to using 271 | // the `DAVerifier.verifyRowRootToDataRootTupleRoot()` method 272 | /// Note: the minimum square size is 1. Thus, we don't expect the proof to have number of leaves equal to 0. 273 | /// @param _proof The proof of the row/column root to the data root. 274 | /// @return The square size of the corresponding block. 275 | /// @return an error code if the _proof is invalid, Errors.NoError otherwise. 276 | function computeSquareSizeFromRowProof(BinaryMerkleProof memory _proof) 277 | internal 278 | pure 279 | returns (uint256, ErrorCodes) 280 | { 281 | if (_proof.numLeaves % 4 != 0) { 282 | return (0, ErrorCodes.InvalidNumberOfLeavesInProof); 283 | } 284 | // we divide the number of leaves of the proof by 4 because the rows/columns tree is constructed 285 | // from the extended block row roots and column roots. 286 | return (_proof.numLeaves / 4, ErrorCodes.NoError); 287 | } 288 | 289 | /// @notice computes the Celestia block square size from a shares to row/column root proof. 290 | /// The square size is the number of rows of the original square. 291 | /// Note: the provided proof is not authenticated to the Blobstream smart contract. It is the user's responsibility 292 | /// to verify that the proof is valid and that the shares were successfully committed to using 293 | /// the `DAVerifier.verifySharesToDataRootTupleRoot()` method. 294 | /// Note: the minimum square size is 1. Thus, we don't expect the proof not to contain any side node. 295 | /// @param _proof The proof of the shares to the row/column root. 296 | /// @return The square size of the corresponding block. 297 | function computeSquareSizeFromShareProof(NamespaceMerkleMultiproof memory _proof) internal pure returns (uint256) { 298 | uint256 extendedSquareRowSize = 2 ** _proof.sideNodes.length; 299 | // we divide the extended square row size by 2 because the square size is the 300 | // the size of the row of the original square size. 301 | return extendedSquareRowSize / 2; 302 | } 303 | 304 | /// @notice creates a slice of bytes from the data slice of bytes containing the elements 305 | /// that correspond to the provided range. 306 | /// It selects a half-open range which includes the begin element, but excludes the end one. 307 | /// @param _data The slice that we want to select data from. 308 | /// @param _begin The beginning of the range (inclusive). 309 | /// @param _end The ending of the range (exclusive). 310 | /// @return _ the sliced data. 311 | function slice(bytes[] memory _data, uint256 _begin, uint256 _end) 312 | internal 313 | pure 314 | returns (bytes[] memory, ErrorCodes) 315 | { 316 | if (_begin > _end) { 317 | return (_data, ErrorCodes.InvalidRange); 318 | } 319 | if (_begin > _data.length || _end > _data.length) { 320 | return (_data, ErrorCodes.OutOfBoundsRange); 321 | } 322 | bytes[] memory out = new bytes[](_end - _begin); 323 | for (uint256 i = _begin; i < _end; i++) { 324 | out[i - _begin] = _data[i]; 325 | } 326 | return (out, ErrorCodes.NoError); 327 | } 328 | } 329 | -------------------------------------------------------------------------------- /src/lib/verifier/test/DAVerifier.t.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | import "openzeppelin-contracts/contracts/utils/cryptography/ECDSA.sol"; 5 | 6 | import "../../../Constants.sol"; 7 | import "../../../DataRootTuple.sol"; 8 | import "../DAVerifier.sol"; 9 | import "../../../Blobstream.sol"; 10 | import "../../tree/binary/BinaryMerkleProof.sol"; 11 | import "../../tree/namespace/NamespaceMerkleMultiproof.sol"; 12 | import "../../tree/Types.sol"; 13 | 14 | import "ds-test/test.sol"; 15 | 16 | interface CheatCodes { 17 | function addr(uint256 privateKey) external returns (address); 18 | 19 | function sign(uint256 privateKey, bytes32 digest) external returns (uint8 v, bytes32 r, bytes32 s); 20 | } 21 | 22 | /* 23 | The data used to generate the proof: 24 | 25 | The block used contains a single share: 26 | 0x0000000000000000000000000000000000000000000000000000000001010000014500000026c3020a95010a92010a1c2f636f736d6f732e62616e6b2e763162657461312e4d736753656e6412720a2f63656c657374696131746b376c776a77336676616578657770687237687833333472766b67646b736d636537666b66122f63656c65737469613167616b61646d63386a73667873646c676e6d64643867773736346739796165776e32726d386d1a0e0a0475746961120631303030303012670a500a460a1f2f636f736d6f732e63727970746f2e736563703235366b312e5075624b657912230a2103f3e16481ff7c9c2a677f08a30a887e5f9c14313cb624b8c5f7f955d143c81d9212040a020801180112130a0d0a04757469611205323230303010d0e80c1a4068f074601f1bb923f6d6e69d2e3fc3af145c9252eceeb0ac4fba9f661ca0428326f0080478cc969129c0074c3d97ae925de34c5f9d98a458cd47a565a2bb08cc0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 27 | 28 | The extended block is: 29 | 0x0000000000000000000000000000000000000000000000000000000001010000014500000026c3020a95010a92010a1c2f636f736d6f732e62616e6b2e763162657461312e4d736753656e6412720a2f63656c657374696131746b376c776a77336676616578657770687237687833333472766b67646b736d636537666b66122f63656c65737469613167616b61646d63386a73667873646c676e6d64643867773736346739796165776e32726d386d1a0e0a0475746961120631303030303012670a500a460a1f2f636f736d6f732e63727970746f2e736563703235366b312e5075624b657912230a2103f3e16481ff7c9c2a677f08a30a887e5f9c14313cb624b8c5f7f955d143c81d9212040a020801180112130a0d0a04757469611205323230303010d0e80c1a4068f074601f1bb923f6d6e69d2e3fc3af145c9252eceeb0ac4fba9f661ca0428326f0080478cc969129c0074c3d97ae925de34c5f9d98a458cd47a565a2bb08cc0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 30 | 0x0000000000000000000000000000000000000000000000000000000001010000014500000026c3020a95010a92010a1c2f636f736d6f732e62616e6b2e763162657461312e4d736753656e6412720a2f63656c657374696131746b376c776a77336676616578657770687237687833333472766b67646b736d636537666b66122f63656c65737469613167616b61646d63386a73667873646c676e6d64643867773736346739796165776e32726d386d1a0e0a0475746961120631303030303012670a500a460a1f2f636f736d6f732e63727970746f2e736563703235366b312e5075624b657912230a2103f3e16481ff7c9c2a677f08a30a887e5f9c14313cb624b8c5f7f955d143c81d9212040a020801180112130a0d0a04757469611205323230303010d0e80c1a4068f074601f1bb923f6d6e69d2e3fc3af145c9252eceeb0ac4fba9f661ca0428326f0080478cc969129c0074c3d97ae925de34c5f9d98a458cd47a565a2bb08cc0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 31 | 0x0000000000000000000000000000000000000000000000000000000001010000014500000026c3020a95010a92010a1c2f636f736d6f732e62616e6b2e763162657461312e4d736753656e6412720a2f63656c657374696131746b376c776a77336676616578657770687237687833333472766b67646b736d636537666b66122f63656c65737469613167616b61646d63386a73667873646c676e6d64643867773736346739796165776e32726d386d1a0e0a0475746961120631303030303012670a500a460a1f2f636f736d6f732e63727970746f2e736563703235366b312e5075624b657912230a2103f3e16481ff7c9c2a677f08a30a887e5f9c14313cb624b8c5f7f955d143c81d9212040a020801180112130a0d0a04757469611205323230303010d0e80c1a4068f074601f1bb923f6d6e69d2e3fc3af145c9252eceeb0ac4fba9f661ca0428326f0080478cc969129c0074c3d97ae925de34c5f9d98a458cd47a565a2bb08cc0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 32 | 0x0000000000000000000000000000000000000000000000000000000001010000014500000026c3020a95010a92010a1c2f636f736d6f732e62616e6b2e763162657461312e4d736753656e6412720a2f63656c657374696131746b376c776a77336676616578657770687237687833333472766b67646b736d636537666b66122f63656c65737469613167616b61646d63386a73667873646c676e6d64643867773736346739796165776e32726d386d1a0e0a0475746961120631303030303012670a500a460a1f2f636f736d6f732e63727970746f2e736563703235366b312e5075624b657912230a2103f3e16481ff7c9c2a677f08a30a887e5f9c14313cb624b8c5f7f955d143c81d9212040a020801180112130a0d0a04757469611205323230303010d0e80c1a4068f074601f1bb923f6d6e69d2e3fc3af145c9252eceeb0ac4fba9f661ca0428326f0080478cc969129c0074c3d97ae925de34c5f9d98a458cd47a565a2bb08cc0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 33 | 34 | The row roots: 35 | 0x00000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000001787bf77b567506b6e1d0048bfd89edd352a4fbc102e62f07cc9fe6b4cbe5ee69 36 | 0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7329c7d336d0140840837fc0d8eafa2403f4f6b019b602581cd9f04e28026eae 37 | 38 | The column roots: 39 | 0x00000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000001787bf77b567506b6e1d0048bfd89edd352a4fbc102e62f07cc9fe6b4cbe5ee69 40 | 0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7329c7d336d0140840837fc0d8eafa2403f4f6b019b602581cd9f04e28026eae 41 | 42 | The data root: 0x55cfc29fc0cd263906122d5cb859091224495b141fc0c51529612d7ab8962950 43 | 44 | The height: 3 45 | 46 | The blocks data roots used to create the commitment: 47 | 1. 0x3d96b7d238e7e0456f6af8e7cdf0a67bd6cf9c2089ecb559c659dcaa1f880353 48 | 2. 0x3d96b7d238e7e0456f6af8e7cdf0a67bd6cf9c2089ecb559c659dcaa1f880353 49 | 3. 0x55cfc29fc0cd263906122d5cb859091224495b141fc0c51529612d7ab8962950 50 | 4. 0x3d96b7d238e7e0456f6af8e7cdf0a67bd6cf9c2089ecb559c659dcaa1f880353 51 | 52 | The nonce: 2 53 | 54 | The data root tuple root: 0xf89859a09c0f2b1bbb039618d0fe60432b8c247f7ccde97814655f2acffb3434 55 | */ 56 | 57 | contract DAVerifierTest is DSTest { 58 | // Private keys used for test signatures. 59 | uint256 constant testPriv1 = 0x64a1d6f0e760a8d62b4afdde4096f16f51b401eaaecc915740f71770ea76a8ad; 60 | 61 | Blobstream bridge; 62 | TestFixture fixture; 63 | 64 | Validator[] private validators; 65 | uint256 private votingPower = 5000; 66 | 67 | // Set up Foundry cheatcodes. 68 | CheatCodes cheats = CheatCodes(HEVM_ADDRESS); 69 | 70 | // deploy a Blobstream contract and submit the following: 71 | // - initial valset. 72 | // - data root tuple root that commits to the proofs tested below. 73 | function setUp() public { 74 | fixture = new TestFixture(); 75 | 76 | uint256 initialValsetNonce = 1; 77 | 78 | validators.push(Validator(cheats.addr(testPriv1), votingPower)); 79 | bytes32 hash = computeValidatorSetHash(validators); 80 | bytes32 checkpoint = domainSeparateValidatorSetHash(initialValsetNonce, (2 * votingPower) / 3, hash); 81 | bridge = new Blobstream(); 82 | bridge.initialize(initialValsetNonce, (2 * votingPower) / 3, checkpoint); 83 | 84 | bytes32 newDataRootTupleRoot = 85 | domainSeparateDataRootTupleRoot(fixture.dataRootTupleRootNonce(), fixture.dataRootTupleRoot()); 86 | 87 | // Signature for the update. 88 | Signature[] memory sigs = new Signature[](1); 89 | bytes32 digest_eip191 = ECDSA.toEthSignedMessageHash(newDataRootTupleRoot); 90 | (uint8 v, bytes32 r, bytes32 s) = cheats.sign(testPriv1, digest_eip191); 91 | sigs[0] = Signature(v, r, s); 92 | 93 | Validator[] memory valSet = new Validator[](1); 94 | valSet[0] = Validator(cheats.addr(testPriv1), votingPower); 95 | 96 | bridge.submitDataRootTupleRoot( 97 | fixture.dataRootTupleRootNonce(), initialValsetNonce, fixture.dataRootTupleRoot(), valSet, sigs 98 | ); 99 | 100 | assertEq(bridge.state_eventNonce(), fixture.dataRootTupleRootNonce()); 101 | assertEq(bridge.state_dataRootTupleRoots(fixture.dataRootTupleRootNonce()), fixture.dataRootTupleRoot()); 102 | 103 | assertTrue( 104 | bridge.verifyAttestation( 105 | fixture.dataRootTupleRootNonce(), fixture.getDataRootTuple(), fixture.getDataRootTupleProof() 106 | ) 107 | ); 108 | } 109 | 110 | function testVerifySharesToDataRootTupleRoot() public { 111 | bytes[] memory _data = new bytes[](1); 112 | _data[0] = fixture.shareData(); 113 | 114 | NamespaceMerkleMultiproof[] memory _shareProofs = new NamespaceMerkleMultiproof[](1); 115 | _shareProofs[0] = fixture.getShareToRowRootProof(); 116 | 117 | NamespaceNode[] memory _rowRoots = new NamespaceNode[](1); 118 | _rowRoots[0] = fixture.getFirstRowRootNode(); 119 | 120 | BinaryMerkleProof[] memory _rowProofs = new BinaryMerkleProof[](1); 121 | _rowProofs[0] = fixture.getRowRootToDataRootProof(); 122 | 123 | AttestationProof memory attestationProof = AttestationProof( 124 | fixture.dataRootTupleRootNonce(), fixture.getDataRootTuple(), fixture.getDataRootTupleProof() 125 | ); 126 | SharesProof memory sharesProof = 127 | SharesProof(_data, _shareProofs, fixture.getNamespace(), _rowRoots, _rowProofs, attestationProof); 128 | 129 | (bool valid, DAVerifier.ErrorCodes errorCode) = DAVerifier.verifySharesToDataRootTupleRoot(bridge, sharesProof); 130 | assertTrue(valid); 131 | assertEq(uint8(errorCode), uint8(DAVerifier.ErrorCodes.NoError)); 132 | } 133 | 134 | function testVerifyRowRootToDataRootTupleRoot() public { 135 | AttestationProof memory attestationProof = AttestationProof( 136 | fixture.dataRootTupleRootNonce(), fixture.getDataRootTuple(), fixture.getDataRootTupleProof() 137 | ); 138 | 139 | (bool valid, DAVerifier.ErrorCodes errorCode) = DAVerifier.verifyRowRootToDataRootTupleRoot( 140 | bridge, fixture.getFirstRowRootNode(), fixture.getRowRootToDataRootProof(), attestationProof 141 | ); 142 | assertTrue(valid); 143 | assertEq(uint8(errorCode), uint8(DAVerifier.ErrorCodes.NoError)); 144 | } 145 | 146 | function testVerifyMultiRowRootsToDataRootTupleRoot() public { 147 | NamespaceNode[] memory _rowRoots = new NamespaceNode[](1); 148 | _rowRoots[0] = fixture.getFirstRowRootNode(); 149 | 150 | BinaryMerkleProof[] memory _rowProofs = new BinaryMerkleProof[](1); 151 | _rowProofs[0] = fixture.getRowRootToDataRootProof(); 152 | 153 | AttestationProof memory attestationProof = AttestationProof( 154 | fixture.dataRootTupleRootNonce(), fixture.getDataRootTuple(), fixture.getDataRootTupleProof() 155 | ); 156 | 157 | (bool valid, DAVerifier.ErrorCodes errorCode) = 158 | DAVerifier.verifyMultiRowRootsToDataRootTupleRoot(bridge, _rowRoots, _rowProofs, attestationProof); 159 | assertTrue(valid); 160 | assertEq(uint8(errorCode), uint8(DAVerifier.ErrorCodes.NoError)); 161 | } 162 | 163 | function testComputeSquareSizeFromRowProof() public { 164 | (bool validMerkleProof, BinaryMerkleTree.ErrorCodes error) = 165 | BinaryMerkleTree.verify(fixture.dataRoot(), fixture.getRowRootToDataRootProof(), fixture.firstRowRoot()); 166 | assertEq(uint256(error), uint256(BinaryMerkleTree.ErrorCodes.NoError)); 167 | assertTrue(validMerkleProof); 168 | 169 | // check that the computed square size is correct 170 | uint256 expectedSquareSize = 1; 171 | (uint256 actualSquareSize, DAVerifier.ErrorCodes errorCode) = 172 | DAVerifier.computeSquareSizeFromRowProof(fixture.getRowRootToDataRootProof()); 173 | assertEq(actualSquareSize, expectedSquareSize); 174 | assertEq(uint8(errorCode), uint8(DAVerifier.ErrorCodes.NoError)); 175 | } 176 | 177 | function testComputeSquareSizeFromShareProof() public { 178 | bytes[] memory _data = new bytes[](1); 179 | _data[0] = fixture.shareData(); 180 | 181 | // check that the merkle proof is valid 182 | bool validMerkleProof = NamespaceMerkleTree.verifyMulti( 183 | fixture.getFirstRowRootNode(), fixture.getShareToRowRootProof(), fixture.getNamespace(), _data 184 | ); 185 | assertTrue(validMerkleProof); 186 | 187 | // check that the computed square size is correct 188 | uint256 expectedSquareSize = 1; 189 | uint256 actualSquareSize = DAVerifier.computeSquareSizeFromShareProof(fixture.getShareToRowRootProof()); 190 | assertEq(actualSquareSize, expectedSquareSize); 191 | } 192 | 193 | function testValidSlice() public { 194 | bytes[] memory data = new bytes[](4); 195 | data[0] = "a"; 196 | data[1] = "b"; 197 | data[2] = "c"; 198 | data[3] = "d"; 199 | 200 | (bytes[] memory result, DAVerifier.ErrorCodes error) = DAVerifier.slice(data, 1, 3); 201 | 202 | assertEq(uint256(error), uint256(DAVerifier.ErrorCodes.NoError)); 203 | assertEq(string(result[0]), string(data[1])); 204 | assertEq(string(result[1]), string(data[2])); 205 | } 206 | 207 | function testInvalidSliceBeginEnd() public { 208 | bytes[] memory data = new bytes[](4); 209 | data[0] = "a"; 210 | data[1] = "b"; 211 | data[2] = "c"; 212 | data[3] = "d"; 213 | 214 | (bytes[] memory result, DAVerifier.ErrorCodes error) = DAVerifier.slice(data, 2, 1); 215 | 216 | assertEq(uint256(error), uint256(DAVerifier.ErrorCodes.InvalidRange)); 217 | } 218 | 219 | function testOutOfBoundsSlice() public { 220 | bytes[] memory data = new bytes[](4); 221 | data[0] = "a"; 222 | data[1] = "b"; 223 | data[2] = "c"; 224 | data[3] = "d"; 225 | 226 | (bytes[] memory result, DAVerifier.ErrorCodes error) = DAVerifier.slice(data, 2, 5); 227 | assertEq(uint256(error), uint256(DAVerifier.ErrorCodes.OutOfBoundsRange)); 228 | 229 | (result, error) = DAVerifier.slice(data, 6, 8); 230 | assertEq(uint256(error), uint256(DAVerifier.ErrorCodes.OutOfBoundsRange)); 231 | } 232 | 233 | function computeValidatorSetHash(Validator[] memory _validators) private pure returns (bytes32) { 234 | return keccak256(abi.encode(_validators)); 235 | } 236 | 237 | function domainSeparateValidatorSetHash(uint256 _nonce, uint256 _powerThreshold, bytes32 _validatorSetHash) 238 | private 239 | pure 240 | returns (bytes32) 241 | { 242 | bytes32 c = 243 | keccak256(abi.encode(VALIDATOR_SET_HASH_DOMAIN_SEPARATOR, _nonce, _powerThreshold, _validatorSetHash)); 244 | 245 | return c; 246 | } 247 | 248 | function domainSeparateDataRootTupleRoot(uint256 _nonce, bytes32 _dataRootTupleRoot) 249 | private 250 | pure 251 | returns (bytes32) 252 | { 253 | bytes32 c = keccak256(abi.encode(DATA_ROOT_TUPLE_ROOT_DOMAIN_SEPARATOR, _nonce, _dataRootTupleRoot)); 254 | 255 | return c; 256 | } 257 | } 258 | 259 | /// @title TestFixture contains the necessary information to create proofs for the token 260 | /// transfer transaction that happened on Celestia. It represents the data mentioned in 261 | /// the comment at the beginning of this file. 262 | contract TestFixture { 263 | /// @notice the share containing the token transfer transaction on Celestia. 264 | bytes public shareData = abi.encodePacked( 265 | hex"0000000000000000000000000000000000000000000000000000000001010000", 266 | hex"014500000026c3020a95010a92010a1c2f636f736d6f732e62616e6b2e763162", 267 | hex"657461312e4d736753656e6412720a2f63656c657374696131746b376c776a77", 268 | hex"336676616578657770687237687833333472766b67646b736d636537666b6612", 269 | hex"2f63656c65737469613167616b61646d63386a73667873646c676e6d64643867", 270 | hex"773736346739796165776e32726d386d1a0e0a04757469611206313030303030", 271 | hex"12670a500a460a1f2f636f736d6f732e63727970746f2e736563703235366b31", 272 | hex"2e5075624b657912230a2103f3e16481ff7c9c2a677f08a30a887e5f9c14313c", 273 | hex"b624b8c5f7f955d143c81d9212040a020801180112130a0d0a04757469611205", 274 | hex"323230303010d0e80c1a4068f074601f1bb923f6d6e69d2e3fc3af145c9252ec", 275 | hex"eeb0ac4fba9f661ca0428326f0080478cc969129c0074c3d97ae925de34c5f9d", 276 | hex"98a458cd47a565a2bb08cc000000000000000000000000000000000000000000", 277 | hex"0000000000000000000000000000000000000000000000000000000000000000", 278 | hex"0000000000000000000000000000000000000000000000000000000000000000", 279 | hex"0000000000000000000000000000000000000000000000000000000000000000", 280 | hex"0000000000000000000000000000000000000000000000000000000000000000" 281 | ); 282 | 283 | /// @notice the first EDS row root. 284 | bytes public firstRowRoot = abi.encodePacked( 285 | hex"00000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000001787bf77b567506b6e1d0048bfd89edd352a4fbc102e62f07cc9fe6b4cbe5ee69" 286 | ); 287 | 288 | /// @notice the second EDS row root. 289 | bytes public secondRowRoot = abi.encodePacked( 290 | hex"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7329c7d336d0140840837fc0d8eafa2403f4f6b019b602581cd9f04e28026eae" 291 | ); 292 | 293 | /// @notice the first EDS column root. 294 | bytes public firstColumnRoot = abi.encodePacked( 295 | hex"00000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000001787bf77b567506b6e1d0048bfd89edd352a4fbc102e62f07cc9fe6b4cbe5ee69" 296 | ); 297 | 298 | /// @notice the second EDS column root. 299 | bytes public secondColumnRoot = abi.encodePacked( 300 | hex"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7329c7d336d0140840837fc0d8eafa2403f4f6b019b602581cd9f04e28026eae" 301 | ); 302 | 303 | /// @notice the data root of the block containing the token transfer transaction. 304 | bytes32 public dataRoot = 0x55cfc29fc0cd263906122d5cb859091224495b141fc0c51529612d7ab8962950; 305 | 306 | /// @notice the height of the block containing the submitted token transfer transaction. 307 | uint256 public height = 3; 308 | 309 | /// @notice the data root tuple root committing to the Celestia block. 310 | bytes32 public dataRootTupleRoot = 0xf89859a09c0f2b1bbb039618d0fe60432b8c247f7ccde97814655f2acffb3434; 311 | 312 | /// @notice the data root tuple root nonce in the Blobstream contract. 313 | uint256 public dataRootTupleRootNonce = 2; 314 | 315 | /// @notice the data root tuple to data root tuple root proof side nodes. 316 | bytes32[] public dataRootProofSideNodes = [ 317 | bytes32(0xb5d4d27ec6b206a205bf09dde3371ffba62e5b53d27bbec4255b7f4f27ef5d90), 318 | bytes32(0x406e22ba94989ca721453057a1391fc531edb342c86a0ab4cc722276b54036ec) 319 | ]; 320 | 321 | /// @notice shares to data root proof side nodes. 322 | NamespaceNode[] public shareToDataRootProofSideNodes = [ 323 | NamespaceNode( 324 | Namespace(0xff, 0xffffffffffffffffffffffffffffffffffffffffffffffffffffffff), 325 | Namespace(0xff, 0xffffffffffffffffffffffffffffffffffffffffffffffffffffffff), 326 | 0x0ec8148c743a4a4db384f40f487cae2fd1ca0d18442d1f162916bdf1cc61b679 327 | ) 328 | ]; 329 | 330 | /// @notice row root to data root proof side nodes. 331 | bytes32[] public rowRootToDataRootProofSideNodes = [ 332 | bytes32(0x5bc0cf3322dd5c9141a2dcd76947882351690c9aec61015802efc6742992643f), 333 | bytes32(0xff576381b02abadc50e414f6b4efcae31091cd40a5aba75f56be52d1bb2efcae) 334 | ]; 335 | 336 | /// @notice the share's namespace. 337 | function getNamespace() public pure returns (Namespace memory) { 338 | return Namespace(0x00, 0x00000000000000000000000000000000000000000000000000000001); 339 | } 340 | 341 | /// @notice the data root tuple of the block containing the token transfer transaction. 342 | function getDataRootTuple() public view returns (DataRootTuple memory) { 343 | return DataRootTuple(height, dataRoot); 344 | } 345 | 346 | /// @notice the data root tuple to data root tuple root proof. 347 | function getDataRootTupleProof() public view returns (BinaryMerkleProof memory) { 348 | return BinaryMerkleProof(dataRootProofSideNodes, 2, 4); 349 | } 350 | 351 | /// @notice the first EDS row root. 352 | function getFirstRowRootNode() public pure returns (NamespaceNode memory) { 353 | return NamespaceNode( 354 | Namespace(0x00, 0x00000000000000000000000000000000000000000000000000000001), 355 | Namespace(0x00, 0x00000000000000000000000000000000000000000000000000000001), 356 | 0x787bf77b567506b6e1d0048bfd89edd352a4fbc102e62f07cc9fe6b4cbe5ee69 357 | ); 358 | } 359 | 360 | /// @notice the second EDS row root. 361 | function getSecondRowRootNode() public pure returns (NamespaceNode memory) { 362 | return NamespaceNode( 363 | Namespace(0xff, 0xffffffffffffffffffffffffffffffffffffffffffffffffffffffff), 364 | Namespace(0xff, 0xffffffffffffffffffffffffffffffffffffffffffffffffffffffff), 365 | 0x7329c7d336d0140840837fc0d8eafa2403f4f6b019b602581cd9f04e28026eae 366 | ); 367 | } 368 | 369 | /// @notice the first EDS column root. 370 | function getFirstColumnRootNode() public pure returns (NamespaceNode memory) { 371 | return NamespaceNode( 372 | Namespace(0x00, 0x00000000000000000000000000000000000000000000000000000001), 373 | Namespace(0x00, 0x00000000000000000000000000000000000000000000000000000001), 374 | 0x787bf77b567506b6e1d0048bfd89edd352a4fbc102e62f07cc9fe6b4cbe5ee69 375 | ); 376 | } 377 | 378 | /// @notice the second EDS column root. 379 | function getSecondColumnRootNode() public pure returns (NamespaceNode memory) { 380 | return NamespaceNode( 381 | Namespace(0xff, 0xffffffffffffffffffffffffffffffffffffffffffffffffffffffff), 382 | Namespace(0xff, 0xffffffffffffffffffffffffffffffffffffffffffffffffffffffff), 383 | 0x7329c7d336d0140840837fc0d8eafa2403f4f6b019b602581cd9f04e28026eae 384 | ); 385 | } 386 | 387 | /// @notice shares to row root proof. 388 | function getShareToRowRootProof() public view returns (NamespaceMerkleMultiproof memory) { 389 | return NamespaceMerkleMultiproof(0, 1, shareToDataRootProofSideNodes); 390 | } 391 | 392 | /// @notice row root to data root proof. 393 | function getRowRootToDataRootProof() public view returns (BinaryMerkleProof memory) { 394 | return BinaryMerkleProof(rowRootToDataRootProofSideNodes, 0, 4); 395 | } 396 | } 397 | -------------------------------------------------------------------------------- /src/test/Blobstream.t.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.22; 3 | 4 | import "openzeppelin-contracts/contracts/utils/cryptography/ECDSA.sol"; 5 | 6 | import "../Constants.sol"; 7 | import "../DataRootTuple.sol"; 8 | import "../Blobstream.sol"; 9 | import "../lib/tree/binary/BinaryMerkleProof.sol"; 10 | 11 | import "ds-test/test.sol"; 12 | 13 | interface CheatCodes { 14 | function addr(uint256 privateKey) external returns (address); 15 | 16 | function sign(uint256 privateKey, bytes32 digest) external returns (uint8 v, bytes32 r, bytes32 s); 17 | } 18 | 19 | contract RelayerTest is DSTest { 20 | // Private keys used for test signatures. 21 | uint256 constant testPriv1 = 0x64a1d6f0e760a8d62b4afdde4096f16f51b401eaaecc915740f71770ea76a8ad; 22 | uint256 constant testPriv2 = 0x6e8bdfa979ab645b41c4d17cb1329b2a44684c82b61b1b060ea9b6e1c927a4f4; 23 | 24 | Blobstream bridge; 25 | 26 | Validator[] private validators; 27 | uint256 private votingPower = 5000; 28 | 29 | // Set up Foundry cheatcodes. 30 | CheatCodes cheats = CheatCodes(HEVM_ADDRESS); 31 | 32 | function setUp() public { 33 | uint256 initialValsetNonce = 1; 34 | 35 | validators.push(Validator(cheats.addr(testPriv1), votingPower)); 36 | bytes32 hash = computeValidatorSetHash(validators); 37 | bytes32 validatorSetCheckpoint = domainSeparateValidatorSetHash(initialValsetNonce, (2 * votingPower) / 3, hash); 38 | bridge = new Blobstream(); 39 | bridge.initialize(initialValsetNonce, (2 * votingPower) / 3, validatorSetCheckpoint); 40 | } 41 | 42 | function testUpdateValidatorSet() public { 43 | uint256 initialValsetNonce = 1; 44 | 45 | // Save the old test validator set before we add to it. 46 | Validator[] memory oldVS = new Validator[](1); 47 | oldVS[0] = Validator(cheats.addr(testPriv1), votingPower); 48 | 49 | uint256 newNonce = 2; 50 | validators.push(Validator(cheats.addr(testPriv2), votingPower)); 51 | votingPower += votingPower; 52 | uint256 newPowerThreshold = (2 * votingPower) / 3; 53 | bytes32 newVSHash = keccak256(abi.encode(validators)); 54 | bytes32 newCheckpoint = domainSeparateValidatorSetHash(newNonce, newPowerThreshold, newVSHash); 55 | 56 | // Signature for the first validator set update. 57 | Signature[] memory sigs = new Signature[](1); 58 | bytes32 digest_eip191 = ECDSA.toEthSignedMessageHash(newCheckpoint); 59 | (uint8 v, bytes32 r, bytes32 s) = cheats.sign(testPriv1, digest_eip191); 60 | sigs[0] = Signature(v, r, s); 61 | 62 | bridge.updateValidatorSet(newNonce, initialValsetNonce, newPowerThreshold, newVSHash, oldVS, sigs); 63 | 64 | assertEq(bridge.state_eventNonce(), newNonce); 65 | assertEq(bridge.state_powerThreshold(), newPowerThreshold); 66 | assertEq(bridge.state_lastValidatorSetCheckpoint(), newCheckpoint); 67 | } 68 | 69 | function testSubmitDataRootTupleRoot() public { 70 | uint256 initialValsetNonce = 1; 71 | uint256 nonce = 2; 72 | // 32 bytes, chosen at random. 73 | bytes32 newTupleRoot = 0x0de92bac0b356560d821f8e7b6f5c9fe4f3f88f6c822283efd7ab51ad56a640e; 74 | 75 | bytes32 newDataRootTupleRoot = domainSeparateDataRootTupleRoot(nonce, newTupleRoot); 76 | 77 | // Signature for the update. 78 | Signature[] memory sigs = new Signature[](1); 79 | bytes32 digest_eip191 = ECDSA.toEthSignedMessageHash(newDataRootTupleRoot); 80 | (uint8 v, bytes32 r, bytes32 s) = cheats.sign(testPriv1, digest_eip191); 81 | sigs[0] = Signature(v, r, s); 82 | 83 | Validator[] memory valSet = new Validator[](1); 84 | valSet[0] = Validator(cheats.addr(testPriv1), votingPower); 85 | 86 | bridge.submitDataRootTupleRoot(nonce, initialValsetNonce, newTupleRoot, valSet, sigs); 87 | 88 | assertEq(bridge.state_eventNonce(), nonce); 89 | assertEq(bridge.state_dataRootTupleRoots(nonce), newTupleRoot); 90 | } 91 | 92 | function testDeployContractAtCustomNonce() public { 93 | uint256 initialValsetNonce = 1; 94 | uint256 targetNonce = 200; 95 | 96 | Validator[] memory valSet = new Validator[](1); 97 | valSet[0] = Validator(cheats.addr(testPriv1), votingPower); 98 | 99 | bytes32 hash = computeValidatorSetHash(valSet); 100 | bytes32 validatorSetCheckpoint = domainSeparateValidatorSetHash(initialValsetNonce, (2 * votingPower) / 3, hash); 101 | Blobstream newBridge = new Blobstream(); 102 | newBridge.initialize(targetNonce, (2 * votingPower) / 3, validatorSetCheckpoint); 103 | 104 | // 32 bytes, chosen at random. 105 | bytes32 newTupleRoot = 0x0de92bac0b356560d821f8e7b6f5c9fe4f3f88f6c822283efd7ab51ad56a640e; 106 | 107 | bytes32 newDataRootTupleRoot = domainSeparateDataRootTupleRoot(targetNonce + 1, newTupleRoot); 108 | 109 | // Signature for the update. 110 | Signature[] memory sigs = new Signature[](1); 111 | bytes32 digest_eip191 = ECDSA.toEthSignedMessageHash(newDataRootTupleRoot); 112 | (uint8 v, bytes32 r, bytes32 s) = cheats.sign(testPriv1, digest_eip191); 113 | sigs[0] = Signature(v, r, s); 114 | 115 | newBridge.submitDataRootTupleRoot(targetNonce + 1, initialValsetNonce, newTupleRoot, valSet, sigs); 116 | 117 | assertEq(newBridge.state_eventNonce(), targetNonce + 1); 118 | assertEq(newBridge.state_dataRootTupleRoots(targetNonce + 1), newTupleRoot); 119 | } 120 | 121 | /* 122 | the values used in the verify attestation test are in the format ``, which 123 | represent an encoded `abi.encode(DataRootTuple)`: 124 | 125 | 0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 126 | 0x00000000000000000000000000000000000000000000000000000000000000010101010101010101010101010101010101010101010101010101010101010101 127 | 0x00000000000000000000000000000000000000000000000000000000000000020202020202020202020202020202020202020202020202020202020202020202 128 | 0x00000000000000000000000000000000000000000000000000000000000000030303030303030303030303030303030303030303030303030303030303030303 129 | */ 130 | function testVerifyAttestation() public { 131 | uint256 initialValsetNonce = 1; 132 | // data root tuple root nonce. 133 | uint256 nonce = 2; 134 | // commitment to a set of roots. 135 | // these values were generated using the tendermint implementation of binary merkle trees: 136 | // https://github.com/celestiaorg/celestia-core/blob/60310e7aa554bb76b735a010847a6613bcfe06e8/crypto/merkle/proof.go#L33-L48 137 | bytes32 newTupleRoot = 0x82dc1607d84557d3579ce602a45f5872e821c36dbda7ec926dfa17ebc8d5c013; 138 | // a data root committed to by the above tuple root. 139 | bytes32 newTuple = 0x0101010101010101010101010101010101010101010101010101010101010101; 140 | // the height of the data root. 141 | uint256 height = 1; 142 | // the binary merkle proof of the data root to the data root tuple root. 143 | bytes32[] memory sideNodes = new bytes32[](2); 144 | sideNodes[0] = 0x98ce42deef51d40269d542f5314bef2c7468d401ad5d85168bfab4c0108f75f7; 145 | sideNodes[1] = 0x575664048c9e64260eca2304d177b11d1566d0c954f1417fc76a4f9f27350063; 146 | BinaryMerkleProof memory newTupleProof; 147 | newTupleProof.sideNodes = sideNodes; 148 | newTupleProof.key = 1; 149 | newTupleProof.numLeaves = 4; 150 | 151 | bytes32 newDataRootTupleRoot = domainSeparateDataRootTupleRoot(nonce, newTupleRoot); 152 | 153 | // Signature for the update. 154 | Signature[] memory sigs = new Signature[](1); 155 | bytes32 digest_eip191 = ECDSA.toEthSignedMessageHash(newDataRootTupleRoot); 156 | (uint8 v, bytes32 r, bytes32 s) = cheats.sign(testPriv1, digest_eip191); 157 | sigs[0] = Signature(v, r, s); 158 | 159 | Validator[] memory valSet = new Validator[](1); 160 | valSet[0] = Validator(cheats.addr(testPriv1), votingPower); 161 | 162 | bridge.submitDataRootTupleRoot(nonce, initialValsetNonce, newTupleRoot, valSet, sigs); 163 | 164 | assertEq(bridge.state_eventNonce(), nonce); 165 | assertEq(bridge.state_dataRootTupleRoots(nonce), newTupleRoot); 166 | 167 | DataRootTuple memory t; 168 | t.height = height; 169 | t.dataRoot = newTuple; 170 | 171 | // verify that the tuple was committed to 172 | bool committedTo = bridge.verifyAttestation(nonce, t, newTupleProof); 173 | assertTrue(committedTo); 174 | } 175 | 176 | function computeValidatorSetHash(Validator[] memory _validators) private pure returns (bytes32) { 177 | return keccak256(abi.encode(_validators)); 178 | } 179 | 180 | function domainSeparateValidatorSetHash(uint256 _nonce, uint256 _powerThreshold, bytes32 _validatorSetHash) 181 | private 182 | pure 183 | returns (bytes32) 184 | { 185 | bytes32 c = 186 | keccak256(abi.encode(VALIDATOR_SET_HASH_DOMAIN_SEPARATOR, _nonce, _powerThreshold, _validatorSetHash)); 187 | 188 | return c; 189 | } 190 | 191 | function domainSeparateDataRootTupleRoot(uint256 _nonce, bytes32 _dataRootTupleRoot) 192 | private 193 | pure 194 | returns (bytes32) 195 | { 196 | bytes32 c = keccak256(abi.encode(DATA_ROOT_TUPLE_ROOT_DOMAIN_SEPARATOR, _nonce, _dataRootTupleRoot)); 197 | 198 | return c; 199 | } 200 | } 201 | -------------------------------------------------------------------------------- /src/test/BlobstreamBenchmark.t.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: Apache-2.0 2 | pragma solidity ^0.8.19; 3 | 4 | import "openzeppelin-contracts/contracts/utils/cryptography/ECDSA.sol"; 5 | 6 | import "../Constants.sol"; 7 | import "../DataRootTuple.sol"; 8 | import "../Blobstream.sol"; 9 | import "../lib/tree/binary/BinaryMerkleProof.sol"; 10 | 11 | import "ds-test/test.sol"; 12 | 13 | interface CheatCodes { 14 | function addr(uint256 privateKey) external returns (address); 15 | function sign(uint256 privateKey, bytes32 digest) external returns (uint8 v, bytes32 r, bytes32 s); 16 | function deriveKey(string calldata, string calldata, uint32) external returns (uint256); 17 | } 18 | 19 | /// @notice Example command to run the benchmark: 20 | /// `forge test --match-test testBenchmarkSubmitDataRootTupleRoot -vvvvvv --gas-report`. 21 | /// To change the validator set size, change the `numberOfValidators` constant. 22 | /// To make custom calculations of the gas, you can use the `gasleft()` solidity 23 | /// built-in function. 24 | /// The following answer has some insights on using that: 25 | /// https://ethereum.stackexchange.com/a/132325/65649 26 | /// The gas estimations might not be accurate to the real cost in a real network, 27 | /// and that's because foundry doesn't track calldata cost. source: 28 | /// https://github.com/foundry-rs/foundry/issues/3475#issuecomment-1469940917 29 | /// To have accurate results, make sure to add the following costs: 30 | /// A byte of calldata costs either 4 gas (if it is zero) or 16 gas (if it is any other value). 31 | contract Benchmark is DSTest { 32 | uint256 private constant numberOfValidators = 100; 33 | uint256 private constant numberOfSigners = 30; 34 | 35 | // Private keys used for test signatures. 36 | uint256[] private privateKeys; 37 | 38 | Blobstream private bridge; 39 | 40 | Validator[] private validators; 41 | uint256 private totalValidatorPower = 1000000; 42 | uint256 private dataTupleRootNonce = 0; 43 | 44 | // Set up Foundry cheatcodes. 45 | CheatCodes cheats = CheatCodes(HEVM_ADDRESS); 46 | 47 | function setUp() public { 48 | uint256 initialValsetNonce = 0; 49 | privateKeys = derivePrivateKeys(numberOfValidators); 50 | validators = initializeValidators(privateKeys); 51 | 52 | bytes32 hash = computeValidatorSetHash(validators); 53 | bytes32 checkpoint = domainSeparateValidatorSetHash(initialValsetNonce, (2 * totalValidatorPower) / 3, hash); 54 | bridge = new Blobstream(); 55 | bridge.initialize(initialValsetNonce, (2 * totalValidatorPower) / 3, checkpoint); 56 | } 57 | 58 | function testBenchmarkSubmitDataRootTupleRoot() public { 59 | uint256 initialValsetNonce = 0; 60 | uint256 nonce = 1; 61 | 62 | // 32 bytes, chosen at random. 63 | bytes32 newTupleRoot = 0x0de92bac0b356560d821f8e7b6f5c9fe4f3f88f6c822283efd7ab51ad56a640e; 64 | bytes32 newDataRootTupleRoot = domainSeparateDataRootTupleRoot(nonce, newTupleRoot); 65 | 66 | // Signature for the update. 67 | Signature[] memory sigs = new Signature[](numberOfValidators); 68 | bytes32 digest_eip191 = ECDSA.toEthSignedMessageHash(newDataRootTupleRoot); 69 | uint256 threshold = 2 * totalValidatorPower / 3; 70 | uint256 cumulatedPower = 0; 71 | for (uint256 i = 0; i < numberOfValidators; i++) { 72 | if (cumulatedPower > threshold) { 73 | break; 74 | } 75 | (uint8 v, bytes32 r, bytes32 s) = cheats.sign(privateKeys[i], digest_eip191); 76 | sigs[i] = Signature(v, r, s); 77 | cumulatedPower += validators[i].power; 78 | } 79 | 80 | // these are called here so that they're part of the gas report. 81 | // uint256 currentPowerThreshold = (2 * votingPower * numberOfValidators) / 3; 82 | // bytes32 currentValidatorSetHash = bridge.computeValidatorSetHash(validators); 83 | // bridge.domainSeparateValidatorSetHash(nonce, currentPowerThreshold, currentValidatorSetHash); 84 | // bridge.checkValidatorSignatures(validators, sigs, newDataRootTupleRoot, currentPowerThreshold); 85 | 86 | bridge.submitDataRootTupleRoot(nonce, initialValsetNonce, newTupleRoot, validators, sigs); 87 | } 88 | 89 | function computeValidatorSetHash(Validator[] memory _validators) private pure returns (bytes32) { 90 | return keccak256(abi.encode(_validators)); 91 | } 92 | 93 | function domainSeparateValidatorSetHash(uint256 _nonce, uint256 _powerThreshold, bytes32 _validatorSetHash) 94 | private 95 | pure 96 | returns (bytes32) 97 | { 98 | bytes32 c = 99 | keccak256(abi.encode(VALIDATOR_SET_HASH_DOMAIN_SEPARATOR, _nonce, _powerThreshold, _validatorSetHash)); 100 | 101 | return c; 102 | } 103 | 104 | function domainSeparateDataRootTupleRoot(uint256 _nonce, bytes32 _dataRootTupleRoot) 105 | private 106 | pure 107 | returns (bytes32) 108 | { 109 | bytes32 c = keccak256(abi.encode(DATA_ROOT_TUPLE_ROOT_DOMAIN_SEPARATOR, _nonce, _dataRootTupleRoot)); 110 | 111 | return c; 112 | } 113 | 114 | function derivePrivateKeys(uint256 count) private returns (uint256[] memory) { 115 | string memory mnemonic = "test test test test test test test test test test test junk"; 116 | uint256[] memory keys = new uint256[](count); 117 | for (uint32 i = 0; i < count; i++) { 118 | keys[i] = cheats.deriveKey(mnemonic, "m/44'/60'/0'/0", i); 119 | } 120 | return keys; 121 | } 122 | 123 | function initializeValidators(uint256[] memory keys) private returns (Validator[] memory) { 124 | Validator[] memory vs = new Validator[](keys.length); 125 | uint256 threshold = 2 * totalValidatorPower / 3; 126 | uint256 primaryPower = threshold / (numberOfSigners - 1); 127 | uint256 secondaryPower = (totalValidatorPower - threshold) / (numberOfValidators - numberOfSigners + 1); 128 | for (uint256 i = 0; i < keys.length; i++) { 129 | if (i < numberOfSigners) { 130 | vs[i] = Validator(cheats.addr(keys[i]), primaryPower); 131 | } else { 132 | vs[i] = Validator(cheats.addr(keys[i]), secondaryPower); 133 | } 134 | } 135 | return vs; 136 | } 137 | } 138 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es2020", 4 | "module": "commonjs", 5 | "esModuleInterop": true, 6 | "forceConsistentCasingInFileNames": true, 7 | "strict": true, 8 | "skipLibCheck": true, 9 | "resolveJsonModule": true 10 | } 11 | } 12 | --------------------------------------------------------------------------------