├── .dockerignore ├── .env.example ├── .github ├── core.png ├── readme.png └── workflows │ ├── ci.yml │ ├── deploy.yml │ ├── fixtures.yml │ └── release.yml ├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── LICENSE ├── README.md ├── batch.json ├── benches ├── README.md └── provider_benchmark.rs ├── build └── readme.md ├── cli ├── Cargo.toml └── src │ ├── cli.rs │ ├── commands │ ├── mod.rs │ ├── process.rs │ ├── run.rs │ ├── run_datalake.rs │ └── run_module.rs │ ├── interactive.rs │ ├── lib.rs │ └── main.rs ├── clippy.toml ├── config └── config.json ├── docker-compose.yml ├── dry_run_input.json ├── dry_run_output.json ├── examples └── private-input-module │ ├── Cargo.toml │ ├── README.md │ ├── private_module │ ├── Scarb.lock │ ├── Scarb.toml │ └── src │ │ └── lib.cairo │ └── src │ └── main.rs ├── fixtures └── primitives │ ├── local_class.json │ ├── mmr.json │ ├── processed │ ├── account.json │ ├── datalake_compute.json │ ├── header.json │ ├── mpt.json │ ├── receipt.json │ ├── storage.json │ └── transaction.json │ ├── processed_in_felts │ ├── account.json │ ├── datalake_compute.json │ ├── header.json │ ├── receipt.json │ ├── storage.json │ └── transaction.json │ └── uint256.json ├── hdp ├── Cargo.toml └── src │ ├── cairo_runner │ ├── dry_run.rs │ ├── input │ │ ├── dry_run.rs │ │ └── mod.rs │ ├── mod.rs │ └── run.rs │ ├── constant.rs │ ├── hdp_run.rs │ ├── lib.rs │ ├── preprocessor │ ├── compile │ │ ├── config.rs │ │ ├── datalake.rs │ │ ├── mod.rs │ │ ├── module.rs │ │ └── task.rs │ ├── mod.rs │ ├── module_compile.rs │ └── module_registry.rs │ ├── primitives │ ├── aggregate_fn │ │ ├── integer.rs │ │ └── mod.rs │ ├── block │ │ ├── account.rs │ │ ├── header.rs │ │ └── mod.rs │ ├── chain_id.rs │ ├── merkle_tree.rs │ ├── mod.rs │ ├── processed_types │ │ ├── account.rs │ │ ├── block_proofs.rs │ │ ├── cairo_format │ │ │ ├── account.rs │ │ │ ├── block_proofs.rs │ │ │ ├── datalake_compute.rs │ │ │ ├── felt_vec_unit.rs │ │ │ ├── header.rs │ │ │ ├── mod.rs │ │ │ ├── module.rs │ │ │ ├── mpt.rs │ │ │ ├── query.rs │ │ │ ├── receipt.rs │ │ │ ├── storage.rs │ │ │ ├── task.rs │ │ │ ├── traits.rs │ │ │ └── transaction.rs │ │ ├── datalake_compute.rs │ │ ├── header.rs │ │ ├── mmr.rs │ │ ├── mod.rs │ │ ├── module.rs │ │ ├── mpt.rs │ │ ├── processor_output.rs │ │ ├── query.rs │ │ ├── receipt.rs │ │ ├── starknet │ │ │ ├── header.rs │ │ │ ├── mod.rs │ │ │ └── storage.rs │ │ ├── storage.rs │ │ ├── task.rs │ │ ├── transaction.rs │ │ └── uint256.rs │ ├── request.rs │ ├── serde.rs │ ├── solidity_types │ │ ├── datalake_compute │ │ │ ├── compute.rs │ │ │ ├── datalake │ │ │ │ ├── block_sampled.rs │ │ │ │ ├── envelope.rs │ │ │ │ ├── mod.rs │ │ │ │ └── transactions_in_block.rs │ │ │ └── mod.rs │ │ ├── mod.rs │ │ ├── module.rs │ │ └── traits.rs │ ├── task │ │ ├── datalake │ │ │ ├── block_sampled │ │ │ │ ├── collection.rs │ │ │ │ ├── datalake.rs │ │ │ │ ├── mod.rs │ │ │ │ └── rlp_fields.rs │ │ │ ├── compute.rs │ │ │ ├── datalake_type.rs │ │ │ ├── envelope.rs │ │ │ ├── mod.rs │ │ │ └── transactions │ │ │ │ ├── collection.rs │ │ │ │ ├── datalake.rs │ │ │ │ ├── mod.rs │ │ │ │ └── rlp_fields.rs │ │ ├── mod.rs │ │ └── module.rs │ └── utils.rs │ ├── processor.rs │ └── provider │ ├── config.rs │ ├── error.rs │ ├── evm │ ├── datalake │ │ ├── block_sampled.rs │ │ ├── mod.rs │ │ └── transactions.rs │ ├── from_keys.rs │ ├── mod.rs │ ├── provider.rs │ └── rpc.rs │ ├── indexer.rs │ ├── key.rs │ ├── mod.rs │ ├── starknet │ ├── from_keys.rs │ ├── mod.rs │ ├── provider.rs │ ├── rpc.rs │ └── types.rs │ ├── traits.rs │ └── types.rs ├── input.json ├── justfile ├── request.json ├── rust-toolchain.toml └── script ├── compile.sh ├── config_to_env.sh ├── fetch_program.sh └── prepare_image_build.sh /.dockerignore: -------------------------------------------------------------------------------- 1 | # Ignore Dockerfiles 2 | Dockerfile 3 | Dockerfile.* 4 | *.dockerfile 5 | 6 | # Ignore Docker Compose files 7 | docker-compose.yml 8 | docker-compose.override.yml 9 | 10 | # Ignore Docker volumes 11 | /docker-volumes/ 12 | /docker-data/ 13 | 14 | # Ignore Docker build directories 15 | /docker_build/ 16 | 17 | # Ignore target directory 18 | target 19 | 20 | # Ignore environment variables 21 | .env* 22 | 23 | # Ignore Python virtual environments 24 | venv/ 25 | 26 | # Ignore specific tools directory 27 | tools/ 28 | 29 | # Ignore specific directories and files 30 | hdp-cairo/ 31 | build/ 32 | *.pie 33 | *.json 34 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | # Note that RPC_URL_{CHAIN_ID} is required for fetch data 2 | PROVIDER_URL_ETHEREUM_SEPOLIA=https://goerli.infura.io/v3/your-infura-api-key 3 | # this value is optional 4 | PROVIDER_CHUNK_SIZE_ETHEREUM_SEPOLIA=2000 5 | 6 | RPC_URL_STARKNET_SEPOLIA=# if it's starknet make sure to use pathfinder 7 | 8 | # Optional 9 | DRY_RUN_CAIRO_PATH= # path for dry run cairo 10 | SOUND_RUN_CAIRO_PATH= # path for sound run cairo 11 | SAVE_FETCH_KEYS_FILE= # path for dry run output file 12 | -------------------------------------------------------------------------------- /.github/core.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HerodotusDev/hdp/492dc32aebea3e002cf731e319bb585cf6768f75/.github/core.png -------------------------------------------------------------------------------- /.github/readme.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HerodotusDev/hdp/492dc32aebea3e002cf731e319bb585cf6768f75/.github/readme.png -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | - dev 11 | 12 | jobs: 13 | test: 14 | runs-on: ubuntu-latest 15 | env: 16 | PROVIDER_URL_ETHEREUM_SEPOLIA: ${{ secrets.PROVIDER_URL_ETHEREUM_SEPOLIA }} 17 | steps: 18 | - uses: actions/checkout@v4 19 | - uses: actions/setup-python@v4 20 | with: 21 | python-version: "3.9" 22 | - name: Display Python version 23 | run: python -c "import sys; print(sys.version)" 24 | - uses: dtolnay/rust-toolchain@master 25 | with: 26 | toolchain: stable 27 | - uses: taiki-e/install-action@cargo-llvm-cov 28 | - uses: taiki-e/install-action@nextest 29 | - name: Install clippy and rustfmt 30 | run: | 31 | rustup component add clippy 32 | rustup component add rustfmt 33 | - uses: taiki-e/install-action@just 34 | - name: Run clippy and formatter checks 35 | run: just run-ci-flow 36 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yml: -------------------------------------------------------------------------------- 1 | name: Deploy 2 | 3 | on: 4 | push: 5 | tags: 6 | - "[0-9]+.[0-9]+.[0-9]+" 7 | - "v[0-9]+.[0-9]+.[0-9]+(-[a-zA-Z0-9]+)?" 8 | 9 | permissions: 10 | contents: write 11 | 12 | jobs: 13 | build-and-upload: 14 | name: Build and upload 15 | runs-on: ${{ matrix.os }} 16 | 17 | strategy: 18 | matrix: 19 | # You can add more, for any target you'd like! 20 | include: 21 | - build: linux 22 | os: ubuntu-latest 23 | target: x86_64-unknown-linux-musl 24 | 25 | - build: macos 26 | os: macos-latest 27 | target: x86_64-apple-darwin 28 | 29 | steps: 30 | - name: Checkout 31 | uses: actions/checkout@v3 32 | 33 | - name: Get the release version from the tag 34 | shell: bash 35 | run: echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV 36 | 37 | - name: Install Rust 38 | # Or @nightly if you want 39 | uses: dtolnay/rust-toolchain@stable 40 | # Arguments to pass in 41 | with: 42 | # Make Rust compile to our target (defined in the matrix) 43 | targets: ${{ matrix.target }} 44 | 45 | - name: Build 46 | uses: actions-rs/cargo@v1 47 | with: 48 | use-cross: true 49 | command: build 50 | args: --verbose --release --target ${{ matrix.target }} 51 | 52 | - name: Build archive 53 | shell: bash 54 | run: | 55 | # Replace with the name of your binary 56 | binary_name="" 57 | 58 | dirname="$binary_name-${{ env.VERSION }}-${{ matrix.target }}" 59 | mkdir "$dirname" 60 | if [ "${{ matrix.os }}" = "windows-latest" ]; then 61 | mv "target/${{ matrix.target }}/release/$binary_name.exe" "$dirname" 62 | else 63 | mv "target/${{ matrix.target }}/release/$binary_name" "$dirname" 64 | fi 65 | 66 | if [ "${{ matrix.os }}" = "windows-latest" ]; then 67 | 7z a "$dirname.zip" "$dirname" 68 | echo "ASSET=$dirname.zip" >> $GITHUB_ENV 69 | else 70 | tar -czf "$dirname.tar.gz" "$dirname" 71 | echo "ASSET=$dirname.tar.gz" >> $GITHUB_ENV 72 | fi 73 | 74 | - name: Release 75 | uses: softprops/action-gh-release@v1 76 | with: 77 | files: | 78 | ${{ env.ASSET }} 79 | -------------------------------------------------------------------------------- /.github/workflows/fixtures.yml: -------------------------------------------------------------------------------- 1 | name: Generate and Merge Fixtures 2 | 3 | on: 4 | workflow_dispatch: 5 | push: 6 | branches: 7 | - dev 8 | 9 | jobs: 10 | generate_fixtures: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Checkout current repository 14 | uses: actions/checkout@v4 15 | 16 | - name: Set up Python 17 | uses: actions/setup-python@v4 18 | with: 19 | python-version: "3.9" 20 | 21 | - name: Display Python version 22 | run: python -c "import sys; print(sys.version)" 23 | 24 | - uses: dtolnay/rust-toolchain@master 25 | with: 26 | toolchain: stable 27 | 28 | - name: Install CLI binary 29 | run: | 30 | cargo install --locked -f --path cli/ 31 | 32 | - name: Clone hdp-test repository 33 | run: | 34 | git clone -b ${{ github.ref_name }} https://x-access-token:${{ secrets.REPO_ACCESS_TOKEN }}@github.com/HerodotusDev/hdp-test.git hdp-test || git clone https://x-access-token:${{ secrets.REPO_ACCESS_TOKEN }}@github.com/HerodotusDev/hdp-test.git hdp-test 35 | cd hdp-test 36 | git checkout -b ${{ github.ref_name }} || git checkout ${{ github.ref_name }} 37 | git config user.name github-actions 38 | git config user.email github-actions@github.com 39 | 40 | - name: Clone hdp-cairo repository 41 | run: | 42 | git clone https://x-access-token:${{ secrets.REPO_ACCESS_TOKEN }}@github.com/HerodotusDev/hdp-cairo.git hdp-cairo || git clone https://x-access-token:${{ secrets.REPO_ACCESS_TOKEN }}@github.com/HerodotusDev/hdp-cairo.git hdp-cairo 43 | cd hdp-cairo 44 | git config user.name github-actions 45 | git config user.email github-actions@github.com 46 | 47 | - name: Generate .env file 48 | run: | 49 | cd hdp-test 50 | cat << EOF > .env 51 | PROVIDER_URL_ETHEREUM_SEPOLIA=${{ secrets.PROVIDER_URL_ETHEREUM_SEPOLIA }} 52 | PROVIDER_URL_STARKNET_SEPOLIA=${{ secrets.PROVIDER_URL_STARKNET_SEPOLIA }} 53 | PROVIDER_CHUNK_SIZE_ETHEREUM_SEPOLIA=${{ secrets.PROVIDER_CHUNK_SIZE_ETHEREUM_SEPOLIA }} 54 | PROVIDER_CHUNK_SIZE_STARKNET_SEPOLIA=${{ secrets.PROVIDER_CHUNK_SIZE_STARKNET_SEPOLIA }} 55 | DRY_RUN_CAIRO_PATH=${{ secrets.DRY_RUN_CAIRO_PATH }} 56 | SOUND_RUN_CAIRO_PATH=${{ secrets.SOUND_RUN_CAIRO_PATH }} 57 | SAVE_FETCH_KEYS_FILE=${{ secrets.SAVE_FETCH_KEYS_FILE }} 58 | EOF 59 | 60 | - name: Set up and generate fixtures 61 | run: | 62 | cd hdp-cairo 63 | make setup 64 | source venv/bin/activate 65 | cd ../ 66 | cd hdp-test 67 | make generate 68 | 69 | - name: Commit and push new fixtures 70 | run: | 71 | cd hdp-test 72 | git add . 73 | git commit -m "Update fixtures" 74 | git push origin ${{ github.ref_name }} 75 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release Tag 2 | on: 3 | push: 4 | branches: 5 | - main 6 | jobs: 7 | build: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v2 11 | - name: Extract version name 12 | run: echo "##[set-output name=version;]$(echo '${{ github.event.head_commit.message }}' | egrep -o '[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}')" 13 | id: extract_version_name 14 | - name: generate release 15 | uses: actions/create-release@v1 16 | env: 17 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 18 | with: 19 | tag_name: ${{ steps.extract_version_name.outputs.version }} 20 | release_name: ${{ steps.extract_version_name.outputs.version }} 21 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Binaries for programs and libraries 2 | *.exe 3 | *.exe~ 4 | *.dll 5 | *.so 6 | *.dylib 7 | *.dSYM 8 | *.lib 9 | *.a 10 | *.o 11 | *.obj 12 | *.lo 13 | *.slo 14 | 15 | # Intermediate files produced by rustc 16 | *.rlib 17 | 18 | # Cargo artifacts 19 | /target/ 20 | /**/target/ 21 | /Cargo.lock 22 | /.cargo/ 23 | 24 | # Generated by Cargo 25 | Cargo.lock 26 | 27 | # Cairo build artifacts 28 | build/*.json 29 | 30 | # Binaries produced by `cargo install` 31 | bin/ 32 | 33 | # Rust debug files 34 | *.dSYM/ 35 | 36 | # If you're using IntelliJ Rust 37 | .idea/ 38 | 39 | # If you're using Visual Studio Code 40 | .vscode/ 41 | *.code-workspace 42 | 43 | # If you're using Eclipse 44 | .project 45 | .cproject 46 | .settings/ 47 | 48 | # If you're using JetBrains IDEs 49 | .idea/ 50 | *.iml 51 | 52 | # JetBrains related files 53 | *.ipr 54 | *.iws 55 | .idea/ 56 | 57 | # Ignore environment variables 58 | .env 59 | 60 | # Ignore Python virtual environments 61 | venv/ 62 | 63 | # Ignore specific tools directory 64 | tools/ 65 | 66 | # Ignore specific directories and files 67 | hdp-cairo/ 68 | *.pie 69 | *.zip -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | resolver = "2" 3 | members = ["cli", "examples/private-input-module", "hdp"] 4 | 5 | [workspace.package] 6 | edition = "2021" 7 | version = "0.9.0" 8 | license-file = "LICENSE" 9 | authors = ["Herodotus Data Processor Team"] 10 | repository = "https://github.com/HerodotusDev/hdp" 11 | homepage = "https://herodotus.dev/" 12 | exclude = ["benches/", "fixtures/", "examples/"] 13 | keywords = ["blockchain", "ethereum", "rust", "data-processor", "storage-proof"] 14 | categories = [ 15 | "command-line-interface", 16 | "cryptography::cryptocurrencies", 17 | "compilers", 18 | "asynchronous", 19 | ] 20 | 21 | [workspace.dependencies] 22 | hdp = { path = "hdp" } 23 | hdp-cli = { path = "cli" } 24 | tokio = { version = "1", features = ["full"] } 25 | tempfile = "3.10.1" 26 | alloy-merkle-tree = { version = "0.7.1" } 27 | alloy = { version = "0.4.2", features = ["full"] } 28 | alloy-rlp = { version = "0.3.8", features = ["derive"] } 29 | anyhow = "1.0.79" 30 | serde = { version = "1.0", features = ["derive"] } 31 | serde_with = "2.3.2" 32 | serde_json = "1.0" 33 | tracing = "0.1" 34 | reqwest = { version = "0.11", features = ["json"] } 35 | rand = "0.8.4" 36 | regex = "1" 37 | starknet = "0.11.0" 38 | starknet-crypto = "0.7.1" 39 | starknet-types-core = "0.1.5" 40 | cairo-lang-starknet-classes = "2.7.0" 41 | cairo-vm = "1.0.0-rc6" 42 | futures = "0.3.30" 43 | lazy_static = "1.4.0" 44 | thiserror = "1.0" 45 | sn-trie-proofs = { version = "0.1.1" } 46 | eth-trie-proofs = "0.1.2" 47 | itertools = "0.10" 48 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # This repository is deprecated, project is continued in [this](https://github.com/HerodotusDev/hdp-cairo) repository 2 | 3 | ![](.github/readme.png) 4 | 5 | # Herodotus Data Processor (HDP) 6 | 7 | [![CI](https://github.com/HerodotusDev/hdp/actions/workflows/ci.yml/badge.svg)](https://github.com/HerodotusDev/hdp/actions/workflows/ci.yml) 8 | [![Crates.io][crates-badge]][crates-url] 9 | [![Documentation][docs-badge]][docs-url] 10 | [![GPLv3 licensed][gpl3-badge]][gpl3-url] 11 | 12 | [crates-url]: https://crates.io/crates/hdp 13 | [crates-badge]: https://img.shields.io/crates/v/hdp.svg 14 | [docs-badge]: https://docs.rs/hdp/badge.svg 15 | [docs-url]: https://docs.rs/hdp 16 | [gpl3-badge]: https://img.shields.io/badge/license-GPLv3-blue 17 | [gpl3-url]: LICENSE 18 | 19 | > **Warning:** This codebase is experimental and not audited. Use at your own risk. 20 | 21 | HDP enhances off-chain compute capabilities with zkVMs for verifiable on-chain data integration. For more, visit our [documentation](https://docs.herodotus.dev/herodotus-docs/developers/herodotus-data-processor-hdp). 22 | 23 | ## Introduction 24 | 25 | The Data Processor CLI serves as an essential tool for developers working with Cairo programs and zkVM environments. Its primary function is to translate human-readable requests into a format compatible with Cairo programs, enabling commands to be executed over the Cairo VM and generating executable outputs. This transformation is a crucial preprocessing step that prepares data for off-chain computations in zkVM environments. 26 | 27 | ## Features 28 | 29 | - **Development Tools**: Encode and decode data lakes and computational tasks. 30 | - **Core Processing**: Compile data from various sources and compute aggregate functions. 31 | - **Extensibility**: Support for multiple blockchain integrations and various ZKVM backends is planned. 32 | - **Ease of Use**: Provides a CLI for easy interaction with the system. 33 | 34 | ## Install HDP 35 | 36 | ### Install with cargo 37 | 38 | ```bash 39 | # Install with cargo 40 | ❯ cargo install --git https://github.com/HerodotusDev/hdp/ --tag {TAG} --locked --force hdp-cli 41 | ``` 42 | 43 | ### Build from source 44 | 45 | ```bash 46 | # clone repo 47 | ❯ git clone https://github.com/HerodotusDev/hdp.git 48 | 49 | # install hdp 50 | ❯ cargo install --locked -f --path cli/ 51 | ``` 52 | 53 | ## Getting Started 54 | 55 | To launch the interactive CLI: 56 | 57 | ```bash 58 | # Start the HDP 59 | ❯ hdp start 60 | Welcome to Herodotus Data Processor interactive CLI! 🛰️ 61 | 62 | _ _ ____ ____ 63 | | | | | | _ \ | _ \ 64 | | |_| | | | | | | |_) | 65 | | _ | | |_| | | __/ 66 | |_| |_| |____/ |_| 67 | 68 | ? Step 1. What's your datalake type? 69 | ``` 70 | 71 | ## Usage Examples 72 | 73 | First locate `.env` file like the one in [example](./.env.example). 74 | 75 | Second, run command like below : 76 | 77 | note that this will go though both pre-process -> process step. 78 | 79 | ```bash 80 | hdp run -r ${Request file path} -p ${Program input file path} -b ${Batch proof file path} -c ${PIE file after process} 81 | ``` 82 | 83 | For a more comprehensive guide on commands available on `hdp run`: 84 | 85 | ```console 86 | ❯ hdp run --help 87 | Run batch of tasks base on request json file 88 | 89 | Usage: hdp run [OPTIONS] --request-file --program-input-file 90 | 91 | Options: 92 | -r, --request-file 93 | Pass request as json file 94 | --dry-run-cairo-file 95 | dry run contract bootloader program. only used for module task 96 | -p, --program-input-file 97 | Path to save program input file after pre-processing 98 | --cairo-format 99 | Set this boolean to true to generate cairo format program_input_file 100 | -b, --batch-proof-file 101 | Path to save batch proof file after pre-processing 102 | --sound-run-cairo-file 103 | hdp cairo compiled program. main entry point 104 | -c, --cairo-pie-file 105 | Path to save pie file 106 | --proof-mode 107 | Flag to run `cairo-run` in proof mode 108 | -h, --help 109 | Print help (see more with '--help') 110 | ``` 111 | 112 | 113 | ### Integration Test 114 | 115 | The core soundness of HDP relies on generating the correct input file and running the Cairo program. To ensure this, a full integration test flow is necessary to link the pre-processor and processor versions. For continuous integration tests, please refer to the [hdp-test](https://github.com/HerodotusDev/hdp-test) repository as it contains all the cases of supported features in table below. 116 | 117 | 118 | ### Additional Notes 119 | 120 | - Please ensure that the data fields you are applying these functions contain numeric values to avoid computational errors. 121 | - For details on how these tests are performed or to contribute to the further development of tests, please refer to the [Integration Test Guide](https://github.com/HerodotusDev/hdp-test). 122 | 123 | ## Development 124 | 125 | ```sh 126 | # CI check 127 | just run-ci-flow 128 | ``` 129 | 130 | ### Local Run 131 | 132 | Full local environment to run, check out this [hdp module template](https://github.com/HerodotusDev/hdp-module-template). 133 | 134 | ## License 135 | 136 | `hdp` is licensed under the [GNU General Public License v3.0](./LICENSE). 137 | 138 | --- 139 | 140 | Herodotus Dev Ltd - 2024 141 | -------------------------------------------------------------------------------- /batch.json: -------------------------------------------------------------------------------- 1 | { 2 | "raw_results": [ 3 | "0x0000000000000000000000000000000000000000000000000000000000000000" 4 | ], 5 | "results_commitments": [ 6 | "0xee7f5579ffc26c8af7fe984a1f4af55aad0b293f964d009a4218ea8feada8b89" 7 | ], 8 | "tasks_commitments": [ 9 | "0xf9439166b16d3ebbc8c047754c9c4cf7c4012c5850f44a81da9c34c32f91cee3" 10 | ], 11 | "task_inclusion_proofs": [ 12 | [] 13 | ], 14 | "results_inclusion_proofs": [ 15 | [] 16 | ], 17 | "results_root": "0x9ec4f29a895bc781b530beb776070c9e454202fa6c3e296e1581bf42c50a53bb", 18 | "tasks_root": "0x71269cb70288924fce237c0e6fdcd1735e855480e8dce9e0bb85a8645b1cea03", 19 | "mmr_metas": [ 20 | { 21 | "id": 27, 22 | "root": "0x492627ffa5084ec078f4d461408dfaa50b504a022c5471452d598da0040c066", 23 | "size": 13024091, 24 | "peaks": [ 25 | "0x262c4c9b1cb2a036924aecf563dc9952e5f8b41004310adde86f22abb793eb1", 26 | "0x10b39aed56c8f244a1df559c944ada6f12b7238f8c06a2c243ba4276b8059b0", 27 | "0x46f45f218ea3aec481f350cda528a6f9f926a2dd53dae302e2cb610e5f152c7", 28 | "0x1d52a06e6d02569893a1d842c00bb67c044be541c614e88613d7fc7187e18c1", 29 | "0x770ebf618a589c17e3dc05bda7121acbedc0b48cd25f2943dc43f395f8bf0db", 30 | "0x7263e878f7deafdc49b47da57f8594d477e572d3ac2bec27bb73860a35b1899", 31 | "0x7b9e99f008949f9ee33d2965708ac6773a57965514df6383d55de104a39ab8c", 32 | "0x28f6ccdcd38f6be6c437d100fcd62604c3293e31342a777dc37c712869ab08c", 33 | "0x13d87197fe5d6f646a57dc918dcbef210737020dca9b89537fd8718ac69da3e", 34 | "0x7eef4b790b56858c0232b494034d4c8699112d88f358209f71f02d5e93a7084", 35 | "0x25cd2f0b579c902c41ac26df96ed5b21e16a3127dce2b471973dc86eb4c099f", 36 | "0x5fdedfd0123b7461d5b3162fe82f7f3172c42fda6209415367870086f7c7918", 37 | "0x7c0a415d5a6c4c90fd2dde1b340c3be305a72aa3b758dd26b8d7b4a78b53681" 38 | ], 39 | "chain_id": 11155111 40 | } 41 | ] 42 | } -------------------------------------------------------------------------------- /benches/README.md: -------------------------------------------------------------------------------- 1 | # `EvmProvider` Benchmark 2 | 3 | ## Hardware Specifications 4 | 5 | - **Processor**: Apple M2 6 | - **Memory**: 32 GB 7 | - **Operating System**: macOS 8 | 9 | ## RPC Specifications 10 | 11 | - used Alchemy non-paid plan rpc url 12 | 13 | ## Benchmark Results 14 | 15 | | Benchmark | Time (ms) | Iterations | Notes | 16 | | ------------------------------------ | --------- | ---------- | -------------- | 17 | | get_10_header_proofs | 200.52 ms | 10 | Block Range 10 | 18 | | get_10_account_proofs | 243.05 ms | 10 | Block Range 10 | 19 | | get_10_storage_proofs | 245.14 ms | 10 | Block Range 10 | 20 | | get_tx_with_proof_from_block | 231.62 ms | 10 | -- | 21 | | get_tx_receipt_with_proof_from_block | 1590.2 ms | 10 | -- | 22 | -------------------------------------------------------------------------------- /benches/provider_benchmark.rs: -------------------------------------------------------------------------------- 1 | use alloy::primitives::{address, B256}; 2 | use criterion::{criterion_group, criterion_main, Bencher, Criterion}; 3 | use hdp::provider::evm::provider::EvmProvider; 4 | use tokio::runtime::Runtime; 5 | 6 | fn benchmark_header(b: &mut Bencher) { 7 | let provider = EvmProvider::default(); 8 | let rt = Runtime::new().unwrap(); 9 | 10 | b.iter(|| { 11 | rt.block_on(async { 12 | provider 13 | .get_range_of_header_proofs(6127485, 6127485 + 10, 1) 14 | .await 15 | .unwrap(); 16 | }); 17 | }); 18 | } 19 | 20 | fn benchmark_account(b: &mut Bencher) { 21 | let provider = EvmProvider::default(); 22 | let target_address = address!("7f2c6f930306d3aa736b3a6c6a98f512f74036d4"); 23 | let rt = Runtime::new().unwrap(); 24 | 25 | b.iter(|| { 26 | rt.block_on(async { 27 | provider 28 | .get_range_of_account_proofs(6127485, 6127485 + 10, 1, target_address) 29 | .await 30 | .unwrap(); 31 | }); 32 | }); 33 | } 34 | 35 | fn benchmark_storage(b: &mut Bencher) { 36 | let provider = EvmProvider::default(); 37 | let target_address = address!("75CeC1db9dCeb703200EAa6595f66885C962B920"); 38 | let storage_key = B256::ZERO; 39 | let rt = Runtime::new().unwrap(); 40 | 41 | b.iter(|| { 42 | rt.block_on(async { 43 | provider 44 | .get_range_of_storage_proofs(6127485, 6127485 + 10, 1, target_address, storage_key) 45 | .await 46 | .unwrap(); 47 | }); 48 | }); 49 | } 50 | 51 | fn benchmark_transaction(b: &mut Bencher) { 52 | let provider = EvmProvider::default(); 53 | let rt = Runtime::new().unwrap(); 54 | 55 | b.iter(|| { 56 | rt.block_on(async { 57 | provider 58 | .get_tx_with_proof_from_block(6127485, 0, 23, 1) 59 | .await 60 | .unwrap(); 61 | }); 62 | }); 63 | } 64 | 65 | fn benchmark_transaction_receipt(b: &mut Bencher) { 66 | let provider = EvmProvider::default(); 67 | let rt = Runtime::new().unwrap(); 68 | 69 | b.iter(|| { 70 | rt.block_on(async { 71 | provider 72 | .get_tx_receipt_with_proof_from_block(6127485, 0, 23, 1) 73 | .await 74 | .unwrap(); 75 | }); 76 | }); 77 | } 78 | 79 | fn criterion_benchmark(c: &mut Criterion) { 80 | c.bench_function("get_10_header_proofs", benchmark_header); 81 | c.bench_function("get_10_account_proofs", benchmark_account); 82 | c.bench_function("get_10_storage_proofs", benchmark_storage); 83 | c.bench_function("get_tx_with_proof_from_block", benchmark_transaction); 84 | c.bench_function( 85 | "get_tx_receipt_with_proof_from_block", 86 | benchmark_transaction_receipt, 87 | ); 88 | } 89 | 90 | criterion_group! { 91 | name = benches; 92 | config = Criterion::default().sample_size(10).measurement_time(std::time::Duration::new(10, 0)); 93 | targets = criterion_benchmark 94 | } 95 | 96 | criterion_main!(benches); 97 | -------------------------------------------------------------------------------- /build/readme.md: -------------------------------------------------------------------------------- 1 | ### Pedersen Program Hash 2 | 3 | ```json 4 | { 5 | "HDP_PROGRAM_HASH": "0x7f4e04ae49045719567040fd49a42283c63f50c9241abdadd23e96f7d9bda8c", 6 | "DRY_RUN_PROGRAM_HASH": "0x48ac124e876e38ec61c5cd1543930e8211d17be84fd37e6c65da472f6801529" 7 | } 8 | ``` 9 | 10 | ### Solidity Contract 11 | 12 | ``` 13 | 0x17e6E8e650e96B0cE39FB389B372E122C68F5a41 14 | ``` 15 | -------------------------------------------------------------------------------- /cli/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "hdp-cli" 3 | description = "Interactive Herodotus Data Processor via CLI" 4 | edition.workspace = true 5 | license-file.workspace = true 6 | version.workspace = true 7 | repository.workspace = true 8 | homepage.workspace = true 9 | exclude.workspace = true 10 | keywords.workspace = true 11 | categories.workspace = true 12 | authors.workspace = true 13 | readme = "../README.md" 14 | 15 | 16 | [[bin]] 17 | name = "hdp" 18 | path = "src/main.rs" 19 | 20 | [dependencies] 21 | tracing-subscriber = { version = "0.3.0", features = ["env-filter"] } 22 | hdp = { workspace = true } 23 | anyhow = { workspace = true } 24 | tokio = { workspace = true } 25 | tracing = { workspace = true } 26 | serde_json = { workspace = true } 27 | serde = { workspace = true } 28 | starknet = { workspace = true } 29 | clap = { version = "4.4.4", features = ["derive"] } 30 | dotenv = "0.15.0" 31 | inquire = "0.7.4" 32 | alloy = { workspace = true } 33 | -------------------------------------------------------------------------------- /cli/src/commands/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::commands::run::RunArgs; 2 | use clap::{command, Parser, Subcommand}; 3 | use process::ProcessArgs; 4 | use run_datalake::RunDatalakeArgs; 5 | use run_module::RunModuleArgs; 6 | 7 | pub mod process; 8 | pub mod run; 9 | pub mod run_datalake; 10 | pub mod run_module; 11 | 12 | #[derive(Debug, Parser)] 13 | #[command(name = "hdp")] 14 | #[command(version, about, long_about = None)] 15 | pub struct HDPCli { 16 | #[command(subcommand)] 17 | pub command: HDPCliCommands, 18 | } 19 | 20 | #[derive(Debug, Subcommand)] 21 | pub enum HDPCliCommands { 22 | /// New to the HDP CLI? Start here! 23 | #[command(name = "start")] 24 | Start, 25 | /// Run single datalake compute 26 | #[command(name = "run-datalake", arg_required_else_help = true)] 27 | RunDatalake(RunDatalakeArgs), 28 | 29 | /// Run single module with either program hash or local class path 30 | #[command(name = "run-module", arg_required_else_help = true)] 31 | RunModule(RunModuleArgs), 32 | /// Run batch of tasks base on request json file 33 | #[command(name = "run", arg_required_else_help = true)] 34 | Run(RunArgs), 35 | 36 | /// Process the output of preprocessor 37 | #[command(name = "process", arg_required_else_help = true)] 38 | Process(ProcessArgs), 39 | } 40 | -------------------------------------------------------------------------------- /cli/src/commands/process.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use clap::Parser; 4 | 5 | #[derive(Parser, Debug)] 6 | pub struct ProcessArgs { 7 | /// Path to save output file after pre-processing. 8 | #[arg(short, long)] 9 | pub input_file: PathBuf, 10 | 11 | /// hdp cairo compiled program. main entry point 12 | #[arg(long)] 13 | pub sound_run_cairo_file: Option, 14 | 15 | /// Path to save pie file 16 | /// 17 | /// This will trigger processing(=pie generation) step 18 | #[arg(short, long, requires("input_file"), conflicts_with = "proof_mode")] 19 | pub cairo_pie_file: Option, 20 | 21 | /// Flag to run `cairo-run` in proof mode 22 | /// 23 | /// This will trigger processing(=pie generation) step 24 | /// By default, it will run in non-proof mode to generate pie 25 | /// Note that if this flag is set 26 | #[arg(long, default_value_t = false, conflicts_with = "cairo_pie_file")] 27 | pub proof_mode: bool, 28 | } 29 | -------------------------------------------------------------------------------- /cli/src/commands/run.rs: -------------------------------------------------------------------------------- 1 | use crate::commands::Parser; 2 | use std::path::PathBuf; 3 | 4 | #[derive(Parser, Debug)] 5 | pub struct RunArgs { 6 | /// Pass request as json file 7 | #[arg(short, long)] 8 | pub request_file: PathBuf, 9 | 10 | /// dry run contract bootloader program. 11 | /// only used for module task 12 | #[arg(long)] 13 | pub dry_run_cairo_file: Option, 14 | 15 | /// Path to save program input file after pre-processing. 16 | /// 17 | /// This will be input data for cairo program 18 | #[arg(short, long)] 19 | pub program_input_file: PathBuf, 20 | 21 | /// Set this boolean to true to generate cairo format program_input_file 22 | /// 23 | /// By default, program_input_file is generated in cairo format. If you dont want, set this to false. 24 | #[arg(long, default_value_t = true)] 25 | pub cairo_format: bool, 26 | 27 | /// Path to save batch proof file after pre-processing. 28 | /// 29 | /// This will be used to verify the batch proof on-chain 30 | #[arg(short, long, requires("program_input_file"))] 31 | pub batch_proof_file: Option, 32 | 33 | /// hdp cairo compiled program. main entry point 34 | #[arg(long)] 35 | pub sound_run_cairo_file: Option, 36 | 37 | /// Path to save pie file 38 | /// 39 | /// This will trigger processing(=pie generation) step 40 | #[arg( 41 | short, 42 | long, 43 | requires("program_input_file"), 44 | conflicts_with = "proof_mode" 45 | )] 46 | pub cairo_pie_file: Option, 47 | 48 | /// Flag to run `cairo-run` in proof mode 49 | /// 50 | /// This will trigger processing(=pie generation) step 51 | /// By default, it will run in non-proof mode to generate pie 52 | /// Note that if this flag is set 53 | #[arg(long, default_value_t = false, conflicts_with = "cairo_pie_file")] 54 | pub proof_mode: bool, 55 | } 56 | -------------------------------------------------------------------------------- /cli/src/commands/run_datalake.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use alloy::primitives::{BlockNumber, TxIndex}; 4 | use clap::{arg, command, Parser, Subcommand}; 5 | use hdp::primitives::{ 6 | aggregate_fn::{AggregationFunction, FunctionContext}, 7 | task::datalake::{ 8 | block_sampled::BlockSampledCollection, 9 | transactions::{IncludedTypes, TransactionsCollection}, 10 | }, 11 | ChainId, 12 | }; 13 | 14 | #[derive(Parser, Debug)] 15 | pub struct RunDatalakeArgs { 16 | /// The aggregate function id e.g. "sum", "min", "avg" 17 | pub aggregate_fn_id: AggregationFunction, 18 | /// Optional context for applying conditions on the aggregate function "count". 19 | /// Format: "{operator}.{value}" (e.g., "eq.100" for equality, "gt.100" for greater-than). 20 | /// Supported operators are in the Operator enum. 21 | pub aggregate_fn_ctx: Option, 22 | 23 | #[command(subcommand)] 24 | pub datalake: DataLakeCommands, 25 | 26 | /// Path to save program input file after pre-processing. 27 | /// 28 | /// This will be input data for cairo program 29 | #[arg(short, long)] 30 | pub program_input_file: PathBuf, 31 | 32 | /// Set this boolean to true to generate cairo format program_input_file 33 | /// 34 | /// By default, program_input_file is generated in cairo format. If you dont want, set this to false. 35 | #[arg(long, default_value_t = true)] 36 | pub cairo_format: bool, 37 | 38 | /// Path to save batch proof file after pre-processing. 39 | /// 40 | /// This will be used to verify the batch proof on-chain 41 | #[arg(short, long, requires("program_input_file"))] 42 | pub batch_proof_file: Option, 43 | 44 | /// hdp cairo compiled program. main entry point 45 | #[arg(long)] 46 | pub sound_run_cairo_file: Option, 47 | 48 | /// Path to save pie file 49 | /// 50 | /// This will trigger processing(=pie generation) step 51 | #[arg( 52 | short, 53 | long, 54 | requires("program_input_file"), 55 | conflicts_with = "proof_mode" 56 | )] 57 | pub cairo_pie_file: Option, 58 | 59 | /// Flag to run `cairo-run` in proof mode 60 | /// 61 | /// This will trigger processing(=pie generation) step 62 | /// By default, it will run in non-proof mode to generate pie 63 | /// Note that if this flag is set 64 | #[arg(long, default_value_t = false, conflicts_with = "cairo_pie_file")] 65 | pub proof_mode: bool, 66 | 67 | /// Destination chain id 68 | #[arg(long)] 69 | pub destination_chain_id: ChainId, 70 | } 71 | 72 | #[derive(Subcommand, Clone, Debug, PartialEq, Eq)] 73 | pub enum DataLakeCommands { 74 | #[command(arg_required_else_help = true)] 75 | #[command(short_flag = 's')] 76 | BlockSampled { 77 | /// Chain id 78 | chain_id: ChainId, 79 | /// Block number range start (inclusive) 80 | block_range_start: BlockNumber, 81 | /// Block number range end (inclusive) 82 | block_range_end: BlockNumber, 83 | /// Sampled property e.g. "header.number", "account.0xaccount.balance", "storage.0xcontract.0xstoragekey" 84 | sampled_property: BlockSampledCollection, 85 | /// Increment number of given range blocks 86 | #[arg(default_value_t = 1)] 87 | increment: u64, 88 | }, 89 | 90 | #[command(arg_required_else_help = true)] 91 | #[command(short_flag = 't')] 92 | TransactionsInBlock { 93 | /// Chain id 94 | chain_id: ChainId, 95 | /// Target block number 96 | target_block: BlockNumber, 97 | /// Sampled property 98 | /// Fields from transaction: "chain_id", "gas_price"... etc 99 | /// Fields from transaction receipt: "cumulative_gas_used".. etc 100 | sampled_property: TransactionsCollection, 101 | /// Start index of transactions range (inclusive) 102 | start_index: TxIndex, 103 | /// End index of transactions range (exclusive) 104 | end_index: TxIndex, 105 | /// Increment number of transaction indexes in the block 106 | increment: u64, 107 | /// Filter out the specific type of Txs 108 | /// Each byte represents a type of transaction to be included in the datalake 109 | /// e.g 1,0,1,0 -> include legacy, exclude eip2930, include eip1559, exclude eip4844 110 | included_types: IncludedTypes, 111 | }, 112 | } 113 | -------------------------------------------------------------------------------- /cli/src/commands/run_module.rs: -------------------------------------------------------------------------------- 1 | use clap::{arg, Parser}; 2 | use hdp::primitives::ChainId; 3 | use std::path::PathBuf; 4 | 5 | #[derive(Parser, Debug)] 6 | pub struct RunModuleArgs { 7 | /// Input field elements for the module contract. 8 | /// The input field elements should be separated by comma. 9 | /// The first element is visibility, and the second element is the value. 10 | /// 11 | /// e.g. "private.0x1234,public.0xabcd" 12 | #[arg(long, use_value_delimiter = true)] 13 | pub module_inputs: Option>, 14 | 15 | /// Program hash of the contract class. 16 | /// (Note: either class_hash or local_class_path should be provided) 17 | #[arg(long, group = "class_source")] 18 | pub program_hash: Option, 19 | 20 | /// Local path of the contract class file. 21 | /// Make sure to have structure match with [CasmContractClass](https://github.com/starkware-libs/cairo/blob/53f7a0d26d5c8a99a8ad6ba07207a762678f2931/crates/cairo-lang-starknet-classes/src/casm_contract_class.rs) 22 | /// 23 | /// (Note: either class_hash or local_class_path should be provided) 24 | #[arg(long, group = "class_source")] 25 | pub local_class_path: Option, 26 | 27 | /// optionally can save keys for module task to file 28 | #[arg(long)] 29 | pub save_fetch_keys_file: Option, 30 | 31 | /// dry run contract bootloader program. 32 | /// only used for module task 33 | #[arg(long)] 34 | pub dry_run_cairo_file: Option, 35 | 36 | /// Path to save program input file after pre-processing. 37 | /// 38 | /// This will be input data for cairo program 39 | #[arg(short, long)] 40 | pub program_input_file: PathBuf, 41 | 42 | /// Set this boolean to true to generate cairo format program_input_file 43 | /// 44 | /// By default, program_input_file is generated in cairo format. If you dont want, set this to false. 45 | #[arg(long, default_value_t = true)] 46 | pub cairo_format: bool, 47 | 48 | /// Path to save batch proof file after pre-processing. 49 | /// 50 | /// This will be used to verify the batch proof on-chain 51 | #[arg(short, long, requires("program_input_file"))] 52 | pub batch_proof_file: Option, 53 | 54 | /// hdp cairo compiled program. main entry point 55 | #[arg(long)] 56 | pub sound_run_cairo_file: Option, 57 | 58 | /// Path to save pie file 59 | /// 60 | /// This will trigger processing(=pie generation) step 61 | #[arg( 62 | short, 63 | long, 64 | requires("program_input_file"), 65 | conflicts_with = "proof_mode" 66 | )] 67 | pub cairo_pie_file: Option, 68 | 69 | /// Flag to run `cairo-run` in proof mode 70 | /// 71 | /// This will trigger processing(=pie generation) step 72 | /// By default, it will run in non-proof mode to generate pie 73 | /// Note that if this flag is set 74 | #[arg(long, default_value_t = false, conflicts_with = "cairo_pie_file")] 75 | pub proof_mode: bool, 76 | 77 | /// Destination chain id 78 | #[arg(long)] 79 | pub destination_chain_id: ChainId, 80 | } 81 | -------------------------------------------------------------------------------- /cli/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod cli; 2 | pub mod commands; 3 | pub mod interactive; 4 | -------------------------------------------------------------------------------- /cli/src/main.rs: -------------------------------------------------------------------------------- 1 | #[tokio::main] 2 | async fn main() -> anyhow::Result<()> { 3 | hdp_cli::cli::hdp_cli_run().await 4 | } 5 | -------------------------------------------------------------------------------- /clippy.toml: -------------------------------------------------------------------------------- 1 | too-many-arguments-threshold = 11 2 | -------------------------------------------------------------------------------- /config/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "HDP_PROGRAM_HASH": "0x01f77febac1a08346546520d148ca8beb99cbb9b26f2f3030d14934003f1f88b", 3 | "DRY_RUN_PROGRAM_HASH": "0x048ac124e876e38ec61c5cd1543930e8211d17be84fd37e6c65da472f6801529" 4 | } 5 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | runner: 3 | build: 4 | context: . 5 | dockerfile: ./runner.dockerfile 6 | args: 7 | HDP_PROGRAM_HASH: 0x0581f639627ec3af482532a1847f59b6521418b59cbe5e2a5657196e3ed886ea 8 | DRY_RUN_PROGRAM_HASH: 0x062a6397fa977a8e25887000cef90053ca599e3615830dfeb4cc18f79e534afc 9 | env_file: 10 | - .env 11 | volumes: 12 | - ./input.json:/hdp-runner/input.json 13 | - ./output.json:/hdp-runner/output.json 14 | - ./cairo.pie:/hdp-runner/cairo.pie 15 | - ./request.json:/hdp-runner/request.json 16 | -------------------------------------------------------------------------------- /dry_run_output.json: -------------------------------------------------------------------------------- 1 | [{"fetch_keys": [{"type": "AccountMemorizerKey", "key": {"chain_id": 11155111, "block_number": 5186021, "address": "0x13CB6AE34A13a0977F4d7101eBc24B87Bb23F0d5"}}, {"type": "AccountMemorizerKey", "key": {"chain_id": 11155111, "block_number": 5186022, "address": "0x13CB6AE34A13a0977F4d7101eBc24B87Bb23F0d5"}}, {"type": "AccountMemorizerKey", "key": {"chain_id": 11155111, "block_number": 5186023, "address": "0x13CB6AE34A13a0977F4d7101eBc24B87Bb23F0d5"}}], "result": {"low": "0x0", "high": "0x0"}, "program_hash": "0xaf1333b8346c1ac941efe380f3122a71c1f7cbad19301543712e74f765bfca"}] -------------------------------------------------------------------------------- /examples/private-input-module/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "private-input-module" 3 | version.workspace = true 4 | edition.workspace = true 5 | license-file.workspace = true 6 | authors.workspace = true 7 | repository.workspace = true 8 | homepage.workspace = true 9 | exclude.workspace = true 10 | keywords.workspace = true 11 | categories.workspace = true 12 | 13 | [dependencies] 14 | hdp = { workspace = true, features = ["test_utils"] } 15 | tokio = { workspace = true } 16 | serde_json = { workspace = true } 17 | -------------------------------------------------------------------------------- /examples/private-input-module/README.md: -------------------------------------------------------------------------------- 1 | # private input and module 2 | 3 | HDP provides privacy not only for provability. 4 | 5 | - private module: In case you don't want to share custom module's computation logic (e.g logic contains sensitive strategy ), you can run hdp locally like this example on [private_module](./private_module/) and point to the module's contract class locally to generate PIE. 6 | 7 | - private input: In case you don't want to reveal some inputs of module function (e.g private key), you can change visility into private so that it can be excluded from construction task commitment. 8 | -------------------------------------------------------------------------------- /examples/private-input-module/private_module/Scarb.lock: -------------------------------------------------------------------------------- 1 | # Code generated by scarb DO NOT EDIT. 2 | version = 1 3 | 4 | [[package]] 5 | name = "hdp_cairo" 6 | version = "0.1.0" 7 | source = "git+https://github.com/HerodotusDev/hdp-cairo.git#4e00d58246e3f5ea5d1495e914c28c738c6d540f" 8 | 9 | [[package]] 10 | name = "private_module" 11 | version = "0.1.0" 12 | dependencies = [ 13 | "hdp_cairo", 14 | ] 15 | -------------------------------------------------------------------------------- /examples/private-input-module/private_module/Scarb.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "private_module" 3 | version = "0.1.0" 4 | edition = "2023_11" 5 | 6 | # See more keys and their definitions at https://docs.swmansion.com/scarb/docs/reference/manifest.html 7 | 8 | [dependencies] 9 | starknet = "2.6.3" 10 | hdp_cairo = { git = "https://github.com/HerodotusDev/hdp-cairo.git" } 11 | 12 | [[target.starknet-contract]] 13 | sierra = true 14 | casm = true 15 | casm-add-pythonic-hints = true 16 | -------------------------------------------------------------------------------- /examples/private-input-module/private_module/src/lib.cairo: -------------------------------------------------------------------------------- 1 | #[starknet::contract] 2 | mod get_balance { 3 | use hdp_cairo::memorizer::account_memorizer::AccountMemorizerTrait; 4 | use hdp_cairo::{HDP, memorizer::account_memorizer::{AccountKey, AccountMemorizerImpl}}; 5 | use starknet::syscalls::call_contract_syscall; 6 | use starknet::{ContractAddress, SyscallResult, SyscallResultTrait}; 7 | 8 | #[storage] 9 | struct Storage {} 10 | 11 | #[external(v0)] 12 | pub fn main(ref self: ContractState, hdp: HDP, block_number: u32, address: felt252) -> u256 { 13 | hdp 14 | .account_memorizer 15 | .get_balance( 16 | AccountKey { chain_id: 11155111, block_number: block_number.into(), address } 17 | ) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /examples/private-input-module/src/main.rs: -------------------------------------------------------------------------------- 1 | use hdp::{ 2 | hdp_run::HdpRunConfig, 3 | preprocessor::module_registry::ModuleRegistry, 4 | primitives::task::{module::Module, TaskEnvelope}, 5 | }; 6 | 7 | #[tokio::main] 8 | async fn main() { 9 | //set RUST_LOG into debug 10 | std::env::set_var("RUST_LOG", "debug"); 11 | 12 | let module_regisry = ModuleRegistry::new(); 13 | let module = Module::new_from_str( 14 | None, 15 | Some( 16 | "./private_module/target/dev/private_module_get_balance.compiled_contract_class.json" 17 | .into(), 18 | ), 19 | vec![ 20 | "private.0x5222a4".to_string(), 21 | "public.0x00000000000000000000000013cb6ae34a13a0977f4d7101ebc24b87bb23f0d5".to_string(), 22 | ], 23 | ) 24 | .unwrap(); 25 | let module = module_regisry.get_extended_module(module).await.unwrap(); 26 | let tasks = vec![TaskEnvelope::Module(module)]; 27 | let pre_processor_output_file = "input.json"; 28 | let output_file = "output.json"; 29 | let cairo_pie_file = "pie.zip"; 30 | 31 | let hdp_run_config = HdpRunConfig { 32 | dry_run_program_path: "../../build/contract_dry_run.json".into(), 33 | sound_run_program_path: "../../build/hdp.json".into(), 34 | program_input_file: pre_processor_output_file.into(), 35 | batch_proof_file: Some(output_file.into()), 36 | cairo_pie_file: Some(cairo_pie_file.into()), 37 | ..Default::default() 38 | }; 39 | 40 | hdp::run(&hdp_run_config, tasks).await.unwrap(); 41 | } 42 | -------------------------------------------------------------------------------- /fixtures/primitives/mmr.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": 26, 3 | "root": "0x18e672dd525cd5eacc5f6b15e5d89451bce65177881304d5200af38e350ebdc", 4 | "size": 12273288, 5 | "peaks": [ 6 | "0x262c4c9b1cb2a036924aecf563dc9952e5f8b41004310adde86f22abb793eb1", 7 | "0x72f553aac8690d09c95fe6220fdd5a073440631e4ca0a161a92b655d2ac9478", 8 | "0x6c68dfa085af40218620038d05f477fba52c4b12b812b64902663abf78bca62", 9 | "0x52a50beb6cbeffbd5db875d77e4d3917fdee5f723165f139dc04fe20cd4d69a", 10 | "0x5c4814bbd601bffb5ac9980977a79bf100d4c1ad4f2caa410f7a7c4249a2fd4", 11 | "0x668035a3620690024dac08a8db46e3316619e4c2a634daaa3175ab16af72deb", 12 | "0x67cff2a39ca6fb235decefaf5bb63f54c550b97b57e9873751eb9dae35cfcd4", 13 | "0x2a7d9ca4745f200dd2c66d2dfd6374a21f7092452287696c395f62afc22c805", 14 | "0x37511dd8cc41503f6c08879d18f15b9ae649d6b2cdd91bcaa3990aeb87ba8c6", 15 | "0x55112088a2f7dfaf5d88ce949f3aad7c7d05d6e4eaff4053aebfbed3af885af", 16 | "0x66c82fce8bfc291095c6c9255b1f7ccf725a1e91e8ae8cd8c43ceb111c21480", 17 | "0x2e5274895f9cd556bb8dee5b2551e9cda9aa3caa23532f9824abcc62d5ad273" 18 | ], 19 | "chain_id": 11155111 20 | } 21 | -------------------------------------------------------------------------------- /fixtures/primitives/processed/datalake_compute.json: -------------------------------------------------------------------------------- 1 | { 2 | "encoded_task": "0xb2a2ee8b2a785947ecf6b7705a3e7175c8dbfc1f9dc4448e27f5260fc875f84b000000000000000000000000000000000000000000000000000000000000000300000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", 3 | "task_commitment": "0x80f222ba154043dc142836cd20cdf612f6ad8a49cd8974097d9456e470e9bef8", 4 | "compiled_result": "0x63fa", 5 | "result_commitment": "0x529b81a8ce99f7498e0662f024e40b909d5b3b6f607feb74b083dd6ba2f7dedb", 6 | "task_proof": [], 7 | "result_proof": [], 8 | "encoded_datalake": "0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000005006da00000000000000000000000000000000000000000000000000000000005006e0000000000000000000000000000000000000000000000000000000000000000400000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000016027f2c6f930306d3aa736b3a6c6a98f512f74036d40000000000000000000000", 9 | "datalake_type": 0, 10 | "property_type": 2 11 | } 12 | -------------------------------------------------------------------------------- /fixtures/primitives/processed/header.json: -------------------------------------------------------------------------------- 1 | { 2 | "rlp": "f90267a05c1f73059dd60f0201ca7b8aa270ca5424a8f076db426d017c54a74a82ef56cea01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794ff58d746a67c2e42bcc07d6b3f58406e8837e883a057728119bd76e9af2c58df2f47c47f543f432172a37043c2351fc34d38702436a0f9cab9849d35d85b85809dafbe3b60d5b4918be47cffc1e3c0864c13b41c31a6a00a209f052f990c4e3a65efe0a271eb51b4b95d076e38ffa307c7346d0832b57ab901008b2a934ea0084cf024510ca3f9b10796b9f58a5e00d1248332b6deac42efa8326246a17ea45804658bb1897166ad8ec34a84102622a7ab23253014a1ae300460b08e918a82b70e94c1aa4e5b025d00c9b4e47ea48dd5645c22daf86d4a9e5439cf7d8a6a2643433ab4a8f22d2462a93da00825cc213fc74e0b00f35d80dc6d145355c0b48d963925bc0d53039a00b0c6e835e1a543494b1af1f1d2988c7c1f406f1926b62ae6f5be6ae024e42015ca1e1d6648940668bca42df3e12a07fc146341921c873aea51086703ce29bfd6b086b292511417e6203586eef9c31194b70da47c7084e678b02c25613c30a7824a87ad5104b6aac78073826a0bc723347e1580835006de8401c9c3808401708c938465c4bc289a476f65726c69205365706f6c69612d4265706f6c696120513966a06f2a21ccd01c17585b697e92dd754a0f1095f8becbcdbae0cd6487fd441f7ac388000000000000000085121cb8724ba02b3ea7bdc5dea8a5634d941c929b7ab4b3e61ea482fdf6967da806a80f5648df830c00008404b40000a04421327c2e3610774a4e10b476dba017a00d3fcdd4e2f2d13825893dd8571dd5", 3 | "proof": { 4 | "leaf_idx": 1404667, 5 | "mmr_path": [ 6 | "0x1d550d316e3449e6fe1d1ef0105eb0b1dc68c1fe0aa7b98e790c0d9b21e3b64", 7 | "0x38a8e581a709de93232b8467ce4ce5a5a43354ec821373fd17b177d14322d10", 8 | "0x12d5be3a4a2b486c988b9a71704afd3ddd65ae34763c76354dca0e60b20c543", 9 | "0x17cbff91a6ccd55c7330b0644d56fe93f8b57f1781945419739030d57630db1", 10 | "0x18e768d8767f82b6c57d53c1ba52246554223aacaf685024514dc5f50863f9f", 11 | "0x524b0cd8cdb612b77155a918475e84d7145406c4315ff58ca929d92d9a9a238", 12 | "0x1a565dd0d008e9257f901eaceae589d1b3e5b7f1c953aa87ab7d2475d8a6739", 13 | "0x45d834b9e55575482c591cffd59166614f42f66717a320d8487e771ab4041e8", 14 | "0x020e956066dfc48621f6a59652347e7bc52005b6b1f247fb922b084a7015ecf", 15 | "0x7afb8f6d52239a4b00890b863c469b5234d30c15326eaa0cf957ee73c0e4060", 16 | "0x57092b41cd50870322e982f98aebae94bc16481b9e34d9d410f6037fe043f8a", 17 | "0x2daa26a7fe21fdd33b1a736131e3941a82dfcca6a5b82c6bd28e319828a68e0", 18 | "0x4c8253608c4056ec0d20726cfa02e03ee2671a8ee0fa22fa5f7e99a179752c6", 19 | "0x3b478af9e081ec6e82a6ca7723e16937cfbfacdb3385ae55e4c24aa49ebd124", 20 | "0x7c442caa8134c3d87c44a140f9b97d146425681a67f5a48c25fd3299ef17e8b", 21 | "0x48f0b4c63aa260df87defb488e12a9636e018b2e19521c5290930efe57f5c5e", 22 | "0x5f6c44fe6070dafde8e98c1728aca5aca87e9e1ae86c23c68e3cb042bc99331", 23 | "0x4b2dfec0bb9b57c4e7ac16cedcd3ff50f381f7ab2df50bcfa3596308f79135a", 24 | "0x7aab79afb3e980f9731d2c86c342c1d89533ed23d08912d0bf41e88103a5b79", 25 | "0x27d5dc0fe31c9125f2ecd34018ab6f31d62b3838592881c96449e3369b001f8", 26 | "0x546455f57f4ee848d3952148e3b94700f387ee2c36730bfeda09379ce8fa509", 27 | "0x08808a106dc9e09c29afd24be7cee31edd9f0d27ce0a3469839ef3d09ddfb43" 28 | ] 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /fixtures/primitives/processed/receipt.json: -------------------------------------------------------------------------------- 1 | { 2 | "key": "0x3d", 3 | "block_number": 5253873, 4 | "proof": [ 5 | "0xf8f1a0d6d2f0c5fe4d6e7fdd12d8e7c54fc0adcf1f6ea7760d257cf6ec4c5981ee633ca03542aeb359759f7c229df933b7eeef249766954db3e1aa9f540c098b551d6cb8a06e2b6c1f59bf775a8870c99c9b4b09741e110ee923c75b4d27ad45edcccbe304a0bb3c1c5fdc4e756878af26c10d9cf51f37e28fcb1792fb69642a72b0eaf797a0a0d1c61702ef0e6220e6e58ccaf6fa467d68fc66de7b27170b0a9f3340fe2df621a011a072d1a15faa12da3bd895a2b1c32bfed8e367a5180cc74ce35ecf04aaf1248080a0e58215be848c1293dd381210359d84485553000a82b67410406d183b42adbbdd8080808080808080", 6 | "0xf90211a0e530205ccaf27c0d8789f957e49096e297fe5981a63b04e5b79b23ab2c36b32ea016e81a7e2479e1689cbd6b08af5dc5d36f835bd4950a466803d115afd3a5d354a02b4f82e6cefed7f28e00637d8a14b94caff157ff9f1822b684feaf4275fcb793a0375489e2d9f075c6cfdc81662e16979aec51a1c61e5f864c5ad5dab0751f1612a02a0cd86e40ee0c985634ff5051cf282e5135b4676d53aeb67714fff6b7ae17c7a0b24145c0b3768da491875b981a06b53f0015808059afcee309fba03167fd77e7a020d6c75b171dfcede424073cbc7b112f85ed873c84c08fe1d6416a54af22bc5ba0d5c35c85e465e1770aca637f32621d7d14b45b21bdb824cff993b88fdf27e992a00a372cd2505e5e3ca07a4efe92be0cbd8f77955bd44d42c82df4404a699ddbb9a0ce5cf55a12f3787cd7db5c62ba67720603653e91ccdc50f3096a4d61d9dfcd9ba042481e21c20c93d9516e7f5d61b1f8cb09ca38cf3657968bea6790f07a6f4c21a01843978051fc3d1db3117f020d4eb9f6fecfd179a0e049dba91661742065990ca076e073454d772911f9efd3eb11666f40db58751738f7ae496673f01a0d9829fca0351475bd27c1f155ed7dcf2cd9ff53ba37a0e3d6ab045d508e54dbbd166e0f61a0b9f6d03b51bf2d664d22a27c86336fca7a104142dbc17e6780aaf9c14216b4aba07aa21af16eda12b0d5d48f1d379a06c5c2718530959e65d960a6c27efbf1d47a80", 7 | "0xf901f020b901ec02f901e80184012a70e5b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000000000000000000000000400000000000000000000000000000000000002000000000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000010000000000000200000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200008000400000000000000000000000000000000000000000000f8ddf8db94ab260022803e6735a81256604d73115d663c6b82f842a0ce6d38800f185a3ea05a466d1da359c3aedecb48740391666def25626b8e71caa0000000000000000000000000c7e4a4429b0ead84ced03a02d7a1fea59650c7fcb880000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000124bb779cb647cf9f0bb2c23dd38016ae56601c9a2d0082c0ae9ef4ee7033b8b1a256dbfa98b" 8 | ] 9 | } 10 | -------------------------------------------------------------------------------- /fixtures/primitives/processed/transaction.json: -------------------------------------------------------------------------------- 1 | { 2 | "key": "0x41", 3 | "block_number": 5253873, 4 | "proof": [ 5 | "0xf8f1a056960f81748c98c602a1c1d27f3657e83f1acdbd4d1e7736b0f918a716cffef9a0dda7c16b6d5d578a983f363db8c09b834ad3d29a04811a60a0e68b31df18cadda006fc877c73828199c0b9376d55b8352f796c8caa684e8516de8490ac4bdf8263a02b5d42a4d4274acaff91416d85cc20c1e173b0f2f2f4567bc06365a88ddd5b97a029e8a23f68b26e50447a7b82d3def519446eea60a69bd9875cb6b6aa664916cda0fb981f417d97ed371fc5502bcb4325e6ad24d1d8bbd3c41f44f3a7916030a70a8080a0bc05fdbc292e7ab5ef20f5124d8cc2978a70bac74f639b552922452020f69fc08080808080808080", 6 | "0xf90211a0d826fd0292582b4143469464578eecd69a65c33404090987dfc4040f62c5cb25a06b361c9cf5fc8e9842e5a12748776fb86fc025ff0a97ac5dcf99e31da8d5f3c9a0c34c3c903aebcf9f0a416d8013bc8bb05debcff63306442a4c379b9cb24dcbbca0e95edf64906941e37cd01915e4ea9e892a715d08341de517160f67866d96e793a0c6a17cb7df023443def0c965544a78c31477d3e8aebcf07d1bb05f838b91f5f2a0de500d6585c624d2b1b0915deaac15a8c2ba4388616b8c179eb9d350e16d8b77a071b3368f3ba35dbc32b2421bbee60e88f8f1afdb1faa81e463a7169a0ed13766a01f5deaf61f31e9eeb18dfc9b20838016b532f02557b416c6f85b247ea5a760d2a06b1446941a73e67efa495125c9c8010c80fb497c20f70fb91170bed687187b56a0859419da74ac255585221eb827b48aff2f5c53e202ebefba649cca5e409d1662a0755493d08d1485495c1c11150afd25339a06c4dd9de74c525d21e1e2c8371fcea0752fc30974832441fb836d51d5e1e944ddf7759969f8fc85e79d9c1bc6858d79a0645092e58426159ede0b6000d7638a0b80269cc5e3a355c48021c780d991828ca058ed650822e857f5f1d98f2b5ec59b9da6f71a2819813f342c43d7aba807671aa00ebca32b2a38945099bd47934f7a6b107a34bfd9b8706bac722d14d576108f78a0aee412498109533eef7647b83917747e4431399f681cbf41cc1f8c5751e72fa680", 7 | "0xf9081d20b9081902f9081583aa36a7218459682f0085056dac9303830b3cfd94d3ec28ad6d777f5aa92377294b9b6522c719307902b907a4259198e30000000000000000000000000000000000000000000000a2a15d09519be000000000000000000000000000000000000000000000000000a1836a097e47c3254000000000000000000000000000000000000000000000000000000000000000c00000000000000000000000004c9beb652114b06ff97496563dbff07fba15fe7f000000000000000000000000000000000000000005846b6a0fc27a7e90cc000000000000000000000000000000000000000000000000000000000000000001200000000000000000000000000000000000000000000000000000000000000002000000000000000000000000c3cc76ec127deb0ced031901f1d7ad52ec66f66c00000000000000000000000041d4ef2279fee5a76163ee2a200d42750548ee13000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000000000000000603504e41550100000003b801000000030d020320d4712c53ea67c66d8aa9316be07ad9f7caa4ea6347d5e1716df6ac305c89392d09e89e42ad16490c1d33b3679ec83f8de9dcb3f8768aee4b2a408df44d7101038d088776cd30242e3b6b715364caf99cc7d451b0782b8c8b3114c56a330a33176a6e065340665045a11de616fe67a40471369a7c419eb117b6548576a73495d90104fc8882e80f3976b46a4ffe7eb4e93d2051accc048c47143edbddb36c145e28026f856ecbfa9cc891b959175c7a2a19933e83996598b6ae26db7933d2260354d6010772b12fe71910dde1f817739293981cc94965756a462a7ae0e88987b5577ac21523e2b2b48ad6bc3d5436237b315035edd5ea9831bb7d763fe29ceb17a44bbd7101083c151a7ee7958c47ad3babdcc8b4e7173b96e1ea05b378a155b3f4f89b8643232a0980c711e8d948fd9150cd5a02f2d473a8e4b7c2df641f285fca9f805939c2000ac0cc90d3205495e1851bf3808661512df7f1517873c28f9f2423c10863fcae2054443233ea58c2d4bdb5dc4a4c877215bf8d5c386cd8671096e3995bcb213cc2000bbd1221be004aa1d6bef7f27dee58c32e7ad32035c3981418b4d6848b64284095254fc173c03ad133547dfeb21a81ab595b0586b332d116d0ae3c8c8d1c5774e4010cfbfbef6bef1ba735dc1ee505d47b3e5141202574e51797af7aeda600608874f36ee4d13890fc0b05fd4d836dc36af387b3ded6a8c28cb83b336cf9157db62b74010df038251cca2e24c732af6aef82f5da938943a89fbe2ca6f41807e3c6975279080c5c77b31d1ccff7e28dd6bcb4f9be3f4bc95b194e4990071cf88b5d60a771e0000eef5d4543d82c355c0125bc0049a73e8678ad392f35c9aead131317010483fb751491096e1bb4210fe60338490737348dc3f812a191cd55d10bf48c7705e90a0c000fa7ac8fab36da04bc4af0bbc3f8663b697a122ab57f4456fdd7c98a7612ccbc6426d184a22bca34c77ad9d357422d485d63ecf143f383f5b71830b12280d615d50010f4c93e02f2ba7210940b02d56b3770819a1dbdc080d1222a5924eede8a8e1258611c50bb20671a420f0d95c367d1ec03268d21fe9adfa5b8cb97bcaea3c7b52301126bda7f2c40d63c601e27601d4681997c42e901f38005ff4e221a2e8b7170cb427f978805618cbc5cdfa0890a09091735fdc49e0cd5c66111daa3c320f2aefc1a0165c683d600000000001ae101faedac5851e32b9b23b5f9411a8c2bac4aae3ed4dd7b811dd1a72ea4aa71000000000263982d01415557560000000000076b8e8d0000271040eb117b31bb068ce3eb3b57c2239137545222a802005500b0948a5e5313200c632b51bb5ca32f6de0d36e9950a942d19751e833f70dabfd0000000005f5f492000000000001387bfffffff80000000065c683d60000000065c683d60000000005f5e84400000000000115070af5b975d38a40cd7111215ae86b307b8c5296159eb04a8769391ba37516bf44f8882566bead65827dff08b10eea8a9ec85d4d648e86dfa7702199a9acdb12ca589afb5b409aafd8fdb174882943e61c13d2e20d716b2f873d9263be5d05a0e9adcc4beab4f4c2f8ea9d9f89c0cb95b9c4bd96c8bd98d86d403b70ac53fa5f40e61e436be7328c2cdf07fa50664a9a21175a02f491cef0234b3115cb9b08f442e82f07596aa89f4a08e4c10e37aa660a21bf01329998a668f706144f4cbeb144dc29c909d5e6a8d01c005500eaa020c61cc479712813461ce153894a96a6c00b21ed0cfc2798d1f9a9e9c94a0000000005f5a6cb000000000000fe3ffffffff80000000065c683d60000000065c683d60000000005f5d84e00000000000091d20a435fe117d09a532ca8e8c41220fa5a62158136fc67bf52c0aa6f7a45343a5a39028037ab0c52e211010a8c52c1ff84681becc14556919d99bb030498c7b9c3606291e6ad9ab9f2c1df3252965949c6942da9f8f7adfa7e49aaa7c45c4bb7daf3c104b6c12e4cc05be7134eaf3c4ee68786bf310a9360b398350cef324cfd75c41eb0d36f9ef38a23d93ac888643d943e5082ac29f4b33d3cf71360ded66117782f07596aa89f4a08e4c10e37aa660a21bf01329998a668f706144f4cbeb144dc29c909d5e6a8d01c0000000000000000000000000000000000000000000000000000000000c001a085b4e7aa9c15b83f4731238354f0b8567bbf3967bc538b0706bfc870fd3419dca07c7d8cae0dd7c35125673c2823c164a2b5f790bcd4288d297a0627777bbacb68" 8 | ] 9 | } 10 | -------------------------------------------------------------------------------- /fixtures/primitives/processed_in_felts/datalake_compute.json: -------------------------------------------------------------------------------- 1 | { 2 | "task_bytes_len": 128, 3 | "encoded_task": [ 4 | "0x4759782a8beea2b2", 5 | "0x75713e5a70b7f6ec", 6 | "0x8e44c49d1ffcdbc8", 7 | "0x4bf875c80f26f527", 8 | "0x0", 9 | "0x0", 10 | "0x0", 11 | "0x300000000000000", 12 | "0x0", 13 | "0x0", 14 | "0x0", 15 | "0x0", 16 | "0x0", 17 | "0x0", 18 | "0x0", 19 | "0x0" 20 | ], 21 | "datalake_bytes_len": 224, 22 | "encoded_datalake": [ 23 | "0x0", 24 | "0x0", 25 | "0x0", 26 | "0x0", 27 | "0x0", 28 | "0x0", 29 | "0x0", 30 | "0xda06500000000000", 31 | "0x0", 32 | "0x0", 33 | "0x0", 34 | "0xe006500000000000", 35 | "0x0", 36 | "0x0", 37 | "0x0", 38 | "0x400000000000000", 39 | "0x0", 40 | "0x0", 41 | "0x0", 42 | "0xa000000000000000", 43 | "0x0", 44 | "0x0", 45 | "0x0", 46 | "0x1600000000000000", 47 | "0xd30603936f2c7f02", 48 | "0xf5986a6c3a6b73aa", 49 | "0xd43640f712", 50 | "0x0" 51 | ], 52 | "datalake_type": 0, 53 | "property_type": 2 54 | } 55 | -------------------------------------------------------------------------------- /fixtures/primitives/processed_in_felts/header.json: -------------------------------------------------------------------------------- 1 | { 2 | "rlp": [ 3 | "0x5731f5ca06702f9", 4 | "0x8a7bca01020fd69d", 5 | "0x76f0a82454ca70a2", 6 | "0x4aa7547c016d42db", 7 | "0x4dcc1da0ce56ef82", 8 | "0xb585ab7a5dc7dee8", 9 | "0x4512d31ad4ccb667", 10 | "0x42a1f013748a941b", 11 | "0x58ff944793d440fd", 12 | "0xc0bc422e7ca646d7", 13 | "0x37886e40583f6b7d", 14 | "0xbd19817257a083e8", 15 | "0x472fdf582cafe976", 16 | "0xa37221433f547fc4", 17 | "0x384dc31f35c24370", 18 | "0x84b9caf9a0362470", 19 | "0xaf9d80855bd8359d", 20 | "0xe48b91b4d5603bbe", 21 | "0x134c86c0e3c1ff7c", 22 | "0x9f200aa0a6311cb4", 23 | "0xef653a4e0c992f05", 24 | "0x5db9b451eb71a2e0", 25 | "0x34c707a3ff386e07", 26 | "0x1b97ab532086d", 27 | "0xf04c08a04e932a8b", 28 | "0x9607b1f9a30c5124", 29 | "0x8324d1005e8af5b9", 30 | "0x32a8ef42acdeb632", 31 | "0x650458a47ea14662", 32 | "0xc38ead667189b18b", 33 | "0x23aba7222610844a", 34 | "0x600430aea1143025", 35 | "0x940eb7828a918eb0", 36 | "0xc9005d025b4eaac1", 37 | "0x5c64d58da47ee4b4", 38 | "0x39549e4a6df8da22", 39 | "0x3a4343266a8a7dcf", 40 | "0x3da962242df2a8b4", 41 | "0x4ec73f21cc2508a0", 42 | "0x146ddc805df3000b", 43 | "0x2539968db4c05553", 44 | "0xc6b0009a03530dbc", 45 | "0x1a4b4943a5e135e8", 46 | "0x401f7c8c98d2f1f1", 47 | "0xbef5e62ab626196f", 48 | "0x1eca1520e424e06a", 49 | "0xa4bc68069448661d", 50 | "0x6314fc072ae1f32d", 51 | "0x851ea3a871c9241", 52 | "0x86b0d6bf29ce0367", 53 | "0x3520e617145192b2", 54 | "0xdb79411c3f9ee86", 55 | "0x2cb078e684707ca4", 56 | "0x874a82a7303c6125", 57 | "0x7380c7aab60451ad", 58 | "0x157e3423c70b6a82", 59 | "0xc90184de06508380", 60 | "0x84938c70018480c3", 61 | "0x656f479a28bcc465", 62 | "0x6f70655320696c72", 63 | "0x6f7065422d61696c", 64 | "0xa06639512061696c", 65 | "0x58171cd0cc212a6f", 66 | "0xf4a75dd927e695b", 67 | "0xe0bacdcbbef89510", 68 | "0xc37a1f44fd8764cd", 69 | "0x88", 70 | "0xa04b72b81c128500", 71 | "0xa5a8dec5bda73e2b", 72 | "0xb47a9b921c944d63", 73 | "0x96f6fd82a41ee6b3", 74 | "0xdf48560fa806a87d", 75 | "0xb4048400000c83", 76 | "0x362e7c322144a000", 77 | "0xdb76b4104e4a7710", 78 | "0xe2d4cd3f0da017a0", 79 | "0x57d83d892538d1f2", 80 | "0xd51d" 81 | ], 82 | "rlp_bytes_len": 618, 83 | "proof": { 84 | "leaf_idx": 1404667, 85 | "mmr_path": [ 86 | "0x1d550d316e3449e6fe1d1ef0105eb0b1dc68c1fe0aa7b98e790c0d9b21e3b64", 87 | "0x38a8e581a709de93232b8467ce4ce5a5a43354ec821373fd17b177d14322d10", 88 | "0x12d5be3a4a2b486c988b9a71704afd3ddd65ae34763c76354dca0e60b20c543", 89 | "0x17cbff91a6ccd55c7330b0644d56fe93f8b57f1781945419739030d57630db1", 90 | "0x18e768d8767f82b6c57d53c1ba52246554223aacaf685024514dc5f50863f9f", 91 | "0x524b0cd8cdb612b77155a918475e84d7145406c4315ff58ca929d92d9a9a238", 92 | "0x1a565dd0d008e9257f901eaceae589d1b3e5b7f1c953aa87ab7d2475d8a6739", 93 | "0x45d834b9e55575482c591cffd59166614f42f66717a320d8487e771ab4041e8", 94 | "0x020e956066dfc48621f6a59652347e7bc52005b6b1f247fb922b084a7015ecf", 95 | "0x7afb8f6d52239a4b00890b863c469b5234d30c15326eaa0cf957ee73c0e4060", 96 | "0x57092b41cd50870322e982f98aebae94bc16481b9e34d9d410f6037fe043f8a", 97 | "0x2daa26a7fe21fdd33b1a736131e3941a82dfcca6a5b82c6bd28e319828a68e0", 98 | "0x4c8253608c4056ec0d20726cfa02e03ee2671a8ee0fa22fa5f7e99a179752c6", 99 | "0x3b478af9e081ec6e82a6ca7723e16937cfbfacdb3385ae55e4c24aa49ebd124", 100 | "0x7c442caa8134c3d87c44a140f9b97d146425681a67f5a48c25fd3299ef17e8b", 101 | "0x48f0b4c63aa260df87defb488e12a9636e018b2e19521c5290930efe57f5c5e", 102 | "0x5f6c44fe6070dafde8e98c1728aca5aca87e9e1ae86c23c68e3cb042bc99331", 103 | "0x4b2dfec0bb9b57c4e7ac16cedcd3ff50f381f7ab2df50bcfa3596308f79135a", 104 | "0x7aab79afb3e980f9731d2c86c342c1d89533ed23d08912d0bf41e88103a5b79", 105 | "0x27d5dc0fe31c9125f2ecd34018ab6f31d62b3838592881c96449e3369b001f8", 106 | "0x546455f57f4ee848d3952148e3b94700f387ee2c36730bfeda09379ce8fa509", 107 | "0x08808a106dc9e09c29afd24be7cee31edd9f0d27ce0a3469839ef3d09ddfb43" 108 | ] 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /fixtures/primitives/processed_in_felts/receipt.json: -------------------------------------------------------------------------------- 1 | { 2 | "key": "0x3d", 3 | "block_number": 5253873, 4 | "proof_bytes_len": [243, 532, 499], 5 | "proof": [ 6 | [ 7 | "0xfec5f0d2d6a0f1f8", 8 | "0xc5e7d812dd7f6e4d", 9 | "0x76a76e1fcfadc04f", 10 | "0x81594cecf67c250d", 11 | "0xb3ae4235a03c63ee", 12 | "0x33f99d227c9f7559", 13 | "0x4d95669724efeeb7", 14 | "0x8b090c549faae1b3", 15 | "0x6c2b6ea0b86c1d55", 16 | "0xc970885a77bf591f", 17 | "0xe111e74094b9b9c", 18 | "0x45ad274d5bc723e9", 19 | "0x3cbba004e3cbcced", 20 | "0xaf7868754edc5f1c", 21 | "0xe2371ff59c0dc126", 22 | "0x2a6469fb9217cb8f", 23 | "0xd1a0a097f7eab072", 24 | "0xe620620eef0217c6", 25 | "0x687d46faf6ca8ce5", 26 | "0xa0b17277bde66fc", 27 | "0xa021f62dfe40339f", 28 | "0x12aa5fa1d172a011", 29 | "0x2bc3b1a295d83bda", 30 | "0xc70c18a567e3d8fe", 31 | "0x24f1aa04cf5ee34c", 32 | "0x84be1582e5a08080", 33 | "0x35101238dd93128c", 34 | "0x820a00535548849d", 35 | "0x423b186d401074b6", 36 | "0x8080808080ddbbad", 37 | "0x808080" 38 | ], 39 | [ 40 | "0x5c2030e5a01102f9", 41 | "0x57f989870d7cf2ca", 42 | "0x8159fe97e29690e4", 43 | "0xab239bb7e5043ba6", 44 | "0x1ae816a02eb3362c", 45 | "0x6bbd9c68e179247e", 46 | "0x5b836fd3c55daf08", 47 | "0x15d10368460a95d4", 48 | "0x4f2ba054d3a5d3af", 49 | "0x8ef2d7fecee682", 50 | "0xf1af4cb9148a7d63", 51 | "0xfe84b622189fff57", 52 | "0x37a093b7fc7542af", 53 | "0xcfc675f0d9e28954", 54 | "0xec9a97162e6681dc", 55 | "0x5a4c865f1ec6a151", 56 | "0xa012161f75b0dad5", 57 | "0x980cee406ed80c2a", 58 | "0x2e28cf5150ff3456", 59 | "0xb6ae536d67b43551", 60 | "0xc717aeb7f6ff1477", 61 | "0x8d76b3c04541b2a0", 62 | "0xb5061a985b8791a4", 63 | "0xceaf59808015003f", 64 | "0x77fd6731a0fb09e3", 65 | "0x1d175bc7d620a0e7", 66 | "0x7bbc3c0724e4edfc", 67 | "0xc0843c87ed852f11", 68 | "0x22af546a41d6e18f", 69 | "0xe4855cc3d5a05bbc", 70 | "0x327f63ca0a77e165", 71 | "0xbd215bb4147d1d62", 72 | "0xdf8fb893f9cf24b8", 73 | "0xd22c370aa092e927", 74 | "0xfe4e7aa03c5e5e50", 75 | "0x5b95778fbd0cbe92", 76 | "0x4a40f42dc8424dd4", 77 | "0xf55ccea0b9db9d69", 78 | "0x5cdbd77c78f3125a", 79 | "0x3e6503067267ba62", 80 | "0x4d6a09f350dccc91", 81 | "0x4842a09bcddfd961", 82 | "0x6e51d9930cc2211e", 83 | "0xca09cbf8b1615d7f", 84 | "0x67ea8b965736cf38", 85 | "0x18a0214c6f7af090", 86 | "0xb31d3dfc51809743", 87 | "0xfef6b94e0d027f11", 88 | "0xa9db49e0a079d1cf", 89 | "0xa00c996520746116", 90 | "0x1129774d4573e076", 91 | "0x406f6611ebd3eff9", 92 | "0x49aef738177558db", 93 | "0xfc29980d1af07366", 94 | "0xf1c127bd751435a0", 95 | "0x53ffd92ccf7ded55", 96 | "0x5d04abd6e3a037ba", 97 | "0xf6e16bddb548e50", 98 | "0xbf513bd0f6b9a061", 99 | "0x33867ca2224d662d", 100 | "0xc1db4241107aca6f", 101 | "0x1642c1f9aa80677e", 102 | "0x6ef11aa27aa0abb4", 103 | "0x371d8fd4d5b012da", 104 | "0x95308571c2c5069a", 105 | "0xfb7ec2a660d9659e", 106 | "0x807ad4f1" 107 | ], 108 | [ 109 | "0x2ec01b920f001f9", 110 | "0x702a018401e801f9", 111 | "0x1b9e5", 112 | "0x0", 113 | "0x0", 114 | "0x0", 115 | "0x0", 116 | "0x0", 117 | "0x0", 118 | "0x800000", 119 | "0x0", 120 | "0x0", 121 | "0x400000000", 122 | "0x0", 123 | "0x200000000000000", 124 | "0x0", 125 | "0x0", 126 | "0x20000", 127 | "0x0", 128 | "0x0", 129 | "0x0", 130 | "0x100000000000", 131 | "0x2000000000", 132 | "0x0", 133 | "0x0", 134 | "0x0", 135 | "0x0", 136 | "0x0", 137 | "0x0", 138 | "0x0", 139 | "0x0", 140 | "0x40008002000", 141 | "0x0", 142 | "0x0", 143 | "0xdbf8ddf800000000", 144 | "0x673e80220026ab94", 145 | "0x11734d605612a835", 146 | "0xa042f8826b3c665d", 147 | "0x3e5a180f80386dce", 148 | "0xc359a31d6d465aa0", 149 | "0x6691037448cbdeae", 150 | "0xca718e6b6225ef6d", 151 | "0xa0", 152 | "0xa4e4c70000000000", 153 | "0x3ad0ce84ad0e9b42", 154 | "0xc75096a5fea1d702", 155 | "0x80b8fc", 156 | "0x0", 157 | "0x0", 158 | "0x0", 159 | "0x10000", 160 | "0x0", 161 | "0x0", 162 | "0x0", 163 | "0x0", 164 | "0x0", 165 | "0x0", 166 | "0xb74b120000000000", 167 | "0x2cbbf0f97c64cb79", 168 | "0x166e56a0138dd23", 169 | "0xefe90a2c08d0a2c9", 170 | "0x6d251a8b3b03e74e", 171 | "0x8ba9bf" 172 | ] 173 | ] 174 | } 175 | -------------------------------------------------------------------------------- /fixtures/primitives/uint256.json: -------------------------------------------------------------------------------- 1 | { 2 | "low": "0x53416ee9311f9a8f7efbc5984b3bceed", 3 | "high": "0x476ce5db235461146bb610521645999" 4 | } -------------------------------------------------------------------------------- /hdp/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "hdp" 3 | description = "All Herodotus Data Processor" 4 | version.workspace = true 5 | edition.workspace = true 6 | license-file.workspace = true 7 | authors.workspace = true 8 | repository.workspace = true 9 | homepage.workspace = true 10 | exclude.workspace = true 11 | keywords.workspace = true 12 | categories.workspace = true 13 | readme = "../README.md" 14 | 15 | [dependencies] 16 | anyhow = { workspace = true } 17 | tempfile = { workspace = true } 18 | tracing = { workspace = true } 19 | regex = { workspace = true } 20 | alloy = { workspace = true } 21 | alloy-rlp = { workspace = true } 22 | cairo-lang-starknet-classes = { workspace = true } 23 | serde = { workspace = true } 24 | serde_with = { workspace = true } 25 | serde_json = { workspace = true } 26 | starknet-crypto = { workspace = true } 27 | starknet-types-core = { workspace = true } 28 | starknet = { workspace = true } 29 | thiserror.workspace = true 30 | alloy-merkle-tree = { workspace = true } 31 | tokio = { workspace = true } 32 | futures = { workspace = true } 33 | reqwest = { workspace = true } 34 | lazy_static = { workspace = true } 35 | eth-trie-proofs = { workspace = true } 36 | itertools = { workspace = true } 37 | sn-trie-proofs = { workspace = true } 38 | 39 | [features] 40 | default = [] 41 | test_utils = [] 42 | 43 | [dev-dependencies] 44 | criterion = { version = "0.4", features = [ 45 | "async", 46 | "async_futures", 47 | "html_reports", 48 | ] } 49 | dotenv = "0.15.0" 50 | 51 | [[bench]] 52 | name = "provider_benchmark" 53 | path = "../benches/provider_benchmark.rs" 54 | harness = false 55 | required-features = ["test_utils"] 56 | -------------------------------------------------------------------------------- /hdp/src/cairo_runner/input/dry_run.rs: -------------------------------------------------------------------------------- 1 | //! The input for the dry-runner. 2 | //! This serialized struct will be passed to the dry-runner(cairo-run) as input.json file. 3 | 4 | use crate::primitives::processed_types::cairo_format; 5 | use serde::Serialize; 6 | use serde_with::serde_as; 7 | use std::path::PathBuf; 8 | 9 | #[serde_as] 10 | #[derive(Serialize)] 11 | pub struct DryRunnerProgramInput { 12 | pub dry_run_output_path: PathBuf, 13 | pub modules: Vec, 14 | } 15 | 16 | impl DryRunnerProgramInput { 17 | pub fn new( 18 | dry_run_output_path: PathBuf, 19 | modules: Vec, 20 | ) -> Self { 21 | // TODO: temporary check to ensure only one module is passed 22 | if modules.len() != 1 { 23 | panic!("Currently DryRunnerProgramInput only supports a single module"); 24 | } 25 | Self { 26 | dry_run_output_path, 27 | modules, 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /hdp/src/cairo_runner/input/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod dry_run; 2 | -------------------------------------------------------------------------------- /hdp/src/cairo_runner/mod.rs: -------------------------------------------------------------------------------- 1 | use std::path::{Path, PathBuf}; 2 | use thiserror::Error; 3 | 4 | pub mod dry_run; 5 | pub mod input; 6 | pub mod run; 7 | 8 | #[derive(Error, Debug)] 9 | pub enum CairoRunnerError { 10 | #[error("Error while running the cairo program")] 11 | CairoRunError, 12 | 13 | #[error("Error while parsing json: {0}")] 14 | ParseError(#[from] serde_json::Error), 15 | 16 | #[error("Input file is empty")] 17 | EmptyInput, 18 | 19 | #[error("Error while temp file creation: {0}")] 20 | TempFileError(#[from] std::io::Error), 21 | 22 | #[error("Error while convert to alloy: {0}")] 23 | ConvertToAlloyError(#[from] alloy::primitives::ruint::ParseError), 24 | 25 | #[error("Error while parse int: {0}")] 26 | ParseIntError(#[from] std::num::ParseIntError), 27 | 28 | #[error("Result root not found")] 29 | ResultRootNotFound, 30 | 31 | #[error("Geneal error: {0}")] 32 | GeneralError(#[from] anyhow::Error), 33 | } 34 | 35 | /// Compatible with cairo-run command 36 | pub fn cairo_run( 37 | program_path: &Path, 38 | input_string: String, 39 | pie_file_path: Option<&PathBuf>, 40 | is_proof_mode: bool, 41 | ) -> Result { 42 | let cairo_runner = run::Runner::new(program_path); 43 | cairo_runner.run(input_string, pie_file_path, is_proof_mode) 44 | } 45 | 46 | /// Compatible with cairo-run command, performs dry run 47 | pub fn cairo_dry_run( 48 | program_path: PathBuf, 49 | input_string: String, 50 | output_file_path: Option, 51 | ) -> Result { 52 | let dry_runner = dry_run::DryRunner::new(program_path, output_file_path); 53 | dry_runner.run(input_string) 54 | } 55 | -------------------------------------------------------------------------------- /hdp/src/cairo_runner/run.rs: -------------------------------------------------------------------------------- 1 | use crate::constant::SOUND_CAIRO_RUN_OUTPUT_FILE; 2 | use alloy::primitives::{B256, U256}; 3 | use regex::Regex; 4 | use serde::{Deserialize, Serialize}; 5 | use std::fs; 6 | use std::path::{Path, PathBuf}; 7 | use std::process::{Command, Stdio}; 8 | use tempfile::NamedTempFile; 9 | use tracing::info; 10 | 11 | use crate::cairo_runner::CairoRunnerError; 12 | 13 | /// Result of run 14 | #[derive(Debug)] 15 | pub struct RunResult { 16 | pub pie_path: Option, 17 | pub cairo_run_output: CairoRunOutput, 18 | } 19 | 20 | #[derive(Debug, Serialize, Deserialize)] 21 | pub struct CairoRunOutput { 22 | pub tasks_root: B256, 23 | pub results_root: B256, 24 | pub results: Vec, 25 | } 26 | 27 | pub struct Runner { 28 | program_path: PathBuf, 29 | } 30 | 31 | impl Runner { 32 | pub fn new(program_path: &Path) -> Self { 33 | Self { 34 | program_path: program_path.to_path_buf(), 35 | } 36 | } 37 | 38 | fn _run( 39 | &self, 40 | input_file_path: &Path, 41 | cairo_pie_file_path: Option<&PathBuf>, 42 | is_proof_mode: bool, 43 | ) -> Result { 44 | let task = if is_proof_mode { 45 | Command::new("cairo-run") 46 | .arg("--program") 47 | .arg(&self.program_path) 48 | .arg("--layout") 49 | .arg("starknet_with_keccak") 50 | .arg("--program_input") 51 | .arg(input_file_path) 52 | .arg("--proof_mode") 53 | .arg("--print_output") 54 | .arg("--print_info") 55 | .stdout(Stdio::piped()) 56 | .spawn()? 57 | } else { 58 | Command::new("cairo-run") 59 | .arg("--program") 60 | .arg(&self.program_path) 61 | .arg("--layout") 62 | .arg("starknet_with_keccak") 63 | .arg("--program_input") 64 | .arg(input_file_path) 65 | .arg("--cairo_pie_output") 66 | .arg(cairo_pie_file_path.expect("pie file should be specified in non-proof mode")) 67 | .arg("--print_output") 68 | .arg("--print_info") 69 | .stdout(Stdio::piped()) 70 | .spawn()? 71 | }; 72 | 73 | let output = task.wait_with_output().expect("Failed to read stdout"); 74 | let output_str = String::from_utf8_lossy(&output.stdout); 75 | Ok(output_str.to_string()) 76 | } 77 | 78 | /// Run the cairo program to return PIE object and results of process 79 | pub fn run( 80 | &self, 81 | input_string: String, 82 | pie_file_path: Option<&PathBuf>, 83 | is_proof_mode: bool, 84 | ) -> Result { 85 | if input_string.is_empty() { 86 | return Err(CairoRunnerError::EmptyInput); 87 | } 88 | 89 | let input_file = NamedTempFile::new()?; 90 | let input_file_path = input_file.path(); 91 | fs::write(input_file_path, input_string).expect("Failed to write input file"); 92 | 93 | let output = self._run(input_file_path, pie_file_path, is_proof_mode)?; 94 | let cairo_run_output = 95 | self.parse_run(output, &PathBuf::from(SOUND_CAIRO_RUN_OUTPUT_FILE))?; 96 | info!("cairo run output: {:#?}", cairo_run_output); 97 | 98 | fs::remove_file(SOUND_CAIRO_RUN_OUTPUT_FILE) 99 | .expect("Failed to remove cairo run output file"); 100 | 101 | Ok(RunResult { 102 | pie_path: pie_file_path.cloned(), 103 | cairo_run_output, 104 | }) 105 | } 106 | 107 | /// Parse the output of the run command 108 | fn parse_run( 109 | &self, 110 | output: String, 111 | cairo_run_output_path: &PathBuf, 112 | ) -> Result { 113 | let number_of_steps = 114 | Regex::new(r"Number of steps: (\d+)").expect("Failed to create regex"); 115 | if let Some(number_of_steps_caps) = number_of_steps.captures(&output) { 116 | let number_of_steps = number_of_steps_caps[1].parse::()?; 117 | info!("number of steps: {:#?}", number_of_steps); 118 | let cairo_run_output_from_file = fs::read_to_string(cairo_run_output_path) 119 | .expect("Failed to read cairo run output file"); 120 | let cairo_run_output: CairoRunOutput = 121 | serde_json::from_str(&cairo_run_output_from_file) 122 | .expect("Failed to parse cairo run output"); 123 | Ok(cairo_run_output) 124 | } else { 125 | Err(CairoRunnerError::CairoRunError) 126 | } 127 | } 128 | } 129 | 130 | #[cfg(test)] 131 | mod tests { 132 | use super::*; 133 | 134 | #[test] 135 | fn test_cairo_run_output() { 136 | let cairo_run_output_str = r#"{"tasks_root": "0x25bdd48e6c00a86eef6c08afb935635652d246bdf07f54e3ef7c81c29e763fe2", "results_root": "0xbe7bb3e8d053273c753c752107b0b528a24a03058ae989c1e0a9d920c96da753", "results": ["0x0000000000000000000000000000000000000000000000103557b1b802c24c17"]}"#; 137 | let cairo_run_output: CairoRunOutput = 138 | serde_json::from_str(cairo_run_output_str).expect("Failed to parse cairo run output"); 139 | println!("Cairo run output: {:#?}", cairo_run_output); 140 | } 141 | } 142 | -------------------------------------------------------------------------------- /hdp/src/constant.rs: -------------------------------------------------------------------------------- 1 | pub const DRY_CAIRO_RUN_OUTPUT_FILE: &str = "dry_run_output.json"; 2 | pub const SOUND_CAIRO_RUN_OUTPUT_FILE: &str = "cairo_run_output.json"; 3 | pub const DEFAULT_DRY_CAIRO_RUN_CAIRO_FILE: &str = "build/contract_dry_run.json"; 4 | pub const DEFAULT_SOUND_CAIRO_RUN_CAIRO_FILE: &str = "build/hdp.json"; 5 | 6 | // Internal Herdootus services URLs 7 | pub const HERODOTUS_RS_INDEXER_URL: &str = 8 | "https://rs-indexer.api.herodotus.cloud/accumulators/proofs"; 9 | pub const HERODOTUS_RS_INDEXER_STAGING_URL: &str = 10 | "https://staging.rs-indexer.api.herodotus.cloud/accumulators/proofs"; 11 | pub const HERODOTUS_PROGRAM_REGISTRY_URL: &str = 12 | "http://program-registery.api.herodotus.cloud/get-program?program_hash"; 13 | -------------------------------------------------------------------------------- /hdp/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! The Data Processor CLI serves as an essential tool for developers working with Cairo programs and zkVM environments. 2 | //! Its primary function is to translate human-readable requests into a format compatible with Cairo programs, 3 | //! enabling commands to be executed over the Cairo VM and generating executable outputs. 4 | //! This transformation is a crucial preprocessing step that prepares data for off-chain computations in zkVM environments. 5 | 6 | pub mod cairo_runner; 7 | pub mod constant; 8 | pub mod hdp_run; 9 | pub mod preprocessor; 10 | pub mod primitives; 11 | pub mod processor; 12 | pub mod provider; 13 | 14 | pub use hdp_run::run; 15 | -------------------------------------------------------------------------------- /hdp/src/preprocessor/compile/config.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::HashMap, path::PathBuf}; 2 | 3 | #[cfg(feature = "test_utils")] 4 | use crate::constant::DEFAULT_DRY_CAIRO_RUN_CAIRO_FILE; 5 | use crate::{primitives::ChainId, provider::config::ProviderConfig}; 6 | 7 | pub struct CompilerConfig { 8 | // dry-run program path 9 | pub dry_run_program_path: PathBuf, 10 | pub save_fetch_keys_file: Option, 11 | // chain_id => provider config 12 | pub provider_config: HashMap, 13 | } 14 | 15 | impl CompilerConfig { 16 | pub fn with_dry_run_program_path(self, dry_run_program_path: PathBuf) -> Self { 17 | Self { 18 | dry_run_program_path, 19 | provider_config: self.provider_config, 20 | save_fetch_keys_file: self.save_fetch_keys_file, 21 | } 22 | } 23 | } 24 | 25 | // Default config for the compiler only for testing 26 | #[cfg(feature = "test_utils")] 27 | impl Default for CompilerConfig { 28 | fn default() -> Self { 29 | CompilerConfig { 30 | dry_run_program_path: PathBuf::from(DEFAULT_DRY_CAIRO_RUN_CAIRO_FILE), 31 | provider_config: [( 32 | ProviderConfig::default().chain_id, 33 | ProviderConfig::default(), 34 | )] 35 | .into(), 36 | save_fetch_keys_file: None, 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /hdp/src/preprocessor/compile/module.rs: -------------------------------------------------------------------------------- 1 | //! Preprocessor is reponsible for identifying the required values. 2 | //! This will be most abstract layer of the preprocessor. 3 | 4 | use crate::cairo_runner::dry_run::DryRunResult; 5 | use crate::cairo_runner::{cairo_dry_run, input::dry_run::DryRunnerProgramInput}; 6 | use crate::constant::DRY_CAIRO_RUN_OUTPUT_FILE; 7 | use crate::primitives::processed_types::cairo_format; 8 | use crate::primitives::task::ExtendedModule; 9 | use crate::provider::key::categorize_fetch_keys; 10 | use crate::provider::traits::new_provider_from_config; 11 | use core::panic; 12 | 13 | use std::collections::{HashMap, HashSet}; 14 | use std::path::PathBuf; 15 | use tracing::info; 16 | 17 | use super::config::CompilerConfig; 18 | use super::{Compilable, CompilationResult, CompileError}; 19 | 20 | pub type ModuleVec = Vec; 21 | 22 | impl Compilable for ModuleVec { 23 | async fn compile( 24 | &self, 25 | compile_config: &CompilerConfig, 26 | ) -> Result { 27 | // Log the target task for debugging purposes 28 | info!("target task: {:#?}", self[0].task); 29 | let dry_run_program_path = compile_config.dry_run_program_path.clone(); 30 | 31 | // Generate input for the dry run based on the extended modules 32 | let dry_run_input = 33 | generate_input(self.to_vec(), PathBuf::from(DRY_CAIRO_RUN_OUTPUT_FILE)).await?; 34 | let input_string = 35 | serde_json::to_string_pretty(&dry_run_input).expect("Failed to serialize module class"); 36 | 37 | // 2. Run the dry run and retrieve the fetch points 38 | info!("2. Running dry-run... "); 39 | let dry_run_results: DryRunResult = cairo_dry_run( 40 | dry_run_program_path, 41 | input_string, 42 | compile_config.save_fetch_keys_file.clone(), 43 | )?; 44 | 45 | // Check if the program hash matches the expected hash 46 | if dry_run_results[0].program_hash != self[0].task.program_hash { 47 | return Err(CompileError::ClassHashMismatch); 48 | } 49 | 50 | // Ensure only one module is supported 51 | if dry_run_results.len() != 1 { 52 | panic!("Multiple Modules are not supported"); 53 | } 54 | 55 | // Extract the dry run module result 56 | let dry_run_module = dry_run_results.into_iter().next().unwrap(); 57 | let commit_results = vec![dry_run_module.result.into()]; 58 | 59 | // 3. Categorize fetch keys by chain ID 60 | let categorized_keys = categorize_fetch_keys(dry_run_module.fetch_keys); 61 | 62 | // Initialize maps to store fetched proofs grouped by chain ID 63 | let mut accounts_map = HashMap::new(); 64 | let mut storages_map = HashMap::new(); 65 | let mut transactions_map = HashMap::new(); 66 | let mut transaction_receipts_map = HashMap::new(); 67 | let mut mmr_header_map = HashMap::new(); 68 | 69 | info!("3. Fetching proofs from provider..."); 70 | // Loop through each chain ID and fetch proofs 71 | for (chain_id, keys) in categorized_keys { 72 | info!("target provider chain id: {}", chain_id); 73 | let target_provider_config = compile_config 74 | .provider_config 75 | .get(&chain_id) 76 | .expect("target task's chain had not been configured."); 77 | let provider = new_provider_from_config(target_provider_config); 78 | 79 | // TODO: handle starknet 80 | let results = provider 81 | .fetch_proofs_from_keys(keys) 82 | .await? 83 | .get_evm_proofs() 84 | .unwrap(); 85 | 86 | // Update the maps with fetched results 87 | mmr_header_map.insert( 88 | chain_id.to_numeric_id(), 89 | HashSet::from_iter(results.mmr_with_headers.into_iter()), 90 | ); 91 | accounts_map.insert( 92 | chain_id.to_numeric_id(), 93 | HashSet::from_iter(results.accounts.into_iter()), 94 | ); 95 | storages_map.insert( 96 | chain_id.to_numeric_id(), 97 | HashSet::from_iter(results.storages.into_iter()), 98 | ); 99 | transactions_map.insert( 100 | chain_id.to_numeric_id(), 101 | HashSet::from_iter(results.transactions.into_iter()), 102 | ); 103 | transaction_receipts_map.insert( 104 | chain_id.to_numeric_id(), 105 | HashSet::from_iter(results.transaction_receipts.into_iter()), 106 | ); 107 | } 108 | 109 | // Create and return the compilation result containing all relevant proofs 110 | let compiled_result = CompilationResult::new( 111 | commit_results, 112 | mmr_header_map, 113 | accounts_map, 114 | storages_map, 115 | transactions_map, 116 | transaction_receipts_map, 117 | ); 118 | Ok(compiled_result) 119 | } 120 | } 121 | 122 | /// Generate input structure for preprocessor that need to pass to runner 123 | async fn generate_input( 124 | extended_modules: Vec, 125 | identified_keys_file: PathBuf, 126 | ) -> Result { 127 | // Collect results, filter out any errors 128 | let mut collected_results = Vec::new(); 129 | for module in extended_modules { 130 | let input_module = 131 | cairo_format::DryRunProcessedModule::new(module.task.inputs, module.module_class); 132 | collected_results.push(input_module); 133 | } 134 | 135 | Ok(DryRunnerProgramInput::new( 136 | identified_keys_file, 137 | collected_results, 138 | )) 139 | } 140 | -------------------------------------------------------------------------------- /hdp/src/preprocessor/compile/task.rs: -------------------------------------------------------------------------------- 1 | use crate::primitives::task::TaskEnvelope; 2 | 3 | use super::{config::CompilerConfig, Compilable, CompilationResult, CompileError}; 4 | 5 | impl Compilable for Vec { 6 | async fn compile( 7 | &self, 8 | compile_config: &CompilerConfig, 9 | ) -> Result { 10 | let (datalakes, modules) = TaskEnvelope::divide_tasks(self.to_vec()); 11 | let mut compiled_result = if !datalakes.is_empty() { 12 | datalakes.compile(compile_config).await? 13 | } else { 14 | CompilationResult::default() 15 | }; 16 | 17 | let module_compile_result = if !modules.is_empty() { 18 | modules.compile(compile_config).await? 19 | } else { 20 | CompilationResult::default() 21 | }; 22 | compiled_result.extend(module_compile_result); 23 | if compiled_result == CompilationResult::default() { 24 | Err(CompileError::CompilationFailed) 25 | } else { 26 | Ok(compiled_result) 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /hdp/src/preprocessor/module_compile.rs: -------------------------------------------------------------------------------- 1 | // ! This file is sketching out compilation logic that only considering module task. 2 | // ! We already confirmed on direction to deprecating datalake, which curren ./compile file 3 | //! is causing too much overhead interms of abstraction around legacy types that we supported. 4 | //! Ideally later this file will deprecate ./compile 5 | 6 | use alloy::primitives::U256; 7 | use std::collections::HashMap; 8 | use std::path::PathBuf; 9 | use tracing::info; 10 | 11 | use crate::{ 12 | cairo_runner::{cairo_dry_run, dry_run::DryRunResult, input::dry_run::DryRunnerProgramInput}, 13 | constant::DRY_CAIRO_RUN_OUTPUT_FILE, 14 | primitives::{ 15 | processed_types::{block_proofs::ProcessedBlockProofs, cairo_format}, 16 | task::ExtendedModule, 17 | ChainId, 18 | }, 19 | provider::{key::categorize_fetch_keys, traits::new_provider_from_config}, 20 | }; 21 | 22 | use super::compile::{config::CompilerConfig, CompileError}; 23 | 24 | #[derive(Debug, Default, PartialEq)] 25 | pub struct ModuleCompilationResult { 26 | /// results of tasks 27 | pub task_results: Vec, 28 | /// proofs 29 | pub proofs: HashMap, 30 | } 31 | 32 | pub async fn module_compile( 33 | task: ExtendedModule, 34 | compile_config: &CompilerConfig, 35 | ) -> Result { 36 | // Log the target task for debugging purposes 37 | info!("target task: {:#?}", task.task); 38 | let dry_run_program_path = compile_config.dry_run_program_path.clone(); 39 | 40 | // Generate input for the dry run based on the extended modules 41 | let dry_run_input = DryRunnerProgramInput::new( 42 | PathBuf::from(DRY_CAIRO_RUN_OUTPUT_FILE), 43 | vec![cairo_format::DryRunProcessedModule::new( 44 | task.task.inputs, 45 | task.module_class, 46 | )], 47 | ); 48 | let input_string = 49 | serde_json::to_string_pretty(&dry_run_input).expect("Failed to serialize module class"); 50 | 51 | // 2. Run the dry run and retrieve the fetch points 52 | info!("2. Running dry-run... "); 53 | let dry_run_results: DryRunResult = cairo_dry_run( 54 | dry_run_program_path, 55 | input_string, 56 | compile_config.save_fetch_keys_file.clone(), 57 | )?; 58 | 59 | // TODO: prob as soon as we deprecate data lake this check no need 60 | // Check if the program hash matches the expected hash 61 | if dry_run_results[0].program_hash != task.task.program_hash { 62 | return Err(CompileError::ClassHashMismatch); 63 | } 64 | // Ensure only one module is supported 65 | if dry_run_results.len() != 1 { 66 | panic!("Multiple Modules are not supported"); 67 | } 68 | 69 | // Extract the dry run module result 70 | let dry_run_module = dry_run_results.into_iter().next().unwrap(); 71 | let task_results = vec![dry_run_module.result.into()]; 72 | 73 | // 3. Categorize fetch keys by chain ID 74 | let categorized_keys = categorize_fetch_keys(dry_run_module.fetch_keys); 75 | 76 | let mut proofs_map: std::collections::HashMap = HashMap::new(); 77 | info!("3. Fetching proofs from provider..."); 78 | // Loop through each chain ID and fetch proofs 79 | for (chain_id, keys) in categorized_keys { 80 | info!("target provider chain id: {}", chain_id); 81 | let target_provider_config = compile_config 82 | .provider_config 83 | .get(&chain_id) 84 | .expect("target task's chain had not been configured."); 85 | let provider = new_provider_from_config(target_provider_config); 86 | let results = provider.fetch_proofs_from_keys(keys).await?; 87 | // Update the maps with fetched results 88 | proofs_map.insert(chain_id, results); 89 | } 90 | 91 | Ok(ModuleCompilationResult { 92 | task_results, 93 | proofs: proofs_map, 94 | }) 95 | } 96 | -------------------------------------------------------------------------------- /hdp/src/preprocessor/module_registry.rs: -------------------------------------------------------------------------------- 1 | //! Module registry is a service that provides the ability to fetch modules from the StarkNet network. 2 | //! It fetch contract class from the StarkNet network and compile it to the casm. 3 | 4 | use cairo_lang_starknet_classes::casm_contract_class::{ 5 | CasmContractClass, StarknetSierraCompilationError, 6 | }; 7 | use starknet_crypto::Felt; 8 | 9 | use crate::{ 10 | constant::HERODOTUS_PROGRAM_REGISTRY_URL, 11 | primitives::task::{module::Module, ExtendedModule}, 12 | }; 13 | use reqwest::Client; 14 | use std::path::PathBuf; 15 | use thiserror::Error; 16 | use tracing::info; 17 | 18 | #[derive(Error, Debug)] 19 | pub enum ModuleRegistryError { 20 | #[error("Serialize error: {0}")] 21 | SerializeError(#[from] serde_json::Error), 22 | 23 | #[error("StarkNet error: {0}")] 24 | StarkNetSierraCompileError(#[from] StarknetSierraCompilationError), 25 | 26 | #[error("StarkNet Provider error: {0}")] 27 | StarkNetProviderError(#[from] starknet::providers::ProviderError), 28 | 29 | #[error("Cairo1 module should have sierra as class")] 30 | SierraNotFound, 31 | 32 | #[error("Tokio join error: {0}")] 33 | TokioJoinError(#[from] tokio::task::JoinError), 34 | 35 | #[error("Module class source error: {0}")] 36 | ClassSourceError(String), 37 | 38 | #[error("Type conversion error: {0}")] 39 | TypeConversionError(String), 40 | } 41 | 42 | pub struct ModuleRegistry { 43 | client: Client, 44 | } 45 | 46 | impl Default for ModuleRegistry { 47 | fn default() -> Self { 48 | Self::new() 49 | } 50 | } 51 | 52 | impl ModuleRegistry { 53 | pub fn new() -> Self { 54 | let client = Client::new(); 55 | Self { client } 56 | } 57 | 58 | pub async fn get_extended_module( 59 | &self, 60 | module: Module, 61 | ) -> Result { 62 | let casm = if let Some(ref local_class_path) = module.local_class_path { 63 | self.get_module_class_from_local_path(local_class_path) 64 | .await? 65 | } else { 66 | self.get_module_class_from_program_hash(module.program_hash) 67 | .await? 68 | }; 69 | 70 | Ok(ExtendedModule { 71 | task: module, 72 | module_class: casm, 73 | }) 74 | } 75 | 76 | async fn get_module_class_from_local_path( 77 | &self, 78 | local_class_path: &PathBuf, 79 | ) -> Result { 80 | let casm: CasmContractClass = 81 | serde_json::from_str(&std::fs::read_to_string(local_class_path).map_err(|_| { 82 | ModuleRegistryError::ClassSourceError( 83 | "Local class path is not a valid JSON file".to_string(), 84 | ) 85 | })?)?; 86 | 87 | info!( 88 | "contract class fetched successfully from local path: {:?}", 89 | local_class_path 90 | ); 91 | Ok(casm) 92 | } 93 | 94 | async fn get_module_class_from_program_hash( 95 | &self, 96 | program_hash: Felt, 97 | ) -> Result { 98 | let program_hash_hex = format!("{:#x}", program_hash); 99 | 100 | info!( 101 | "fetching contract class from module registry... program_hash: {}", 102 | program_hash_hex 103 | ); 104 | 105 | let api_url = format!("{}={}", HERODOTUS_PROGRAM_REGISTRY_URL, program_hash_hex); 106 | 107 | let response = self 108 | .client 109 | .get(&api_url) 110 | .header("User-Agent", "request") 111 | .send() 112 | .await 113 | .expect("response is failed"); 114 | 115 | // Check if the response status is successful 116 | if response.status().is_success() { 117 | let response_text = response.text().await.expect("cannot get response"); 118 | let casm: CasmContractClass = serde_json::from_str(&response_text)?; 119 | info!( 120 | "contract class fetched successfully from program_hash: {:?}", 121 | program_hash 122 | ); 123 | Ok(casm) 124 | } else { 125 | Err(ModuleRegistryError::ClassSourceError( 126 | "failed to fetch contract class".to_string(), 127 | )) 128 | } 129 | } 130 | } 131 | 132 | #[cfg(test)] 133 | mod tests { 134 | 135 | use starknet_crypto::Felt; 136 | 137 | use super::*; 138 | 139 | fn init() -> (ModuleRegistry, Felt) { 140 | let module_registry = ModuleRegistry::new(); 141 | // This is test contract class hash 142 | let program_hash = 143 | Felt::from_hex("0x64041a339b1edd10de83cf031cfa938645450f971d2527c90d4c2ce68d7d412") 144 | .unwrap(); 145 | 146 | (module_registry, program_hash) 147 | } 148 | 149 | #[tokio::test] 150 | async fn test_get_module() { 151 | let (module_registry, program_hash) = init(); 152 | let _casm_from_rpc = module_registry 153 | .get_module_class_from_program_hash(program_hash) 154 | .await 155 | .unwrap(); 156 | } 157 | 158 | #[tokio::test] 159 | async fn test_get_multiple_module_classes() { 160 | let (module_registry, program_hash) = init(); 161 | 162 | let module = Module::new(program_hash, vec![], None); 163 | 164 | let extended_modules = module_registry.get_extended_module(module).await.unwrap(); 165 | 166 | assert_eq!( 167 | extended_modules.task.program_hash, 168 | Felt::from_hex("0x64041a339b1edd10de83cf031cfa938645450f971d2527c90d4c2ce68d7d412") 169 | .unwrap() 170 | ); 171 | assert_eq!(extended_modules.task.inputs, vec![]); 172 | } 173 | } 174 | -------------------------------------------------------------------------------- /hdp/src/primitives/block/account.rs: -------------------------------------------------------------------------------- 1 | //! Account struct and its associated methods 2 | 3 | use alloy::{primitives::B256, primitives::U256, rpc::types::EIP1186AccountProofResponse}; 4 | use alloy_rlp::{Decodable, Encodable, RlpDecodable, RlpEncodable}; 5 | 6 | #[derive(Debug, RlpDecodable, RlpEncodable, PartialEq)] 7 | pub struct Account { 8 | pub nonce: u64, 9 | pub balance: U256, 10 | pub storage_root: B256, 11 | pub code_hash: B256, 12 | } 13 | 14 | impl Account { 15 | pub fn new(nonce: u64, balance: U256, storage_root: B256, code_hash: B256) -> Self { 16 | Account { 17 | nonce, 18 | balance, 19 | storage_root, 20 | code_hash, 21 | } 22 | } 23 | 24 | pub fn rlp_encode(&self) -> Vec { 25 | let mut buffer = Vec::::new(); 26 | self.encode(&mut buffer); 27 | buffer 28 | } 29 | 30 | pub fn rlp_decode(mut rlp: &[u8]) -> Self { 31 | ::decode(&mut rlp).expect("rlp decode failed.") 32 | } 33 | } 34 | 35 | impl From<&EIP1186AccountProofResponse> for Account { 36 | fn from(account_from_rpc: &EIP1186AccountProofResponse) -> Self { 37 | Account { 38 | nonce: account_from_rpc.nonce, 39 | balance: account_from_rpc.balance, 40 | storage_root: account_from_rpc.storage_hash, 41 | code_hash: account_from_rpc.code_hash, 42 | } 43 | } 44 | } 45 | 46 | #[cfg(test)] 47 | mod tests { 48 | use super::*; 49 | use alloy::hex; 50 | use alloy::primitives::{b256, U256}; 51 | 52 | #[test] 53 | fn test_get_account_rlp() { 54 | let account = Account::new( 55 | 1, 56 | U256::from(0), 57 | b256!("1c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185"), 58 | b256!("cd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c"), 59 | ); 60 | let account_rlp = account.rlp_encode(); 61 | assert_eq!( 62 | hex::encode(account_rlp), 63 | "f8440180a01c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185a0cd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c" 64 | ); 65 | 66 | let account = Account::new( 67 | 2, 68 | U256::from(0), 69 | b256!("1c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185"), 70 | b256!("cd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c"), 71 | ); 72 | let account_rlp = account.rlp_encode(); 73 | assert_eq!( 74 | hex::encode(account_rlp), 75 | "f8440280a01c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185a0cd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c" 76 | ); 77 | 78 | let account = Account::new( 79 | 2, 80 | U256::from(0x1), 81 | b256!("1c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185"), 82 | b256!("cd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c"), 83 | ); 84 | let account_rlp = account.rlp_encode(); 85 | assert_eq!( 86 | hex::encode(account_rlp), 87 | "f8440201a01c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185a0cd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c" 88 | ); 89 | } 90 | 91 | #[test] 92 | fn test_decode_account_rlp() { 93 | let account_rlp = "f8440180a01c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185a0cd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c"; 94 | let account = Account::rlp_decode(hex::decode(account_rlp).unwrap().as_slice()); 95 | assert_eq!( 96 | account, 97 | Account::new( 98 | 1, 99 | U256::from(0), 100 | b256!("1c35dfde2b62d99d3a74fda76446b60962c4656814bdd7815eb6e5b8be1e7185"), 101 | b256!("cd4f25236fff0ccac15e82bf4581beb08e95e1b5ba89de6031c75893cd91245c") 102 | ) 103 | ); 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /hdp/src/primitives/block/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod account; 2 | pub mod header; 3 | -------------------------------------------------------------------------------- /hdp/src/primitives/merkle_tree.rs: -------------------------------------------------------------------------------- 1 | use alloy::{ 2 | dyn_abi::DynSolValue, 3 | primitives::{Keccak256, B256, U256}, 4 | }; 5 | use alloy_merkle_tree::standard_binary_tree::StandardMerkleTree; 6 | 7 | fn raw_result_to_result_commitment(task_commitment: &B256, compiled_result: &U256) -> B256 { 8 | let mut hasher = Keccak256::new(); 9 | hasher.update(task_commitment); 10 | hasher.update(compiled_result.to_be_bytes_vec()); 11 | hasher.finalize() 12 | } 13 | 14 | /// Build result merkle tree by providing task commitments and task results as reference, 15 | /// And returning tree structure and result commitment 16 | pub fn build_result_merkle_tree( 17 | tasks_commitments: &[B256], 18 | task_results: &[U256], 19 | ) -> (StandardMerkleTree, Vec) { 20 | if tasks_commitments.len() != task_results.len() { 21 | panic!("tasks commitments and task results have to be same length to construct result merkle tree") 22 | } 23 | let mut results_leaves = Vec::new(); 24 | let mut results_commitments = Vec::new(); 25 | for (task_commitment, task_result) in tasks_commitments.iter().zip(task_results.iter()) { 26 | let result_commitment = raw_result_to_result_commitment(task_commitment, task_result); 27 | results_commitments.push(result_commitment); 28 | results_leaves.push(DynSolValue::FixedBytes(result_commitment, 32)); 29 | } 30 | let tree = StandardMerkleTree::of(&results_leaves); 31 | (tree, results_commitments) 32 | } 33 | 34 | /// Build task merkle tree by providing tasks_commitments as reference 35 | pub fn build_task_merkle_tree(tasks_commitments: &[B256]) -> StandardMerkleTree { 36 | let mut task_leaves = Vec::new(); 37 | tasks_commitments 38 | .iter() 39 | .for_each(|tc| task_leaves.push(DynSolValue::FixedBytes(*tc, 32))); 40 | StandardMerkleTree::of(&task_leaves) 41 | } 42 | 43 | #[cfg(test)] 44 | mod tests { 45 | use alloy::primitives::b256; 46 | 47 | use super::*; 48 | 49 | #[test] 50 | fn test_build_result_merkle_tree() { 51 | let tasks_commitments = vec![B256::ZERO]; 52 | let task_results = vec![U256::from(10)]; 53 | let (tree, results_commitments) = 54 | build_result_merkle_tree(&tasks_commitments, &task_results); 55 | let result_root = tree.root(); 56 | assert_eq!( 57 | result_root, 58 | b256!("deddf91dc7d95dba2b7698201b4571eaa5bfec0a9ff4276e836f98e3a40a77e9") 59 | ); 60 | assert_eq!( 61 | results_commitments, 62 | vec![b256!( 63 | "13da86008ba1c6922daee3e07db95305ef49ebced9f5467a0b8613fcc6b343e3" 64 | )] 65 | ) 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /hdp/src/primitives/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod aggregate_fn; 2 | pub mod block; 3 | pub mod chain_id; 4 | pub mod merkle_tree; 5 | pub mod processed_types; 6 | pub mod request; 7 | pub mod serde; 8 | pub mod solidity_types; 9 | pub mod task; 10 | pub mod utils; 11 | 12 | pub use chain_id::ChainId; 13 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/account.rs: -------------------------------------------------------------------------------- 1 | //! Processed account type 2 | //! This contains the processed account type and its conversion to cairo format. 3 | 4 | use super::mpt::ProcessedMPTProof; 5 | use alloy::primitives::{keccak256, Address}; 6 | use serde::{Deserialize, Serialize}; 7 | 8 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 9 | pub struct ProcessedAccount { 10 | pub address: Address, 11 | pub account_key: String, 12 | pub proofs: Vec, 13 | } 14 | 15 | impl ProcessedAccount { 16 | pub fn new(address: Address, proofs: Vec) -> Self { 17 | // TODO: actually this is account trie leaf to be more accurate 18 | let account_trie_leaf = keccak256(address).to_string(); 19 | ProcessedAccount { 20 | address, 21 | account_key: account_trie_leaf, 22 | proofs, 23 | } 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/block_proofs.rs: -------------------------------------------------------------------------------- 1 | use std::collections::{HashMap, HashSet}; 2 | 3 | use serde::{Deserialize, Serialize}; 4 | 5 | use super::{ 6 | account::ProcessedAccount, header::ProcessedHeader, mmr::MMRMeta, receipt::ProcessedReceipt, 7 | starknet, storage::ProcessedStorage, transaction::ProcessedTransaction, 8 | }; 9 | 10 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 11 | #[serde(untagged)] 12 | pub enum ProcessedBlockProofs { 13 | Evm(EvmBlockProofs), 14 | StarkNet(StarkNetBlockProofs), 15 | } 16 | 17 | impl ProcessedBlockProofs { 18 | pub fn get_chain_id(&self) -> String { 19 | match self { 20 | ProcessedBlockProofs::Evm(evm) => evm.chain_id.to_owned(), 21 | ProcessedBlockProofs::StarkNet(starknet) => starknet.chain_id.to_owned(), 22 | } 23 | } 24 | 25 | pub fn get_evm_proofs(self) -> Option { 26 | match self { 27 | ProcessedBlockProofs::Evm(evm) => Some(evm), 28 | ProcessedBlockProofs::StarkNet(_) => None, 29 | } 30 | } 31 | 32 | pub fn get_starknet_proofs(self) -> Option { 33 | match self { 34 | ProcessedBlockProofs::Evm(_) => None, 35 | ProcessedBlockProofs::StarkNet(starknet) => Some(starknet), 36 | } 37 | } 38 | 39 | pub fn get_mmr_meta(self) -> Vec { 40 | match self { 41 | ProcessedBlockProofs::Evm(evm_proofs) => evm_proofs 42 | .mmr_with_headers 43 | .into_iter() 44 | .map(|m| m.mmr_meta) 45 | .collect(), 46 | ProcessedBlockProofs::StarkNet(sn_proofs) => sn_proofs 47 | .mmr_with_headers 48 | .into_iter() 49 | .map(|m| m.mmr_meta) 50 | .collect(), 51 | } 52 | } 53 | } 54 | 55 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 56 | pub struct StarkNetBlockProofs { 57 | pub chain_id: String, 58 | pub mmr_with_headers: Vec, 59 | pub storages: Vec, 60 | } 61 | 62 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 63 | pub struct MMRWithHeaderStarkNet { 64 | pub mmr_meta: MMRMeta, 65 | pub headers: Vec, 66 | } 67 | 68 | /// Provider should fetch all the proofs and rlp values from given keys. 69 | 70 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 71 | pub struct EvmBlockProofs { 72 | pub chain_id: String, 73 | pub mmr_with_headers: Vec, 74 | pub accounts: Vec, 75 | pub storages: Vec, 76 | pub transactions: Vec, 77 | pub transaction_receipts: Vec, 78 | } 79 | 80 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 81 | pub struct MMRWithHeader { 82 | pub mmr_meta: MMRMeta, 83 | pub headers: Vec, 84 | } 85 | 86 | pub fn mmr_with_header_vec_to_map( 87 | target: Vec, 88 | ) -> HashMap> { 89 | let mut map = HashMap::new(); 90 | for target_item in target { 91 | map.entry(target_item.mmr_meta) 92 | .and_modify(|existing_headers: &mut HashSet| { 93 | existing_headers.extend(target_item.headers.iter().cloned()); 94 | }) 95 | .or_insert_with(|| target_item.headers.into_iter().collect()); 96 | } 97 | map 98 | } 99 | 100 | impl MMRWithHeader { 101 | pub fn to_map(self) -> HashMap> { 102 | let mut map = HashMap::new(); 103 | map.insert(self.mmr_meta, HashSet::from_iter(self.headers)); 104 | map 105 | } 106 | 107 | pub fn extend(self, other: MMRWithHeader) -> Vec { 108 | let mut self_map = self.to_map(); 109 | let other_map = other.to_map(); 110 | for (mmr, headers) in other_map { 111 | self_map 112 | .entry(mmr) 113 | .and_modify(|existing_headers| { 114 | existing_headers.extend(headers.iter().cloned()); 115 | }) 116 | .or_insert_with(|| headers.into_iter().collect()); 117 | } 118 | convert_to_mmr_with_headers(self_map) 119 | } 120 | } 121 | 122 | pub fn convert_to_mmr_with_headers( 123 | map: HashMap>, 124 | ) -> Vec { 125 | map.into_iter() 126 | .map(|(mmr_meta, headers)| MMRWithHeader { 127 | mmr_meta, 128 | headers: headers.into_iter().collect(), 129 | }) 130 | .collect() 131 | } 132 | 133 | pub fn convert_to_mmr_with_sn_headers( 134 | map: HashMap>, 135 | ) -> Vec { 136 | map.into_iter() 137 | .map(|(mmr_meta, headers)| MMRWithHeaderStarkNet { 138 | mmr_meta, 139 | headers: headers.into_iter().collect(), 140 | }) 141 | .collect() 142 | } 143 | 144 | pub fn convert_to_mmr_meta_set( 145 | mmr_with_headers: Vec, 146 | ) -> HashMap> { 147 | mmr_with_headers 148 | .into_iter() 149 | .map(|mmr_with_header| { 150 | ( 151 | mmr_with_header.mmr_meta, 152 | mmr_with_header.headers.into_iter().collect::>(), 153 | ) 154 | }) 155 | .collect() 156 | } 157 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/cairo_format/account.rs: -------------------------------------------------------------------------------- 1 | //! Processed account type 2 | //! This contains the processed account type and its conversion to cairo format. 3 | 4 | use serde::{Deserialize, Serialize}; 5 | use serde_with::serde_as; 6 | use starknet::core::serde::unsigned_field_element::UfeHex; 7 | use starknet_types_core::felt::Felt; 8 | 9 | use crate::primitives::processed_types::account::ProcessedAccount as BaseProcessedAccount; 10 | 11 | use super::{felt_vec_unit::FieldElementVectorUnit, mpt::ProcessedMPTProof, AsCairoFormat}; 12 | 13 | impl AsCairoFormat for BaseProcessedAccount { 14 | type Output = ProcessedAccount; 15 | 16 | fn as_cairo_format(&self) -> Self::Output { 17 | let address_chunk_result = FieldElementVectorUnit::from_bytes(self.address.as_ref()); 18 | let account_key = &self.account_key; 19 | let proofs = self 20 | .proofs 21 | .iter() 22 | .map(|proof| proof.as_cairo_format()) 23 | .collect(); 24 | ProcessedAccount { 25 | address: address_chunk_result.felts, 26 | account_key: account_key.into(), 27 | proofs, 28 | } 29 | } 30 | } 31 | 32 | #[serde_as] 33 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 34 | pub struct ProcessedAccount { 35 | #[serde_as(as = "Vec")] 36 | pub address: Vec, 37 | pub account_key: String, 38 | pub proofs: Vec, 39 | } 40 | 41 | #[cfg(test)] 42 | mod tests { 43 | use super::*; 44 | 45 | #[test] 46 | fn test_account_to_serde() { 47 | let processed_string = 48 | include_str!("../../../../../fixtures/primitives/processed/account.json"); 49 | let accounts: BaseProcessedAccount = serde_json::from_str(processed_string).unwrap(); 50 | let accounts_in_felts: ProcessedAccount = accounts.as_cairo_format(); 51 | let string = serde_json::to_string_pretty(&accounts_in_felts).unwrap(); 52 | 53 | let json_file = 54 | include_str!("../../../../../fixtures/primitives/processed_in_felts/account.json"); 55 | let expected: ProcessedAccount = serde_json::from_str(json_file).unwrap(); 56 | let expected_string = serde_json::to_string_pretty(&expected).unwrap(); 57 | 58 | assert_eq!(string, expected_string); 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/cairo_format/block_proofs.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use crate::primitives::processed_types::{ 4 | block_proofs::{ProcessedBlockProofs as BaseProcessedBlockProofs, StarkNetBlockProofs}, 5 | mmr::MMRMeta, 6 | }; 7 | 8 | use super::{ 9 | AsCairoFormat, ProcessedAccount, ProcessedHeader, ProcessedReceipt, ProcessedStorage, 10 | ProcessedTransaction, 11 | }; 12 | 13 | impl AsCairoFormat for BaseProcessedBlockProofs { 14 | type Output = ProcessedBlockProofs; 15 | 16 | fn as_cairo_format(&self) -> Self::Output { 17 | match self { 18 | BaseProcessedBlockProofs::Evm(evm_proof) => ProcessedBlockProofs::Evm(EvmBlockProofs { 19 | chain_id: evm_proof.chain_id.clone(), 20 | mmr_with_headers: evm_proof 21 | .mmr_with_headers 22 | .iter() 23 | .map(|mmr_with_header| MMRWithHeader { 24 | mmr_meta: mmr_with_header.mmr_meta.clone(), 25 | headers: mmr_with_header 26 | .headers 27 | .iter() 28 | .map(|header| header.as_cairo_format()) 29 | .collect(), 30 | }) 31 | .collect(), 32 | accounts: evm_proof 33 | .accounts 34 | .iter() 35 | .map(|account| account.as_cairo_format()) 36 | .collect(), 37 | storages: evm_proof 38 | .storages 39 | .iter() 40 | .map(|storage| storage.as_cairo_format()) 41 | .collect(), 42 | transactions: evm_proof 43 | .transactions 44 | .iter() 45 | .map(|transaction| transaction.as_cairo_format()) 46 | .collect(), 47 | transaction_receipts: evm_proof 48 | .transaction_receipts 49 | .iter() 50 | .map(|receipt| receipt.as_cairo_format()) 51 | .collect(), 52 | }), 53 | BaseProcessedBlockProofs::StarkNet(sn_proofs) => { 54 | ProcessedBlockProofs::StarkNet(sn_proofs.clone()) 55 | } 56 | } 57 | } 58 | } 59 | 60 | #[derive(Serialize, Deserialize)] 61 | #[serde(untagged)] 62 | pub enum ProcessedBlockProofs { 63 | Evm(EvmBlockProofs), 64 | StarkNet(StarkNetBlockProofs), 65 | } 66 | 67 | #[derive(Serialize, Deserialize)] 68 | pub struct EvmBlockProofs { 69 | pub chain_id: String, 70 | pub mmr_with_headers: Vec, 71 | pub accounts: Vec, 72 | pub storages: Vec, 73 | pub transactions: Vec, 74 | pub transaction_receipts: Vec, 75 | } 76 | 77 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] 78 | pub struct MMRWithHeader { 79 | pub mmr_meta: MMRMeta, 80 | pub headers: Vec, 81 | } 82 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/cairo_format/datalake_compute.rs: -------------------------------------------------------------------------------- 1 | use crate::primitives::processed_types::datalake_compute::ProcessedDatalakeCompute as BaseProcessedDatalakeCompute; 2 | 3 | use super::{felt_vec_unit::FieldElementVectorUnit, traits::AsCairoFormat}; 4 | use serde::{Deserialize, Serialize}; 5 | use serde_with::serde_as; 6 | use starknet::core::serde::unsigned_field_element::UfeHex; 7 | use starknet_types_core::felt::Felt; 8 | 9 | impl AsCairoFormat for BaseProcessedDatalakeCompute { 10 | type Output = ProcessedDatalakeCompute; 11 | 12 | fn as_cairo_format(&self) -> Self::Output { 13 | let computational_task_felts = FieldElementVectorUnit::from_bytes(&self.encoded_task); 14 | let datalake_felts = FieldElementVectorUnit::from_bytes(&self.encoded_datalake); 15 | ProcessedDatalakeCompute { 16 | task_bytes_len: computational_task_felts.bytes_len, 17 | encoded_task: computational_task_felts.felts, 18 | datalake_bytes_len: datalake_felts.bytes_len, 19 | encoded_datalake: datalake_felts.felts, 20 | datalake_type: self.datalake_type, 21 | property_type: self.property_type, 22 | } 23 | } 24 | } 25 | 26 | #[serde_as] 27 | #[derive(Serialize, Debug, Clone, PartialEq, Deserialize)] 28 | pub struct ProcessedDatalakeCompute { 29 | pub task_bytes_len: u64, 30 | #[serde_as(as = "Vec")] 31 | pub encoded_task: Vec, 32 | pub datalake_bytes_len: u64, 33 | #[serde_as(as = "Vec")] 34 | pub encoded_datalake: Vec, 35 | pub datalake_type: u8, 36 | pub property_type: u8, 37 | } 38 | 39 | #[cfg(test)] 40 | mod tests { 41 | 42 | use super::*; 43 | 44 | #[test] 45 | fn test_datalake_compute_to_serde() { 46 | let processed_string = 47 | include_str!("../../../../../fixtures/primitives/processed/datalake_compute.json"); 48 | let datalake_computes: BaseProcessedDatalakeCompute = 49 | serde_json::from_str(processed_string).unwrap(); 50 | let datalake_computes_in_felts: ProcessedDatalakeCompute = 51 | datalake_computes.as_cairo_format(); 52 | let string = serde_json::to_string_pretty(&datalake_computes_in_felts).unwrap(); 53 | 54 | let json_file = include_str!( 55 | "../../../../../fixtures/primitives/processed_in_felts/datalake_compute.json" 56 | ); 57 | let expected: ProcessedDatalakeCompute = serde_json::from_str(json_file).unwrap(); 58 | let expected_string = serde_json::to_string_pretty(&expected).unwrap(); 59 | 60 | assert_eq!(string, expected_string); 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/cairo_format/felt_vec_unit.rs: -------------------------------------------------------------------------------- 1 | use serde::Serialize; 2 | use serde_with::serde_as; 3 | use starknet::core::serde::unsigned_field_element::UfeHex; 4 | use starknet_types_core::felt::Felt; 5 | 6 | #[serde_as] 7 | #[derive(Serialize, Debug)] 8 | pub struct FieldElementVectorUnit { 9 | /// Chunked vector of field elements 10 | #[serde_as(as = "Vec")] 11 | pub felts: Vec, 12 | /// Length of the original byte array before chunking into field elements 13 | pub bytes_len: u64, 14 | } 15 | 16 | impl FieldElementVectorUnit { 17 | /// Converts a byte slice into a `FieldElementVectorUnit`. 18 | /// 19 | /// This function takes a slice of bytes and converts it into a `FieldElementVectorUnit`, 20 | /// which consists of a vector of [`Felt`]s and the length of the original byte slice. 21 | /// 22 | /// # Panics 23 | /// 24 | /// This function will panic if the input byte slice is empty. 25 | pub fn from_bytes(bytes: &[u8]) -> Self { 26 | if bytes.is_empty() { 27 | panic!("Cannot convert to FieldElementVectorUnit from empty bytes") 28 | } 29 | let bytes_len = bytes.len() as u64; 30 | let felts = bytes 31 | .chunks(8) 32 | .map(|chunk| { 33 | let mut arr = [0u8; 8]; 34 | let len = chunk.len(); 35 | arr[..len].copy_from_slice(chunk); 36 | let le_int = u64::from_le_bytes(arr); 37 | Felt::from_dec_str(&le_int.to_string()).expect("Invalid to convert FieldElement") 38 | }) 39 | .collect(); 40 | 41 | Self { felts, bytes_len } 42 | } 43 | } 44 | 45 | #[cfg(test)] 46 | mod tests { 47 | use alloy::hex; 48 | 49 | use super::*; 50 | 51 | #[test] 52 | #[should_panic(expected = "Cannot convert to FieldElementVectorUnit from empty bytes")] 53 | fn test_empty_bytes() { 54 | let bytes = hex::decode("").unwrap(); 55 | FieldElementVectorUnit::from_bytes(&bytes); 56 | } 57 | 58 | #[test] 59 | fn test_single_byte_bytes() { 60 | let bytes = hex::decode("0x01").unwrap(); 61 | let result = FieldElementVectorUnit::from_bytes(&bytes); 62 | assert_eq!(result.bytes_len, 1); 63 | assert_eq!(result.felts.len(), 1); 64 | assert_eq!(result.felts[0], Felt::from_hex("0x1").unwrap()); 65 | } 66 | 67 | #[test] 68 | fn test_single_chunk_bytes() { 69 | let bytes = hex::decode("0x1234567890abcdef").unwrap(); 70 | let result = FieldElementVectorUnit::from_bytes(&bytes); 71 | assert_eq!(result.bytes_len, 8); 72 | assert_eq!(result.felts.len(), 1); 73 | assert_eq!(result.felts[0], Felt::from_hex("efcdab9078563412").unwrap()); 74 | } 75 | 76 | #[test] 77 | fn test_multiple_chunks_bytes() { 78 | let bytes = hex::decode("0x1234567890abcdef1122334455667788").unwrap(); 79 | let result = FieldElementVectorUnit::from_bytes(&bytes); 80 | assert_eq!(result.bytes_len, 16); 81 | assert_eq!(result.felts.len(), 2); 82 | assert_eq!(result.felts[0], Felt::from_hex("efcdab9078563412").unwrap()); 83 | assert_eq!(result.felts[1], Felt::from_hex("8877665544332211").unwrap()); 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/cairo_format/header.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use serde_with::serde_as; 3 | use starknet::core::serde::unsigned_field_element::UfeHex; 4 | use starknet_types_core::felt::Felt; 5 | 6 | use crate::primitives::processed_types::header::{ 7 | ProcessedHeader as BaseProcessedHeader, ProcessedHeaderProof as BasedProcessedHeaderProof, 8 | }; 9 | 10 | use super::{felt_vec_unit::FieldElementVectorUnit, traits::AsCairoFormat}; 11 | 12 | impl AsCairoFormat for BaseProcessedHeader { 13 | type Output = ProcessedHeader; 14 | 15 | fn as_cairo_format(&self) -> Self::Output { 16 | let felts_unit = FieldElementVectorUnit::from_bytes(&self.rlp); 17 | let proof = self.proof.clone(); 18 | ProcessedHeader { 19 | rlp: felts_unit.felts, 20 | rlp_bytes_len: felts_unit.bytes_len, 21 | proof: BasedProcessedHeaderProof { 22 | leaf_idx: proof.leaf_idx, 23 | mmr_path: proof.mmr_path, 24 | }, 25 | } 26 | } 27 | } 28 | 29 | /// HeaderFormatted is the formatted version of Header 30 | #[serde_as] 31 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 32 | pub struct ProcessedHeader { 33 | #[serde_as(as = "Vec")] 34 | pub rlp: Vec, 35 | /// rlp_bytes_len is the byte( 8 bit ) length from rlp string 36 | pub rlp_bytes_len: u64, 37 | pub proof: BasedProcessedHeaderProof, 38 | } 39 | 40 | #[cfg(test)] 41 | mod tests { 42 | use super::*; 43 | 44 | #[test] 45 | fn test_header_serde() { 46 | let processed_string = 47 | include_str!("../../../../../fixtures/primitives/processed/header.json"); 48 | let headers: BaseProcessedHeader = serde_json::from_str(processed_string).unwrap(); 49 | let headers_in_felts: ProcessedHeader = headers.as_cairo_format(); 50 | let string = serde_json::to_string_pretty(&headers_in_felts).unwrap(); 51 | 52 | let json_file = 53 | include_str!("../../../../../fixtures/primitives/processed_in_felts/header.json"); 54 | let expected: ProcessedHeader = serde_json::from_str(json_file).unwrap(); 55 | let expected_string = serde_json::to_string_pretty(&expected).unwrap(); 56 | 57 | assert_eq!(string, expected_string); 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/cairo_format/mod.rs: -------------------------------------------------------------------------------- 1 | //! This module contains the processed types for the Cairo format. 2 | //! Used to serialize the processed types to the Cairo Program's input format. 3 | 4 | pub mod account; 5 | pub mod block_proofs; 6 | pub mod datalake_compute; 7 | pub mod felt_vec_unit; 8 | pub mod header; 9 | pub mod module; 10 | pub mod mpt; 11 | pub mod query; 12 | pub mod receipt; 13 | pub mod storage; 14 | pub mod task; 15 | pub mod traits; 16 | pub mod transaction; 17 | 18 | pub use account::*; 19 | pub use block_proofs::*; 20 | pub use datalake_compute::*; 21 | pub use felt_vec_unit::*; 22 | pub use header::*; 23 | pub use module::*; 24 | pub use mpt::*; 25 | pub use query::*; 26 | pub use receipt::*; 27 | pub use storage::*; 28 | pub use task::*; 29 | pub use traits::*; 30 | pub use transaction::*; 31 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/cairo_format/module.rs: -------------------------------------------------------------------------------- 1 | use crate::primitives::{ 2 | processed_types::module::ProcessedModule as BaseProcessedModule, task::module::ModuleInput, 3 | }; 4 | use cairo_lang_starknet_classes::casm_contract_class::CasmContractClass; 5 | use serde::{Deserialize, Serialize}; 6 | use serde_with::serde_as; 7 | use starknet::core::serde::unsigned_field_element::UfeHex; 8 | use starknet_types_core::felt::Felt; 9 | 10 | use super::{AsCairoFormat, FieldElementVectorUnit}; 11 | 12 | impl AsCairoFormat for BaseProcessedModule { 13 | type Output = ProcessedModule; 14 | 15 | fn as_cairo_format(&self) -> Self::Output { 16 | let module_task_felts = FieldElementVectorUnit::from_bytes(&self.encoded_task); 17 | ProcessedModule { 18 | module_class: self.module_class.clone(), 19 | encoded_task: module_task_felts.felts, 20 | inputs: self.inputs.clone(), 21 | task_bytes_len: module_task_felts.bytes_len, 22 | } 23 | } 24 | } 25 | 26 | impl BaseProcessedModule { 27 | pub fn as_dry_run_cairo_format(&self) -> DryRunProcessedModule { 28 | DryRunProcessedModule { 29 | inputs: self.inputs.clone(), 30 | module_class: self.module_class.clone(), 31 | } 32 | } 33 | } 34 | 35 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] 36 | pub struct DryRunProcessedModule { 37 | pub inputs: Vec, 38 | /// Detail class code of the module. 39 | /// This will be loaded to bootloader. 40 | pub module_class: CasmContractClass, 41 | } 42 | 43 | impl DryRunProcessedModule { 44 | pub fn new(inputs: Vec, module_class: CasmContractClass) -> Self { 45 | Self { 46 | inputs, 47 | module_class, 48 | } 49 | } 50 | } 51 | 52 | #[serde_as] 53 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] 54 | pub struct ProcessedModule { 55 | #[serde_as(as = "Vec")] 56 | pub encoded_task: Vec, 57 | pub task_bytes_len: u64, 58 | pub inputs: Vec, 59 | /// Detail class code of the module. 60 | /// This will be loaded to bootloader. 61 | pub module_class: CasmContractClass, 62 | } 63 | 64 | impl ProcessedModule { 65 | pub fn new( 66 | encoded_task: Vec, 67 | task_bytes_len: u64, 68 | inputs: Vec, 69 | module_class: CasmContractClass, 70 | ) -> Self { 71 | Self { 72 | encoded_task, 73 | task_bytes_len, 74 | inputs, 75 | module_class, 76 | } 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/cairo_format/mpt.rs: -------------------------------------------------------------------------------- 1 | use crate::primitives::processed_types::mpt::ProcessedMPTProof as BaseProcessedMPTProof; 2 | 3 | use super::{felt_vec_unit::FieldElementVectorUnit, traits::AsCairoFormat}; 4 | use serde::{Deserialize, Serialize}; 5 | use serde_with::serde_as; 6 | use starknet::core::serde::unsigned_field_element::UfeHex; 7 | use starknet_types_core::felt::Felt; 8 | 9 | impl AsCairoFormat for BaseProcessedMPTProof { 10 | type Output = ProcessedMPTProof; 11 | 12 | fn as_cairo_format(&self) -> ProcessedMPTProof { 13 | let proof_felts: Vec = self 14 | .proof 15 | .iter() 16 | .map(|proof| FieldElementVectorUnit::from_bytes(proof)) 17 | .collect(); 18 | 19 | let proof_bytes_len = proof_felts.iter().map(|f| f.bytes_len).collect(); 20 | let proof_result: Vec> = proof_felts.iter().map(|f| f.felts.clone()).collect(); 21 | ProcessedMPTProof { 22 | block_number: self.block_number, 23 | proof_bytes_len, 24 | proof: proof_result, 25 | } 26 | } 27 | } 28 | 29 | #[serde_as] 30 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 31 | pub struct ProcessedMPTProof { 32 | pub block_number: u64, 33 | /// proof_bytes_len is the byte( 8 bit ) length from each proof string 34 | pub proof_bytes_len: Vec, 35 | #[serde_as(as = "Vec>")] 36 | pub proof: Vec>, 37 | } 38 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/cairo_format/query.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use crate::primitives::processed_types::query::ProcessorInput as BasedProcessorInput; 4 | use ::serde::Serialize; 5 | use alloy::primitives::B256; 6 | use serde::Deserialize; 7 | 8 | use super::{AsCairoFormat, ProcessedBlockProofs, ProcessedTask}; 9 | 10 | impl AsCairoFormat for BasedProcessorInput { 11 | type Output = ProcessorInput; 12 | 13 | fn as_cairo_format(&self) -> Self::Output { 14 | ProcessorInput { 15 | cairo_run_output_path: self.cairo_run_output_path.clone(), 16 | task_root: self.tasks_root, 17 | result_root: self.results_root, 18 | proofs: self 19 | .proofs 20 | .iter() 21 | .map(|proof| proof.as_cairo_format()) 22 | .collect(), 23 | tasks: self 24 | .tasks 25 | .iter() 26 | .map(|task| task.as_cairo_format()) 27 | .collect(), 28 | } 29 | } 30 | } 31 | 32 | #[derive(Serialize, Deserialize)] 33 | pub struct ProcessorInput { 34 | /// Path to the directory where the cairo-run output will be stored. 35 | pub cairo_run_output_path: PathBuf, 36 | /// Batched tasks root of all tasks. 37 | pub task_root: B256, 38 | /// Batched results root of all tasks. 39 | pub result_root: B256, 40 | /// Fetched proofs per each fetch point. 41 | pub proofs: Vec, 42 | /// tasks to be executed. 43 | pub tasks: Vec, 44 | } 45 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/cairo_format/receipt.rs: -------------------------------------------------------------------------------- 1 | //! This module defines the `ProcessedReceipt` struct and its corresponding `ProcessedReceiptInFelts` struct. 2 | 3 | use super::{felt_vec_unit::FieldElementVectorUnit, traits::AsCairoFormat}; 4 | use crate::primitives::processed_types::receipt::ProcessedReceipt as BaseProcessedReceipt; 5 | use serde::{Deserialize, Serialize}; 6 | use serde_with::serde_as; 7 | use starknet::core::serde::unsigned_field_element::UfeHex; 8 | use starknet_types_core::felt::Felt; 9 | 10 | impl AsCairoFormat for BaseProcessedReceipt { 11 | type Output = ProcessedReceipt; 12 | 13 | fn as_cairo_format(&self) -> Self::Output { 14 | let key = self.key.clone(); 15 | let proof_felts: Vec = self 16 | .proof 17 | .iter() 18 | .map(|proof| FieldElementVectorUnit::from_bytes(proof)) 19 | .collect(); 20 | 21 | let proof_bytes_len = proof_felts.iter().map(|f| f.bytes_len).collect(); 22 | let proof_result: Vec> = proof_felts.iter().map(|f| f.felts.clone()).collect(); 23 | ProcessedReceipt { 24 | key, 25 | block_number: self.block_number, 26 | proof_bytes_len, 27 | proof: proof_result, 28 | } 29 | } 30 | } 31 | 32 | #[serde_as] 33 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 34 | #[serde(rename = "transaction_receipts")] 35 | pub struct ProcessedReceipt { 36 | pub key: String, 37 | pub block_number: u64, 38 | /// proof_bytes_len is the byte( 8 bit ) length from each proof string 39 | pub proof_bytes_len: Vec, 40 | #[serde_as(as = "Vec>")] 41 | pub proof: Vec>, 42 | } 43 | 44 | #[cfg(test)] 45 | mod tests { 46 | 47 | use super::*; 48 | 49 | #[test] 50 | fn test_receipt_serde() { 51 | let processed_string = 52 | include_str!("../../../../../fixtures/primitives/processed/receipt.json"); 53 | let receipts: BaseProcessedReceipt = serde_json::from_str(processed_string).unwrap(); 54 | let receipts_in_felts: ProcessedReceipt = receipts.as_cairo_format(); 55 | let string = serde_json::to_string_pretty(&receipts_in_felts).unwrap(); 56 | 57 | let json_file = 58 | include_str!("../../../../../fixtures/primitives/processed_in_felts/receipt.json"); 59 | let expected: ProcessedReceipt = serde_json::from_str(json_file).unwrap(); 60 | let expected_string = serde_json::to_string_pretty(&expected).unwrap(); 61 | 62 | assert_eq!(string, expected_string); 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/cairo_format/storage.rs: -------------------------------------------------------------------------------- 1 | //! This module defines the `ProcessedStorage` struct and its corresponding `ProcessedStorageInFelts` struct. 2 | 3 | use alloy::primitives::StorageKey; 4 | use serde::{Deserialize, Serialize}; 5 | use serde_with::serde_as; 6 | use starknet::core::serde::unsigned_field_element::UfeHex; 7 | use starknet_types_core::felt::Felt; 8 | 9 | use crate::primitives::processed_types::storage::ProcessedStorage as BaseProcessedStorage; 10 | 11 | use super::{felt_vec_unit::FieldElementVectorUnit, mpt::ProcessedMPTProof, traits::AsCairoFormat}; 12 | 13 | impl AsCairoFormat for BaseProcessedStorage { 14 | type Output = ProcessedStorage; 15 | 16 | fn as_cairo_format(&self) -> Self::Output { 17 | let address_chunk_result = FieldElementVectorUnit::from_bytes(self.address.as_ref()); 18 | let slot_chunk_result = FieldElementVectorUnit::from_bytes(self.slot.as_ref()); 19 | let storage_key = self.storage_key; 20 | let proofs = self 21 | .proofs 22 | .iter() 23 | .map(|proof| proof.as_cairo_format()) 24 | .collect(); 25 | ProcessedStorage { 26 | address: address_chunk_result.felts, 27 | slot: slot_chunk_result.felts, 28 | storage_key, 29 | proofs, 30 | } 31 | } 32 | } 33 | 34 | #[serde_as] 35 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 36 | pub struct ProcessedStorage { 37 | // chunked address 38 | #[serde_as(as = "Vec")] 39 | pub address: Vec, 40 | // chunked storage slot 41 | #[serde_as(as = "Vec")] 42 | pub slot: Vec, 43 | pub storage_key: StorageKey, 44 | pub proofs: Vec, 45 | } 46 | 47 | #[cfg(test)] 48 | mod tests { 49 | use super::*; 50 | 51 | #[test] 52 | fn test_storage_serde() { 53 | let processed_string = 54 | include_str!("../../../../../fixtures/primitives/processed/storage.json"); 55 | let storages: BaseProcessedStorage = serde_json::from_str(processed_string).unwrap(); 56 | let storages_in_felts: ProcessedStorage = storages.as_cairo_format(); 57 | let string = serde_json::to_string_pretty(&storages_in_felts).unwrap(); 58 | 59 | let json_file = 60 | include_str!("../../../../../fixtures/primitives/processed_in_felts/storage.json"); 61 | let expected: ProcessedStorage = serde_json::from_str(json_file).unwrap(); 62 | let expected_string = serde_json::to_string_pretty(&expected).unwrap(); 63 | 64 | assert_eq!(string, expected_string); 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/cairo_format/task.rs: -------------------------------------------------------------------------------- 1 | use super::{module::ProcessedModule, AsCairoFormat, ProcessedDatalakeCompute}; 2 | use crate::primitives::processed_types::task::ProcessedTask as BaseProcessedTask; 3 | use ::serde::Serialize; 4 | use serde::Deserialize; 5 | 6 | impl AsCairoFormat for BaseProcessedTask { 7 | type Output = ProcessedTask; 8 | 9 | fn as_cairo_format(&self) -> Self::Output { 10 | match self { 11 | BaseProcessedTask::DatalakeCompute(datalake_compute) => { 12 | ProcessedTask::DatalakeCompute(datalake_compute.as_cairo_format()) 13 | } 14 | BaseProcessedTask::Module(module) => ProcessedTask::Module(module.as_cairo_format()), 15 | } 16 | } 17 | } 18 | 19 | #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] 20 | #[serde(tag = "type", content = "context")] 21 | pub enum ProcessedTask { 22 | #[serde(rename = "datalake_compute")] 23 | DatalakeCompute(ProcessedDatalakeCompute), 24 | #[serde(rename = "module")] 25 | Module(ProcessedModule), 26 | } 27 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/cairo_format/traits.rs: -------------------------------------------------------------------------------- 1 | pub trait AsCairoFormat { 2 | type Output; 3 | 4 | fn as_cairo_format(&self) -> Self::Output; 5 | } 6 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/cairo_format/transaction.rs: -------------------------------------------------------------------------------- 1 | //! The transaction module contains the ProcessedTransaction struct and its conversion to ProcessedTransactionInFelts. 2 | 3 | use super::{felt_vec_unit::FieldElementVectorUnit, traits::AsCairoFormat}; 4 | use crate::primitives::processed_types::transaction::ProcessedTransaction as BaseProcessedTransaction; 5 | use serde::{Deserialize, Serialize}; 6 | use serde_with::serde_as; 7 | use starknet::core::serde::unsigned_field_element::UfeHex; 8 | use starknet_types_core::felt::Felt; 9 | 10 | impl AsCairoFormat for BaseProcessedTransaction { 11 | type Output = ProcessedTransaction; 12 | 13 | fn as_cairo_format(&self) -> Self::Output { 14 | let key = self.key.clone(); 15 | let proof_felts: Vec = self 16 | .proof 17 | .iter() 18 | .map(|proof| FieldElementVectorUnit::from_bytes(proof)) 19 | .collect(); 20 | 21 | let proof_bytes_len = proof_felts.iter().map(|f| f.bytes_len).collect(); 22 | let proof_result: Vec> = proof_felts.iter().map(|f| f.felts.clone()).collect(); 23 | ProcessedTransaction { 24 | key, 25 | block_number: self.block_number, 26 | proof_bytes_len, 27 | proof: proof_result, 28 | } 29 | } 30 | } 31 | 32 | #[serde_as] 33 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 34 | pub struct ProcessedTransaction { 35 | pub key: String, 36 | pub block_number: u64, 37 | /// proof_bytes_len is the byte( 8 bit ) length from each proof string 38 | pub proof_bytes_len: Vec, 39 | #[serde_as(as = "Vec>")] 40 | pub proof: Vec>, 41 | } 42 | 43 | #[cfg(test)] 44 | mod tests { 45 | use super::*; 46 | 47 | #[test] 48 | fn test_transaction_serde() { 49 | let processed_string = 50 | include_str!("../../../../../fixtures/primitives/processed/transaction.json"); 51 | let tx: BaseProcessedTransaction = serde_json::from_str(processed_string).unwrap(); 52 | let tx_in_felts = tx.as_cairo_format(); 53 | let string = serde_json::to_string_pretty(&tx_in_felts).unwrap(); 54 | 55 | let json_file = 56 | include_str!("../../../../../fixtures/primitives/processed_in_felts/transaction.json"); 57 | let expected: ProcessedTransaction = serde_json::from_str(json_file).unwrap(); 58 | let expected_string = serde_json::to_string_pretty(&expected).unwrap(); 59 | 60 | assert_eq!(string, expected_string); 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/datalake_compute.rs: -------------------------------------------------------------------------------- 1 | use alloy::primitives::{Bytes, B256, U256}; 2 | use serde::{Deserialize, Serialize}; 3 | 4 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 5 | pub struct ProcessedDatalakeCompute { 6 | /// encoded computational task 7 | pub encoded_task: Bytes, 8 | /// computational task commitment 9 | pub task_commitment: B256, 10 | /// raw evaluation result of target compiled task 11 | pub compiled_result: U256, 12 | /// results merkle tree's entry value 13 | pub result_commitment: B256, 14 | /// merkle proof for tasks 15 | pub task_proof: Vec, 16 | /// merkle proof for results 17 | pub result_proof: Vec, 18 | /// encoded datalake 19 | pub encoded_datalake: Bytes, 20 | // ex. block sampled datalake / transaction datalake 21 | pub datalake_type: u8, 22 | // ex. "header", "account", "storage" 23 | pub property_type: u8, 24 | } 25 | 26 | impl ProcessedDatalakeCompute { 27 | pub fn new( 28 | encoded_task: Bytes, 29 | task_commitment: B256, 30 | compiled_result: U256, 31 | result_commitment: B256, 32 | task_proof: Vec, 33 | result_proof: Vec, 34 | encoded_datalake: Bytes, 35 | datalake_type: u8, 36 | property_type: u8, 37 | ) -> Self { 38 | Self { 39 | encoded_task, 40 | task_commitment, 41 | compiled_result, 42 | result_commitment, 43 | task_proof, 44 | result_proof, 45 | encoded_datalake, 46 | datalake_type, 47 | property_type, 48 | } 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/header.rs: -------------------------------------------------------------------------------- 1 | use alloy::hex; 2 | use serde::{Deserialize, Serialize}; 3 | use serde_with::serde_as; 4 | 5 | use crate::{ 6 | primitives::block::header::RlpBlockHeader, 7 | primitives::serde::{deserialize_hex, serialize_hex}, 8 | }; 9 | 10 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 11 | pub struct ProcessedHeaderProof { 12 | pub leaf_idx: u64, 13 | pub mmr_path: Vec, 14 | } 15 | 16 | impl ProcessedHeaderProof { 17 | pub fn new(leaf_idx: u64, mmr_path: Vec) -> Self { 18 | ProcessedHeaderProof { leaf_idx, mmr_path } 19 | } 20 | } 21 | 22 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 23 | #[serde_as] 24 | pub struct ProcessedHeader { 25 | #[serde(serialize_with = "serialize_hex", deserialize_with = "deserialize_hex")] 26 | pub rlp: Vec, 27 | pub proof: ProcessedHeaderProof, 28 | } 29 | 30 | impl ProcessedHeader { 31 | pub fn new(rlp: RlpBlockHeader, leaf_idx: u64, mmr_path: Vec) -> Self { 32 | let rlp = hex::decode(rlp.0).expect("Cannot decode RLP block header to bytes"); 33 | let proof = ProcessedHeaderProof::new(leaf_idx, mmr_path); 34 | Self { rlp, proof } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/mmr.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use crate::primitives::{block::header::MMRMetaFromNewIndexer, utils::hex_string_to_uint}; 4 | 5 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 6 | pub struct MMRMeta { 7 | pub id: u64, 8 | pub root: String, 9 | pub size: u64, 10 | // hex encoded 11 | pub peaks: Vec, 12 | } 13 | 14 | impl MMRMeta { 15 | pub fn new(id: u64, root: String, size: u64, peaks: Vec) -> Self { 16 | MMRMeta { 17 | id, 18 | root, 19 | size, 20 | peaks, 21 | } 22 | } 23 | } 24 | 25 | impl MMRMeta { 26 | pub fn from_indexer(val: MMRMetaFromNewIndexer) -> Self { 27 | MMRMeta { 28 | id: hex_string_to_uint(&val.mmr_id) as u64, 29 | root: val.mmr_root, 30 | size: val.mmr_size, 31 | peaks: val.mmr_peaks, 32 | } 33 | } 34 | } 35 | 36 | #[cfg(test)] 37 | mod tests { 38 | 39 | use super::*; 40 | 41 | #[test] 42 | fn test_mmr_meta() { 43 | let mmr_meta = MMRMeta::new( 44 | 26, 45 | "0x18e672dd525cd5eacc5f6b15e5d89451bce65177881304d5200af38e350ebdc".to_string(), 46 | 12273288, 47 | vec![ 48 | "0x262c4c9b1cb2a036924aecf563dc9952e5f8b41004310adde86f22abb793eb1".to_string(), 49 | "0x72f553aac8690d09c95fe6220fdd5a073440631e4ca0a161a92b655d2ac9478".to_string(), 50 | "0x6c68dfa085af40218620038d05f477fba52c4b12b812b64902663abf78bca62".to_string(), 51 | "0x52a50beb6cbeffbd5db875d77e4d3917fdee5f723165f139dc04fe20cd4d69a".to_string(), 52 | "0x5c4814bbd601bffb5ac9980977a79bf100d4c1ad4f2caa410f7a7c4249a2fd4".to_string(), 53 | "0x668035a3620690024dac08a8db46e3316619e4c2a634daaa3175ab16af72deb".to_string(), 54 | "0x67cff2a39ca6fb235decefaf5bb63f54c550b97b57e9873751eb9dae35cfcd4".to_string(), 55 | "0x2a7d9ca4745f200dd2c66d2dfd6374a21f7092452287696c395f62afc22c805".to_string(), 56 | "0x37511dd8cc41503f6c08879d18f15b9ae649d6b2cdd91bcaa3990aeb87ba8c6".to_string(), 57 | "0x55112088a2f7dfaf5d88ce949f3aad7c7d05d6e4eaff4053aebfbed3af885af".to_string(), 58 | "0x66c82fce8bfc291095c6c9255b1f7ccf725a1e91e8ae8cd8c43ceb111c21480".to_string(), 59 | "0x2e5274895f9cd556bb8dee5b2551e9cda9aa3caa23532f9824abcc62d5ad273".to_string(), 60 | ], 61 | ); 62 | 63 | let processed_string = include_str!("../../../../fixtures/primitives/mmr.json"); 64 | let mmr: MMRMeta = serde_json::from_str(processed_string).unwrap(); 65 | assert_eq!(mmr_meta, mmr); 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod cairo_format; 2 | 3 | pub mod starknet; 4 | 5 | pub mod account; 6 | pub mod block_proofs; 7 | pub mod datalake_compute; 8 | pub mod header; 9 | pub mod mmr; 10 | pub mod module; 11 | pub mod mpt; 12 | pub mod processor_output; 13 | pub mod query; 14 | pub mod receipt; 15 | pub mod storage; 16 | pub mod task; 17 | pub mod transaction; 18 | pub mod uint256; 19 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/module.rs: -------------------------------------------------------------------------------- 1 | use alloy::primitives::{Bytes, B256, U256}; 2 | use cairo_lang_starknet_classes::casm_contract_class::CasmContractClass; 3 | use serde::{Deserialize, Serialize}; 4 | 5 | use crate::primitives::task::module::ModuleInput; 6 | 7 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] 8 | pub struct ProcessedModule { 9 | /// encoded computational task 10 | pub encoded_task: Bytes, 11 | pub task_commitment: B256, 12 | pub result_commitment: B256, 13 | /// raw evaluation result of target compiled task 14 | pub compiled_result: U256, 15 | pub task_proof: Vec, 16 | pub result_proof: Vec, 17 | 18 | pub inputs: Vec, 19 | /// Detail class code of the module. 20 | /// This will be loaded to bootloader. 21 | pub module_class: CasmContractClass, 22 | } 23 | 24 | impl ProcessedModule { 25 | pub fn new( 26 | encoded_task: Bytes, 27 | task_commitment: B256, 28 | result_commitment: B256, 29 | compiled_result: U256, 30 | task_proof: Vec, 31 | result_proof: Vec, 32 | inputs: Vec, 33 | module_class: CasmContractClass, 34 | ) -> Self { 35 | ProcessedModule { 36 | encoded_task, 37 | task_commitment, 38 | result_commitment, 39 | compiled_result, 40 | task_proof, 41 | result_proof, 42 | inputs, 43 | module_class, 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/mpt.rs: -------------------------------------------------------------------------------- 1 | use alloy::primitives::Bytes; 2 | use serde::{Deserialize, Serialize}; 3 | 4 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 5 | pub struct ProcessedMPTProof { 6 | pub block_number: u64, 7 | pub proof: Vec, 8 | } 9 | 10 | impl ProcessedMPTProof { 11 | pub fn new(block_number: u64, proof: Vec) -> Self { 12 | ProcessedMPTProof { 13 | block_number, 14 | proof, 15 | } 16 | } 17 | } 18 | 19 | #[cfg(test)] 20 | mod tests { 21 | 22 | use super::*; 23 | 24 | #[test] 25 | fn test_mpt_proof() { 26 | let processed_string = include_str!("../../../../fixtures/primitives/processed/mpt.json"); 27 | let processed_mpt: ProcessedMPTProof = serde_json::from_str(processed_string).unwrap(); 28 | assert_eq!(processed_mpt.block_number, 5244634); 29 | assert_eq!(processed_mpt.proof.len(), 8); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/processor_output.rs: -------------------------------------------------------------------------------- 1 | use alloy::primitives::B256; 2 | use serde::Serialize; 3 | 4 | use super::mmr::MMRMeta; 5 | 6 | #[derive(Debug, Serialize)] 7 | pub struct ProcessorOutput { 8 | /// raw results of the module 9 | pub raw_results: Vec, 10 | /// leaf of result merkle tree 11 | pub results_commitments: Vec, 12 | /// leaf of task merkle tree 13 | pub tasks_commitments: Vec, 14 | /// tasks inclusion proofs 15 | pub task_inclusion_proofs: Vec>, 16 | /// results inclusion proofs 17 | pub results_inclusion_proofs: Vec>, 18 | /// root of the results merkle tree 19 | pub results_root: B256, 20 | /// root of the tasks merkle tree 21 | pub tasks_root: B256, 22 | /// mmr metas related to processed tasks 23 | pub mmr_metas: Vec, 24 | } 25 | 26 | impl ProcessorOutput { 27 | pub fn new( 28 | raw_results: Vec, 29 | results_commitments: Vec, 30 | tasks_commitments: Vec, 31 | task_inclusion_proofs: Vec>, 32 | results_inclusion_proofs: Vec>, 33 | results_root: B256, 34 | tasks_root: B256, 35 | mmr_metas: Vec, 36 | ) -> Self { 37 | Self { 38 | raw_results, 39 | results_commitments, 40 | tasks_commitments, 41 | task_inclusion_proofs, 42 | results_inclusion_proofs, 43 | results_root, 44 | tasks_root, 45 | mmr_metas, 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/query.rs: -------------------------------------------------------------------------------- 1 | use ::serde::{Deserialize, Serialize}; 2 | use alloy::primitives::B256; 3 | use std::path::PathBuf; 4 | 5 | use super::{ 6 | block_proofs::ProcessedBlockProofs, mmr::MMRMeta, processor_output::ProcessorOutput, 7 | task::ProcessedTask, 8 | }; 9 | 10 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] 11 | pub struct ProcessorInput { 12 | /// Path to the directory where the cairo-run output will be stored. 13 | pub cairo_run_output_path: PathBuf, 14 | // U256 type 15 | pub tasks_root: B256, 16 | // U256 type 17 | pub results_root: B256, 18 | pub proofs: Vec, 19 | pub tasks: Vec, 20 | } 21 | 22 | impl ProcessorInput { 23 | pub fn new( 24 | cairo_run_output_path: PathBuf, 25 | results_root: B256, 26 | tasks_root: B256, 27 | proofs: Vec, 28 | tasks: Vec, 29 | ) -> Self { 30 | Self { 31 | cairo_run_output_path, 32 | results_root, 33 | tasks_root, 34 | proofs, 35 | tasks, 36 | } 37 | } 38 | 39 | /// Turn [`ProcessorInput`] into [`ProcessorOutput`] by provided task results 40 | pub fn into_processor_output(&self) -> ProcessorOutput { 41 | let tasks_commitments: Vec = self 42 | .tasks 43 | .iter() 44 | .map(|task| task.get_task_commitment()) 45 | .collect(); 46 | let task_inclusion_proofs: Vec> = self 47 | .tasks 48 | .iter() 49 | .map(|task| task.get_task_proof()) 50 | .collect(); 51 | let task_results: Vec = self.tasks.iter().map(|task| task.get_result()).collect(); 52 | let result_commitments: Vec = self 53 | .tasks 54 | .iter() 55 | .map(|task| task.get_result_commitment()) 56 | .collect(); 57 | let results_inclusion_proofs: Vec> = self 58 | .tasks 59 | .iter() 60 | .map(|task| task.get_result_proof()) 61 | .collect(); 62 | let mmr_metas: Vec = self 63 | .proofs 64 | .iter() 65 | .flat_map(|x| x.clone().get_mmr_meta()) 66 | .collect(); 67 | 68 | ProcessorOutput::new( 69 | task_results, 70 | result_commitments, 71 | tasks_commitments, 72 | task_inclusion_proofs, 73 | results_inclusion_proofs, 74 | self.results_root, 75 | self.tasks_root, 76 | mmr_metas, 77 | ) 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/receipt.rs: -------------------------------------------------------------------------------- 1 | //! This module defines the `ProcessedReceipt` struct and its corresponding `ProcessedReceiptInFelts` struct. 2 | 3 | use crate::primitives::utils::tx_index_to_tx_key; 4 | use alloy::primitives::Bytes; 5 | use serde::{Deserialize, Serialize}; 6 | 7 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 8 | pub struct ProcessedReceipt { 9 | pub key: String, 10 | pub block_number: u64, 11 | pub proof: Vec, 12 | } 13 | 14 | impl ProcessedReceipt { 15 | pub fn new(index: u64, block_number: u64, proof: Vec) -> Self { 16 | let key = tx_index_to_tx_key(index); 17 | Self { 18 | key, 19 | block_number, 20 | proof, 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/starknet/header.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use starknet_crypto::Felt; 3 | 4 | use crate::primitives::processed_types::header::ProcessedHeaderProof; 5 | 6 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 7 | pub struct ProcessedHeader { 8 | pub fields: Vec, 9 | pub proof: ProcessedHeaderProof, 10 | } 11 | 12 | impl ProcessedHeader { 13 | pub fn new(fields: Vec, leaf_idx: u64, mmr_path: Vec) -> Self { 14 | let proof = ProcessedHeaderProof::new(leaf_idx, mmr_path); 15 | Self { fields, proof } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/starknet/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod header; 2 | pub mod storage; 3 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/starknet/storage.rs: -------------------------------------------------------------------------------- 1 | use alloy::hex; 2 | use serde::{Deserialize, Serialize, Serializer}; 3 | use starknet_crypto::Felt; 4 | 5 | use crate::{primitives::ChainId, provider::starknet::types::GetProofOutput}; 6 | 7 | fn serialize_chain_id(chain_id: &ChainId, serializer: S) -> Result 8 | where 9 | S: Serializer, 10 | { 11 | let bytes = chain_id.to_be_bytes(); 12 | let hex_string = format!("0x{}", hex::encode(bytes)); 13 | serializer.serialize_str(&hex_string) 14 | } 15 | 16 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 17 | pub struct ProcessedStorage { 18 | #[serde(serialize_with = "serialize_chain_id")] 19 | pub chain_id: ChainId, 20 | pub block_number: u64, 21 | pub contract_address: Felt, 22 | pub storage_addresses: Vec, 23 | pub proof: GetProofOutput, 24 | } 25 | 26 | impl ProcessedStorage { 27 | pub fn new( 28 | chain_id: ChainId, 29 | block_number: u64, 30 | contract_address: Felt, 31 | storage_addresses: Vec, 32 | proof: GetProofOutput, 33 | ) -> Self { 34 | Self { 35 | chain_id, 36 | block_number, 37 | contract_address, 38 | storage_addresses, 39 | proof, 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/storage.rs: -------------------------------------------------------------------------------- 1 | //! This module defines the `ProcessedStorage` struct and its corresponding `ProcessedStorageInFelts` struct. 2 | 3 | use super::mpt::ProcessedMPTProof; 4 | use alloy::primitives::{keccak256, Address, StorageKey, B256}; 5 | use serde::{Deserialize, Serialize}; 6 | 7 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 8 | pub struct ProcessedStorage { 9 | pub address: Address, 10 | pub slot: B256, 11 | pub storage_key: StorageKey, 12 | pub proofs: Vec, 13 | } 14 | 15 | impl ProcessedStorage { 16 | pub fn new(address: Address, slot: B256, proofs: Vec) -> Self { 17 | // TODO: actually this is storage leaf. slot == storage key 18 | let storage_trie_leaf = keccak256(slot); 19 | ProcessedStorage { 20 | address, 21 | slot, 22 | storage_key: storage_trie_leaf, 23 | proofs, 24 | } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/task.rs: -------------------------------------------------------------------------------- 1 | use alloy::primitives::B256; 2 | use serde::{Deserialize, Serialize}; 3 | 4 | use super::{datalake_compute::ProcessedDatalakeCompute, module::ProcessedModule}; 5 | 6 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] 7 | #[serde(tag = "type", content = "context")] 8 | pub enum ProcessedTask { 9 | #[serde(rename = "datalake_compute")] 10 | DatalakeCompute(ProcessedDatalakeCompute), 11 | #[serde(rename = "module")] 12 | Module(ProcessedModule), 13 | } 14 | 15 | impl ProcessedTask { 16 | pub fn get_task_commitment(&self) -> B256 { 17 | match self { 18 | ProcessedTask::DatalakeCompute(datalake_compute) => datalake_compute.task_commitment, 19 | ProcessedTask::Module(module) => module.task_commitment, 20 | } 21 | } 22 | 23 | pub fn get_task_proof(&self) -> Vec { 24 | match self { 25 | ProcessedTask::DatalakeCompute(datalake_compute) => datalake_compute.task_proof.clone(), 26 | ProcessedTask::Module(module) => module.task_proof.clone(), 27 | } 28 | } 29 | 30 | pub fn get_result(&self) -> B256 { 31 | match self { 32 | ProcessedTask::DatalakeCompute(datalake_compute) => { 33 | B256::from(datalake_compute.compiled_result) 34 | } 35 | ProcessedTask::Module(module) => B256::from(module.compiled_result), 36 | } 37 | } 38 | 39 | pub fn get_result_commitment(&self) -> B256 { 40 | match self { 41 | ProcessedTask::DatalakeCompute(datalake_compute) => { 42 | B256::from(datalake_compute.result_commitment) 43 | } 44 | ProcessedTask::Module(module) => B256::from(module.result_commitment), 45 | } 46 | } 47 | 48 | pub fn get_result_proof(&self) -> Vec { 49 | match self { 50 | ProcessedTask::DatalakeCompute(datalake_compute) => { 51 | datalake_compute.result_proof.clone() 52 | } 53 | ProcessedTask::Module(module) => module.result_proof.clone(), 54 | } 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /hdp/src/primitives/processed_types/transaction.rs: -------------------------------------------------------------------------------- 1 | //! The transaction module contains the ProcessedTransaction struct and its conversion to ProcessedTransactionInFelts. 2 | 3 | use crate::primitives::utils::tx_index_to_tx_key; 4 | use alloy::primitives::Bytes; 5 | use serde::{Deserialize, Serialize}; 6 | 7 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 8 | pub struct ProcessedTransaction { 9 | pub key: String, 10 | pub block_number: u64, 11 | pub proof: Vec, 12 | } 13 | 14 | impl ProcessedTransaction { 15 | pub fn new(index: u64, block_number: u64, proof: Vec) -> Self { 16 | let key = tx_index_to_tx_key(index); 17 | Self { 18 | key, 19 | block_number, 20 | proof, 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /hdp/src/primitives/request.rs: -------------------------------------------------------------------------------- 1 | use crate::primitives::task::{datalake::DatalakeCompute, module::Module}; 2 | use serde::{Deserialize, Serialize}; 3 | 4 | use super::ChainId; 5 | 6 | #[derive(Debug, Serialize, Deserialize)] 7 | #[serde(tag = "type")] 8 | pub enum Task { 9 | DatalakeCompute(DatalakeCompute), 10 | Module(Module), 11 | } 12 | 13 | #[derive(Debug, Serialize, Deserialize)] 14 | #[serde(rename_all = "camelCase")] 15 | pub struct SubmitBatchQuery { 16 | pub destination_chain_id: ChainId, 17 | pub tasks: Vec, 18 | } 19 | 20 | #[test] 21 | fn test_serialize_submit_batch_query_datalake() { 22 | let json_data = r#" 23 | { 24 | "destinationChainId": "ETHEREUM_SEPOLIA", 25 | "tasks": [ 26 | { 27 | "type": "DatalakeCompute", 28 | "datalake": { 29 | "type": "BlockSampled", 30 | "chainId": "ETHEREUM_SEPOLIA", 31 | "blockRangeStart": 5515020, 32 | "blockRangeEnd": 5515039, 33 | "increment": 10, 34 | "sampledProperty": "header.base_fee_per_gas" 35 | }, 36 | "compute": { 37 | "aggregateFnId": "avg" 38 | } 39 | }, 40 | { 41 | "type": "DatalakeCompute", 42 | "datalake": { 43 | "type": "TransactionsInBlock", 44 | "chainId": "ETHEREUM_SEPOLIA", 45 | "targetBlock": 5409986, 46 | "startIndex": 10, 47 | "endIndex": 40, 48 | "increment": 10, 49 | "includedTypes": { 50 | "legacy": true, 51 | "eip2930": true, 52 | "eip1559": true, 53 | "eip4844": true 54 | }, 55 | "sampledProperty": "tx_receipt.success" 56 | }, 57 | "compute": { 58 | "aggregateFnId": "count", 59 | "aggregateFnCtx": { 60 | "operator": "gt", 61 | "valueToCompare": "1000000000000000000" 62 | } 63 | } 64 | } 65 | ] 66 | } 67 | "#; 68 | 69 | let parsed: SubmitBatchQuery = serde_json::from_str(json_data).unwrap(); 70 | println!("{:?}", parsed); 71 | } 72 | 73 | #[test] 74 | fn test_serialize_submit_batch_query_module() { 75 | let json_data = r#" 76 | { 77 | "destinationChainId": "ETHEREUM_SEPOLIA", 78 | "tasks": [ 79 | { 80 | "type": "Module", 81 | "programHash": "0x64041a339b1edd10de83cf031cfa938645450f971d2527c90d4c2ce68d7d412", 82 | "inputs": [ 83 | { 84 | "visibility": "private", 85 | "value": "0x5222a4" 86 | }, 87 | { 88 | "visibility": "public", 89 | "value": "0x00000000000000000000000013cb6ae34a13a0977f4d7101ebc24b87bb23f0d5" 90 | } 91 | ] 92 | } 93 | ] 94 | } 95 | 96 | "#; 97 | 98 | let parsed: SubmitBatchQuery = serde_json::from_str(json_data).unwrap(); 99 | println!("{:?}", parsed); 100 | } 101 | -------------------------------------------------------------------------------- /hdp/src/primitives/serde.rs: -------------------------------------------------------------------------------- 1 | use alloy::hex; 2 | use serde::de::{self, Visitor}; 3 | use serde::{Deserializer, Serializer}; 4 | use std::fmt; 5 | 6 | pub fn serialize_hex(bytes: &Vec, serializer: S) -> Result 7 | where 8 | S: Serializer, 9 | { 10 | let hex_string = hex::encode(bytes); 11 | serializer.serialize_str(&hex_string) 12 | } 13 | 14 | pub fn deserialize_hex<'de, D>(deserializer: D) -> Result, D::Error> 15 | where 16 | D: Deserializer<'de>, 17 | { 18 | struct HexVisitor; 19 | 20 | impl<'de> Visitor<'de> for HexVisitor { 21 | type Value = Vec; 22 | 23 | fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 24 | formatter.write_str("a hex-encoded string") 25 | } 26 | 27 | fn visit_str(self, value: &str) -> Result, E> 28 | where 29 | E: de::Error, 30 | { 31 | hex::decode(value).map_err(de::Error::custom) 32 | } 33 | } 34 | 35 | deserializer.deserialize_str(HexVisitor) 36 | } 37 | -------------------------------------------------------------------------------- /hdp/src/primitives/solidity_types/datalake_compute/datalake/block_sampled.rs: -------------------------------------------------------------------------------- 1 | use crate::primitives::solidity_types::traits::DatalakeCodecs; 2 | use crate::primitives::task::datalake::block_sampled::{ 3 | BlockSampledCollection, BlockSampledDatalake, 4 | }; 5 | use crate::primitives::task::datalake::datalake_type::DatalakeType; 6 | use crate::primitives::task::datalake::DatalakeCollection; 7 | use crate::primitives::ChainId; 8 | 9 | use alloy::primitives::keccak256; 10 | use alloy::{ 11 | dyn_abi::{DynSolType, DynSolValue}, 12 | primitives::B256, 13 | }; 14 | use anyhow::{bail, Result}; 15 | 16 | impl DatalakeCodecs for BlockSampledDatalake { 17 | /// Get the datalake code for block sampled datalake 18 | fn get_datalake_type(&self) -> DatalakeType { 19 | DatalakeType::BlockSampled 20 | } 21 | 22 | /// Encode the block sampled datalake 23 | fn encode(&self) -> Result> { 24 | let datalake_code: DynSolValue = self.get_datalake_type().to_u8().into(); 25 | let chain_id: DynSolValue = self.chain_id.to_numeric_id().into(); 26 | let block_range_start: DynSolValue = self.block_range_start.into(); 27 | let block_range_end: DynSolValue = self.block_range_end.into(); 28 | let sampled_property: DynSolValue = self.sampled_property.serialize()?.into(); 29 | let increment: DynSolValue = self.increment.into(); 30 | 31 | let tuple_value = DynSolValue::Tuple(vec![ 32 | datalake_code, 33 | chain_id, 34 | block_range_start, 35 | block_range_end, 36 | increment, 37 | sampled_property, 38 | ]); 39 | 40 | match tuple_value.abi_encode_sequence() { 41 | Some(encoded_datalake) => Ok(encoded_datalake), 42 | None => bail!("Encoding failed"), 43 | } 44 | } 45 | 46 | /// Get the commitment hash of the block sampled datalake 47 | fn commit(&self) -> B256 { 48 | keccak256(self.encode().expect("Encoding failed")) 49 | } 50 | 51 | /// Decode the encoded block sampled datalake 52 | fn decode(encoded: &[u8]) -> Result { 53 | let abi_type: DynSolType = "(uint256,uint256,uint256,uint256,uint256,bytes)".parse()?; 54 | let decoded = abi_type.abi_decode_sequence(encoded)?; 55 | let value = decoded.as_tuple().unwrap(); 56 | let datalake_code = value[0].as_uint().unwrap().0.to_string().parse::()?; 57 | 58 | if DatalakeType::from_index(datalake_code)? != DatalakeType::BlockSampled { 59 | bail!("Encoded datalake is not a block sample datalake"); 60 | } 61 | 62 | let chain_id = 63 | ChainId::from_numeric_id(value[1].as_uint().unwrap().0.to_string().parse::()?)?; 64 | let block_range_start = value[2].as_uint().unwrap().0.to_string().parse::()?; 65 | let block_range_end = value[3].as_uint().unwrap().0.to_string().parse::()?; 66 | let increment = value[4].as_uint().unwrap().0.to_string().parse::()?; 67 | let sampled_property = BlockSampledCollection::deserialize(value[5].as_bytes().unwrap())?; 68 | 69 | Ok(Self { 70 | chain_id, 71 | block_range_start, 72 | block_range_end, 73 | increment, 74 | sampled_property, 75 | }) 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /hdp/src/primitives/solidity_types/datalake_compute/datalake/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod block_sampled; 2 | pub mod envelope; 3 | pub mod transactions_in_block; 4 | -------------------------------------------------------------------------------- /hdp/src/primitives/solidity_types/datalake_compute/datalake/transactions_in_block.rs: -------------------------------------------------------------------------------- 1 | use crate::primitives::{ 2 | solidity_types::traits::DatalakeCodecs, 3 | task::datalake::{ 4 | datalake_type::DatalakeType, 5 | transactions::{IncludedTypes, TransactionsCollection, TransactionsInBlockDatalake}, 6 | DatalakeCollection, 7 | }, 8 | ChainId, 9 | }; 10 | use alloy::primitives::{keccak256, U256}; 11 | use alloy::{ 12 | dyn_abi::{DynSolType, DynSolValue}, 13 | primitives::B256, 14 | }; 15 | use anyhow::{bail, Result}; 16 | 17 | impl DatalakeCodecs for TransactionsInBlockDatalake { 18 | /// Get the datalake code for transactions datalake 19 | fn get_datalake_type(&self) -> DatalakeType { 20 | DatalakeType::TransactionsInBlock 21 | } 22 | 23 | /// Encode the [`TransactionsInBlockDatalake`] into a hex string 24 | fn encode(&self) -> Result> { 25 | let datalake_code: DynSolValue = self.get_datalake_type().to_u8().into(); 26 | let chain_id: DynSolValue = self.chain_id.to_numeric_id().into(); 27 | let target_block: DynSolValue = self.target_block.into(); 28 | let sampled_property: DynSolValue = self.sampled_property.serialize()?.into(); 29 | let start_index: DynSolValue = self.start_index.into(); 30 | let end_index: DynSolValue = self.end_index.into(); 31 | let increment: DynSolValue = self.increment.into(); 32 | let converted: U256 = self.included_types.into(); 33 | let included_types: DynSolValue = converted.into(); 34 | 35 | let tuple_value = DynSolValue::Tuple(vec![ 36 | datalake_code, 37 | chain_id, 38 | target_block, 39 | start_index, 40 | end_index, 41 | increment, 42 | included_types, 43 | sampled_property, 44 | ]); 45 | 46 | match tuple_value.abi_encode_sequence() { 47 | Some(encoded_datalake) => Ok(encoded_datalake), 48 | None => bail!("Encoding failed"), 49 | } 50 | } 51 | 52 | /// Get the commitment hash of the [`TransactionsInBlockDatalake`] 53 | fn commit(&self) -> B256 { 54 | let encoded_datalake = self.encode().expect("Encoding failed"); 55 | keccak256(encoded_datalake) 56 | } 57 | 58 | /// Decode the encoded transactions datalake hex string into a [`TransactionsInBlockDatalake`] 59 | fn decode(encoded: &[u8]) -> Result { 60 | let abi_type: DynSolType = 61 | "(uint256,uint256, uint256, uint256, uint256, uint256, uint256, bytes)".parse()?; 62 | let decoded = abi_type.abi_decode_sequence(encoded)?; 63 | 64 | let value = decoded.as_tuple().unwrap(); 65 | let datalake_code = value[0].as_uint().unwrap().0.to_string().parse::()?; 66 | 67 | if DatalakeType::from_index(datalake_code)? != DatalakeType::TransactionsInBlock { 68 | bail!("Encoded datalake is not a transactions datalake"); 69 | } 70 | 71 | let chain_id = 72 | ChainId::from_numeric_id(value[1].as_uint().unwrap().0.to_string().parse::()?)?; 73 | let target_block = value[2].as_uint().unwrap().0.to_string().parse::()?; 74 | let start_index = value[3].as_uint().unwrap().0.to_string().parse::()?; 75 | let end_index = value[4].as_uint().unwrap().0.to_string().parse::()?; 76 | let increment = value[5].as_uint().unwrap().0.to_string().parse::()?; 77 | let included_types = IncludedTypes::from(value[6].as_uint().unwrap().0); 78 | let sampled_property = TransactionsCollection::deserialize(value[7].as_bytes().unwrap())?; 79 | 80 | Ok(Self { 81 | chain_id, 82 | target_block, 83 | start_index, 84 | end_index, 85 | increment, 86 | included_types, 87 | sampled_property, 88 | }) 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /hdp/src/primitives/solidity_types/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod datalake_compute; 2 | pub mod module; 3 | pub mod traits; 4 | -------------------------------------------------------------------------------- /hdp/src/primitives/solidity_types/module.rs: -------------------------------------------------------------------------------- 1 | use alloy::{ 2 | dyn_abi::DynSolValue, 3 | hex, 4 | primitives::{keccak256, B256}, 5 | }; 6 | use tracing::debug; 7 | 8 | use crate::primitives::{task::module::Module, utils::felt_to_bytes32}; 9 | 10 | impl Module { 11 | pub fn encode_task(&self) -> Vec { 12 | let class_hash: DynSolValue = 13 | DynSolValue::FixedBytes(felt_to_bytes32(self.program_hash), 32); 14 | let module_inputs: DynSolValue = DynSolValue::FixedArray( 15 | self.get_public_inputs() 16 | .iter() 17 | .map(|input| DynSolValue::FixedBytes(felt_to_bytes32(*input), 32)) 18 | .collect(), 19 | ); 20 | let input_length: DynSolValue = self.get_public_inputs().len().into(); 21 | // offset of class hash 22 | let offset: DynSolValue = (64).into(); 23 | let module_tuple_value = 24 | DynSolValue::Tuple(vec![class_hash, offset, input_length, module_inputs]); 25 | module_tuple_value.abi_encode() 26 | } 27 | 28 | pub fn commit(&self) -> B256 { 29 | let encoded_task = self.encode_task(); 30 | debug!("encoded_task: {:?}", hex::encode(encoded_task.clone())); 31 | keccak256(encoded_task) 32 | } 33 | } 34 | 35 | #[cfg(test)] 36 | mod tests { 37 | use alloy::primitives::b256; 38 | use starknet_crypto::Felt; 39 | 40 | use crate::primitives::task::module::{ModuleInput, Visibility}; 41 | 42 | use super::*; 43 | 44 | #[test] 45 | pub fn module_encode() { 46 | let module = Module { 47 | program_hash: Felt::from_hex( 48 | "0x00af1333b8346c1ac941efe380f3122a71c1f7cbad19301543712e74f765bfca", 49 | ) 50 | .unwrap(), 51 | inputs: vec![ 52 | ModuleInput::new(Visibility::Public, "0x4F21E5"), 53 | ModuleInput::new(Visibility::Public, "0x4F21E8"), 54 | ModuleInput::new( 55 | Visibility::Public, 56 | "0x13cb6ae34a13a0977f4d7101ebc24b87bb23f0d5", 57 | ), 58 | ], 59 | local_class_path: None, 60 | }; 61 | 62 | let expected_commit = module.commit(); 63 | assert_eq!( 64 | expected_commit, 65 | b256!("879869b6d237b92bfdd3f3f7b76baaa9ebb2a3ad5e8478d12cca258d3def05af") 66 | ); 67 | } 68 | 69 | #[test] 70 | pub fn module_encode_with_private_input() { 71 | let module = Module { 72 | program_hash: Felt::from_hex( 73 | "0x00af1333b8346c1ac941efe380f3122a71c1f7cbad19301543712e74f765bfca", 74 | ) 75 | .unwrap(), 76 | inputs: vec![ 77 | ModuleInput::new(Visibility::Public, "0x4F21E5"), 78 | ModuleInput::new(Visibility::Public, "0x4F21E8"), 79 | ModuleInput::new( 80 | Visibility::Private, 81 | "0x13cb6ae34a13a0977f4d7101ebc24b87bb23f0d5", 82 | ), 83 | ], 84 | local_class_path: None, 85 | }; 86 | 87 | let expected_commit = module.commit(); 88 | assert_eq!( 89 | expected_commit, 90 | b256!("d81ebd27b719967e1df4edf64c9e3ce87635089e3462306af340a393625d8726") 91 | ); 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /hdp/src/primitives/solidity_types/traits.rs: -------------------------------------------------------------------------------- 1 | use alloy::primitives::B256; 2 | use anyhow::Result; 3 | 4 | use crate::primitives::task::datalake::datalake_type::DatalakeType; 5 | 6 | /// Define the common trait for datalake 7 | /// 8 | /// Common for both BlockSampled and TransactionsInBlock 9 | pub trait DatalakeCodecs { 10 | fn get_datalake_type(&self) -> DatalakeType; 11 | fn encode(&self) -> Result>; 12 | fn commit(&self) -> B256; 13 | fn decode(encoded: &[u8]) -> Result 14 | where 15 | Self: Sized; 16 | } 17 | 18 | pub trait Codecs { 19 | fn encode(&self) -> Result>; 20 | fn decode(encoded: &[u8]) -> Result 21 | where 22 | Self: Sized; 23 | } 24 | 25 | /// Codecs for datalake task 26 | pub trait DatalakeComputeCodecs { 27 | fn decode(encoded_datalake: &[u8], encoded_compute: &[u8]) -> Result 28 | where 29 | Self: std::marker::Sized; 30 | fn encode(&self) -> Result>; 31 | fn commit(&self) -> B256; 32 | } 33 | 34 | /// Codecs for vector of datalake task 35 | pub trait BatchedDatalakeComputeCodecs { 36 | fn decode(encoded_datalake: &[u8], encoded_compute: &[u8]) -> Result 37 | where 38 | Self: std::marker::Sized; 39 | fn encode(&self) -> Result<(Vec, Vec)>; 40 | } 41 | -------------------------------------------------------------------------------- /hdp/src/primitives/task/datalake/block_sampled/datalake.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use crate::primitives::{task::datalake::envelope::default_increment, ChainId}; 4 | 5 | use super::collection::BlockSampledCollection; 6 | 7 | /// [`BlockSampledDatalake`] is a struct that represents a block sampled datalake. 8 | /// It contains chain id, block range, the sampled property, and the increment. 9 | /// 10 | /// Inclusive block range: [block_range_start..block_range_end] 11 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] 12 | #[serde(rename_all = "camelCase")] 13 | pub struct BlockSampledDatalake { 14 | /// Chain id of the datalake 15 | pub chain_id: ChainId, 16 | /// The start of the block range 17 | pub block_range_start: u64, 18 | /// The end of the block range 19 | pub block_range_end: u64, 20 | /// The increment. Defaults to 1 if not present. 21 | #[serde(default = "default_increment")] 22 | pub increment: u64, 23 | /// The sampled property 24 | pub sampled_property: BlockSampledCollection, 25 | } 26 | 27 | impl BlockSampledDatalake { 28 | pub fn new( 29 | chain_id: ChainId, 30 | block_range_start: u64, 31 | block_range_end: u64, 32 | increment: u64, 33 | sampled_property: BlockSampledCollection, 34 | ) -> Self { 35 | Self { 36 | chain_id, 37 | block_range_start, 38 | block_range_end, 39 | increment, 40 | sampled_property, 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /hdp/src/primitives/task/datalake/compute.rs: -------------------------------------------------------------------------------- 1 | use serde::de::{self, MapAccess, Visitor}; 2 | use serde::{Deserialize, Deserializer, Serialize}; 3 | use std::fmt; 4 | 5 | use crate::primitives::aggregate_fn::{AggregationFunction, FunctionContext}; 6 | 7 | /// [`Computation`] is a structure that contains the aggregate function id and context 8 | #[derive(Debug, PartialEq, Eq, Clone, Serialize)] 9 | #[serde(rename_all = "camelCase")] 10 | pub struct Computation { 11 | pub aggregate_fn_id: AggregationFunction, 12 | pub aggregate_fn_ctx: FunctionContext, 13 | } 14 | 15 | impl Computation { 16 | pub fn new( 17 | aggregate_fn_id: AggregationFunction, 18 | aggregate_fn_ctx: Option, 19 | ) -> Self { 20 | let aggregate_fn_ctn_parsed = aggregate_fn_ctx.unwrap_or_default(); 21 | Self { 22 | aggregate_fn_id, 23 | aggregate_fn_ctx: aggregate_fn_ctn_parsed, 24 | } 25 | } 26 | } 27 | 28 | impl<'de> Deserialize<'de> for Computation { 29 | fn deserialize(deserializer: D) -> Result 30 | where 31 | D: Deserializer<'de>, 32 | { 33 | #[derive(Deserialize)] 34 | #[serde(field_identifier, rename_all = "camelCase")] 35 | enum Field { 36 | AggregateFnId, 37 | AggregateFnCtx, 38 | } 39 | 40 | struct ComputationVisitor; 41 | 42 | impl<'de> Visitor<'de> for ComputationVisitor { 43 | type Value = Computation; 44 | 45 | fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 46 | formatter.write_str("struct Computation") 47 | } 48 | 49 | fn visit_map(self, mut map: V) -> Result 50 | where 51 | V: MapAccess<'de>, 52 | { 53 | let mut aggregate_fn_id = None; 54 | let mut aggregate_fn_ctx = None; 55 | while let Some(key) = map.next_key()? { 56 | match key { 57 | Field::AggregateFnId => { 58 | if aggregate_fn_id.is_some() { 59 | return Err(de::Error::duplicate_field("aggregateFnId")); 60 | } 61 | aggregate_fn_id = Some(map.next_value()?); 62 | } 63 | Field::AggregateFnCtx => { 64 | if aggregate_fn_ctx.is_some() { 65 | return Err(de::Error::duplicate_field("aggregateFnCtx")); 66 | } 67 | aggregate_fn_ctx = Some(map.next_value()?); 68 | } 69 | } 70 | } 71 | let aggregate_fn_id = 72 | aggregate_fn_id.ok_or_else(|| de::Error::missing_field("aggregateFnId"))?; 73 | let aggregate_fn_ctx = aggregate_fn_ctx.unwrap_or_default(); 74 | Ok(Computation { 75 | aggregate_fn_id, 76 | aggregate_fn_ctx, 77 | }) 78 | } 79 | } 80 | 81 | const FIELDS: &[&str] = &["aggregateFnId", "aggregateFnCtx"]; 82 | deserializer.deserialize_struct("Computation", FIELDS, ComputationVisitor) 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /hdp/src/primitives/task/datalake/datalake_type.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{bail, Result}; 2 | use serde::{Deserialize, Serialize}; 3 | use std::str::FromStr; 4 | 5 | /// Identifier for a BlockSampledDatalake 6 | pub const BLOCK_SAMPLED_DATALAKE_TYPE_ID: u8 = 0; 7 | 8 | /// Identifier for a TransactionsDatalake 9 | pub const TRANSACTIONS_IN_BLOCK_DATALAKE_TYPE_ID: u8 = 1; 10 | 11 | #[derive(Debug, Clone, Copy, Deserialize, Serialize, PartialEq)] 12 | pub enum DatalakeType { 13 | BlockSampled = 0, 14 | TransactionsInBlock = 1, 15 | } 16 | 17 | impl FromStr for DatalakeType { 18 | type Err = anyhow::Error; 19 | 20 | fn from_str(s: &str) -> Result { 21 | match s { 22 | "BLOCK_SAMPLED" => Ok(DatalakeType::BlockSampled), 23 | "TRANSACTIONS_IN_BLOCK" => Ok(DatalakeType::TransactionsInBlock), 24 | _ => bail!("Unknown datalake type"), 25 | } 26 | } 27 | } 28 | 29 | impl From for u8 { 30 | fn from(value: DatalakeType) -> Self { 31 | match value { 32 | DatalakeType::BlockSampled => BLOCK_SAMPLED_DATALAKE_TYPE_ID, 33 | DatalakeType::TransactionsInBlock => TRANSACTIONS_IN_BLOCK_DATALAKE_TYPE_ID, 34 | } 35 | } 36 | } 37 | 38 | impl DatalakeType { 39 | pub fn variants() -> Vec { 40 | vec!["BLOCK_SAMPLED", "TRANSACTIONS_IN_BLOCK"] 41 | .into_iter() 42 | .map(String::from) 43 | .collect() 44 | } 45 | 46 | pub fn to_u8(self) -> u8 { 47 | self.into() 48 | } 49 | 50 | pub fn from_index(value: u8) -> Result { 51 | match value { 52 | BLOCK_SAMPLED_DATALAKE_TYPE_ID => Ok(DatalakeType::BlockSampled), 53 | TRANSACTIONS_IN_BLOCK_DATALAKE_TYPE_ID => Ok(DatalakeType::TransactionsInBlock), 54 | _ => bail!("Invalid datalake type"), 55 | } 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /hdp/src/primitives/task/datalake/envelope.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use super::{ 4 | block_sampled::BlockSampledDatalake, transactions::TransactionsInBlockDatalake, 5 | DatalakeCollection, 6 | }; 7 | 8 | pub type BatchedDatalakes = Vec; 9 | 10 | /// Envelope for datalake 11 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] 12 | #[serde(tag = "type")] 13 | pub enum DatalakeEnvelope { 14 | BlockSampled(BlockSampledDatalake), 15 | TransactionsInBlock(TransactionsInBlockDatalake), 16 | } 17 | 18 | impl DatalakeEnvelope { 19 | pub fn get_collection_type(&self) -> Box { 20 | match self { 21 | DatalakeEnvelope::BlockSampled(datalake) => Box::new(datalake.sampled_property), 22 | DatalakeEnvelope::TransactionsInBlock(datalake) => Box::new(datalake.sampled_property), 23 | } 24 | } 25 | 26 | pub fn get_chain_id(&self) -> crate::primitives::ChainId { 27 | match self { 28 | DatalakeEnvelope::BlockSampled(datalake) => datalake.chain_id, 29 | DatalakeEnvelope::TransactionsInBlock(datalake) => datalake.chain_id, 30 | } 31 | } 32 | } 33 | 34 | /// Default increment for datalake 35 | pub fn default_increment() -> u64 { 36 | 1 37 | } 38 | -------------------------------------------------------------------------------- /hdp/src/primitives/task/datalake/mod.rs: -------------------------------------------------------------------------------- 1 | use alloy::primitives::U256; 2 | use anyhow::Result; 3 | use serde::{Deserialize, Serialize}; 4 | use std::{fmt::Display, str::FromStr}; 5 | 6 | use self::{compute::Computation, envelope::DatalakeEnvelope}; 7 | 8 | pub mod block_sampled; 9 | pub mod compute; 10 | pub mod datalake_type; 11 | pub mod envelope; 12 | pub mod transactions; 13 | 14 | #[derive(Debug, Clone, Serialize, Deserialize)] 15 | pub struct DatalakeCompute { 16 | pub datalake: DatalakeEnvelope, 17 | pub compute: Computation, 18 | } 19 | 20 | impl DatalakeCompute { 21 | pub fn new(datalake: DatalakeEnvelope, compute: Computation) -> Self { 22 | Self { datalake, compute } 23 | } 24 | } 25 | 26 | pub trait DatalakeCollection { 27 | fn to_index(&self) -> u8; 28 | fn serialize(&self) -> Result>; 29 | fn deserialize(encoded: &[u8]) -> Result 30 | where 31 | Self: Sized; 32 | } 33 | 34 | pub trait DatalakeField: FromStr + Display { 35 | fn from_index(index: u8) -> Result 36 | where 37 | Self: Sized; 38 | fn to_index(&self) -> u8; 39 | fn decode_field_from_rlp(&self, rlp: &[u8]) -> U256; 40 | } 41 | -------------------------------------------------------------------------------- /hdp/src/primitives/task/datalake/transactions/collection.rs: -------------------------------------------------------------------------------- 1 | use std::{fmt::Display, str::FromStr}; 2 | 3 | use anyhow::{bail, Result}; 4 | use serde::{Deserialize, Serialize}; 5 | 6 | use crate::primitives::task::datalake::{DatalakeCollection, DatalakeField}; 7 | 8 | use super::{TransactionField, TransactionReceiptField}; 9 | 10 | pub enum TransactionsCollectionType { 11 | Transactions, 12 | TransactionReceipts, 13 | } 14 | 15 | impl TransactionsCollectionType { 16 | pub fn variants() -> Vec { 17 | vec!["TX".to_string(), "TX_RECEIPT".to_string()] 18 | } 19 | 20 | pub fn to_u8(&self) -> u8 { 21 | match self { 22 | TransactionsCollectionType::Transactions => 4, 23 | TransactionsCollectionType::TransactionReceipts => 5, 24 | } 25 | } 26 | } 27 | 28 | impl FromStr for TransactionsCollectionType { 29 | type Err = anyhow::Error; 30 | 31 | fn from_str(s: &str) -> Result { 32 | match s.to_uppercase().as_str() { 33 | "TX" => Ok(TransactionsCollectionType::Transactions), 34 | "TX_RECEIPT" => Ok(TransactionsCollectionType::TransactionReceipts), 35 | _ => bail!("Unknown transactions collection type"), 36 | } 37 | } 38 | } 39 | 40 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] 41 | #[serde(try_from = "String")] 42 | pub enum TransactionsCollection { 43 | Transactions(TransactionField), 44 | TranasactionReceipts(TransactionReceiptField), 45 | } 46 | 47 | impl DatalakeCollection for TransactionsCollection { 48 | fn to_index(&self) -> u8 { 49 | match self { 50 | TransactionsCollection::Transactions(ref field) => field.to_index(), 51 | TransactionsCollection::TranasactionReceipts(ref field) => field.to_index(), 52 | } 53 | } 54 | 55 | fn serialize(&self) -> Result> { 56 | match self { 57 | TransactionsCollection::Transactions(ref field) => Ok([1, field.to_index()].to_vec()), 58 | TransactionsCollection::TranasactionReceipts(ref field) => { 59 | Ok([2, field.to_index()].to_vec()) 60 | } 61 | } 62 | } 63 | 64 | fn deserialize(bytes: &[u8]) -> Result { 65 | if bytes.len() != 2 { 66 | return Err(anyhow::Error::msg("Invalid transactions collection")); 67 | } 68 | 69 | match bytes[0] { 70 | 1 => Ok(TransactionsCollection::Transactions( 71 | TransactionField::from_index(bytes[1])?, 72 | )), 73 | 2 => Ok(TransactionsCollection::TranasactionReceipts( 74 | TransactionReceiptField::from_index(bytes[1])?, 75 | )), 76 | _ => Err(anyhow::Error::msg("Unknown transactions collection")), 77 | } 78 | } 79 | } 80 | 81 | impl FromStr for TransactionsCollection { 82 | type Err = anyhow::Error; 83 | 84 | fn from_str(s: &str) -> Result { 85 | // Split into two parts by '.' 86 | let parts: Vec<&str> = s.split('.').collect(); 87 | if parts.len() != 2 { 88 | bail!("Invalid transactions collection format"); 89 | } 90 | 91 | match parts[0].to_uppercase().as_str() { 92 | "TX" => Ok(TransactionsCollection::Transactions( 93 | parts[1].to_uppercase().as_str().parse()?, 94 | )), 95 | "TX_RECEIPT" => Ok(TransactionsCollection::TranasactionReceipts( 96 | parts[1].to_uppercase().as_str().parse()?, 97 | )), 98 | _ => bail!("Unknown transactions collection"), 99 | } 100 | } 101 | } 102 | 103 | impl TryFrom for TransactionsCollection { 104 | type Error = anyhow::Error; 105 | 106 | fn try_from(value: String) -> Result { 107 | TransactionsCollection::from_str(&value) 108 | } 109 | } 110 | 111 | impl Display for TransactionsCollection { 112 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 113 | match self { 114 | TransactionsCollection::Transactions(ref field) => write!(f, "TX.{}", field), 115 | TransactionsCollection::TranasactionReceipts(ref field) => { 116 | write!(f, "TX_RECEIPT.{}", field) 117 | } 118 | } 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /hdp/src/primitives/task/mod.rs: -------------------------------------------------------------------------------- 1 | //! Task is a unit of work that can be executed by the processor/pre-processor. 2 | use crate::primitives::solidity_types::traits::DatalakeComputeCodecs; 3 | use alloy::primitives::B256; 4 | use cairo_lang_starknet_classes::casm_contract_class::CasmContractClass; 5 | use datalake::DatalakeCompute; 6 | use module::Module; 7 | 8 | pub mod datalake; 9 | pub mod module; 10 | 11 | /// [`TaskEnvelope`] is a structure that contains task itself 12 | /// This structure is used to provide the task to the pre-processor 13 | #[derive(Clone, Debug)] 14 | pub enum TaskEnvelope { 15 | DatalakeCompute(DatalakeCompute), 16 | Module(ExtendedModule), 17 | } 18 | 19 | #[derive(Clone, Debug)] 20 | pub struct ExtendedModule { 21 | pub task: Module, 22 | pub module_class: CasmContractClass, 23 | } 24 | 25 | impl TaskEnvelope { 26 | pub fn commit(&self) -> B256 { 27 | match self { 28 | TaskEnvelope::DatalakeCompute(task) => task.commit(), 29 | TaskEnvelope::Module(module) => module.task.commit(), 30 | } 31 | } 32 | 33 | pub fn divide_tasks(tasks: Vec) -> (Vec, Vec) { 34 | // Partition the tasks into datalake and module tasks 35 | let (datalake_envelopes, module_envelopes): (Vec<_>, Vec<_>) = tasks 36 | .into_iter() 37 | .partition(|task| matches!(task, TaskEnvelope::DatalakeCompute(_))); 38 | 39 | let datalake_tasks = datalake_envelopes 40 | .into_iter() 41 | .filter_map(|task| { 42 | if let TaskEnvelope::DatalakeCompute(datalake_task) = task { 43 | Some(datalake_task) 44 | } else { 45 | None 46 | } 47 | }) 48 | .collect(); 49 | 50 | let module_tasks = module_envelopes 51 | .into_iter() 52 | .filter_map(|task| { 53 | if let TaskEnvelope::Module(module_task) = task { 54 | Some(module_task) 55 | } else { 56 | None 57 | } 58 | }) 59 | .collect(); 60 | 61 | (datalake_tasks, module_tasks) 62 | } 63 | 64 | pub fn variants() -> Vec { 65 | vec!["DATALAKE_COMPUTE", "MODULE"] 66 | .into_iter() 67 | .map(String::from) 68 | .collect() 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /hdp/src/processor.rs: -------------------------------------------------------------------------------- 1 | //! Processor is reponsible for running the module. 2 | //! This run is sound execution of the module. 3 | //! This will be most abstract layer of the processor. 4 | 5 | use crate::constant::DEFAULT_SOUND_CAIRO_RUN_CAIRO_FILE; 6 | use crate::{cairo_runner::cairo_run, primitives::processed_types::cairo_format::ProcessorInput}; 7 | use anyhow::Result; 8 | use std::env; 9 | use std::path::PathBuf; 10 | use tracing::{debug, info}; 11 | 12 | /// HdpProcessorConfig for the CLI 13 | #[derive(Debug)] 14 | pub struct HdpProcessorConfig { 15 | pub input_file: PathBuf, 16 | pub sound_run_program_path: PathBuf, 17 | pub cairo_pie_file: Option, 18 | pub is_proof_mode: bool, 19 | } 20 | 21 | impl HdpProcessorConfig { 22 | pub fn init( 23 | cli_sound_run_cairo_file: Option, 24 | cli_input_file: PathBuf, 25 | cli_cairo_pie_file: Option, 26 | cli_is_proof_mode: bool, 27 | ) -> Self { 28 | let sound_run_cairo_path: PathBuf = cli_sound_run_cairo_file.unwrap_or_else(|| { 29 | env::var("SOUND_RUN_CAIRO_PATH") 30 | .unwrap_or_else(|_| DEFAULT_SOUND_CAIRO_RUN_CAIRO_FILE.to_string()) 31 | .parse() 32 | .expect("SOUND_RUN_CAIRO_PATH must be a path to a cairo file") 33 | }); 34 | 35 | let config = HdpProcessorConfig { 36 | input_file: cli_input_file, 37 | sound_run_program_path: sound_run_cairo_path, 38 | cairo_pie_file: cli_cairo_pie_file, 39 | is_proof_mode: cli_is_proof_mode, 40 | }; 41 | 42 | debug!("Running with configuration: {:#?}", config); 43 | config 44 | } 45 | } 46 | 47 | pub struct Processor { 48 | program_path: PathBuf, 49 | } 50 | 51 | impl Processor { 52 | pub fn new(program_path: PathBuf) -> Self { 53 | Self { program_path } 54 | } 55 | 56 | /// Execute process that involves sound-cairo-run. 57 | pub async fn process( 58 | &self, 59 | processor_input: ProcessorInput, 60 | pie_file_path: Option<&PathBuf>, 61 | is_proof_mode: bool, 62 | ) -> Result<()> { 63 | let cairo_run_input = serde_json::to_string_pretty(&processor_input) 64 | .expect("Failed to serialize module class"); 65 | let _ = cairo_run( 66 | &self.program_path, 67 | cairo_run_input, 68 | pie_file_path, 69 | is_proof_mode, 70 | )?; 71 | info!("2️⃣ Processor completed successfully"); 72 | Ok(()) 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /hdp/src/provider/config.rs: -------------------------------------------------------------------------------- 1 | use reqwest::Url; 2 | 3 | use crate::primitives::ChainId; 4 | 5 | /// EVM provider configuration 6 | #[derive(Clone, Debug)] 7 | pub struct ProviderConfig { 8 | /// provider url 9 | pub provider_url: Url, 10 | /// accumulates chain id 11 | pub chain_id: ChainId, 12 | /// deployed on chain id 13 | pub deployed_on_chain_id: ChainId, 14 | /// Max number of requests to send in parallel 15 | /// 16 | /// For default, it is set to 100 17 | /// For archive node, recommend to set it to 1000 18 | /// This will effect fetch speed of account, storage proofs 19 | pub max_requests: u64, 20 | } 21 | 22 | /// This is optimal max number of requests to send in parallel when using non-paid alchemy rpc url 23 | #[cfg(feature = "test_utils")] 24 | pub const TEST_MAX_REQUESTS: u64 = 100; 25 | #[cfg(feature = "test_utils")] 26 | use lazy_static::lazy_static; 27 | 28 | #[cfg(feature = "test_utils")] 29 | lazy_static! { 30 | static ref TEST_RPC_URL: String = std::env::var("PROVIDER_URL_ETHEREUM_SEPOLIA") 31 | .expect("Environment variable PROVIDER_URL_ETHEREUM_SEPOLIA not set"); 32 | } 33 | 34 | #[cfg(feature = "test_utils")] 35 | impl Default for ProviderConfig { 36 | fn default() -> Self { 37 | Self { 38 | provider_url: TEST_RPC_URL.parse().unwrap(), 39 | chain_id: ChainId::EthereumSepolia, 40 | deployed_on_chain_id: ChainId::EthereumSepolia, 41 | max_requests: TEST_MAX_REQUESTS, 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /hdp/src/provider/error.rs: -------------------------------------------------------------------------------- 1 | use alloy::primitives::BlockNumber; 2 | use thiserror::Error; 3 | 4 | use crate::provider::indexer::IndexerError; 5 | 6 | /// Error type for provider 7 | #[derive(Error, Debug)] 8 | pub enum ProviderError { 9 | /// Error when the query is invalid 10 | #[error("Transaction index out of bound: requested index: {0}, length: {1}")] 11 | OutOfBoundRequestError(u64, u64), 12 | 13 | /// Error when the MMR meta is mismatched among range of requested blocks 14 | #[error("MMR meta mismatch among range of requested blocks")] 15 | MismatchedMMRMeta, 16 | 17 | /// Error when the MMR is not found 18 | #[error("MMR not found")] 19 | MmrNotFound, 20 | 21 | /// Error from the [`IndexerError`] 22 | #[error("Failed from indexer")] 23 | IndexerError(#[from] IndexerError), 24 | 25 | /// Error from [`RpcProviderError`] 26 | #[error("Failed to get proofs: {0}")] 27 | EvmRpcProviderError(#[from] RpcProviderError), 28 | 29 | /// Error from [`eth_trie_proofs`] 30 | #[error("EthTrieError: {0}")] 31 | EthTrieError(#[from] eth_trie_proofs::EthTrieError), 32 | 33 | #[error("Fetch key error: {0}")] 34 | FetchKeyError(String), 35 | } 36 | 37 | /// Error from rpc 38 | #[derive(Error, Debug)] 39 | pub enum RpcProviderError { 40 | #[error("Failed to send proofs with mpsc")] 41 | MpscError( 42 | #[from] 43 | tokio::sync::mpsc::error::SendError<( 44 | BlockNumber, 45 | alloy::rpc::types::EIP1186AccountProofResponse, 46 | )>, 47 | ), 48 | 49 | #[error("Failed to fetch proofs: {0}")] 50 | ReqwestError(#[from] reqwest::Error), 51 | 52 | #[error("Failed to parse response: {0}")] 53 | SerdeJsonError(#[from] serde_json::Error), 54 | } 55 | -------------------------------------------------------------------------------- /hdp/src/provider/evm/datalake/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod block_sampled; 2 | pub mod transactions; 3 | -------------------------------------------------------------------------------- /hdp/src/provider/evm/datalake/transactions.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | primitives::{ 3 | processed_types::{ 4 | block_proofs::convert_to_mmr_with_headers, header::ProcessedHeader, mmr::MMRMeta, 5 | receipt::ProcessedReceipt, transaction::ProcessedTransaction, 6 | }, 7 | task::datalake::{ 8 | transactions::{TransactionsCollection, TransactionsInBlockDatalake}, 9 | DatalakeField, 10 | }, 11 | }, 12 | provider::{error::ProviderError, evm::provider::EvmProvider, types::FetchedDatalake}, 13 | }; 14 | use alloy::primitives::U256; 15 | use anyhow::Result; 16 | 17 | use std::collections::{HashMap, HashSet}; 18 | 19 | impl EvmProvider { 20 | pub async fn fetch_transactions( 21 | &self, 22 | datalake: &TransactionsInBlockDatalake, 23 | ) -> Result { 24 | let mut aggregation_set: Vec = Vec::new(); 25 | 26 | let headers_proofs = self 27 | .get_range_of_header_proofs( 28 | datalake.target_block, 29 | datalake.target_block, 30 | datalake.increment, 31 | ) 32 | .await?; 33 | 34 | let mut mmr_with_headers: HashMap> = HashMap::new(); 35 | let mut transactions: HashSet = HashSet::new(); 36 | let mut transaction_receipts: HashSet = HashSet::new(); 37 | let (fetched_block, mmr) = headers_proofs.get(&datalake.target_block).unwrap(); 38 | 39 | let processed_header = ProcessedHeader::new( 40 | fetched_block.block_header.get_evm_block_header(), 41 | fetched_block.element_index, 42 | fetched_block.siblings_hashes.clone(), 43 | ); 44 | mmr_with_headers.insert(mmr.clone(), [processed_header].into_iter().collect()); 45 | 46 | match &datalake.sampled_property { 47 | TransactionsCollection::Transactions(property) => { 48 | for tx in self 49 | .get_tx_with_proof_from_block( 50 | datalake.target_block, 51 | datalake.start_index, 52 | datalake.end_index, 53 | datalake.increment, 54 | ) 55 | .await? 56 | { 57 | transactions.insert(ProcessedTransaction::new( 58 | tx.tx_index, 59 | tx.block_number, 60 | tx.transaction_proof, 61 | )); 62 | 63 | // depends on datalake.included_types filter the value to be included in the aggregation set 64 | if datalake.included_types.is_included(tx.tx_type) { 65 | let value = property.decode_field_from_rlp(&tx.encoded_transaction); 66 | aggregation_set.push(value); 67 | } 68 | } 69 | } 70 | TransactionsCollection::TranasactionReceipts(property) => { 71 | for tx_receipt in self 72 | .get_tx_receipt_with_proof_from_block( 73 | datalake.target_block, 74 | datalake.start_index, 75 | datalake.end_index, 76 | datalake.increment, 77 | ) 78 | .await? 79 | { 80 | transaction_receipts.insert(ProcessedReceipt::new( 81 | tx_receipt.tx_index, 82 | tx_receipt.block_number, 83 | tx_receipt.receipt_proof, 84 | )); 85 | 86 | // depends on datalake.included_types filter the value to be included in the aggregation set 87 | if datalake.included_types.is_included(tx_receipt.tx_type) { 88 | let value = property.decode_field_from_rlp(&tx_receipt.encoded_receipt); 89 | aggregation_set.push(value); 90 | } 91 | } 92 | } 93 | } 94 | 95 | Ok(FetchedDatalake { 96 | values: aggregation_set, 97 | mmr_with_headers: HashSet::from_iter(convert_to_mmr_with_headers(mmr_with_headers)), 98 | accounts: HashSet::new(), 99 | storages: HashSet::new(), 100 | transactions, 101 | transaction_receipts, 102 | }) 103 | } 104 | } 105 | -------------------------------------------------------------------------------- /hdp/src/provider/evm/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod datalake; 2 | pub mod from_keys; 3 | pub mod provider; 4 | pub mod rpc; 5 | -------------------------------------------------------------------------------- /hdp/src/provider/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod config; 2 | pub mod error; 3 | pub mod evm; 4 | pub mod indexer; 5 | pub mod key; 6 | pub mod starknet; 7 | pub mod traits; 8 | pub mod types; 9 | -------------------------------------------------------------------------------- /hdp/src/provider/starknet/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod from_keys; 2 | pub mod provider; 3 | pub mod rpc; 4 | pub mod types; 5 | -------------------------------------------------------------------------------- /hdp/src/provider/starknet/types.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use serde_with::skip_serializing_none; 3 | use starknet_types_core::{felt::Felt, hash::StarkHash}; 4 | 5 | /// Codebase is from 6 | 7 | /// Holds the membership/non-membership of a contract and its associated 8 | /// contract contract if the contract exists. 9 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 10 | #[skip_serializing_none] 11 | pub struct GetProofOutput { 12 | /// The global state commitment for Starknet 0.11.0 blocks onwards, if 13 | /// absent the hash of the first node in the 14 | /// [contract_proof](GetProofOutput#contract_proof) is the global state 15 | /// commitment. 16 | pub state_commitment: Option, 17 | /// Required to verify that the hash of the class commitment and the root of 18 | /// the [contract_proof](GetProofOutput::contract_proof) matches the 19 | /// [state_commitment](Self#state_commitment). Present only for Starknet 20 | /// blocks 0.11.0 onwards. 21 | pub class_commitment: Option, 22 | 23 | /// Membership / Non-membership proof for the queried contract 24 | pub contract_proof: Vec, 25 | 26 | /// Additional contract data if it exists. 27 | pub contract_data: Option, 28 | } 29 | 30 | /// A node in a Starknet patricia-merkle trie. 31 | /// 32 | /// See pathfinders merkle-tree crate for more information. 33 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 34 | pub enum TrieNode { 35 | #[serde(rename = "binary")] 36 | Binary { left: Felt, right: Felt }, 37 | #[serde(rename = "edge")] 38 | Edge { child: Felt, path: Path }, 39 | } 40 | 41 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 42 | pub struct Path { 43 | len: u64, 44 | value: String, 45 | } 46 | 47 | impl TrieNode { 48 | pub fn hash(&self) -> Felt { 49 | match self { 50 | TrieNode::Binary { left, right } => H::hash(left, right), 51 | TrieNode::Edge { child, path } => { 52 | let bytes: [u8; 32] = path.value.as_bytes().try_into().unwrap(); 53 | let mut length = [0; 32]; 54 | // Safe as len() is guaranteed to be <= 251 55 | length[31] = bytes.len() as u8; 56 | 57 | let length = Felt::from_bytes_be(&length); 58 | let path = Felt::from_bytes_be(&bytes); 59 | H::hash(child, &path) + length 60 | } 61 | } 62 | } 63 | } 64 | 65 | /// Holds the data and proofs for a specific contract. 66 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq, Hash)] 67 | pub struct ContractData { 68 | /// Required to verify the contract state hash to contract root calculation. 69 | class_hash: Felt, 70 | /// Required to verify the contract state hash to contract root calculation. 71 | nonce: Felt, 72 | 73 | /// Root of the Contract state tree 74 | root: Felt, 75 | 76 | /// This is currently just a constant = 0, however it might change in the 77 | /// future. 78 | contract_state_hash_version: Felt, 79 | 80 | /// The proofs associated with the queried storage values 81 | pub storage_proofs: Vec>, 82 | } 83 | -------------------------------------------------------------------------------- /hdp/src/provider/traits.rs: -------------------------------------------------------------------------------- 1 | use crate::primitives::processed_types::block_proofs::ProcessedBlockProofs; 2 | use crate::primitives::ChainId; 3 | use std::future::Future; 4 | use std::pin::Pin; 5 | 6 | use super::config::ProviderConfig; 7 | use super::error::ProviderError; 8 | use super::evm::provider::EvmProvider; 9 | use super::key::CategorizedFetchKeys; 10 | use super::starknet::provider::StarknetProvider; 11 | use super::types::FetchedDatalake; 12 | 13 | pub type FetchProofsResult = Result; 14 | pub type FetchProofsFromKeysResult = Result; 15 | 16 | pub type AsyncResult<'a, T> = Pin + Send + 'a>>; 17 | 18 | /// Trait for generic proof provider. 19 | /// 20 | /// - `fetch_proofs` is used to fetch proofs from datalake. 21 | /// - `fetch_proofs_from_keys` is used to fetch proofs from the provider based on the keys. Used in module. 22 | pub trait ProofProvider: Send + Sync { 23 | fn fetch_proofs<'a>( 24 | &'a self, 25 | datalake: &'a crate::primitives::task::datalake::DatalakeCompute, 26 | ) -> AsyncResult; 27 | 28 | fn fetch_proofs_from_keys( 29 | &self, 30 | keys: CategorizedFetchKeys, 31 | ) -> AsyncResult; 32 | } 33 | 34 | /// Create a new provider from config 35 | /// 36 | /// returns generic provider that implemented [`ProofProvider`] trait 37 | pub fn new_provider_from_config(config: &ProviderConfig) -> Box { 38 | match config.chain_id { 39 | ChainId::EthereumMainnet | ChainId::EthereumSepolia => Box::new(EvmProvider::new(config)), 40 | ChainId::StarknetSepolia | ChainId::StarknetMainnet => { 41 | Box::new(StarknetProvider::new(config)) 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /hdp/src/provider/types.rs: -------------------------------------------------------------------------------- 1 | //! Types for the provider crate. 2 | //! For the `FetchedTransactionProof` and `FetchedTransactionReceiptProof` types. 3 | //! 4 | //! We need this type to bind encoded transaction and receipts to the block number and proofs. 5 | 6 | use std::collections::HashSet; 7 | 8 | use crate::primitives::processed_types::block_proofs::MMRWithHeader; 9 | use crate::primitives::processed_types::{ 10 | account::ProcessedAccount, receipt::ProcessedReceipt, storage::ProcessedStorage, 11 | transaction::ProcessedTransaction, 12 | }; 13 | 14 | use alloy::primitives::U256; 15 | use alloy::{ 16 | consensus::TxType, 17 | primitives::{BlockNumber, Bytes, TxIndex}, 18 | }; 19 | 20 | #[derive(Debug, Clone)] 21 | pub struct FetchedTransactionProof { 22 | pub block_number: BlockNumber, 23 | pub tx_index: TxIndex, 24 | pub encoded_transaction: Vec, 25 | pub transaction_proof: Vec, 26 | pub tx_type: TxType, 27 | } 28 | 29 | impl FetchedTransactionProof { 30 | pub fn new( 31 | block_number: BlockNumber, 32 | tx_index: TxIndex, 33 | encoded_transaction: Vec, 34 | transaction_proof: Vec, 35 | tx_type: TxType, 36 | ) -> Self { 37 | Self { 38 | block_number, 39 | tx_index, 40 | encoded_transaction, 41 | transaction_proof, 42 | tx_type, 43 | } 44 | } 45 | } 46 | 47 | #[derive(Debug, Clone)] 48 | pub struct FetchedTransactionReceiptProof { 49 | pub block_number: BlockNumber, 50 | pub tx_index: TxIndex, 51 | pub encoded_receipt: Vec, 52 | pub receipt_proof: Vec, 53 | pub tx_type: TxType, 54 | } 55 | 56 | impl FetchedTransactionReceiptProof { 57 | pub fn new( 58 | block_number: BlockNumber, 59 | tx_index: TxIndex, 60 | encoded_receipt: Vec, 61 | receipt_proof: Vec, 62 | tx_type: TxType, 63 | ) -> Self { 64 | Self { 65 | block_number, 66 | tx_index, 67 | encoded_receipt, 68 | receipt_proof, 69 | tx_type, 70 | } 71 | } 72 | } 73 | 74 | pub struct FetchedDatalake { 75 | /// Targeted datalake's compiled results 76 | pub values: Vec, 77 | /// mmr_with_headers related to the datalake 78 | pub mmr_with_headers: HashSet, 79 | /// Accounts related to the datalake 80 | pub accounts: HashSet, 81 | /// Storages related to the datalake 82 | pub storages: HashSet, 83 | /// Transactions related to the datalake 84 | pub transactions: HashSet, 85 | /// Transaction receipts related to the datalake 86 | pub transaction_receipts: HashSet, 87 | } 88 | -------------------------------------------------------------------------------- /justfile: -------------------------------------------------------------------------------- 1 | # Set environment variable 2 | export CARGO_MAKE_EXTEND_WORKSPACE_MAKEFILE := "true" 3 | 4 | # Run rustfmt to check the code formatting without making changes 5 | format: 6 | cargo fmt -- --check 7 | 8 | # Clean up the project by removing the target directory 9 | clean: 10 | cargo clean 11 | 12 | # Run clippy to catch common mistakes and improve your Rust code 13 | clippy: 14 | cargo clippy --all-targets --all-features -- -Dwarnings 15 | 16 | # Generate documentation for the project 17 | docs: 18 | cargo doc --no-deps 19 | 20 | # Execute all unit tests in the workspace 21 | test: 22 | cargo llvm-cov nextest --features test_utils 23 | 24 | # Run the entire CI pipeline including format, clippy, docs, and test checks 25 | run-ci-flow: format clippy docs test 26 | @echo "CI flow completed" 27 | -------------------------------------------------------------------------------- /request.json: -------------------------------------------------------------------------------- 1 | { 2 | "destinationChainId": "ETHEREUM_SEPOLIA", 3 | "tasks": [ 4 | { 5 | "type": "Module", 6 | "localClassPath": "./fixtures/primitives/local_class.json", 7 | "inputs": [ 8 | { 9 | "visibility": "public", 10 | "value": "0x5222A4" 11 | }, 12 | { 13 | "visibility": "public", 14 | "value": "0x13cb6ae34a13a0977f4d7101ebc24b87bb23f0d5" 15 | } 16 | ] 17 | } 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "1.81.0" 3 | components = ["rustfmt", "clippy"] 4 | -------------------------------------------------------------------------------- /script/compile.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | process_cairo_file() { 4 | source ./venv/bin/activate && cd hdp-cairo 5 | cairo_file=./src/hdp.cairo 6 | cairo-compile --version 7 | echo "Compiling $cairo_file using cairo-compile ..." 8 | cairo-compile --cairo_path="packages/eth_essentials" "$cairo_file" --output "../compiled_cairo/hdp.json" 9 | echo "Computing $cairo_file program hash using cairo-hash-program ..." 10 | cairo-hash-program --program "../compiled_cairo/hdp.json" 11 | cd .. 12 | local status=$? 13 | if [ $status -eq 0 ]; then 14 | echo "$(date '+%Y-%m-%d %H:%M:%S') - Successfully compiled $1" 15 | else 16 | echo "$(date '+%Y-%m-%d %H:%M:%S') - Failed to compile $1" 17 | return $status 18 | fi 19 | } 20 | 21 | 22 | 23 | # Call the function to ensure the virtual environment is activated 24 | process_cairo_file 25 | -------------------------------------------------------------------------------- /script/config_to_env.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # This script loads the config params from the passed JSON file, writing them to the environment variables. 4 | # These can then be used in the docker compose files, making then available to the different services 5 | 6 | # Path to your JSON file 7 | JSON_FILE="$1" 8 | 9 | # Check if the file exists 10 | if [ ! -f "$JSON_FILE" ]; then 11 | echo "Config file '$JSON_FILE' not found!" 12 | exit 1 13 | fi 14 | 15 | # Read JSON and export variables 16 | export_vars() { 17 | jq -r 'to_entries | .[] | "export " + .key + "=" + (.value | @sh)' "$JSON_FILE" 18 | } 19 | 20 | # Export variables 21 | eval "$(export_vars)" 22 | 23 | # Create a string with all the environment variables 24 | ENV_VARS=$(jq -r 'to_entries | map("\(.key)=\(.value | @sh)") | join("\n")' "$JSON_FILE") 25 | 26 | # Export the variables and run Docker Compose 27 | export $(echo "$ENV_VARS" | xargs) -------------------------------------------------------------------------------- /script/fetch_program.sh: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HerodotusDev/hdp/492dc32aebea3e002cf731e319bb585cf6768f75/script/fetch_program.sh -------------------------------------------------------------------------------- /script/prepare_image_build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Source the environment variables from the JSON config file 4 | source ./script/config_to_env.sh config/config.json 5 | 6 | # Define the default paths for the programs 7 | DEFAULT_DRY_CAIRO_RUN_CAIRO_FILE="build/contract_dry_run.json" 8 | DEFAULT_SOUND_CAIRO_RUN_CAIRO_FILE="build/hdp.json" 9 | 10 | clone_registry() { 11 | echo "Cloning program registry..." 12 | temp_dir=$(mktemp -d) 13 | git clone https://github.com/petscheit/cairo-program-registry-new.git "$temp_dir/cairo-program-registry" 14 | 15 | if [ $? -eq 0 ]; then 16 | rm -rf cairo-programs 17 | mv "$temp_dir/cairo-program-registry" cairo-programs 18 | rm -rf "$temp_dir" 19 | echo "Programs directory created/updated successfully." 20 | else 21 | echo "Failed to clone registry." 22 | rm -rf "$temp_dir" 23 | exit 1 24 | fi 25 | } 26 | 27 | check_program_dir() { 28 | local hash=$1 29 | if [ ! -d "cairo-programs/$hash" ]; then 30 | echo "Required program directory for hash $hash not found." 31 | return 1 32 | fi 33 | return 0 34 | } 35 | 36 | # Check if cairo-programs directory exists and contains required directories 37 | if [ ! -d "cairo-programs" ] || \ 38 | ! check_program_dir "$HDP_PROGRAM_HASH" || \ 39 | ! check_program_dir "$DRY_RUN_PROGRAM_HASH"; then 40 | echo "Required program directories not found. Cloning/updating registry..." 41 | clone_registry 42 | fi 43 | 44 | # Final check for both program directories 45 | if ! check_program_dir "$HDP_PROGRAM_HASH" || \ 46 | ! check_program_dir "$DRY_RUN_PROGRAM_HASH"; then 47 | echo "Error: Required program directories still not available after clone/refetch." 48 | exit 1 49 | else 50 | echo "Required program directories found." 51 | fi 52 | 53 | # Create the build directory if it doesn't exist 54 | mkdir -p build 55 | 56 | # Move the HDP program to the default path 57 | if [ -f "cairo-programs/$HDP_PROGRAM_HASH/program.json" ]; then 58 | cp "cairo-programs/$HDP_PROGRAM_HASH/program.json" "$DEFAULT_SOUND_CAIRO_RUN_CAIRO_FILE" 59 | echo "HDP program moved to $DEFAULT_SOUND_CAIRO_RUN_CAIRO_FILE." 60 | else 61 | echo "Error: HDP program file not found in cairo-programs/$HDP_PROGRAM_HASH." 62 | exit 1 63 | fi 64 | 65 | # Move the Dry Run program to the default path 66 | if [ -f "cairo-programs/$DRY_RUN_PROGRAM_HASH/program.json" ]; then 67 | cp "cairo-programs/$DRY_RUN_PROGRAM_HASH/program.json" "$DEFAULT_DRY_CAIRO_RUN_CAIRO_FILE" 68 | echo "Dry Run program moved to $DEFAULT_DRY_CAIRO_RUN_CAIRO_FILE." 69 | else 70 | echo "Error: Dry Run program file not found in cairo-programs/$DRY_RUN_PROGRAM_HASH." 71 | exit 1 72 | fi 73 | --------------------------------------------------------------------------------