├── .cargo └── config.toml ├── .github ├── dependabot.yml └── workflows │ ├── audit-check.yaml │ ├── benchmark.yml │ ├── build-arm64-wheels.yml │ ├── build-crate.yml │ ├── build-m1-wheel.yml │ ├── build-npm.yml │ ├── build-test.yml │ ├── check-commit-signing.yml │ ├── dependency-review.yml │ ├── prettier.yml │ └── stale-issue.yml ├── .gitignore ├── .prettierrc ├── Cargo.lock ├── Cargo.toml ├── LICENSE ├── README.md ├── RELEASE.md ├── SECURITY.md ├── benches ├── 0.generator ├── 1.generator ├── 2.generator ├── 3.generator ├── 4.generator ├── block_af9c3d98.bin ├── deserialize.rs ├── run-program.rs ├── serialize.rs └── sha256_hash.rs ├── benchmark ├── block-2000.hex ├── compressed-2000.envhex ├── compressed-2000.hex ├── concat.clvm ├── concat.hex ├── count-even.clvm ├── count-even.hex ├── factorial.clvm ├── factorial.hex ├── hash-string.clvm ├── hash-string.hex ├── hash-tree.clvm ├── hash-tree.hex ├── large-block.clvm ├── large-block.hex ├── loop_add.clvm ├── loop_add.hex ├── loop_ior.clvm ├── loop_ior.hex ├── loop_not.clvm ├── loop_not.hex ├── loop_sub.clvm ├── loop_sub.hex ├── loop_xor.clvm ├── loop_xor.hex ├── matrix-multiply.clvm ├── matrix-multiply.hex ├── point-pow.clvm ├── point-pow.hex ├── pubkey-tree.clvm ├── pubkey-tree.hex ├── shift-left.clvm ├── shift-left.hex ├── substr-tree.clvm ├── substr-tree.hex ├── substr.clvm ├── substr.hex ├── sum-tree.clvm └── sum-tree.hex ├── benchmarks.txt ├── docs ├── compressed-serialization.md └── new-operator-checklist.md ├── fuzz ├── .gitignore ├── Cargo.toml └── fuzz_targets │ ├── allocator.rs │ ├── canonical_serialization.rs │ ├── canonical_serialization_br.rs │ ├── deserialize.rs │ ├── deserialize_br.rs │ ├── deserialize_br_rand_tree.rs │ ├── fuzzing_utils.rs │ ├── incremental_serializer.rs │ ├── keccak.rs │ ├── make_tree.rs │ ├── node_eq.rs │ ├── object_cache.rs │ ├── operators.rs │ ├── parse_triples.rs │ ├── pick_node.rs │ ├── run_program.rs │ ├── serialized_len.rs │ ├── serialized_length.rs │ ├── serialized_length_trusted.rs │ ├── serializer.rs │ ├── serializer_cmp.rs │ ├── tree_cache.rs │ └── tree_hash.rs ├── op-tests ├── test-bls-ops.txt ├── test-bls-zk.txt ├── test-blspy-g1.txt ├── test-blspy-g2.txt ├── test-blspy-hash.txt ├── test-blspy-pairing.txt ├── test-blspy-verify.txt ├── test-core-ops.txt ├── test-keccak256-generated.txt ├── test-keccak256.txt ├── test-modpow.txt ├── test-more-ops.txt ├── test-secp-verify.txt ├── test-secp256k1.txt ├── test-secp256r1.txt └── test-sha256.txt ├── package-lock.json ├── package.json ├── src ├── allocator.rs ├── bls_ops.rs ├── chia_dialect.rs ├── core_ops.rs ├── cost.rs ├── dialect.rs ├── err_utils.rs ├── f_table.rs ├── keccak256_ops.rs ├── lib.rs ├── more_ops.rs ├── number.rs ├── op_utils.rs ├── reduction.rs ├── run_program.rs ├── runtime_dialect.rs ├── secp_ops.rs ├── serde │ ├── bitset.rs │ ├── bytes32.rs │ ├── de.rs │ ├── de_br.rs │ ├── de_tree.rs │ ├── errors.rs │ ├── identity_hash.rs │ ├── incremental.rs │ ├── mod.rs │ ├── object_cache.rs │ ├── parse_atom.rs │ ├── path_builder.rs │ ├── read_cache_lookup.rs │ ├── ser.rs │ ├── ser_br.rs │ ├── serialized_length.rs │ ├── test.rs │ ├── tools.rs │ ├── tree_cache.rs │ ├── utils.rs │ └── write_atom.rs ├── test_ops.rs ├── tests.rs └── traverse_path.rs ├── tests ├── generate-programs.py ├── programs │ ├── args-add.envhex │ ├── args-add.hex │ ├── args-all.envhex │ ├── args-all.hex │ ├── args-and.envhex │ ├── args-and.hex │ ├── args-any.envhex │ ├── args-any.hex │ ├── args-cat.envhex │ ├── args-cat.hex │ ├── args-mul.envhex │ ├── args-mul.hex │ ├── args-or.envhex │ ├── args-or.hex │ ├── args-point_add.envhex │ ├── args-point_add.hex │ ├── args-sha.envhex │ ├── args-sha.hex │ ├── args-sub.envhex │ ├── args-sub.hex │ ├── args-unknown-1.envhex │ ├── args-unknown-1.hex │ ├── args-unknown-2.envhex │ ├── args-unknown-2.hex │ ├── args-unknown-3.envhex │ ├── args-unknown-3.hex │ ├── args-unknown-4.envhex │ ├── args-unknown-4.hex │ ├── args-unknown-5.envhex │ ├── args-unknown-5.hex │ ├── args-unknown-6.envhex │ ├── args-unknown-6.hex │ ├── args-unknown-7.envhex │ ├── args-unknown-7.hex │ ├── args-unknown-8.envhex │ ├── args-unknown-8.hex │ ├── args-unknown-9.envhex │ ├── args-unknown-9.hex │ ├── args-xor.envhex │ ├── args-xor.hex │ ├── recursive-add.envhex │ ├── recursive-add.hex │ ├── recursive-ash.envhex │ ├── recursive-ash.hex │ ├── recursive-cat.envhex │ ├── recursive-cat.hex │ ├── recursive-cons.envhex │ ├── recursive-cons.hex │ ├── recursive-div.envhex │ ├── recursive-div.hex │ ├── recursive-lsh.envhex │ ├── recursive-lsh.hex │ ├── recursive-mul.envhex │ ├── recursive-mul.hex │ ├── recursive-not.envhex │ ├── recursive-not.hex │ ├── recursive-pubkey.envhex │ ├── recursive-pubkey.hex │ ├── recursive-sub.envhex │ ├── recursive-sub.hex │ ├── softfork-1.envhex │ ├── softfork-1.hex │ ├── softfork-2.envhex │ └── softfork-2.hex ├── run-programs.py └── run.py ├── tools ├── Cargo.toml ├── data │ ├── proof.json │ ├── public.json │ └── verification_key.json ├── generate-bls-tests.py ├── generate-keccak-tests.py ├── generate-secp256k1-tests.py ├── generate-secp256r1-tests.py ├── generate-sha256-tests.py └── src │ └── bin │ ├── benchmark-clvm-cost.rs │ ├── generate-fuzz-corpus.rs │ ├── generate-modpow-tests.rs │ └── verify-zksnark.rs ├── wasm ├── Cargo.toml ├── LICENSE ├── README.md ├── src │ ├── flags.rs │ ├── lazy_node.rs │ ├── lib.rs │ ├── run_program.rs │ ├── serialize.rs │ └── tests.rs └── tests │ └── index.js └── wheel ├── Cargo.toml ├── pyproject.toml ├── python ├── clvm_rs │ ├── __init__.py │ ├── at.py │ ├── casts.py │ ├── chia_dialect.py │ ├── clvm_rs.pyi │ ├── clvm_storage.py │ ├── clvm_tree.py │ ├── curry_and_treehash.py │ ├── de.py │ ├── eval_error.py │ ├── program.py │ ├── py.typed │ ├── replace.py │ ├── ser.py │ └── tree_hash.py └── tests │ ├── __init__.py │ ├── test_apis.py │ ├── test_curry_and_treehash.py │ ├── test_program.py │ └── test_serialize.py └── src ├── adapt_response.rs ├── api.rs ├── lazy_node.rs └── lib.rs /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [target.x86_64-apple-darwin] 2 | rustflags = [ 3 | "-C", "link-arg=-undefined", 4 | "-C", "link-arg=dynamic_lookup", 5 | ] 6 | 7 | [target.aarch64-apple-darwin] 8 | rustflags = [ 9 | "-C", "link-arg=-undefined", 10 | "-C", "link-arg=dynamic_lookup", 11 | ] 12 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # This file is managed by the repo-content-updater project. Manual changes here will result in a PR to bring back 2 | # inline with the upstream template, unless you remove the dependabot managed file property from the repo 3 | 4 | version: 2 5 | updates: 6 | - package-ecosystem: "gomod" 7 | directory: / 8 | schedule: 9 | interval: "weekly" 10 | day: "tuesday" 11 | open-pull-requests-limit: 10 12 | rebase-strategy: auto 13 | labels: 14 | - dependencies 15 | - go 16 | - "Changed" 17 | reviewers: ["cmmarslender", "Starttoaster"] 18 | groups: 19 | global: 20 | patterns: 21 | - "*" 22 | 23 | - package-ecosystem: "pip" 24 | directory: / 25 | schedule: 26 | interval: "weekly" 27 | day: "tuesday" 28 | open-pull-requests-limit: 10 29 | rebase-strategy: auto 30 | labels: 31 | - dependencies 32 | - python 33 | - "Changed" 34 | reviewers: ["emlowe", "altendky"] 35 | 36 | - package-ecosystem: "github-actions" 37 | directories: ["/", ".github/actions/*"] 38 | schedule: 39 | interval: "weekly" 40 | day: "tuesday" 41 | open-pull-requests-limit: 10 42 | rebase-strategy: auto 43 | labels: 44 | - dependencies 45 | - github_actions 46 | - "Changed" 47 | reviewers: ["cmmarslender", "Starttoaster", "pmaslana"] 48 | 49 | - package-ecosystem: "npm" 50 | directory: / 51 | schedule: 52 | interval: "weekly" 53 | day: "tuesday" 54 | open-pull-requests-limit: 10 55 | rebase-strategy: auto 56 | labels: 57 | - dependencies 58 | - javascript 59 | - "Changed" 60 | reviewers: ["cmmarslender", "ChiaMineJP"] 61 | 62 | - package-ecosystem: cargo 63 | directory: / 64 | schedule: 65 | interval: "weekly" 66 | day: "tuesday" 67 | open-pull-requests-limit: 10 68 | rebase-strategy: auto 69 | labels: 70 | - dependencies 71 | - rust 72 | - "Changed" 73 | 74 | - package-ecosystem: swift 75 | directory: / 76 | schedule: 77 | interval: "weekly" 78 | day: "tuesday" 79 | open-pull-requests-limit: 10 80 | rebase-strategy: auto 81 | -------------------------------------------------------------------------------- /.github/workflows/audit-check.yaml: -------------------------------------------------------------------------------- 1 | name: Security audit 2 | on: 3 | push: 4 | paths: 5 | - "**/Cargo.toml" 6 | - "**/Cargo.lock" 7 | jobs: 8 | security_audit: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v4 12 | - name: install cargo audit 13 | run: cargo install cargo-audit 14 | - name: cargo audit 15 | run: cargo audit --ignore RUSTSEC-2025-0020 16 | -------------------------------------------------------------------------------- /.github/workflows/benchmark.yml: -------------------------------------------------------------------------------- 1 | name: Run benchmarks 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | - dev 8 | tags: 9 | - "**" 10 | pull_request: 11 | branches: 12 | - "**" 13 | 14 | jobs: 15 | benchmark: 16 | name: Benchmark on ${{ matrix.os }} 17 | runs-on: ${{ matrix.os }} 18 | strategy: 19 | fail-fast: false 20 | matrix: 21 | os: [macos-13, ubuntu-latest, windows-latest] 22 | 23 | steps: 24 | - uses: actions/checkout@v4 25 | with: 26 | fetch-depth: 1 27 | 28 | - uses: chia-network/actions/setup-python@main 29 | name: Install Python 3.9 30 | with: 31 | python-version: 3.9 32 | 33 | - name: Update pip 34 | run: | 35 | python -m pip install --upgrade pip 36 | 37 | - name: Set up rust 38 | uses: dtolnay/rust-toolchain@stable 39 | 40 | - name: Run benchmarks 41 | run: cargo bench 42 | 43 | max-cost-checks: 44 | name: Cost checks 45 | runs-on: ubuntu-latest 46 | 47 | steps: 48 | - uses: actions/checkout@v4 49 | with: 50 | fetch-depth: 1 51 | 52 | - uses: chia-network/actions/setup-python@main 53 | name: Install Python 3.9 54 | with: 55 | python-version: 3.9 56 | 57 | - name: Update pip 58 | run: | 59 | python -m pip install --upgrade pip 60 | 61 | - name: Set up rust 62 | uses: dtolnay/rust-toolchain@stable 63 | 64 | - name: Install dependencies 65 | run: | 66 | python -m pip install maturin 67 | rustup target add x86_64-unknown-linux-musl 68 | 69 | - name: Build 70 | env: 71 | CC: gcc 72 | run: | 73 | python -m venv venv 74 | ln -s venv/bin/activate 75 | . ./activate 76 | python -m pip install colorama 77 | maturin develop -m wheel/Cargo.toml --release --features=openssl 78 | 79 | - name: Run cost checks 80 | run: | 81 | . ./activate 82 | cd tests 83 | ./generate-programs.py 84 | ./run-programs.py 85 | -------------------------------------------------------------------------------- /.github/workflows/build-arm64-wheels.yml: -------------------------------------------------------------------------------- 1 | name: Build ARM64 wheels on ubuntu-latest 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | - dev 8 | tags: 9 | - "**" 10 | pull_request: 11 | branches: 12 | - "**" 13 | 14 | permissions: 15 | contents: read 16 | id-token: write 17 | 18 | jobs: 19 | build_wheels: 20 | name: ARM64 Python Wheels on ARM64 Ubuntu 21 | runs-on: ${{ matrix.os }} 22 | strategy: 23 | matrix: 24 | os: [[ARM64, Linux]] 25 | 26 | steps: 27 | - uses: Chia-Network/actions/clean-workspace@main 28 | 29 | - name: Checkout repository 30 | uses: actions/checkout@v4 31 | with: 32 | fetch-depth: 1 33 | 34 | - name: Build Python wheels 35 | run: | 36 | docker run --rm \ 37 | -v ${{ github.workspace }}:/ws --workdir=/ws \ 38 | quay.io/pypa/manylinux_2_28_aarch64 \ 39 | bash -exc '\ 40 | echo $PATH && \ 41 | curl -L https://sh.rustup.rs > rustup-init.sh && \ 42 | sh rustup-init.sh -y && \ 43 | yum -y install perl-IPC-Cmd && \ 44 | yum -y install openssl-devel && \ 45 | source $HOME/.cargo/env && \ 46 | rustup target add aarch64-unknown-linux-musl && \ 47 | rm -rf venv && \ 48 | export PATH=/opt/python/cp310-cp310/bin/:$PATH && \ 49 | export PATH=/opt/python/cp39-cp39/bin/:$PATH && \ 50 | export PATH=/opt/python/cp38-cp38/bin/:$PATH && \ 51 | /opt/python/cp38-cp38/bin/python -m venv venv && \ 52 | if [ ! -f "activate" ]; then ln -s venv/bin/activate; fi && \ 53 | . ./activate && \ 54 | pip install maturin && \ 55 | CC=gcc maturin build -m wheel/Cargo.toml --release --strip --manylinux 2_28 --features=openssl \ 56 | ' 57 | 58 | - name: Upload artifacts 59 | uses: actions/upload-artifact@v4 60 | with: 61 | name: wheels 62 | path: target/wheels/ 63 | 64 | - name: Install Twine 65 | run: | 66 | if [ ! -f "venv" ]; then sudo rm -rf venv; fi 67 | sudo apt-get install python3-venv python3-pip -y 68 | python3 -m venv venv 69 | if [ ! -f "activate" ]; then ln -s venv/bin/activate; fi 70 | . ./activate 71 | pip install setuptools_rust 72 | 73 | - name: publish (PyPi) 74 | if: startsWith(github.event.ref, 'refs/tags') 75 | uses: pypa/gh-action-pypi-publish@release/v1 76 | with: 77 | packages-dir: target/wheels/ 78 | skip-existing: true 79 | 80 | - name: Clean up AMR64 81 | if: startsWith(matrix.os, 'ARM64') 82 | run: | 83 | rm -rf venv 84 | rm -rf dist 85 | -------------------------------------------------------------------------------- /.github/workflows/build-crate.yml: -------------------------------------------------------------------------------- 1 | name: Build rust crate 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | - dev 8 | tags: 9 | - "**" 10 | pull_request: 11 | branches: 12 | - "**" 13 | 14 | jobs: 15 | build_crate: 16 | name: Crate 17 | runs-on: ubuntu-latest 18 | strategy: 19 | fail-fast: false 20 | 21 | steps: 22 | - uses: actions/checkout@v4 23 | with: 24 | fetch-depth: 0 25 | 26 | - name: Set up rusts 27 | uses: dtolnay/rust-toolchain@stable 28 | 29 | - name: fmt (stable) 30 | run: cargo +stable fmt -- --files-with-diff --check 31 | - name: clippy (stable) 32 | run: cargo +stable clippy 33 | - name: tests 34 | run: cargo test && cargo test --release 35 | - name: build 36 | run: cargo build --release 37 | - name: dry-run of `cargo publish` 38 | run: cargo publish --dry-run 39 | 40 | - name: publish to crates.io if tagged 41 | if: startsWith(github.event.ref, 'refs/tags') 42 | env: 43 | CARGO_REGISTRY_TOKEN: ${{ secrets.cargo_registry_token }} 44 | run: cargo publish 45 | -------------------------------------------------------------------------------- /.github/workflows/build-m1-wheel.yml: -------------------------------------------------------------------------------- 1 | name: Build M1 Wheels 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | - dev 8 | tags: 9 | - "**" 10 | pull_request: 11 | branches: 12 | - "**" 13 | 14 | concurrency: 15 | group: ${{ github.ref }}-${{ github.workflow }}-${{ github.event_name }}--${{ (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/heads/release/') || startsWith(github.ref, 'refs/heads/long_lived/')) && github.sha || '' }} 16 | cancel-in-progress: true 17 | 18 | permissions: 19 | contents: read 20 | id-token: write 21 | 22 | jobs: 23 | build_wheels: 24 | name: Build wheel on Mac M1 25 | runs-on: [macos-13-arm64] 26 | strategy: 27 | fail-fast: false 28 | 29 | steps: 30 | - uses: Chia-Network/actions/clean-workspace@main 31 | 32 | - name: Checkout code 33 | uses: actions/checkout@v4 34 | with: 35 | fetch-depth: 0 36 | 37 | - name: Install python 38 | uses: Chia-Network/actions/setup-python@main 39 | with: 40 | python-version: "3.10" 41 | 42 | - name: Set up rust 43 | run: | 44 | curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs > rust.sh 45 | sh rust.sh -y 46 | 47 | - name: Build m1 wheels 48 | env: 49 | MACOSX_DEPLOYMENT_TARGET: "13.0" 50 | run: | 51 | python3 -m venv venv 52 | . ./venv/bin/activate 53 | export PATH=~/.cargo/bin:$PATH 54 | pip install maturin 55 | maturin build -m wheel/Cargo.toml -i python --release --strip --features=openssl 56 | 57 | - name: Install clvm_rs wheel 58 | run: | 59 | . ./venv/bin/activate 60 | ls ./target/wheels/ 61 | pip install ./target/wheels/clvm_rs*.whl 62 | 63 | - name: Install other wheels 64 | run: | 65 | . ./venv/bin/activate 66 | python -m pip install pytest 67 | python -m pip install blspy 68 | 69 | - name: Run tests from wheel 70 | run: | 71 | . ./venv/bin/activate 72 | cd wheel/python 73 | pytest --import-mode append tests 74 | # we use `append` because otherwise the `clvm_rs` source is added 75 | # to `sys.path` and it uses that instead of the wheel (and so 76 | # ignoring `clvm_rs.so`, which is pretty important) 77 | 78 | - name: Upload artifacts 79 | uses: actions/upload-artifact@v4 80 | with: 81 | name: wheels 82 | path: ./target/wheels/ 83 | 84 | upload: 85 | name: Upload to PyPI 86 | runs-on: ubuntu-latest 87 | needs: build_wheels 88 | steps: 89 | - name: Checkout code 90 | uses: actions/checkout@v4 91 | with: 92 | fetch-depth: 0 93 | 94 | - name: Install python 95 | uses: Chia-Network/actions/setup-python@main 96 | with: 97 | python-version: "3.10" 98 | 99 | - name: Download artifacts 100 | uses: actions/download-artifact@v4 101 | with: 102 | name: wheels 103 | path: ./target/wheels/ 104 | 105 | - name: publish (PyPi) 106 | if: startsWith(github.event.ref, 'refs/tags') 107 | uses: pypa/gh-action-pypi-publish@release/v1 108 | with: 109 | packages-dir: target/wheels/ 110 | skip-existing: true 111 | -------------------------------------------------------------------------------- /.github/workflows/build-npm.yml: -------------------------------------------------------------------------------- 1 | name: Build npm package 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | - dev 8 | tags: 9 | - "**" 10 | pull_request: 11 | branches: 12 | - "**" 13 | 14 | jobs: 15 | build_crate: 16 | name: NPM 17 | runs-on: ubuntu-latest 18 | strategy: 19 | fail-fast: false 20 | 21 | steps: 22 | - uses: actions/checkout@v4 23 | with: 24 | fetch-depth: 0 25 | 26 | - name: install wasm-pack 27 | run: cargo install wasm-pack 28 | 29 | - name: wasm-pack build and pack 30 | run: wasm-pack build --release --target=nodejs wasm && wasm-pack pack wasm 31 | 32 | - name: Setup Node 18.x 33 | uses: actions/setup-node@v4 34 | with: 35 | node-version: "18.x" 36 | 37 | - name: Test wasm 38 | run: node wasm/tests/index.js 39 | 40 | - name: Upload npm pkg artifacts 41 | uses: actions/upload-artifact@v4 42 | with: 43 | name: npm-pkg 44 | path: ./wasm/pkg/clvm_wasm-*.tgz 45 | 46 | - name: publish to npmjs.com if tagged 47 | if: startsWith(github.event.ref, 'refs/tags') 48 | uses: JS-DevTools/npm-publish@v3 49 | with: 50 | token: ${{ secrets.node_auth_token }} 51 | package: wasm/pkg/package.json 52 | -------------------------------------------------------------------------------- /.github/workflows/check-commit-signing.yml: -------------------------------------------------------------------------------- 1 | name: 🚨 Check commit signing 2 | 3 | on: 4 | push: 5 | branches: 6 | - long_lived/** 7 | - main 8 | - release/** 9 | pull_request: 10 | branches: 11 | - "**" 12 | 13 | concurrency: 14 | group: ${{ github.event_name == 'pull_request' && format('{0}-{1}', github.workflow_ref, github.event.pull_request.number) || github.run_id }} 15 | cancel-in-progress: true 16 | 17 | jobs: 18 | check-commit-signing: 19 | name: Check commit signing 20 | runs-on: [ubuntu-latest] 21 | timeout-minutes: 5 22 | 23 | steps: 24 | - name: Checkout Code 25 | uses: actions/checkout@v4 26 | with: 27 | fetch-depth: 0 28 | 29 | - uses: chia-network/actions/check-commit-signing@main 30 | -------------------------------------------------------------------------------- /.github/workflows/dependency-review.yml: -------------------------------------------------------------------------------- 1 | # Dependency Review Action 2 | # 3 | # This Action will scan dependency manifest files that change as part of a Pull Request, surfacing known-vulnerable versions of the packages declared or updated in the PR. Once installed, if the workflow run is marked as required, PRs introducing known-vulnerable packages will be blocked from merging. 4 | # 5 | # Source repository: https://github.com/actions/dependency-review-action 6 | # Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement 7 | name: "Dependency Review" 8 | on: [pull_request] 9 | 10 | permissions: 11 | contents: read 12 | 13 | jobs: 14 | dependency-review: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - name: "Checkout Repository" 18 | uses: actions/checkout@v4 19 | - name: "Dependency Review" 20 | uses: actions/dependency-review-action@v4 21 | with: 22 | allow-ghsas: GHSA-xphf-cx8h-7q9g 23 | -------------------------------------------------------------------------------- /.github/workflows/prettier.yml: -------------------------------------------------------------------------------- 1 | name: Prettier 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | - dev 8 | tags: 9 | - "**" 10 | pull_request: 11 | branches: 12 | - "**" 13 | 14 | jobs: 15 | prettier: 16 | name: Prettier 17 | runs-on: ubuntu-latest 18 | strategy: 19 | fail-fast: false 20 | 21 | steps: 22 | - uses: actions/checkout@v4 23 | 24 | - name: Setup Node.js 25 | uses: actions/setup-node@v4 26 | 27 | - name: Install dependencies 28 | run: npm install 29 | 30 | - name: Run Prettier 31 | run: npm run prettier 32 | -------------------------------------------------------------------------------- /.github/workflows/stale-issue.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: "Close stale issues" 3 | on: 4 | schedule: 5 | - cron: "0 11 * * *" 6 | 7 | jobs: 8 | stale: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: chia-network/stale@main 12 | with: 13 | operations-per-run: 10000 14 | ascending: true 15 | days-before-issue-stale: 14 16 | days-before-issue-close: 7 17 | days-before-pr-stale: 60 18 | days-before-pr-close: -1 19 | exempt-all-pr-milestones: true 20 | exempt-all-issue-milestones: true 21 | exempt-all-assignees: true 22 | stale-issue-label: stale-issue 23 | stale-pr-label: stale-pr 24 | remove-stale-when-updated: true 25 | stale-issue-message: > 26 | 'This issue has been flagged as stale as there has been no 27 | activity on it in 14 days. If this issue is still affecting you 28 | and in need of review, please update it to keep it open.' 29 | close-issue-message: > 30 | 'This issue was automatically closed because it has been flagged 31 | as stale and subsequently passed 7 days with no further activity.' 32 | stale-pr-message: > 33 | 'This PR has been flagged as stale due to no activity for over 60 34 | days. It will not be automatically closed, but it has been given 35 | a stale-pr label and should be manually reviewed.' 36 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # These are backup files generated by rustfmt 7 | **/*.rs.bk 8 | 9 | # MSVC Windows builds of rustc generate these, which store debugging information 10 | *.pdb 11 | 12 | # RustRover 13 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 14 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 15 | # and can be added to the global gitignore or merged into this file. For a more nuclear 16 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 17 | .idea/ 18 | 19 | # python ignore 20 | /venv 21 | /.venv 22 | 23 | # Node.js 24 | /target 25 | /node_modules 26 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "useTabs": false, 3 | "tabWidth": 2 4 | } 5 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = ["fuzz", "tools", "wasm", "wheel"] 3 | 4 | [package] 5 | name = "clvmr" 6 | version = "0.14.0" 7 | authors = ["Richard Kiss "] 8 | edition = "2021" 9 | license = "Apache-2.0" 10 | description = "Implementation of `clvm` for Chia Network's cryptocurrency" 11 | homepage = "https://github.com/Chia-Network/clvm_rs/" 12 | repository = "https://github.com/Chia-Network/clvm_rs/" 13 | readme = "README.md" 14 | 15 | [lib] 16 | name = "clvmr" 17 | crate-type = ["rlib"] 18 | bench = false 19 | 20 | [features] 21 | # when enabling the "counters" features, the CLVM interpreter is instrumented to 22 | # collect counters about the programs it executes 23 | counters = [] 24 | 25 | # when enabled, pre-eval and post-eval callbacks are enabled. This is useful for 26 | # debugging and tracing of programs. 27 | pre-eval = [] 28 | 29 | # On UNIX-based platforms, you may get a speed boost on `sha256` operations by building 30 | # with OpenSSL when enabled 31 | openssl = ["chia-sha2/openssl"] 32 | 33 | [profile.release] 34 | lto = "thin" 35 | 36 | [workspace.dependencies] 37 | clvmr = { path = "." } 38 | lazy_static = "1.5.0" 39 | num-bigint = "0.4.6" 40 | num-traits = "0.2.19" 41 | num-integer = "0.1.46" 42 | chia-bls = "0.22.0" 43 | chia-sha2 = "0.22.0" 44 | hex-literal = "0.4.1" 45 | # for secp sigs 46 | k256 = "0.13.4" 47 | p256 = "0.13.2" 48 | rstest = "0.21.0" 49 | criterion = "0.5.1" 50 | hex = "0.4.3" 51 | pyo3 = "0.22.6" 52 | wasm-bindgen = "0.2.100" 53 | wasm-bindgen-test = "0.3.50" 54 | js-sys = "0.3.77" 55 | getrandom = "0.2.15" 56 | libfuzzer-sys = "0.4.9" 57 | rand = "0.8.5" 58 | sha1 = "0.10.6" 59 | linreg = "0.2.0" 60 | serde = "1.0.219" 61 | serde_json = "1.0.140" 62 | clap = "4.5.37" 63 | rand_chacha = "0.3.1" 64 | bitvec = "1.0.1" 65 | arbitrary = { version = "1.4.1", features = ["derive"] } 66 | bumpalo = "3.17.0" 67 | 68 | [dependencies] 69 | lazy_static = { workspace = true } 70 | num-bigint = { workspace = true } 71 | num-traits = { workspace = true } 72 | num-integer = { workspace = true } 73 | chia-bls = { workspace = true } 74 | chia-sha2 = { workspace = true } 75 | hex-literal = { workspace = true } 76 | bitvec = { workspace = true } 77 | # for secp sigs 78 | k256 = { version = "0.13.4", features = ["ecdsa"] } 79 | p256 = { version = "0.13.2", features = ["ecdsa"] } 80 | # for keccak256 81 | sha3 = "0.10.8" 82 | rand = { workspace = true } 83 | hex = { workspace = true } 84 | sha1 = { workspace = true } 85 | bumpalo = { workspace = true } 86 | 87 | [dev-dependencies] 88 | rstest = { workspace = true } 89 | criterion = { workspace = true } 90 | hex = { workspace = true } 91 | 92 | [[bench]] 93 | name = "run-program" 94 | harness = false 95 | 96 | [[bench]] 97 | name = "deserialize" 98 | harness = false 99 | 100 | [[bench]] 101 | name = "sha256_hash" 102 | harness = false 103 | 104 | [[bench]] 105 | name = "serialize" 106 | harness = false 107 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Rust implementation of clvm. 2 | 3 | ![GitHub](https://img.shields.io/github/license/Chia-Network/clvm_rs?logo=Github) 4 | [![Coverage Status](https://coveralls.io/repos/github/Chia-Network/clvm_rs/badge.svg?branch=main)](https://coveralls.io/github/Chia-Network/clvm_rs?branch=main) 5 | ![Build Crate](https://github.com/Chia-Network/clvm_rs/actions/workflows/build-crate.yml/badge.svg) 6 | ![Build Wheels](https://github.com/Chia-Network/clvm_rs/actions/workflows/build-test.yml/badge.svg) 7 | 8 | ![PyPI](https://img.shields.io/pypi/v/clvm_rs?logo=pypi) 9 | [![Crates.io](https://img.shields.io/crates/v/clvmr.svg)](https://crates.io/crates/clvmr) 10 | 11 | The cargo workspace includes an rlib crate, for use with rust or other applications, and a python wheel. 12 | 13 | The python wheel is in `wheel`. The npm package is in `wasm`. 14 | 15 | ## Tests 16 | 17 | In order to run the unit tests, run: 18 | 19 | ``` 20 | cargo test 21 | ``` 22 | 23 | ## Fuzzing 24 | 25 | The fuzzing infrastructure for `clvm_rs` uses [cargo-fuzz](https://github.com/rust-fuzz/cargo-fuzz). 26 | 27 | Documentation for setting up fuzzing in rust can be found [here](https://rust-fuzz.github.io/book/cargo-fuzz.html). 28 | 29 | To generate an initial corpus (for the `run_program` fuzzer), run: 30 | 31 | ``` 32 | cd tools 33 | cargo run generate-fuzz-corpus 34 | ``` 35 | 36 | To get started, run: 37 | 38 | ``` 39 | cargo fuzz run fuzz_run_program --jobs=32 -- -rss_limit_mb=4096 40 | ``` 41 | 42 | But with whatever number of jobs works best for you. 43 | 44 | If you find issues in `clvm_rs` please use our [bug bounty program](https://hackerone.com/chia_network). 45 | 46 | ## Build Wheel 47 | 48 | The `clvm_rs` wheel has python bindings for the rust implementation of clvm. 49 | 50 | Use `maturin` to build the python interface. First, install into current virtualenv with 51 | 52 | ``` 53 | $ pip install maturin 54 | ``` 55 | 56 | While in the `wheel` directory, build `clvm_rs` into the current virtualenv with 57 | 58 | ``` 59 | $ maturin develop --release 60 | ``` 61 | 62 | On UNIX-based platforms, you may get a speed boost on `sha256` operations by building 63 | with OpenSSL. 64 | 65 | ``` 66 | $ maturin develop --release --features=openssl 67 | ``` 68 | 69 | To build the wheel, do 70 | 71 | ``` 72 | $ maturin build --release 73 | ``` 74 | 75 | or 76 | 77 | ``` 78 | $ maturin build --release --features=openssl 79 | ``` 80 | -------------------------------------------------------------------------------- /RELEASE.md: -------------------------------------------------------------------------------- 1 | To make a new release: 2 | 3 | - update version in the `Cargo.toml` files 4 | 5 | ```bash 6 | $ git checkout -b new_release 7 | $ sed -i .bak 's/^version.*/version = "0.1.20"/' Cargo.toml */Cargo.toml 8 | # or edit them manually with `vi Cargo.toml */Cargo.toml` 9 | 10 | # build to update `Cargo.lock` 11 | $ cargo build 12 | 13 | $ git add Cargo.toml Cargo.lock */Cargo.toml 14 | 15 | $ git commit -m 'Update version.' 16 | 17 | $ git push 18 | ``` 19 | 20 | Now create a PR with the `new_release` branch. Merge it. 21 | 22 | ``` 23 | $ git checkout main 24 | $ git pull 25 | $ git tag 0.1.20 26 | $ git push --tags 27 | ``` 28 | 29 | The `0.1.20` tag on GitHub will cause the artifacts to be uploaded to crates.io, pypi.org and npmjs.com. 30 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Reporting a Vulnerability 4 | 5 | Please report security concerns to https://hackerone.com/chia_network. 6 | 7 | If your security issue is established to be valid, we will reach out immediately to establish 8 | communication channels and compensate the issue reporter for responsibly reporting security bugs via 9 | our bug bounty program. 10 | -------------------------------------------------------------------------------- /benches/0.generator: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Chia-Network/clvm_rs/c811cefe74f6b623fe63a80b1b0d6cb039b218fe/benches/0.generator -------------------------------------------------------------------------------- /benches/1.generator: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Chia-Network/clvm_rs/c811cefe74f6b623fe63a80b1b0d6cb039b218fe/benches/1.generator -------------------------------------------------------------------------------- /benches/2.generator: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Chia-Network/clvm_rs/c811cefe74f6b623fe63a80b1b0d6cb039b218fe/benches/2.generator -------------------------------------------------------------------------------- /benches/3.generator: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Chia-Network/clvm_rs/c811cefe74f6b623fe63a80b1b0d6cb039b218fe/benches/3.generator -------------------------------------------------------------------------------- /benches/4.generator: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Chia-Network/clvm_rs/c811cefe74f6b623fe63a80b1b0d6cb039b218fe/benches/4.generator -------------------------------------------------------------------------------- /benches/block_af9c3d98.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Chia-Network/clvm_rs/c811cefe74f6b623fe63a80b1b0d6cb039b218fe/benches/block_af9c3d98.bin -------------------------------------------------------------------------------- /benches/deserialize.rs: -------------------------------------------------------------------------------- 1 | use clvmr::allocator::Allocator; 2 | use clvmr::serde::{ 3 | node_from_bytes, node_from_bytes_backrefs, node_from_bytes_backrefs_old, 4 | node_to_bytes_backrefs, serialized_length_from_bytes, serialized_length_from_bytes_trusted, 5 | tree_hash_from_stream, 6 | }; 7 | use criterion::{criterion_group, criterion_main, Criterion}; 8 | use std::include_bytes; 9 | use std::time::Instant; 10 | 11 | fn deserialize_benchmark(c: &mut Criterion) { 12 | let block = include_bytes!("block_af9c3d98.bin"); 13 | let compressed_block = { 14 | let mut a = Allocator::new(); 15 | let input = node_from_bytes(&mut a, block).expect("failed to parse input file"); 16 | node_to_bytes_backrefs(&a, input).expect("failed to compress generator") 17 | }; 18 | 19 | let mut group = c.benchmark_group("deserialize"); 20 | 21 | for (bl, name_suffix) in &[ 22 | (block as &[u8], ""), 23 | (compressed_block.as_ref(), "-compressed"), 24 | ] { 25 | group.bench_function(format!("serialized_length_from_bytes{name_suffix}"), |b| { 26 | b.iter(|| { 27 | let start = Instant::now(); 28 | serialized_length_from_bytes(bl).expect("serialized_length_from_bytes"); 29 | start.elapsed() 30 | }) 31 | }); 32 | 33 | group.bench_function( 34 | format!("serialized_length_from_bytes_trusted{name_suffix}"), 35 | |b| { 36 | b.iter(|| { 37 | let start = Instant::now(); 38 | serialized_length_from_bytes_trusted(bl) 39 | .expect("serialized_length_from_bytes_truested"); 40 | start.elapsed() 41 | }) 42 | }, 43 | ); 44 | 45 | // we don't support compressed CLVM in tree_hash_from_stream yet 46 | if name_suffix.is_empty() { 47 | group.bench_function(format!("tree_hash_from_stream{name_suffix}"), |b| { 48 | b.iter(|| { 49 | let mut cur = std::io::Cursor::new(*bl); 50 | let start = Instant::now(); 51 | tree_hash_from_stream(&mut cur).expect("tree_hash_from_stream"); 52 | start.elapsed() 53 | }) 54 | }); 55 | } 56 | 57 | let mut a = Allocator::new(); 58 | let iter_checkpoint = a.checkpoint(); 59 | 60 | group.bench_function(format!("node_from_bytes_backrefs{name_suffix}"), |b| { 61 | b.iter(|| { 62 | a.restore_checkpoint(&iter_checkpoint); 63 | let start = Instant::now(); 64 | node_from_bytes_backrefs(&mut a, bl).expect("node_from_bytes_backrefs"); 65 | start.elapsed() 66 | }) 67 | }); 68 | 69 | group.bench_function(format!("node_from_bytes_backrefs_old{name_suffix}"), |b| { 70 | b.iter(|| { 71 | a.restore_checkpoint(&iter_checkpoint); 72 | let start = Instant::now(); 73 | node_from_bytes_backrefs_old(&mut a, bl).expect("node_from_bytes_backrefs_old"); 74 | start.elapsed() 75 | }) 76 | }); 77 | } 78 | 79 | let mut a = Allocator::new(); 80 | let iter_checkpoint = a.checkpoint(); 81 | group.bench_function("node_from_bytes", |b| { 82 | b.iter(|| { 83 | a.restore_checkpoint(&iter_checkpoint); 84 | let start = Instant::now(); 85 | node_from_bytes(&mut a, block).expect("node_from_bytes"); 86 | start.elapsed() 87 | }) 88 | }); 89 | 90 | group.finish(); 91 | } 92 | 93 | criterion_group!(deserialize, deserialize_benchmark); 94 | criterion_main!(deserialize); 95 | -------------------------------------------------------------------------------- /benches/serialize.rs: -------------------------------------------------------------------------------- 1 | use clvmr::allocator::Allocator; 2 | use clvmr::serde::{ 3 | node_from_bytes, node_from_bytes_backrefs, node_to_bytes_backrefs, node_to_bytes_limit, 4 | Serializer, 5 | }; 6 | use criterion::black_box; 7 | use criterion::{criterion_group, criterion_main, Criterion}; 8 | use std::include_bytes; 9 | use std::time::Instant; 10 | 11 | fn serialize_benchmark(c: &mut Criterion) { 12 | // the blocks are serialized with back-refs. In order to accurately measure 13 | // the cost of the compression itself, we first need to inflate them and 14 | // then serialize again. 15 | let block0: &[u8] = include_bytes!("0.generator"); 16 | let block1: &[u8] = include_bytes!("1.generator"); 17 | let block2: &[u8] = include_bytes!("2.generator"); 18 | let block3: &[u8] = include_bytes!("3.generator"); 19 | let block4: &[u8] = include_bytes!("4.generator"); 20 | 21 | let mut group = c.benchmark_group("serialize"); 22 | 23 | for (block, name) in [ 24 | (&block0, "0"), 25 | (&block1, "1"), 26 | (&block2, "2"), 27 | (&block3, "3"), 28 | (&block4, "4"), 29 | ] { 30 | let mut a = Allocator::new(); 31 | let node = node_from_bytes_backrefs(&mut a, block).expect("node_from_bytes_backrefs"); 32 | 33 | // if the inflated form takes too much space, just run the benchmark on the compact form 34 | let node = if let Ok(inflated) = node_to_bytes_limit(&a, node, 2000000) { 35 | a = Allocator::new(); 36 | node_from_bytes(&mut a, inflated.as_slice()).expect("node_from_bytes") 37 | } else { 38 | node 39 | }; 40 | 41 | group.bench_function(format!("node_to_bytes_backrefs {name}"), |b| { 42 | b.iter(|| { 43 | let start = Instant::now(); 44 | black_box(node_to_bytes_backrefs(&a, node).expect("node_to_bytes_backrefs")); 45 | start.elapsed() 46 | }) 47 | }); 48 | 49 | group.bench_function(format!("Serializer {name}"), |b| { 50 | b.iter(|| { 51 | let start = Instant::now(); 52 | let mut ser = Serializer::new(None); 53 | let _ = ser.add(&a, node); 54 | black_box(ser.into_inner()); 55 | start.elapsed() 56 | }) 57 | }); 58 | } 59 | 60 | group.finish(); 61 | } 62 | 63 | criterion_group!(serialize, serialize_benchmark); 64 | criterion_main!(serialize); 65 | -------------------------------------------------------------------------------- /benches/sha256_hash.rs: -------------------------------------------------------------------------------- 1 | use criterion::{criterion_group, criterion_main, Criterion}; 2 | 3 | use chia_sha2::Sha256; 4 | 5 | const BYTE_LENGTHS: [u8; 6] = [8, 16, 32, 64, 96, 128]; 6 | const MAX_VAL: u8 = 250; 7 | 8 | fn gen_bytes(value: u8, amount: u8) -> Vec { 9 | let mut bytes = Vec::new(); 10 | for _ in 0..amount { 11 | bytes.push(value); 12 | } 13 | bytes 14 | } 15 | 16 | fn hash_bytes(bytes: &[u8]) -> [u8; 32] { 17 | let mut sha256 = Sha256::new(); 18 | sha256.update(bytes); 19 | sha256.finalize() 20 | } 21 | 22 | fn sha256_hash_benchmark(c: &mut Criterion) { 23 | // setup benchmark 24 | let mut group = c.benchmark_group("sha256_hash"); 25 | 26 | group.bench_function("hash_benchmark", |b| { 27 | b.iter(|| { 28 | // this figures out how many iterations to run. 29 | for val in 0..MAX_VAL { 30 | for len in BYTE_LENGTHS { 31 | let bytes = gen_bytes(val, len); 32 | hash_bytes(&bytes); 33 | } 34 | } 35 | }) 36 | }); 37 | // create 38 | group.finish(); 39 | } 40 | 41 | criterion_group!(sha256_hash, sha256_hash_benchmark); 42 | criterion_main!(sha256_hash); 43 | -------------------------------------------------------------------------------- /benchmark/compressed-2000.envhex: -------------------------------------------------------------------------------- 1 | ffff02ffff01ff05ffff02ff3effff04ff02ffff04ff05ff8080808080ffff04ffff01ffffff81ff7fff81df81bfffffff02ffff03ffff09ff0bffff01818080ffff01ff04ff80ffff04ff05ff808080ffff01ff02ffff03ffff0aff0bff1880ffff01ff02ff1affff04ff02ffff04ffff02ffff03ffff0aff0bff1c80ffff01ff02ffff03ffff0aff0bff1480ffff01ff08ffff018c62616420656e636f64696e6780ffff01ff04ffff0effff18ffff011fff0b80ffff0cff05ff80ffff01018080ffff04ffff0cff05ffff010180ff80808080ff0180ffff01ff04ffff18ffff013fff0b80ffff04ff05ff80808080ff0180ff80808080ffff01ff04ff0bffff04ff05ff80808080ff018080ff0180ff04ffff0cff15ff80ff0980ffff04ffff0cff15ff0980ff808080ffff04ffff04ff05ff1380ffff04ff2bff808080ffff02ff16ffff04ff02ffff04ff09ffff04ffff02ff3effff04ff02ffff04ff15ff80808080ff8080808080ff02ffff03ffff09ffff0cff05ff80ffff010180ff1080ffff01ff02ff2effff04ff02ffff04ffff02ff3effff04ff02ffff04ffff0cff05ffff010180ff80808080ff80808080ffff01ff02ff12ffff04ff02ffff04ffff0cff05ffff010180ffff04ffff0cff05ff80ffff010180ff808080808080ff0180ff018080ffc189ff01ffffffa00000000000000000000000000000000000000000000000000000000000000000ff830186a080ffffff02ffff01ff02ffff01ff02ffff03ff0bffff01ff02ffff03ffff09ff05ffff1dff0bffff1effff0bff0bffff02ff06ffff04ff02ffff04ff17ff8080808080808080ffff01ff02ff17ff2f80ffff01ff088080ff0180ffff01ff04ffff04ff04ffff04ff05ffff04ffff02ff06ffff04ff02ffff04ff17ff80808080ff80808080ffff02ff17ff2f808080ff0180ffff04ffff01ff32ff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff06ffff04ff02ffff04ff09ff80808080ffff02ff06ffff04ff02ffff04ff0dff8080808080ffff01ff0bffff0101ff058080ff0180ff018080ffff04ffff01b081963921826355dcb6c355ccf9c2637c18adf7d38ee44d803ea9ca41587e48c913d8d46896eb830aeadfc13144a8eac3ff018080ffff80ffff01ffff33ffa06b7a83babea1eec790c947db4464ab657dbe9b887fe9acc247062847b8c2a8a9ff830186a08080ff808080808080 2 | -------------------------------------------------------------------------------- /benchmark/concat.clvm: -------------------------------------------------------------------------------- 1 | ;(mod (str N M) 2 | ; (defun iter (V N) 3 | ; (if (= N 0) V (iter (concat V V) (- N 1))) 4 | ; ) 5 | ; (defun shift_c (str i) 6 | ; (if (> i 0) 7 | ; (c str (shift_c (lsh str 8) (- i 1))) 8 | ; () 9 | ; ) 10 | ; ) 11 | ; (shift_c (iter str N) M) 12 | ;) 13 | 14 | (a (q 2 6 (c 2 (c (a 4 (c 2 (c 5 (c 11 (q))))) (c 23 (q))))) (c (q (a (i (= 11 (q)) (q . 5) (q 2 4 (c 2 (c (concat 5 5) (c (- 11 (q . 1)) (q)))))) 1) 2 (i (> 11 (q)) (q 4 5 (a 6 (c 2 (c (lsh 5 (q . 8)) (c (- 11 (q . 1)) (q)))))) (q 1)) 1) 1)) 15 | -------------------------------------------------------------------------------- /benchmark/concat.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff06ffff04ff02ffff04ffff02ff04ffff04ff02ffff04ff05ffff04ff0bffff018080808080ffff04ff17ffff018080808080ffff04ffff01ffff02ffff03ffff09ff0bffff018080ffff0105ffff01ff02ff04ffff04ff02ffff04ffff0eff05ff0580ffff04ffff11ff0bffff010180ffff01808080808080ff0180ff02ffff03ffff15ff0bffff018080ffff01ff04ff05ffff02ff06ffff04ff02ffff04ffff17ff05ffff010880ffff04ffff11ff0bffff010180ffff01808080808080ffff01ff018080ff0180ff018080 -------------------------------------------------------------------------------- /benchmark/count-even.clvm: -------------------------------------------------------------------------------- 1 | ;(mod (num) 2 | ; (defun count-even (numbers) 3 | ; (if numbers 4 | ; (+ (not (r (divmod (f numbers) 2))) 5 | ; (count-even (r numbers))) 6 | ; 0) 7 | ; ) 8 | ; (count-even num) 9 | ;) 10 | 11 | (a (q 2 2 (c 2 (c 5 (q)))) (c (q 2 (i 5 (q 16 (not (r (divmod 9 (q . 2)))) (a 2 (c 2 (c 13 (q))))) (q 1)) 1) 1)) 12 | -------------------------------------------------------------------------------- /benchmark/count-even.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ff05ffff0180808080ffff04ffff01ff02ffff03ff05ffff01ff10ffff20ffff06ffff14ff09ffff0102808080ffff02ff02ffff04ff02ffff04ff0dffff018080808080ffff01ff018080ff0180ff018080 -------------------------------------------------------------------------------- /benchmark/factorial.clvm: -------------------------------------------------------------------------------- 1 | ;(mod (arg_one) 2 | ; (defun factorial (input) 3 | ; (if (= input 1) 1 (* (factorial (- input 1)) input)) 4 | ; ) 5 | ; (factorial arg_one) 6 | ;) 7 | 8 | (a (q 2 2 (c 2 (c 5 (q)))) (c (q 2 (i (= 5 (q . 1)) (q 1 . 1) (q 18 (a 2 (c 2 (c (- 5 (q . 1)) (q)))) 5)) 1) 1)) 9 | -------------------------------------------------------------------------------- /benchmark/factorial.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ff05ffff0180808080ffff04ffff01ff02ffff03ffff09ff05ffff010180ffff01ff0101ffff01ff12ffff02ff02ffff04ff02ffff04ffff11ff05ffff010180ffff0180808080ff058080ff0180ff018080 -------------------------------------------------------------------------------- /benchmark/hash-string.clvm: -------------------------------------------------------------------------------- 1 | ;(mod (strs) 2 | ; (a sha256 strs) 3 | ;) 4 | 5 | (a (q 11) 2) 6 | -------------------------------------------------------------------------------- /benchmark/hash-string.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff0b80ff0280 -------------------------------------------------------------------------------- /benchmark/hash-tree.clvm: -------------------------------------------------------------------------------- 1 | ;(mod (tree) 2 | ; (defun hash (tree) 3 | ; (if (l tree) (sha256 (hash (f tree)) (hash (r tree))) tree) 4 | ; ) 5 | ; (hash tree) 6 | ;) 7 | 8 | (a (q 2 2 (c 2 (c 5 (q)))) (c (q 2 (i (l 5) (q 11 (a 2 (c 2 (c 9 (q)))) (a 2 (c 2 (c 13 (q))))) (q . 5)) 1) 1)) 9 | -------------------------------------------------------------------------------- /benchmark/hash-tree.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ff05ffff0180808080ffff04ffff01ff02ffff03ffff07ff0580ffff01ff0bffff02ff02ffff04ff02ffff04ff09ffff0180808080ffff02ff02ffff04ff02ffff04ff0dffff018080808080ffff010580ff0180ff018080 -------------------------------------------------------------------------------- /benchmark/large-block.clvm: -------------------------------------------------------------------------------- 1 | (a (q 2 4 (c 2 (c (a 3 (q)) (q ())))) (c (q (a (i 5 (q 2 4 (c 2 (c 13 (c (c (a 10 (c 2 (c 9 (q)))) 11) (q))))) (q . 11)) 1) (a (i (a (i (= 29 (q)) (q 2 (i (= 117 (q)) (q 2 (i (= (strlen 9) (q . 32)) (q 1 . 1) (q 1)) 1) (q 1)) 1) (q 1)) 1) (q 4 9 (c (a 14 (c 2 (c 37 (q)))) (c (a 37 85) (q)))) (q 8)) 1) 2 (i (l 5) (q 11 (q . 2) (a 14 (c 2 (c 9 (q)))) (a 14 (c 2 (c 13 (q))))) (q 11 (q . 1) 5)) 1) 1)) 2 | -------------------------------------------------------------------------------- /benchmark/large-block.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff04ffff04ff02ffff04ffff02ff03ffff018080ffff01ff8080808080ffff04ffff01ffff02ffff03ff05ffff01ff02ff04ffff04ff02ffff04ff0dffff04ffff04ffff02ff0affff04ff02ffff04ff09ffff0180808080ff0b80ffff018080808080ffff010b80ff0180ffff02ffff03ffff02ffff03ffff09ff1dffff018080ffff01ff02ffff03ffff09ff75ffff018080ffff01ff02ffff03ffff09ffff0dff0980ffff012080ffff01ff0101ffff01ff018080ff0180ffff01ff018080ff0180ffff01ff018080ff0180ffff01ff04ff09ffff04ffff02ff0effff04ff02ffff04ff25ffff0180808080ffff04ffff02ff25ff5580ffff0180808080ffff01ff088080ff0180ff02ffff03ffff07ff0580ffff01ff0bffff0102ffff02ff0effff04ff02ffff04ff09ffff0180808080ffff02ff0effff04ff02ffff04ff0dffff018080808080ffff01ff0bffff0101ff058080ff0180ff018080 -------------------------------------------------------------------------------- /benchmark/loop_add.clvm: -------------------------------------------------------------------------------- 1 | ; (mod (A) 2 | ; (defun iter (N) 3 | ; (if N (+ (iter (- N 1)) N) 10000) 4 | ; ) 5 | ; (iter A) 6 | ; ) 7 | (a (q 2 2 (c 2 (c 5 ()))) (c (q 2 (i 5 (q 16 (a 2 (c 2 (c (- 5 (q . 1)) ()))) 5) (q 1 . 10000)) 1) 1)) 8 | -------------------------------------------------------------------------------- /benchmark/loop_add.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ff05ff80808080ffff04ffff01ff02ffff03ff05ffff01ff10ffff02ff02ffff04ff02ffff04ffff11ff05ffff010180ff80808080ff0580ffff01ff0182271080ff0180ff018080 -------------------------------------------------------------------------------- /benchmark/loop_ior.clvm: -------------------------------------------------------------------------------- 1 | ; (mod (A) 2 | ; (defun iter (N) 3 | ; (if N (logior (iter (- N 1)) N) 0) 4 | ; ) 5 | ; (iter A) 6 | ; ) 7 | (a (q 2 2 (c 2 (c 5 ()))) (c (q 2 (i 5 (q 25 (a 2 (c 2 (c (- 5 (q . 1)) ()))) 5) ()) 1) 1)) 8 | -------------------------------------------------------------------------------- /benchmark/loop_ior.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ff05ff80808080ffff04ffff01ff02ffff03ff05ffff01ff19ffff02ff02ffff04ff02ffff04ffff11ff05ffff010180ff80808080ff0580ff8080ff0180ff018080 -------------------------------------------------------------------------------- /benchmark/loop_not.clvm: -------------------------------------------------------------------------------- 1 | ; (mod (A) 2 | ; (defun iter (N) 3 | ; (if N (lognot (iter (- N 1))) 0xcccccccccccccccccc) 4 | ; ) 5 | ; (iter A) 6 | ; ) 7 | (a (q 2 2 (c 2 (c 5 ()))) (c (q 2 (i 5 (q 27 (a 2 (c 2 (c (- 5 (q . 1)) ())))) (q 1 . 0xcccccccccccccccccc)) 1) 1)) 8 | -------------------------------------------------------------------------------- /benchmark/loop_not.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ff05ff80808080ffff04ffff01ff02ffff03ff05ffff01ff1bffff02ff02ffff04ff02ffff04ffff11ff05ffff010180ff8080808080ffff01ff0189cccccccccccccccccc80ff0180ff018080 -------------------------------------------------------------------------------- /benchmark/loop_sub.clvm: -------------------------------------------------------------------------------- 1 | ; (mod (A) 2 | ; (defun iter (N) 3 | ; (if N (- (iter (- N 1)) N) 10000) 4 | ; ) 5 | ; (iter A) 6 | ; ) 7 | (a (q 2 2 (c 2 (c 5 ()))) (c (q 2 (i 5 (q 17 (a 2 (c 2 (c (- 5 (q . 1)) ()))) 5) (q 1 . 10000)) 1) 1)) 8 | -------------------------------------------------------------------------------- /benchmark/loop_sub.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ff05ff80808080ffff04ffff01ff02ffff03ff05ffff01ff11ffff02ff02ffff04ff02ffff04ffff11ff05ffff010180ff80808080ff0580ffff01ff0182271080ff0180ff018080 -------------------------------------------------------------------------------- /benchmark/loop_xor.clvm: -------------------------------------------------------------------------------- 1 | ; (mod (A) 2 | ; (defun iter (N) 3 | ; (if N (logxor (iter (- N 1)) N) 0xccccccc) 4 | ; ) 5 | ; (iter A) 6 | ; ) 7 | (a (q 2 2 (c 2 (c 5 ()))) (c (q 2 (i 5 (q 26 (a 2 (c 2 (c (- 5 (q . 1)) ()))) 5) (q 1 . 0x0ccccccc)) 1) 1)) 8 | -------------------------------------------------------------------------------- /benchmark/loop_xor.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ff05ff80808080ffff04ffff01ff02ffff03ff05ffff01ff1affff02ff02ffff04ff02ffff04ffff11ff05ffff010180ff80808080ff0580ffff01ff01840ccccccc80ff0180ff018080 -------------------------------------------------------------------------------- /benchmark/matrix-multiply.clvm: -------------------------------------------------------------------------------- 1 | ;(mod (m1 m2) 2 | ; (defun get_col (m idx) 3 | ; (if (= idx 0) (f m) (get_col (r m) (- idx 1))) 4 | ; ) 5 | ; (defun dot_product (r1 r2) 6 | ; (if r1 (+ (* (f r1) (f r2)) (dot_product (r r1) (r r2))) ()) 7 | ; ) 8 | ; (defun column (m idx) 9 | ; (if m (c (get_col (f m) idx) (column (r m) idx)) ()) 10 | ; ) 11 | ; (defun list_len (row) 12 | ; (if row (+ 1 (list_len (r row))) 0) 13 | ; ) 14 | ; (defun width (m) 15 | ; (list_len (f m)) 16 | ; ) 17 | ; (defun transpose_impl (m idx rest) 18 | ; (if (= -1 idx) 19 | ; rest 20 | ; (transpose_impl m (- idx 1) (c (column m idx) rest)) 21 | ; ) 22 | ; ) 23 | ; (defun transpose (m) 24 | ; (transpose_impl m (- (width m) 1) ()) 25 | ; ) 26 | ; (defun mul_row (r1 m1) 27 | ; (if m1 (c (dot_product r1 (f m1)) (mul_row r1 (r m1))) ()) 28 | ; ) 29 | ; (defun matrix_mul_transposed (m1 m2) 30 | ; (if m1 (c (mul_row (f m1) m2) (matrix_mul_transposed (r m1) m2)) ()) 31 | ; ) 32 | ; (defun matrix_mul (m1 m2) 33 | ; (matrix_mul_transposed m1 (transpose m2)) 34 | ; ) 35 | ; 36 | ; (matrix_mul m1 m2) 37 | ;) 38 | 39 | (a (q 2 60 (c 2 (c 5 (c 11 (q))))) (c (q (((a (i 5 (q 4 (a 20 (c 2 (c 9 (c 11 (q))))) (a 16 (c 2 (c 13 (c 11 (q)))))) (q 1)) 1) 2 (i 5 (q 16 (* 9 19) (a 24 (c 2 (c 13 (c 27 (q)))))) (q 1)) 1) (a (i (= 11 (q)) (q . 9) (q 2 20 (c 2 (c 13 (c (- 11 (q . 1)) (q)))))) 1) (a (i 5 (q 16 (q . 1) (a 44 (c 2 (c 13 (q))))) (q 1)) 1) 2 18 (c 2 (c 5 (c (a 22 (c 2 (c 11 (q)))) (q))))) ((a (i 5 (q 4 (a 26 (c 2 (c 9 (c 11 (q))))) (a 18 (c 2 (c 13 (c 11 (q)))))) (q 1)) 1) 2 (i 11 (q 4 (a 24 (c 2 (c 5 (c 19 (q))))) (a 26 (c 2 (c 5 (c 27 (q)))))) (q 1)) 1) (a 46 (c 2 (c 5 (c (- (a 62 (c 2 (c 5 (q)))) (q . 1)) (q ()))))) (a (i (= (q . -1) 11) (q . 23) (q 2 46 (c 2 (c 5 (c (- 11 (q . 1)) (c (c (a 16 (c 2 (c 5 (c 11 (q))))) 23) (q))))))) 1) 2 44 (c 2 (c 9 (q)))) 1)) 40 | -------------------------------------------------------------------------------- /benchmark/matrix-multiply.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff3cffff04ff02ffff04ff05ffff04ff0bffff018080808080ffff04ffff01ffffffff02ffff03ff05ffff01ff04ffff02ff14ffff04ff02ffff04ff09ffff04ff0bffff018080808080ffff02ff10ffff04ff02ffff04ff0dffff04ff0bffff01808080808080ffff01ff018080ff0180ff02ffff03ff05ffff01ff10ffff12ff09ff1380ffff02ff18ffff04ff02ffff04ff0dffff04ff1bffff01808080808080ffff01ff018080ff0180ffff02ffff03ffff09ff0bffff018080ffff0109ffff01ff02ff14ffff04ff02ffff04ff0dffff04ffff11ff0bffff010180ffff01808080808080ff0180ffff02ffff03ff05ffff01ff10ffff0101ffff02ff2cffff04ff02ffff04ff0dffff018080808080ffff01ff018080ff0180ff02ff12ffff04ff02ffff04ff05ffff04ffff02ff16ffff04ff02ffff04ff0bffff0180808080ffff018080808080ffffff02ffff03ff05ffff01ff04ffff02ff1affff04ff02ffff04ff09ffff04ff0bffff018080808080ffff02ff12ffff04ff02ffff04ff0dffff04ff0bffff01808080808080ffff01ff018080ff0180ff02ffff03ff0bffff01ff04ffff02ff18ffff04ff02ffff04ff05ffff04ff13ffff018080808080ffff02ff1affff04ff02ffff04ff05ffff04ff1bffff01808080808080ffff01ff018080ff0180ffff02ff2effff04ff02ffff04ff05ffff04ffff11ffff02ff3effff04ff02ffff04ff05ffff0180808080ffff010180ffff01ff808080808080ffff02ffff03ffff09ffff0181ffff0b80ffff0117ffff01ff02ff2effff04ff02ffff04ff05ffff04ffff11ff0bffff010180ffff04ffff04ffff02ff10ffff04ff02ffff04ff05ffff04ff0bffff018080808080ff1780ffff0180808080808080ff0180ff02ff2cffff04ff02ffff04ff09ffff0180808080ff018080 -------------------------------------------------------------------------------- /benchmark/point-pow.clvm: -------------------------------------------------------------------------------- 1 | ;(mod (point pow) 2 | ; (defun point_power (input pow) 3 | ; (if (= pow 1) input (point_add input (point_power input (- pow 1)))) 4 | ; ) 5 | ; (point_power point pow) 6 | ;) 7 | 8 | (a (q 2 2 (c 2 (c 5 (c 11 (q))))) (c (q 2 (i (= 11 (q . 1)) (q . 5) (q 29 5 (a 2 (c 2 (c 5 (c (- 11 (q . 1)) (q))))))) 1) 1)) 9 | -------------------------------------------------------------------------------- /benchmark/point-pow.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ff05ffff04ff0bffff018080808080ffff04ffff01ff02ffff03ffff09ff0bffff010180ffff0105ffff01ff1dff05ffff02ff02ffff04ff02ffff04ff05ffff04ffff11ff0bffff010180ffff0180808080808080ff0180ff018080 -------------------------------------------------------------------------------- /benchmark/pubkey-tree.clvm: -------------------------------------------------------------------------------- 1 | ;(mod (tree) 2 | ; (defun pubkey-tree (tree) 3 | ; (if (l tree) (pubkey_for_exp (concat (pubkey-tree (f tree)) (pubkey-tree (r tree)))) (pubkey_for_exp tree)) 4 | ; ) 5 | ; (pubkey-tree tree) 6 | ;) 7 | 8 | (a (q 2 2 (c 2 (c 5 (q)))) (c (q 2 (i (l 5) (q 30 (concat (a 2 (c 2 (c 9 (q)))) (a 2 (c 2 (c 13 (q)))))) (q 30 5)) 1) 1)) 9 | -------------------------------------------------------------------------------- /benchmark/pubkey-tree.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ff05ffff0180808080ffff04ffff01ff02ffff03ffff07ff0580ffff01ff1effff0effff02ff02ffff04ff02ffff04ff09ffff0180808080ffff02ff02ffff04ff02ffff04ff0dffff01808080808080ffff01ff1eff058080ff0180ff018080 -------------------------------------------------------------------------------- /benchmark/shift-left.clvm: -------------------------------------------------------------------------------- 1 | ;(mod (value iter) 2 | ; (defun shift (value iter) 3 | ; (if (> iter 1) (lsh (shift value (- iter 1)) 65535) value) 4 | ; ) 5 | ; (shift value iter) 6 | ;) 7 | 8 | (a (q 2 2 (c 2 (c 5 (c 11 (q))))) (c (q 2 (i (> 11 (q . 1)) (q 23 (a 2 (c 2 (c 5 (c (- 11 (q . 1)) (q))))) (q . 0x00ffff)) (q . 5)) 1) 1)) 9 | -------------------------------------------------------------------------------- /benchmark/shift-left.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ff05ffff04ff0bffff018080808080ffff04ffff01ff02ffff03ffff15ff0bffff010180ffff01ff17ffff02ff02ffff04ff02ffff04ff05ffff04ffff11ff0bffff010180ffff018080808080ffff018300ffff80ffff010580ff0180ff018080 -------------------------------------------------------------------------------- /benchmark/substr-tree.clvm: -------------------------------------------------------------------------------- 1 | ;(mod (string) 2 | ; (defun split (input) 3 | ; (if (> (strlen input) 1) 4 | ; (c 5 | ; (split (substr input 0 (/ (strlen input) 2))) 6 | ; (split (substr input (/ (strlen input) 2) (strlen input)))) 7 | ; input) 8 | ; ) 9 | ; (split string) 10 | ;) 11 | 12 | (a (q 2 2 (c 2 (c 5 (q)))) (c (q 2 (i (> (strlen 5) (q . 1)) (q 4 (a 2 (c 2 (c (substr 5 (q) (/ (strlen 5) (q . 2))) (q)))) (a 2 (c 2 (c (substr 5 (/ (strlen 5) (q . 2)) (strlen 5)) (q))))) (q . 5)) 1) 1)) 13 | -------------------------------------------------------------------------------- /benchmark/substr-tree.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ff05ffff0180808080ffff04ffff01ff02ffff03ffff15ffff0dff0580ffff010180ffff01ff04ffff02ff02ffff04ff02ffff04ffff0cff05ffff0180ffff13ffff0dff0580ffff01028080ffff0180808080ffff02ff02ffff04ff02ffff04ffff0cff05ffff13ffff0dff0580ffff010280ffff0dff058080ffff018080808080ffff010580ff0180ff018080 -------------------------------------------------------------------------------- /benchmark/substr.clvm: -------------------------------------------------------------------------------- 1 | ;(mod (string) 2 | ; (defun explode (input) 3 | ; (if (> 10 (strlen input)) 4 | ; (c input ()) 5 | ; (c (substr input 0 9) (explode (substr input 9 (strlen input)))) 6 | ; ) 7 | ; ) 8 | ; (explode string) 9 | ;) 10 | 11 | (a (q 2 2 (c 2 (c 5 (q)))) (c (q 2 (i (> (q . 10) (strlen 5)) (q 4 5 (q)) (q 4 (substr 5 (q) (q . 9)) (a 2 (c 2 (c (substr 5 (q . 9) (strlen 5)) (q)))))) 1) 1)) 12 | -------------------------------------------------------------------------------- /benchmark/substr.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ff05ffff0180808080ffff04ffff01ff02ffff03ffff15ffff010affff0dff058080ffff01ff04ff05ffff018080ffff01ff04ffff0cff05ffff0180ffff010980ffff02ff02ffff04ff02ffff04ffff0cff05ffff0109ffff0dff058080ffff01808080808080ff0180ff018080 -------------------------------------------------------------------------------- /benchmark/sum-tree.clvm: -------------------------------------------------------------------------------- 1 | ;(mod (tree) 2 | ; (defun sum (tree) 3 | ; (if (l tree) (+ (sum (f tree)) (sum (r tree))) tree) 4 | ; ) 5 | ; (sum tree) 6 | ;) 7 | 8 | (a (q 2 2 (c 2 (c 5 (q)))) (c (q 2 (i (l 5) (q 16 (a 2 (c 2 (c 9 (q)))) (a 2 (c 2 (c 13 (q))))) (q . 5)) 1) 1)) 9 | -------------------------------------------------------------------------------- /benchmark/sum-tree.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ff05ffff0180808080ffff04ffff01ff02ffff03ffff07ff0580ffff01ff10ffff02ff02ffff04ff02ffff04ff09ffff0180808080ffff02ff02ffff04ff02ffff04ff0dffff018080808080ffff010580ff0180ff018080 -------------------------------------------------------------------------------- /benchmarks.txt: -------------------------------------------------------------------------------- 1 | cost scale: 6.425771579565472 2 | base cost scale: 2.3786823529411767 3 | arg cost scale: 10.418449612403101 4 | opcode: point_add (29) 5 | time: base: 41998.35ns per-arg: 126783.69ns 6 | cost: base: 99901 per-arg: 1320889 7 | opcode: g1_subtract (49) 8 | time: base: 33643.21ns per-arg: 134817.90ns 9 | cost: base: 80026 per-arg: 1404594 10 | opcode: g1_multiply (50) 11 | time: base: 584923.32ns per-byte: 1.63ns 12 | cost: base: 1391347 per-byte: 10 13 | opcode: g1_negate (51) 14 | time: base: 132316.16ns 15 | cost: base: 850233 16 | opcode: g2_add (52) 17 | time: base: 42873.58ns per-arg: 366003.29ns 18 | cost: base: 101983 per-arg: 3813187 19 | opcode: g2_subtract (53) 20 | time: base: 36975.90ns per-arg: 363488.86ns 21 | cost: base: 87954 per-arg: 3786990 22 | opcode: g2_multiply (54) 23 | time: base: 1735736.81ns per-byte: 0.74ns 24 | cost: base: 4128767 per-byte: 5 25 | opcode: g2_negate (55) 26 | time: base: 369885.46ns 27 | cost: base: 2376799 28 | opcode: g1_map (56) 29 | time: base: 164252.24ns per-byte: 0.53ns 30 | cost: base: 390704 per-byte: 3 31 | opcode: g2_map (57) 32 | time: base: 681595.49ns per-byte: 0.54ns 33 | cost: base: 1621299 per-byte: 3 34 | opcode: bls_pairing_identity (58) 35 | time: base: 1145375.18ns per-arg: 911304.13ns 36 | cost: base: 7359919 per-arg: 5855832 37 | opcode: bls_verify (59) 38 | time: base: 1938870.02ns per-arg: 1221101.59ns 39 | cost: base: 12458736 per-arg: 7846520 40 | 41 | cost scale: 6.425771579565472 42 | base cost scale: 2.3786823529411767 43 | arg cost scale: 10.418449612403101 44 | opcode: point_add (29) 45 | time: base: 37398.40ns per-arg: 133388.74ns 46 | cost: base: 88959 per-arg: 1389704 47 | opcode: g1_subtract (49) 48 | time: base: 36666.02ns per-arg: 132895.06ns 49 | cost: base: 87217 per-arg: 1384560 50 | opcode: g1_multiply (50) 51 | time: base: 593355.66ns per-byte: 1.62ns 52 | cost: base: 1411405 per-byte: 10 53 | opcode: g1_negate (51) 54 | time: base: 130703.17ns 55 | cost: base: 839869 56 | opcode: g2_add (52) 57 | time: base: 31357.44ns per-arg: 378523.05ns 58 | cost: base: 74589 per-arg: 3943623 59 | opcode: g2_subtract (53) 60 | time: base: 41581.11ns per-arg: 374396.98ns 61 | cost: base: 98908 per-arg: 3900636 62 | opcode: g2_multiply (54) 63 | time: base: 1765758.41ns per-byte: 0.76ns 64 | cost: base: 4200178 per-byte: 5 65 | opcode: g2_negate (55) 66 | time: base: 372016.59ns 67 | cost: base: 2390494 68 | opcode: g1_map (56) 69 | time: base: 167532.75ns per-byte: 0.54ns 70 | cost: base: 398507 per-byte: 3 71 | opcode: g2_map (57) 72 | time: base: 701718.64ns per-byte: 0.55ns 73 | cost: base: 1669166 per-byte: 4 74 | opcode: bls_pairing_identity (58) 75 | time: base: 1160938.98ns per-arg: 932062.80ns 76 | cost: base: 7459929 per-arg: 5989223 77 | opcode: bls_verify (59) 78 | time: base: 2036563.53ns per-arg: 1237270.72ns 79 | cost: base: 13086492 per-arg: 7950419 80 | 81 | cost scale: 6.425771579565472 82 | base cost scale: 2.3786823529411767 83 | arg cost scale: 10.418449612403101 84 | opcode: point_add (29) 85 | time: base: 37881.47ns per-arg: 133342.41ns 86 | cost: base: 90108 per-arg: 1389221 87 | opcode: g1_subtract (49) 88 | time: base: 38600.39ns per-arg: 133076.76ns 89 | cost: base: 91818 per-arg: 1386454 90 | opcode: g1_multiply (50) 91 | time: base: 593521.07ns per-byte: 1.68ns 92 | cost: base: 1411798 per-byte: 11 93 | opcode: g1_negate (51) 94 | time: base: 133263.80ns 95 | cost: base: 856323 96 | opcode: g2_add (52) 97 | time: base: 38400.39ns per-arg: 376056.20ns 98 | cost: base: 91342 per-arg: 3917923 99 | opcode: g2_subtract (53) 100 | time: base: 33696.89ns per-arg: 374538.00ns 101 | cost: base: 80154 per-arg: 3902105 102 | opcode: g2_multiply (54) 103 | time: base: 1807908.67ns per-byte: 0.78ns 104 | cost: base: 4300440 per-byte: 5 105 | opcode: g2_negate (55) 106 | time: base: 379750.25ns 107 | cost: base: 2440188 108 | opcode: g1_map (56) 109 | time: base: 167897.64ns per-byte: 0.54ns 110 | cost: base: 399375 per-byte: 3 111 | opcode: g2_map (57) 112 | time: base: 697674.73ns per-byte: 0.55ns 113 | cost: base: 1659547 per-byte: 4 114 | opcode: bls_pairing_identity (58) 115 | time: base: 1254807.99ns per-arg: 941277.54ns 116 | cost: base: 8063110 per-arg: 6048434 117 | opcode: bls_verify (59) 118 | time: base: 2023084.92ns per-arg: 1262964.77ns 119 | cost: base: 12999882 per-arg: 8115523 120 | 121 | -------------------------------------------------------------------------------- /docs/new-operator-checklist.md: -------------------------------------------------------------------------------- 1 | # Adding new operators 2 | 3 | --- 4 | 5 | There are two approaches to soft-forking in support for new operators: 6 | 7 | 1. Adding a new extension to the `softfork` operator (e.g. the BLS operators) 8 | 2. Assigning meaning to a, previously unknown, operator. Pick an opcode who's 9 | cost matches the cost you want your operator to have. The cost of unknown 10 | operators are defined by a formula, defined 11 | [here](https://github.com/Chia-Network/clvm_rs/blob/main/src/more_ops.rs#L156-L182). 12 | 13 | Using approach (2) only works for operators that unconditionally return nil, and 14 | raise in case of an error. i.e. it can be use for "assert-style" operators that validate 15 | something. 16 | 17 | Follow this checklist when adding operators: 18 | 19 | - Add test cases in a new file under `op-tests/`. Make sure to include all 20 | possible ways the operator(s) can fail. 21 | - If relevant, write a script that generates test vectors, printing them into a 22 | file under `op-tests/` (see `tools/generate-bls-tests.py`). This is to ensure 23 | the new operator's behavior match at least one other implementation. 24 | - Include the new operators in the fuzzer `fuzz/fuzz_targets/operators.rs` 25 | - Include the new operators and their signatures in `tools/src/bin/generate-fuzz-corpus.rs`. 26 | Make sure to run this and fuzz for some time before landing the PR. 27 | - Extend the benchmark-clvm-cost.rs to include benchmarks for the new operator, 28 | to establish its cost. 29 | - The opcode decoding and dispatching happens in `src/chia_dialect.rs` 30 | - Add support for the new operators in `src/test_ops.rs` `parse_atom()`, to 31 | compile the name of the operator to its corresponding opcode. 32 | - If the operator(s) are part of an extension to `softfork`, add another value 33 | to the `OperatorSet` enum. 34 | - Add a new flag (in `src/chia_dialect.rs`) that controls whether the 35 | operators are activated or not. This is required in order for the chain to exist 36 | in a state _before_ your soft-fork has activated, and behave consistently with 37 | versions of the node that doesn't know about your new operators. 38 | Make sure the value of the flag does not collide with any of the flags in 39 | [chia_rs](https://github.com/Chia-Network/chia_rs/blob/main/crates/chia-consensus/src/gen/flags.rs). 40 | This is a quirk where both of these repos share the same flags space. 41 | - Once a soft-fork has activated, if everything on chain before the softfork is 42 | compatible with the new rules (which is likely and ought to be the ambition 43 | with all soft-forks), all logic surrounding activating or deactivating the 44 | soft-fork should be removed. 45 | - Expose the new flag(s) to python in chia_rs. 46 | -------------------------------------------------------------------------------- /fuzz/.gitignore: -------------------------------------------------------------------------------- 1 | corpus 2 | artifacts 3 | -------------------------------------------------------------------------------- /fuzz/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "clvm_rs-fuzz" 3 | version = "0.14.0" 4 | authors = ["Arvid Norberg "] 5 | publish = false 6 | edition = "2021" 7 | 8 | [package.metadata] 9 | cargo-fuzz = true 10 | 11 | [dependencies] 12 | libfuzzer-sys = { workspace = true } 13 | clvmr = { workspace = true, features = ["counters"] } 14 | chia-sha2 = { workspace = true } 15 | hex = { workspace = true } 16 | arbitrary = { workspace = true } 17 | 18 | [[bin]] 19 | name = "fuzz_run_program" 20 | path = "fuzz_targets/run_program.rs" 21 | test = false 22 | doc = false 23 | 24 | [[bin]] 25 | name = "fuzz_serialized_length" 26 | path = "fuzz_targets/serialized_length.rs" 27 | test = false 28 | doc = false 29 | 30 | [[bin]] 31 | name = "fuzz_serialized_length_trusted" 32 | path = "fuzz_targets/serialized_length_trusted.rs" 33 | test = false 34 | doc = false 35 | 36 | [[bin]] 37 | name = "fuzz_deserialize" 38 | path = "fuzz_targets/deserialize.rs" 39 | test = false 40 | doc = false 41 | 42 | [[bin]] 43 | name = "fuzz_deserialize_br" 44 | path = "fuzz_targets/deserialize_br.rs" 45 | test = false 46 | doc = false 47 | 48 | [[bin]] 49 | name = "fuzz_tree_hash" 50 | path = "fuzz_targets/tree_hash.rs" 51 | test = false 52 | doc = false 53 | 54 | [[bin]] 55 | name = "fuzz_deserialize_br_rand_tree" 56 | path = "fuzz_targets/deserialize_br_rand_tree.rs" 57 | test = false 58 | doc = false 59 | 60 | [[bin]] 61 | name = "fuzz_parse_triples" 62 | path = "fuzz_targets/parse_triples.rs" 63 | test = false 64 | doc = false 65 | 66 | [[bin]] 67 | name = "operators" 68 | path = "fuzz_targets/operators.rs" 69 | test = false 70 | doc = false 71 | 72 | [[bin]] 73 | name = "allocator" 74 | path = "fuzz_targets/allocator.rs" 75 | test = false 76 | doc = false 77 | 78 | [[bin]] 79 | name = "keccak" 80 | path = "fuzz_targets/keccak.rs" 81 | test = false 82 | doc = false 83 | 84 | [[bin]] 85 | name = "object-cache" 86 | path = "fuzz_targets/object_cache.rs" 87 | test = false 88 | doc = false 89 | 90 | [[bin]] 91 | name = "serializer" 92 | path = "fuzz_targets/serializer.rs" 93 | test = false 94 | doc = false 95 | 96 | [[bin]] 97 | name = "incremental-serializer" 98 | path = "fuzz_targets/incremental_serializer.rs" 99 | test = false 100 | doc = false 101 | 102 | [[bin]] 103 | name = "serializer-cmp" 104 | path = "fuzz_targets/serializer_cmp.rs" 105 | test = false 106 | doc = false 107 | 108 | [[bin]] 109 | name = "tree-cache" 110 | path = "fuzz_targets/tree_cache.rs" 111 | test = false 112 | doc = false 113 | 114 | [[bin]] 115 | name = "canonical-serialization" 116 | path = "fuzz_targets/canonical_serialization.rs" 117 | test = false 118 | doc = false 119 | 120 | [[bin]] 121 | name = "canonical-serialization-br" 122 | path = "fuzz_targets/canonical_serialization_br.rs" 123 | test = false 124 | doc = false 125 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/allocator.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | use libfuzzer_sys::fuzz_target; 3 | 4 | use clvmr::allocator::fits_in_small_atom; 5 | use clvmr::{Allocator, NodePtr}; 6 | 7 | fn run_tests(a: &mut Allocator, atom1: NodePtr, data: &[u8]) { 8 | assert_eq!(a.atom(atom1).as_ref(), data); 9 | assert_eq!(a.atom_len(atom1), data.len()); 10 | 11 | let canonical = data != [0] 12 | && (data.len() < 2 || data[0] != 0 || (data[1] & 0x80) != 0) 13 | && (data.len() < 2 || data[0] != 0xff || (data[1] & 0x80) == 0); 14 | 15 | // small_number 16 | if let Some(val) = a.small_number(atom1) { 17 | let atom2 = a.new_small_number(val).expect("new_small_number()"); 18 | assert_eq!(a.atom(atom1), a.atom(atom2)); 19 | assert_eq!(a.atom(atom2).as_ref(), data); 20 | assert!(a.atom_eq(atom1, atom2)); 21 | assert_eq!(a.number(atom1), val.into()); 22 | assert_eq!(a.number(atom2), val.into()); 23 | assert_eq!(a.atom_len(atom2), data.len()); 24 | assert!(canonical); 25 | assert_eq!(fits_in_small_atom(data), Some(val)); 26 | } else { 27 | assert_eq!(fits_in_small_atom(data), None); 28 | let val = a.number(atom1); 29 | assert!(!canonical || val < 0.into() || val > ((1 << 26) - 1).into()); 30 | } 31 | 32 | // number 33 | let val = a.number(atom1); 34 | 35 | let atom3 = a.new_number(val.clone()).expect("new_number()"); 36 | 37 | assert_eq!(a.number(atom3), val); 38 | // if the atom is not in canonical integer form we don't expect it to stay 39 | // the same once we "launder" it through a BigInt. 40 | if !canonical { 41 | assert!(a.atom(atom3).as_ref() != data); 42 | assert!(a.atom_len(atom3) < data.len()); 43 | assert!(!a.atom_eq(atom1, atom3)); 44 | } else { 45 | assert_eq!(a.atom(atom3).as_ref(), data); 46 | assert_eq!(a.atom_len(atom3), data.len()); 47 | assert!(a.atom_eq(atom1, atom3)); 48 | } 49 | } 50 | 51 | fuzz_target!(|data: &[u8]| { 52 | let mut a = Allocator::new(); 53 | let atom1 = a.new_atom(data).expect("new_atom()"); 54 | run_tests(&mut a, atom1, data); 55 | 56 | let atom1 = a 57 | .new_concat(data.len(), &[a.nil(), atom1, a.nil()]) 58 | .expect("new_concat()"); 59 | run_tests(&mut a, atom1, data); 60 | }); 61 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/canonical_serialization.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | mod make_tree; 3 | 4 | use clvmr::serde::is_canonical_serialization; 5 | use clvmr::serde::node_to_bytes; 6 | use clvmr::Allocator; 7 | use libfuzzer_sys::fuzz_target; 8 | use make_tree::make_tree_limits; 9 | 10 | fuzz_target!(|data: &[u8]| { 11 | let mut unstructured = arbitrary::Unstructured::new(data); 12 | let mut a = Allocator::new(); 13 | let (tree, _) = make_tree_limits(&mut a, &mut unstructured, 1000, false); 14 | 15 | let buffer = node_to_bytes(&a, tree).expect("internal error, failed to serialize"); 16 | 17 | // out serializer should always produce canonical serialization 18 | assert!(is_canonical_serialization(&buffer)); 19 | }); 20 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/canonical_serialization_br.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | mod make_tree; 3 | 4 | use clvmr::serde::is_canonical_serialization; 5 | use clvmr::serde::node_to_bytes_backrefs; 6 | use clvmr::Allocator; 7 | use libfuzzer_sys::fuzz_target; 8 | use make_tree::make_tree_limits; 9 | 10 | fuzz_target!(|data: &[u8]| { 11 | let mut unstructured = arbitrary::Unstructured::new(data); 12 | let mut a = Allocator::new(); 13 | let (tree, _) = make_tree_limits(&mut a, &mut unstructured, 1000, true); 14 | 15 | let buffer = node_to_bytes_backrefs(&a, tree).expect("internal error, failed to serialize"); 16 | 17 | // out serializer should always produce canonical serialization 18 | assert!(is_canonical_serialization(&buffer)); 19 | }); 20 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/deserialize.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | use clvmr::allocator::Allocator; 3 | use clvmr::serde::node_from_bytes; 4 | use libfuzzer_sys::fuzz_target; 5 | 6 | fuzz_target!(|data: &[u8]| { 7 | let mut allocator = Allocator::new(); 8 | let _program = match node_from_bytes(&mut allocator, data) { 9 | Err(_) => { 10 | return; 11 | } 12 | Ok(r) => r, 13 | }; 14 | }); 15 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/deserialize_br.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | 3 | mod node_eq; 4 | 5 | use clvmr::allocator::Allocator; 6 | use clvmr::serde::node_from_bytes_backrefs; 7 | use clvmr::serde::node_from_bytes_backrefs_old; 8 | use libfuzzer_sys::fuzz_target; 9 | 10 | fuzz_target!(|data: &[u8]| { 11 | let mut allocator = Allocator::new(); 12 | let res1 = node_from_bytes_backrefs(&mut allocator, data); 13 | let node_count = allocator.pair_count(); 14 | let res2 = node_from_bytes_backrefs_old(&mut allocator, data); 15 | // check that the new implementation creates the same number of pair nodes as the old one 16 | assert_eq!(node_count * 2, allocator.pair_count()); 17 | match (res1, res2) { 18 | (Err(_e1), Err(_e2)) => { 19 | // both failed, that's fine 20 | return; 21 | } 22 | (Ok(n1), Ok(n2)) => { 23 | assert!(node_eq::node_eq(&allocator, n1, n2)); 24 | } 25 | _ => { 26 | panic!("mismatching results"); 27 | } 28 | } 29 | }); 30 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/deserialize_br_rand_tree.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | 3 | mod make_tree; 4 | 5 | use clvmr::allocator::Allocator; 6 | use clvmr::serde::node_from_bytes_backrefs; 7 | use clvmr::serde::node_to_bytes_backrefs; 8 | use libfuzzer_sys::fuzz_target; 9 | 10 | fuzz_target!(|data: &[u8]| { 11 | let mut allocator = Allocator::new(); 12 | let mut unstructured = arbitrary::Unstructured::new(data); 13 | 14 | let (program, _) = make_tree::make_tree(&mut allocator, &mut unstructured); 15 | 16 | let b1 = node_to_bytes_backrefs(&allocator, program).unwrap(); 17 | 18 | let mut allocator = Allocator::new(); 19 | let program = node_from_bytes_backrefs(&mut allocator, &b1).unwrap(); 20 | 21 | let b2 = node_to_bytes_backrefs(&allocator, program).unwrap(); 22 | if b1 != b2 { 23 | panic!("b1 and b2 do not match"); 24 | } 25 | }); 26 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/fuzzing_utils.rs: -------------------------------------------------------------------------------- 1 | use chia_sha2::Sha256; 2 | use clvmr::allocator::{Allocator, NodePtr, SExp}; 3 | use std::collections::hash_map::Entry; 4 | use std::collections::HashMap; 5 | 6 | #[allow(dead_code)] 7 | fn hash_atom(buf: &[u8]) -> [u8; 32] { 8 | let mut ctx = Sha256::new(); 9 | ctx.update([1_u8]); 10 | ctx.update(buf); 11 | ctx.finalize() 12 | } 13 | 14 | #[allow(dead_code)] 15 | fn hash_pair(left: &[u8; 32], right: &[u8; 32]) -> [u8; 32] { 16 | let mut ctx = Sha256::new(); 17 | ctx.update([2_u8]); 18 | ctx.update(left); 19 | ctx.update(right); 20 | ctx.finalize() 21 | } 22 | 23 | #[allow(dead_code)] 24 | enum TreeOp { 25 | SExp(NodePtr), 26 | Cons(NodePtr), 27 | } 28 | 29 | #[allow(dead_code)] 30 | pub fn tree_hash(a: &Allocator, node: NodePtr) -> [u8; 32] { 31 | let mut hashes = Vec::<[u8; 32]>::new(); 32 | let mut ops = vec![TreeOp::SExp(node)]; 33 | let mut cache = HashMap::::new(); 34 | 35 | while let Some(op) = ops.pop() { 36 | match op { 37 | TreeOp::SExp(node) => match cache.entry(node) { 38 | Entry::Occupied(e) => hashes.push(*e.get()), 39 | Entry::Vacant(e) => match a.sexp(node) { 40 | SExp::Atom => { 41 | let hash = hash_atom(a.atom(node).as_ref()); 42 | e.insert(hash); 43 | hashes.push(hash); 44 | } 45 | SExp::Pair(left, right) => { 46 | ops.push(TreeOp::Cons(node)); 47 | ops.push(TreeOp::SExp(left)); 48 | ops.push(TreeOp::SExp(right)); 49 | } 50 | }, 51 | }, 52 | TreeOp::Cons(node) => { 53 | let first = hashes.pop().unwrap(); 54 | let rest = hashes.pop().unwrap(); 55 | match cache.entry(node) { 56 | Entry::Occupied(e) => hashes.push(*e.get()), 57 | Entry::Vacant(e) => { 58 | let hash = hash_pair(&first, &rest); 59 | e.insert(hash); 60 | hashes.push(hash); 61 | } 62 | } 63 | } 64 | } 65 | } 66 | 67 | assert!(hashes.len() == 1); 68 | hashes[0] 69 | } 70 | 71 | #[allow(dead_code)] 72 | pub fn visit_tree(a: &Allocator, node: NodePtr, mut visit: impl FnMut(&Allocator, NodePtr)) { 73 | let mut nodes = vec![node]; 74 | let mut visited_index = 0; 75 | 76 | while nodes.len() > visited_index { 77 | match a.sexp(nodes[visited_index]) { 78 | SExp::Atom => {} 79 | SExp::Pair(left, right) => { 80 | nodes.push(left); 81 | nodes.push(right); 82 | } 83 | } 84 | visited_index += 1; 85 | } 86 | 87 | // visit nodes bottom-up (right to left). 88 | for node in nodes.into_iter().rev() { 89 | visit(a, node); 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/keccak.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | use clvmr::keccak256_ops::op_keccak256; 3 | use clvmr::{reduction::Reduction, Allocator, NodePtr}; 4 | use libfuzzer_sys::fuzz_target; 5 | 6 | fuzz_target!(|data: &[u8]| { 7 | let mut a = Allocator::new(); 8 | let blob = a.new_atom(data).expect("failed to create atom"); 9 | let args = a 10 | .new_pair(blob, NodePtr::NIL) 11 | .expect("failed to create pair"); 12 | let Reduction(cost, node) = op_keccak256(&mut a, args, 11000000000).expect("keccak256 failed"); 13 | assert!(cost >= 210); 14 | assert!(node.is_atom()); 15 | assert_eq!(a.atom_len(node), 32); 16 | }); 17 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/node_eq.rs: -------------------------------------------------------------------------------- 1 | use clvmr::{Allocator, NodePtr, SExp}; 2 | use std::collections::HashSet; 3 | 4 | /// compare two CLVM trees. Returns true if they are identical, false otherwise 5 | pub fn node_eq(allocator: &Allocator, lhs: NodePtr, rhs: NodePtr) -> bool { 6 | let mut stack = vec![(lhs, rhs)]; 7 | let mut visited = HashSet::::new(); 8 | 9 | while let Some((l, r)) = stack.pop() { 10 | match (allocator.sexp(l), allocator.sexp(r)) { 11 | (SExp::Pair(ll, lr), SExp::Pair(rl, rr)) => { 12 | if !visited.insert(l) { 13 | continue; 14 | } 15 | stack.push((lr, rr)); 16 | stack.push((ll, rl)); 17 | } 18 | (SExp::Atom, SExp::Atom) => { 19 | if !allocator.atom_eq(l, r) { 20 | return false; 21 | } 22 | } 23 | _ => { 24 | return false; 25 | } 26 | } 27 | } 28 | true 29 | } 30 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/object_cache.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | mod fuzzing_utils; 3 | mod make_tree; 4 | mod pick_node; 5 | mod serialized_len; 6 | 7 | use clvmr::serde::{serialized_length, treehash, ObjectCache}; 8 | use clvmr::Allocator; 9 | use fuzzing_utils::tree_hash; 10 | use libfuzzer_sys::fuzz_target; 11 | use serialized_len::compute_serialized_len; 12 | 13 | fuzz_target!(|data: &[u8]| { 14 | let mut unstructured = arbitrary::Unstructured::new(data); 15 | let mut allocator = Allocator::new(); 16 | let (tree, node_count) = 17 | make_tree::make_tree_limits(&mut allocator, &mut unstructured, 10_000, true); 18 | 19 | let mut hash_cache = ObjectCache::new(treehash); 20 | let mut length_cache = ObjectCache::new(serialized_length); 21 | 22 | let node_idx = unstructured.int_in_range(0..=node_count).unwrap_or(5) as i32; 23 | let node = pick_node::pick_node(&allocator, tree, node_idx); 24 | 25 | let expect_hash = tree_hash(&allocator, node); 26 | let expect_len = compute_serialized_len(&allocator, node); 27 | let computed_hash = hash_cache 28 | .get_or_calculate(&allocator, &node, None) 29 | .unwrap(); 30 | let computed_len = length_cache 31 | .get_or_calculate(&allocator, &node, None) 32 | .unwrap(); 33 | assert_eq!(computed_hash, &expect_hash); 34 | assert_eq!(computed_len, &expect_len); 35 | }); 36 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/operators.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | use libfuzzer_sys::fuzz_target; 3 | 4 | mod make_tree; 5 | 6 | use clvmr::allocator::{Allocator, NodePtr}; 7 | use clvmr::bls_ops::{ 8 | op_bls_g1_multiply, op_bls_g1_negate, op_bls_g1_subtract, op_bls_g2_add, op_bls_g2_multiply, 9 | op_bls_g2_negate, op_bls_g2_subtract, op_bls_map_to_g1, op_bls_map_to_g2, 10 | op_bls_pairing_identity, op_bls_verify, 11 | }; 12 | use clvmr::core_ops::{op_cons, op_eq, op_first, op_if, op_listp, op_raise, op_rest}; 13 | use clvmr::cost::Cost; 14 | use clvmr::keccak256_ops::op_keccak256; 15 | use clvmr::more_ops::{ 16 | op_add, op_all, op_any, op_ash, op_coinid, op_concat, op_div, op_divmod, op_gr, op_gr_bytes, 17 | op_logand, op_logior, op_lognot, op_logxor, op_lsh, op_mod, op_modpow, op_multiply, op_not, 18 | op_point_add, op_pubkey_for_exp, op_sha256, op_strlen, op_substr, op_subtract, 19 | }; 20 | use clvmr::reduction::{EvalErr, Response}; 21 | use clvmr::secp_ops::{op_secp256k1_verify, op_secp256r1_verify}; 22 | 23 | type Opf = fn(&mut Allocator, NodePtr, Cost) -> Response; 24 | 25 | const FUNS: [Opf; 46] = [ 26 | op_if as Opf, 27 | op_cons as Opf, 28 | op_first as Opf, 29 | op_rest as Opf, 30 | op_listp as Opf, 31 | op_raise as Opf, 32 | op_eq as Opf, 33 | op_sha256 as Opf, 34 | op_add as Opf, 35 | op_subtract as Opf, 36 | op_multiply as Opf, 37 | op_div as Opf, 38 | op_divmod as Opf, 39 | op_substr as Opf, 40 | op_strlen as Opf, 41 | op_point_add as Opf, 42 | op_pubkey_for_exp as Opf, 43 | op_concat as Opf, 44 | op_gr as Opf, 45 | op_gr_bytes as Opf, 46 | op_logand as Opf, 47 | op_logior as Opf, 48 | op_logxor as Opf, 49 | op_lognot as Opf, 50 | op_ash as Opf, 51 | op_lsh as Opf, 52 | op_not as Opf, 53 | op_any as Opf, 54 | op_all as Opf, 55 | // the BLS extension 56 | op_coinid as Opf, 57 | op_bls_g1_subtract as Opf, 58 | op_bls_g1_multiply as Opf, 59 | op_bls_g1_negate as Opf, 60 | op_bls_g2_add as Opf, 61 | op_bls_g2_subtract as Opf, 62 | op_bls_g2_multiply as Opf, 63 | op_bls_g2_negate as Opf, 64 | op_bls_map_to_g1 as Opf, 65 | op_bls_map_to_g2 as Opf, 66 | op_bls_pairing_identity as Opf, 67 | op_bls_verify as Opf, 68 | op_mod as Opf, 69 | op_modpow as Opf, 70 | // Secp operators 71 | op_secp256k1_verify as Opf, 72 | op_secp256r1_verify as Opf, 73 | // keccak operator 74 | op_keccak256 as Opf, 75 | ]; 76 | 77 | fuzz_target!(|data: &[u8]| { 78 | let mut unstructured = arbitrary::Unstructured::new(data); 79 | let mut allocator = Allocator::new(); 80 | let (args, _) = make_tree::make_tree(&mut allocator, &mut unstructured); 81 | 82 | let allocator_checkpoint = allocator.checkpoint(); 83 | 84 | for op in FUNS { 85 | for max_cost in [11000000, 1100000, 110000, 10, 1, 0] { 86 | allocator.restore_checkpoint(&allocator_checkpoint); 87 | match op(&mut allocator, args, max_cost) { 88 | Err(EvalErr(n, msg)) => { 89 | assert!(!msg.contains("internal error")); 90 | // make sure n is a valid node in the allocator 91 | allocator.sexp(n); 92 | } 93 | Ok(n) => { 94 | // make sure n is a valid node in the allocator 95 | allocator.sexp(n.1); 96 | // TODO: it would be nice to be able to assert something 97 | // like this, but not all operators check this very strictly 98 | // (the main check is done by the interpreter). The main 99 | // challenge is the malloc_cost(), which happens at the end, 100 | // if the cost of allocating the return value is what makes 101 | // is cross the max_cost limit, the operator still succeeds 102 | // assert!(n.0 <= max_cost + 5000); 103 | } 104 | } 105 | } 106 | } 107 | }); 108 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/parse_triples.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | use clvmr::serde::parse_triples; 3 | use libfuzzer_sys::fuzz_target; 4 | use std::io::Cursor; 5 | 6 | fuzz_target!(|data: &[u8]| { 7 | let mut cursor = Cursor::new(data); 8 | let _triples = parse_triples(&mut cursor, true); 9 | }); 10 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/pick_node.rs: -------------------------------------------------------------------------------- 1 | use clvmr::{Allocator, NodePtr, SExp}; 2 | use std::collections::HashSet; 3 | 4 | pub fn pick_node(a: &Allocator, root: NodePtr, mut node_idx: i32) -> NodePtr { 5 | let mut stack = vec![root]; 6 | let mut seen_node = HashSet::::new(); 7 | 8 | while let Some(node) = stack.pop() { 9 | if node_idx == 0 { 10 | return node; 11 | } 12 | if !seen_node.insert(node) { 13 | continue; 14 | } 15 | node_idx -= 1; 16 | if let SExp::Pair(left, right) = a.sexp(node) { 17 | stack.push(left); 18 | stack.push(right); 19 | } 20 | } 21 | NodePtr::NIL 22 | } 23 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/run_program.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | use libfuzzer_sys::fuzz_target; 3 | 4 | mod make_tree; 5 | 6 | use clvmr::allocator::Allocator; 7 | use clvmr::chia_dialect::{ChiaDialect, MEMPOOL_MODE, NO_UNKNOWN_OPS}; 8 | use clvmr::cost::Cost; 9 | use clvmr::reduction::Reduction; 10 | use clvmr::run_program::run_program; 11 | 12 | fuzz_target!(|data: &[u8]| { 13 | let mut unstructured = arbitrary::Unstructured::new(data); 14 | let mut allocator = Allocator::new(); 15 | let (program, _) = make_tree::make_tree_limits(&mut allocator, &mut unstructured, 10_000, true); 16 | let (args, _) = make_tree::make_tree_limits(&mut allocator, &mut unstructured, 10_000, true); 17 | 18 | let allocator_checkpoint = allocator.checkpoint(); 19 | 20 | for flags in [0, NO_UNKNOWN_OPS, MEMPOOL_MODE] { 21 | let dialect = ChiaDialect::new(flags); 22 | allocator.restore_checkpoint(&allocator_checkpoint); 23 | 24 | let Ok(Reduction(cost, _node)) = run_program( 25 | &mut allocator, 26 | &dialect, 27 | program, 28 | args, 29 | 11_000_000_000 as Cost, 30 | ) else { 31 | continue; 32 | }; 33 | assert!(cost < 11_000_000_000); 34 | } 35 | }); 36 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/serialized_len.rs: -------------------------------------------------------------------------------- 1 | use clvmr::serde::node_to_bytes; 2 | use clvmr::{Allocator, NodePtr, SExp}; 3 | use std::collections::hash_map::Entry; 4 | use std::collections::HashMap; 5 | 6 | enum Op { 7 | Cons(NodePtr), 8 | Traverse(NodePtr), 9 | } 10 | 11 | pub fn compute_serialized_len(a: &Allocator, n: NodePtr) -> u64 { 12 | let mut stack: Vec = vec![]; 13 | let mut op_stack = vec![Op::Traverse(n)]; 14 | let mut cache = HashMap::::new(); 15 | 16 | while let Some(op) = op_stack.pop() { 17 | match op { 18 | Op::Cons(node) => { 19 | let right = stack.pop().expect("internal error, empty stack"); 20 | let left = stack.pop().expect("internal error, empty stack"); 21 | match cache.entry(node) { 22 | Entry::Occupied(e) => stack.push(*e.get()), 23 | Entry::Vacant(e) => { 24 | e.insert(1 + left + right); 25 | stack.push(1 + left + right); 26 | } 27 | } 28 | } 29 | Op::Traverse(node) => match cache.entry(node) { 30 | Entry::Occupied(e) => stack.push(*e.get()), 31 | Entry::Vacant(e) => match a.sexp(node) { 32 | SExp::Pair(left, right) => { 33 | op_stack.push(Op::Cons(node)); 34 | op_stack.push(Op::Traverse(left)); 35 | op_stack.push(Op::Traverse(right)); 36 | } 37 | SExp::Atom => { 38 | let ser_len = node_to_bytes(a, node) 39 | .expect("internal error, failed to serialize") 40 | .len() as u64; 41 | e.insert(ser_len); 42 | stack.push(ser_len); 43 | } 44 | }, 45 | }, 46 | } 47 | } 48 | assert_eq!(stack.len(), 1); 49 | *stack.last().expect("internal error, empty stack") 50 | } 51 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/serialized_length.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | use clvmr::serde::node_from_bytes_backrefs; 3 | use clvmr::serde::node_to_bytes; 4 | use clvmr::serde::serialized_length_from_bytes; 5 | use clvmr::Allocator; 6 | use libfuzzer_sys::fuzz_target; 7 | 8 | fuzz_target!(|data: &[u8]| { 9 | let len = serialized_length_from_bytes(data); 10 | 11 | let mut allocator = Allocator::new(); 12 | let program = node_from_bytes_backrefs(&mut allocator, data); 13 | 14 | match (len, program) { 15 | (Ok(_), Ok(_)) => { 16 | // this is expected 17 | } 18 | (Err(_), Err(_)) => { 19 | // this is expected 20 | } 21 | (Ok(len), Err(e)) => { 22 | panic!("discrepancy between serialized_length and node_from_bytes_backrefs().\n {len}\n{e}"); 23 | } 24 | (Err(e), Ok(program)) => { 25 | panic!( 26 | "discrepancy between serialized_length and node_from_bytes_backrefs().\n {e}\n{:?}", 27 | node_to_bytes(&allocator, program) 28 | ); 29 | } 30 | } 31 | }); 32 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/serialized_length_trusted.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | use clvmr::serde::serialized_length_from_bytes_trusted; 3 | use libfuzzer_sys::fuzz_target; 4 | 5 | fuzz_target!(|data: &[u8]| { 6 | let _len = match serialized_length_from_bytes_trusted(data) { 7 | Err(_) => { 8 | return; 9 | } 10 | Ok(r) => r, 11 | }; 12 | }); 13 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/serializer.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | 3 | mod make_tree; 4 | mod node_eq; 5 | 6 | use clvmr::allocator::Allocator; 7 | use clvmr::serde::{node_from_bytes_backrefs, node_to_bytes_backrefs, Serializer}; 8 | use node_eq::node_eq; 9 | 10 | use libfuzzer_sys::fuzz_target; 11 | 12 | // serializing with the regular compressed serializer should yield the same 13 | // result as using the incremental one (as long as it's in a single add() call). 14 | fuzz_target!(|data: &[u8]| { 15 | let mut unstructured = arbitrary::Unstructured::new(data); 16 | let mut allocator = Allocator::new(); 17 | let (program, _) = make_tree::make_tree(&mut allocator, &mut unstructured); 18 | 19 | let b1 = node_to_bytes_backrefs(&allocator, program).unwrap(); 20 | 21 | let mut ser = Serializer::new(None); 22 | let (done, _) = ser.add(&allocator, program).unwrap(); 23 | assert!(done); 24 | let b2 = ser.into_inner(); 25 | 26 | // make sure both serializations are valid, and can be parsed to produce 27 | // the same tree 28 | let b1 = node_from_bytes_backrefs(&mut allocator, &b1).unwrap(); 29 | let b2 = node_from_bytes_backrefs(&mut allocator, &b2).unwrap(); 30 | assert!(node_eq(&allocator, b1, program)); 31 | assert!(node_eq(&allocator, b1, b2)); 32 | }); 33 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/tree_cache.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | mod make_tree; 3 | mod node_eq; 4 | mod serialized_len; 5 | 6 | use clvmr::reduction::Reduction; 7 | use clvmr::serde::TreeCache; 8 | use clvmr::traverse_path::traverse_path; 9 | use clvmr::{Allocator, NodePtr, SExp}; 10 | use libfuzzer_sys::fuzz_target; 11 | use make_tree::make_tree_limits; 12 | use node_eq::node_eq; 13 | use serialized_len::compute_serialized_len; 14 | 15 | #[derive(PartialEq, Eq)] 16 | enum ReadOp { 17 | Parse, 18 | Cons(NodePtr), 19 | } 20 | 21 | fuzz_target!(|data: &[u8]| { 22 | let mut unstructured = arbitrary::Unstructured::new(data); 23 | let mut allocator = Allocator::new(); 24 | let (tree, node_count) = make_tree_limits(&mut allocator, &mut unstructured, 1000, true); 25 | // uncomment this if you find an interesting test case to add to the benchmark 26 | /* 27 | let tmp = clvmr::serde::node_to_bytes_backrefs(&allocator, tree).unwrap(); 28 | std::fs::write("serialized-benchmark.generator", &tmp).expect("fs::write()"); 29 | */ 30 | let mut tree_cache = TreeCache::default(); 31 | tree_cache.update(&allocator, tree); 32 | 33 | let mut read_op_stack = vec![ReadOp::Parse]; 34 | let mut write_stack = vec![tree]; 35 | 36 | // we count down until this hits zero, then we know which node to test 37 | let mut node_idx = unstructured.int_in_range(0..=node_count).unwrap_or(5) as i32; 38 | let mut node_to_test: Option<(NodePtr, usize)> = None; 39 | 40 | // the stack, as it's built from the parser's point of view. This is what 41 | // the back-references make lookups into. 42 | let mut parse_stack = NodePtr::NIL; 43 | 44 | let mut stack_depth = 0; 45 | while let Some(node_to_write) = write_stack.pop() { 46 | let op = read_op_stack.pop(); 47 | assert!(op == Some(ReadOp::Parse)); 48 | 49 | // make sure we find a valid path to the node we're testing 50 | // This is the main test of the fuzzer 51 | if let Some((node, serialized_len)) = node_to_test { 52 | if let Some(path) = tree_cache.find_path(node) { 53 | let Ok(Reduction(_, found_node)) = traverse_path(&allocator, &path, parse_stack) 54 | else { 55 | panic!("invalid path"); 56 | }; 57 | // make sure the path we returned actually points to an atom 58 | // that's equivalent 59 | assert!(node_eq(&allocator, found_node, node)); 60 | assert!(serialized_len > path.len()); 61 | } 62 | } 63 | 64 | match tree_cache.find_path(node_to_write) { 65 | Some(_path) => { 66 | tree_cache.push(node_to_write); 67 | parse_stack = allocator.new_pair(node_to_write, parse_stack).unwrap(); 68 | stack_depth += 1; 69 | } 70 | None => match allocator.sexp(node_to_write) { 71 | SExp::Pair(left, right) => { 72 | write_stack.push(right); 73 | write_stack.push(left); 74 | read_op_stack.push(ReadOp::Cons(node_to_write)); 75 | read_op_stack.push(ReadOp::Parse); 76 | read_op_stack.push(ReadOp::Parse); 77 | } 78 | SExp::Atom => { 79 | tree_cache.push(node_to_write); 80 | parse_stack = allocator.new_pair(node_to_write, parse_stack).unwrap(); 81 | stack_depth += 1; 82 | if node_idx == 0 { 83 | let serialized_len = 84 | compute_serialized_len(&allocator, node_to_write) as usize; 85 | node_to_test = Some((node_to_write, serialized_len)); 86 | } 87 | node_idx -= 1; 88 | } 89 | }, 90 | } 91 | while let Some(ReadOp::Cons(node)) = read_op_stack.last() { 92 | let node = *node; 93 | read_op_stack.pop(); 94 | tree_cache.pop2_and_cons(node); 95 | if node_idx == 0 { 96 | let serialized_len = compute_serialized_len(&allocator, node_to_write) as usize; 97 | node_to_test = Some((node_to_write, serialized_len)); 98 | } 99 | node_idx -= 1; 100 | 101 | let SExp::Pair(right, rest) = allocator.sexp(parse_stack) else { 102 | panic!("internal error"); 103 | }; 104 | let SExp::Pair(left, rest) = allocator.sexp(rest) else { 105 | panic!("internal error"); 106 | }; 107 | let new_root = allocator.new_pair(left, right).unwrap(); 108 | parse_stack = allocator.new_pair(new_root, rest).unwrap(); 109 | stack_depth -= 1; 110 | } 111 | } 112 | assert_eq!(stack_depth, 1); 113 | }); 114 | -------------------------------------------------------------------------------- /fuzz/fuzz_targets/tree_hash.rs: -------------------------------------------------------------------------------- 1 | #![no_main] 2 | use clvmr::serde::tree_hash_from_stream; 3 | use libfuzzer_sys::fuzz_target; 4 | use std::io::Cursor; 5 | 6 | fuzz_target!(|data: &[u8]| { 7 | let mut cursor = Cursor::<&[u8]>::new(data); 8 | let _ = tree_hash_from_stream(&mut cursor); 9 | }); 10 | -------------------------------------------------------------------------------- /op-tests/test-bls-zk.txt: -------------------------------------------------------------------------------- 1 | ; sample zksnark 2 | ; This was generated by tools/src/bin/verify-zksnark.rs 3 | 4 | bls_pairing_identity 0x8fe94ac2d68d39d9207ea0cae4bb2177f7352bd754173ed27bd13b4c156f77f8885458886ee9fbd212719f27a96397c1 0x8a7ecb9c6d6f0af8d922c9b348d686f7f827c5f5d7a53036e5dd6c4cfe088806375d730251df57c03b0eaa41ca2a9cc51817cfd6118c065e9b337e42a6b66621e2ffa79f576ae57dcb4916459b0131d42383b790a4f60c5aeb339b61a78d85a8 0x829207a7990f9a01be7562fd4740ac7f2e0b51758a3feb0df876c7ba72b17a35968b724af46e51e53ee6b63862061f5b 0x93e02b6052719f607dacd3a088274f65596bd0d09920b61ab5da61bbdc7f5049334cf11213945d57e5ac7d055d042b7e024aa2b2f08f0a91260805272dc51051c6e47ad4fa403b02b4510b647ae3d1770bac0326a805bbefd48056c8c121bdb8 0xb3aedc305adfdbc854aa105c41085618484858e6baa276b176fd89415021f7a0c75ff4f9ec39f482f142f1b54c111448 0xb40acf170629d78244fb753f05fb79578add9217add53996d5de7c3005880c0dea903f851d6be749ebfb81c9721871370ef60428444d76f4ff81515628a4eb63e72c3cd7651a23c4eca109d1d88fec5a53626b36c76407926f308366b5ded1b2 0xb7f61b966f050f306ace1535c2b922ad75d62698b00338f0639a1dfe9b85ea3d8ca08e9b36297d952079b2e1329c19df 0x809468feeddbe3334eae1dd8128cc9b58bd6bb0ac16113df10ff44b780bda1f3f7e8bf6a4d1971790a05e9ac4888e0de10f2dd2ebe44e9d917b0663052eec1a6cc4e50da9454cf7b3d7b276e700f1f0de1049b0a7eb0ce80e1e7db402d41eb67 => 0 | 7800000 5 | -------------------------------------------------------------------------------- /op-tests/test-core-ops.txt: -------------------------------------------------------------------------------- 1 | ; the format for these test cases are: 2 | ; expression => expected result | expected-cost 3 | 4 | i ( ) => FAIL 5 | i ( 1 ) => FAIL 6 | i => FAIL 7 | i 1 => FAIL 8 | i 1 1 => FAIL 9 | i 1 1 1 1 => FAIL 10 | i 1 "true" "false" => "true" | 33 11 | i 0 "true" "false" => "false" | 33 12 | i "" "true" "false" => "false" | 33 13 | i 10 "true" "false" => "true" | 33 14 | i -1 "true" "false" => "true" | 33 15 | i (1 2) "true" "false" => "true" | 33 16 | i (1) "true" "false" => "true" | 33 17 | i () "true" "false" => "false" | 33 18 | 19 | ; tests ported from clvm 20 | i 100 200 300 => 200 | 33 21 | i ( ) 200 300 => 300 | 33 22 | i 1 200 300 => 200 | 33 23 | 24 | c => FAIL 25 | c 1 => FAIL 26 | c 1 ( 2 ) "garbage" => FAIL 27 | c 100 ( ) => ( 100 ) | 50 28 | c 1 ( 2 ) => ( 1 2 ) | 50 29 | c 0 ( 2 ) => ( 0 2 ) | 50 30 | c 1 2 => ( 1 . 2 ) | 50 31 | c 1 ( 2 3 4 ) => ( 1 2 3 4 ) | 50 32 | c ( 1 2 3 ) ( 4 5 6 ) => ( ( 1 2 3 ) 4 5 6 ) | 50 33 | c 100 ( ( 500 ( 200 300 400 ) ) ) => ( 100 ( 500 ( 200 300 400 ) ) ) | 50 34 | 35 | f 0 => FAIL 36 | f 1 => FAIL 37 | f ( ) => FAIL 38 | f ( 1 2 3 ) 1 => FAIL 39 | f ( 1 2 3 ) => 1 | 30 40 | f ( ( 1 2 ) 3 ) => ( 1 2 ) | 30 41 | f ( 100 ) => 100 | 30 42 | 43 | r 1 => FAIL 44 | r => FAIL 45 | r ( 1 2 3 ) 12 => FAIL 46 | r 0 => FAIL 47 | r ( 1 2 3 ) => ( 2 3 ) | 30 48 | r ( 1 . 2 ) => 2 | 30 49 | 50 | ; tests ported from clvm 51 | r ( 100 ) => ( ) | 30 52 | r ( 100 200 300 ) => ( 200 300 ) | 30 53 | r ( ) => FAIL 54 | 55 | l => FAIL 56 | l ( 1 2 ) 1 => FAIL 57 | l ( 1 2 3 ) => 1 | 19 58 | l 1 => 0 | 19 59 | l 0 => 0 | 19 60 | l ( 0 . 0 ) => 1 | 19 61 | l ( 1 . 2 ) => 1 | 19 62 | 63 | ; tests ported from clvm 64 | l 100 => ( ) | 19 65 | l ( 100 ) => 1 | 19 66 | l => FAIL 67 | l 100 200 => FAIL 68 | l 50 => ( ) | 19 69 | 70 | x => FAIL 71 | x ( "msg" ) => FAIL 72 | x "error_message" => FAIL 73 | 74 | = => FAIL 75 | = 0x00 => FAIL 76 | = 0x00 0x00 0x00 => FAIL 77 | = ( "foo" ) "foo" => FAIL 78 | = "foo" ( "foo" ) => FAIL 79 | = 0 ( 0 ) => FAIL 80 | = ( 0 ) 0 => FAIL 81 | = 10 => FAIL 82 | 83 | = 0 0 => 1 | 117 84 | = 1 1 => 1 | 119 85 | = 0 0 => 1 | 117 86 | = 0 0x00 => 0 | 118 87 | = 0x00 0 => 0 | 118 88 | = 0xff 0xffff => 0 | 120 89 | = -1 -1 => 1 | 119 90 | = 1 1 => 1 | 119 91 | = 10 10 => 1 | 119 92 | = 2 3 => 0 | 119 93 | = 256 256 => 1 | 121 94 | = 255 -1 => 0 | 120 95 | = 65535 -1 => 0 | 121 96 | = 65535 65535 => 1 | 123 97 | = 65536 65536 => 1 | 123 98 | = 4294967295 4294967295 => 1 | 127 99 | = 4294967296 4294967296 => 1 | 127 100 | = 2147483647 2147483647 => 1 | 125 101 | = 2147483648 2147483648 => 1 | 127 102 | = 0x00000000000000000000000000000000000000000000000000000010 0x00000000000000000000000000000000000000000000000000000010 => 1 | 173 103 | = 0x00000000000000000000000000000000000000000000000000000010 0x00000000000000000000000000000000000000000000000000000020 => 0 | 173 104 | = 0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010 0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010 => 1 | 807 105 | -------------------------------------------------------------------------------- /op-tests/test-keccak256.txt: -------------------------------------------------------------------------------- 1 | ; the format for these test cases are: 2 | ; expression => expected result | expected-cost 3 | 4 | keccak256 "foobar" => 0x38d18acb67d25c8bb9942764b62f18e17054f66a817bd4295423adf9ed98873e | 542 5 | keccak256 "f" "oobar" => 0x38d18acb67d25c8bb9942764b62f18e17054f66a817bd4295423adf9ed98873e | 702 6 | keccak256 "f" "o" "obar" => 0x38d18acb67d25c8bb9942764b62f18e17054f66a817bd4295423adf9ed98873e | 862 7 | keccak256 "f" "o" "o" "bar" => 0x38d18acb67d25c8bb9942764b62f18e17054f66a817bd4295423adf9ed98873e | 1022 8 | keccak256 "f" "o" "o" "b" "a" "r" => 0x38d18acb67d25c8bb9942764b62f18e17054f66a817bd4295423adf9ed98873e | 1342 9 | 10 | keccak256 "foo" => 0x41b1a0649752af1b28b3dc29a1556eee781e4a4c3a1f7f53f90fa834de098c4d | 536 11 | keccak256 "fo" "o" => 0x41b1a0649752af1b28b3dc29a1556eee781e4a4c3a1f7f53f90fa834de098c4d | 696 12 | keccak256 "f" "o" "o" => 0x41b1a0649752af1b28b3dc29a1556eee781e4a4c3a1f7f53f90fa834de098c4d | 856 13 | -------------------------------------------------------------------------------- /package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "clvm-rs", 3 | "lockfileVersion": 3, 4 | "requires": true, 5 | "packages": { 6 | "": { 7 | "devDependencies": { 8 | "prettier": "^3.5.3" 9 | } 10 | }, 11 | "node_modules/prettier": { 12 | "version": "3.5.3", 13 | "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz", 14 | "integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==", 15 | "dev": true, 16 | "license": "MIT", 17 | "bin": { 18 | "prettier": "bin/prettier.cjs" 19 | }, 20 | "engines": { 21 | "node": ">=14" 22 | }, 23 | "funding": { 24 | "url": "https://github.com/prettier/prettier?sponsor=1" 25 | } 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "private": true, 3 | "scripts": { 4 | "prettier": "prettier --check .", 5 | "fmt": "prettier --write ." 6 | }, 7 | "devDependencies": { 8 | "prettier": "^3.5.3" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /src/core_ops.rs: -------------------------------------------------------------------------------- 1 | use crate::allocator::{Allocator, NodePtr, SExp}; 2 | use crate::cost::Cost; 3 | use crate::err_utils::err; 4 | use crate::op_utils::{first, get_args, nilp, rest}; 5 | use crate::reduction::{EvalErr, Reduction, Response}; 6 | 7 | const FIRST_COST: Cost = 30; 8 | const IF_COST: Cost = 33; 9 | // Cons cost lowered from 245. It only allocates a pair, which is small 10 | const CONS_COST: Cost = 50; 11 | // Rest cost lowered from 77 since it doesn't allocate anything and it should be 12 | // the same as first 13 | const REST_COST: Cost = 30; 14 | const LISTP_COST: Cost = 19; 15 | const EQ_BASE_COST: Cost = 117; 16 | const EQ_COST_PER_BYTE: Cost = 1; 17 | 18 | pub fn op_if(a: &mut Allocator, input: NodePtr, _max_cost: Cost) -> Response { 19 | let [cond, affirmative, negative] = get_args::<3>(a, input, "i")?; 20 | let chosen_node = if nilp(a, cond) { negative } else { affirmative }; 21 | Ok(Reduction(IF_COST, chosen_node)) 22 | } 23 | 24 | pub fn op_cons(a: &mut Allocator, input: NodePtr, _max_cost: Cost) -> Response { 25 | let [n1, n2] = get_args::<2>(a, input, "c")?; 26 | let r = a.new_pair(n1, n2)?; 27 | Ok(Reduction(CONS_COST, r)) 28 | } 29 | 30 | pub fn op_first(a: &mut Allocator, input: NodePtr, _max_cost: Cost) -> Response { 31 | let [n] = get_args::<1>(a, input, "f")?; 32 | Ok(Reduction(FIRST_COST, first(a, n)?)) 33 | } 34 | 35 | pub fn op_rest(a: &mut Allocator, input: NodePtr, _max_cost: Cost) -> Response { 36 | let [n] = get_args::<1>(a, input, "r")?; 37 | Ok(Reduction(REST_COST, rest(a, n)?)) 38 | } 39 | 40 | pub fn op_listp(a: &mut Allocator, input: NodePtr, _max_cost: Cost) -> Response { 41 | let [n] = get_args::<1>(a, input, "l")?; 42 | match a.sexp(n) { 43 | SExp::Pair(_, _) => Ok(Reduction(LISTP_COST, a.one())), 44 | _ => Ok(Reduction(LISTP_COST, a.nil())), 45 | } 46 | } 47 | 48 | pub fn op_raise(a: &mut Allocator, input: NodePtr, _max_cost: Cost) -> Response { 49 | // if given a single argument we should raise the single argument rather 50 | // than the full list of arguments. brun also used to behave this way. 51 | // if the single argument here is a pair then don't throw it unwrapped 52 | // as it'd potentially look the same as a throw of multiple arguments. 53 | let throw_value = if let Ok([value]) = get_args::<1>(a, input, "") { 54 | match a.sexp(value) { 55 | SExp::Atom => value, 56 | _ => input, 57 | } 58 | } else { 59 | input 60 | }; 61 | 62 | err(throw_value, "clvm raise") 63 | } 64 | 65 | fn ensure_atom(a: &Allocator, n: NodePtr, op: &str) -> Result<(), EvalErr> { 66 | if let SExp::Atom = a.sexp(n) { 67 | Ok(()) 68 | } else { 69 | Err(EvalErr(n, format!("{op} on list"))) 70 | } 71 | } 72 | 73 | pub fn op_eq(a: &mut Allocator, input: NodePtr, _max_cost: Cost) -> Response { 74 | let [s0, s1] = get_args::<2>(a, input, "=")?; 75 | ensure_atom(a, s0, "=")?; 76 | ensure_atom(a, s1, "=")?; 77 | let eq = a.atom_eq(s0, s1); 78 | let cost = EQ_BASE_COST + (a.atom_len(s0) as Cost + a.atom_len(s1) as Cost) * EQ_COST_PER_BYTE; 79 | Ok(Reduction(cost, if eq { a.one() } else { a.nil() })) 80 | } 81 | -------------------------------------------------------------------------------- /src/cost.rs: -------------------------------------------------------------------------------- 1 | use crate::allocator::Allocator; 2 | use crate::reduction::EvalErr; 3 | 4 | pub type Cost = u64; 5 | 6 | pub fn check_cost(a: &Allocator, cost: Cost, max_cost: Cost) -> Result<(), EvalErr> { 7 | if cost > max_cost { 8 | Err(EvalErr(a.nil(), "cost exceeded".into())) 9 | } else { 10 | Ok(()) 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /src/dialect.rs: -------------------------------------------------------------------------------- 1 | use crate::allocator::{Allocator, NodePtr}; 2 | use crate::cost::Cost; 3 | use crate::reduction::Response; 4 | 5 | /// The set of operators that are available in the dialect. 6 | #[repr(u32)] 7 | #[derive(Clone, Copy, Eq, PartialEq)] 8 | pub enum OperatorSet { 9 | /// Any softfork extensions that are not added yet will be rejected. 10 | Default, 11 | 12 | /// Originally added BLS operators when inside softfork extension 0. 13 | /// The operators have since been hardforked into the main operator set. 14 | Bls, 15 | 16 | /// The keccak256 operator, which is only available inside the softfork guard. 17 | /// This uses softfork extension 1, which does not conflict with the BLS fork. 18 | Keccak, 19 | } 20 | 21 | pub trait Dialect { 22 | fn quote_kw(&self) -> u32; 23 | fn apply_kw(&self) -> u32; 24 | fn softfork_kw(&self) -> u32; 25 | fn softfork_extension(&self, ext: u32) -> OperatorSet; 26 | fn op( 27 | &self, 28 | allocator: &mut Allocator, 29 | op: NodePtr, 30 | args: NodePtr, 31 | max_cost: Cost, 32 | extensions: OperatorSet, 33 | ) -> Response; 34 | fn allow_unknown_ops(&self) -> bool; 35 | } 36 | -------------------------------------------------------------------------------- /src/err_utils.rs: -------------------------------------------------------------------------------- 1 | use crate::allocator::NodePtr; 2 | use crate::reduction::EvalErr; 3 | 4 | pub fn err(node: NodePtr, msg: &str) -> Result { 5 | Err(EvalErr(node, msg.into())) 6 | } 7 | -------------------------------------------------------------------------------- /src/f_table.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | use crate::allocator::{Allocator, NodePtr}; 4 | use crate::bls_ops::{ 5 | op_bls_g1_multiply, op_bls_g1_negate, op_bls_g1_subtract, op_bls_g2_add, op_bls_g2_multiply, 6 | op_bls_g2_negate, op_bls_g2_subtract, op_bls_map_to_g1, op_bls_map_to_g2, 7 | op_bls_pairing_identity, op_bls_verify, 8 | }; 9 | use crate::core_ops::{op_cons, op_eq, op_first, op_if, op_listp, op_raise, op_rest}; 10 | use crate::cost::Cost; 11 | use crate::more_ops::{ 12 | op_add, op_all, op_any, op_ash, op_concat, op_div, op_divmod, op_gr, op_gr_bytes, op_logand, 13 | op_logior, op_lognot, op_logxor, op_lsh, op_mod, op_modpow, op_multiply, op_not, op_point_add, 14 | op_pubkey_for_exp, op_sha256, op_strlen, op_substr, op_subtract, 15 | }; 16 | use crate::reduction::Response; 17 | use crate::secp_ops::{op_secp256k1_verify, op_secp256r1_verify}; 18 | 19 | type OpFn = fn(&mut Allocator, NodePtr, Cost) -> Response; 20 | 21 | pub type FLookup = [Option; 256]; 22 | 23 | pub fn opcode_by_name(name: &str) -> Option { 24 | let opcode_lookup: [(OpFn, &str); 44] = [ 25 | (op_if, "op_if"), 26 | (op_cons, "op_cons"), 27 | (op_first, "op_first"), 28 | (op_rest, "op_rest"), 29 | (op_listp, "op_listp"), 30 | (op_raise, "op_raise"), 31 | (op_eq, "op_eq"), 32 | (op_sha256, "op_sha256"), 33 | (op_add, "op_add"), 34 | (op_subtract, "op_subtract"), 35 | (op_multiply, "op_multiply"), 36 | (op_modpow, "op_modpow"), 37 | (op_divmod, "op_divmod"), 38 | (op_mod, "op_mod"), 39 | (op_substr, "op_substr"), 40 | (op_strlen, "op_strlen"), 41 | (op_point_add, "op_point_add"), 42 | (op_pubkey_for_exp, "op_pubkey_for_exp"), 43 | (op_concat, "op_concat"), 44 | (op_gr, "op_gr"), 45 | (op_gr_bytes, "op_gr_bytes"), 46 | (op_logand, "op_logand"), 47 | (op_logior, "op_logior"), 48 | (op_logxor, "op_logxor"), 49 | (op_lognot, "op_lognot"), 50 | (op_ash, "op_ash"), 51 | (op_lsh, "op_lsh"), 52 | (op_not, "op_not"), 53 | (op_any, "op_any"), 54 | (op_all, "op_all"), 55 | (op_div, "op_div"), 56 | (op_bls_g1_subtract, "op_g1_subtract"), 57 | (op_bls_g1_multiply, "op_g1_multiply"), 58 | (op_bls_g1_negate, "op_g1_negate"), 59 | (op_bls_g2_add, "op_g2_add"), 60 | (op_bls_g2_subtract, "op_g2_subtract"), 61 | (op_bls_g2_multiply, "op_g2_multiply"), 62 | (op_bls_g2_negate, "op_g2_negate"), 63 | (op_bls_map_to_g1, "op_g1_map"), 64 | (op_bls_map_to_g2, "op_g2_map"), 65 | (op_bls_pairing_identity, "op_bls_pairing_identity"), 66 | (op_bls_verify, "op_bls_verify"), 67 | (op_secp256k1_verify, "op_secp256k1_verify"), 68 | (op_secp256r1_verify, "op_secp256r1_verify"), 69 | ]; 70 | let name: &[u8] = name.as_ref(); 71 | for (f, op) in opcode_lookup.iter() { 72 | let pu8: &[u8] = op.as_ref(); 73 | if pu8 == name { 74 | return Some(*f); 75 | } 76 | } 77 | None 78 | } 79 | 80 | pub fn f_lookup_for_hashmap(opcode_lookup_by_name: HashMap>) -> FLookup { 81 | let mut f_lookup = [None; 256]; 82 | for (name, idx) in opcode_lookup_by_name.iter() { 83 | if idx.len() == 1 { 84 | let index = idx[0]; 85 | let op = opcode_by_name(name); 86 | assert!(op.is_some(), "can't find native operator {name}"); 87 | f_lookup[index as usize] = op; 88 | } 89 | } 90 | f_lookup 91 | } 92 | -------------------------------------------------------------------------------- /src/keccak256_ops.rs: -------------------------------------------------------------------------------- 1 | use crate::allocator::{Allocator, NodePtr}; 2 | use crate::cost::check_cost; 3 | use crate::cost::Cost; 4 | use crate::op_utils::atom; 5 | use crate::op_utils::new_atom_and_cost; 6 | use crate::reduction::Response; 7 | use sha3::{Digest, Keccak256}; 8 | 9 | const KECCAK256_BASE_COST: Cost = 50; 10 | const KECCAK256_COST_PER_ARG: Cost = 160; 11 | const KECCAK256_COST_PER_BYTE: Cost = 2; 12 | 13 | pub fn op_keccak256(a: &mut Allocator, mut input: NodePtr, max_cost: Cost) -> Response { 14 | let mut cost = KECCAK256_BASE_COST; 15 | 16 | let mut byte_count: usize = 0; 17 | let mut hasher = Keccak256::new(); 18 | while let Some((arg, rest)) = a.next(input) { 19 | input = rest; 20 | cost += KECCAK256_COST_PER_ARG; 21 | check_cost( 22 | a, 23 | cost + byte_count as Cost * KECCAK256_COST_PER_BYTE, 24 | max_cost, 25 | )?; 26 | let blob = atom(a, arg, "keccak256")?; 27 | byte_count += blob.as_ref().len(); 28 | hasher.update(blob); 29 | } 30 | cost += byte_count as Cost * KECCAK256_COST_PER_BYTE; 31 | new_atom_and_cost(a, cost, &hasher.finalize()) 32 | } 33 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod allocator; 2 | pub mod bls_ops; 3 | pub mod chia_dialect; 4 | pub mod core_ops; 5 | pub mod cost; 6 | pub mod dialect; 7 | pub mod err_utils; 8 | pub mod f_table; 9 | pub mod keccak256_ops; 10 | pub mod more_ops; 11 | pub mod number; 12 | pub mod op_utils; 13 | pub mod reduction; 14 | pub mod run_program; 15 | pub mod runtime_dialect; 16 | pub mod secp_ops; 17 | pub mod serde; 18 | pub mod traverse_path; 19 | 20 | pub use allocator::{Allocator, Atom, NodePtr, SExp}; 21 | pub use chia_dialect::ChiaDialect; 22 | pub use run_program::run_program; 23 | 24 | pub use chia_dialect::{ENABLE_KECCAK_OPS_OUTSIDE_GUARD, LIMIT_HEAP, MEMPOOL_MODE, NO_UNKNOWN_OPS}; 25 | 26 | #[cfg(feature = "counters")] 27 | pub use run_program::run_program_with_counters; 28 | 29 | #[cfg(feature = "pre-eval")] 30 | pub use run_program::run_program_with_pre_eval; 31 | 32 | #[cfg(feature = "counters")] 33 | pub use run_program::Counters; 34 | 35 | #[cfg(test)] 36 | mod tests; 37 | 38 | #[cfg(test)] 39 | mod test_ops; 40 | -------------------------------------------------------------------------------- /src/reduction.rs: -------------------------------------------------------------------------------- 1 | use std::{fmt, io}; 2 | 3 | use crate::allocator::NodePtr; 4 | use crate::cost::Cost; 5 | 6 | #[derive(Debug, Clone, PartialEq, Eq)] 7 | pub struct EvalErr(pub NodePtr, pub String); 8 | 9 | #[derive(Debug, PartialEq, Eq)] 10 | pub struct Reduction(pub Cost, pub NodePtr); 11 | 12 | pub type Response = Result; 13 | 14 | impl fmt::Display for EvalErr { 15 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 16 | write!(f, "Error at {:?}: {}", self.0, self.1) 17 | } 18 | } 19 | 20 | impl std::error::Error for EvalErr {} 21 | 22 | impl From for io::Error { 23 | fn from(v: EvalErr) -> Self { 24 | Self::other(v.1) 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/runtime_dialect.rs: -------------------------------------------------------------------------------- 1 | use crate::allocator::{Allocator, NodePtr}; 2 | use crate::chia_dialect::NO_UNKNOWN_OPS; 3 | use crate::cost::Cost; 4 | use crate::dialect::{Dialect, OperatorSet}; 5 | use crate::err_utils::err; 6 | use crate::f_table::{f_lookup_for_hashmap, FLookup}; 7 | use crate::more_ops::op_unknown; 8 | use crate::reduction::Response; 9 | use std::collections::HashMap; 10 | 11 | pub struct RuntimeDialect { 12 | f_lookup: FLookup, 13 | quote_kw: Vec, 14 | apply_kw: Vec, 15 | softfork_kw: Vec, 16 | flags: u32, 17 | } 18 | 19 | impl RuntimeDialect { 20 | pub fn new( 21 | op_map: HashMap>, 22 | quote_kw: Vec, 23 | apply_kw: Vec, 24 | flags: u32, 25 | ) -> RuntimeDialect { 26 | RuntimeDialect { 27 | f_lookup: f_lookup_for_hashmap(op_map), 28 | quote_kw, 29 | apply_kw, 30 | softfork_kw: vec![36], // softfork opcode 31 | flags, 32 | } 33 | } 34 | } 35 | 36 | impl Dialect for RuntimeDialect { 37 | fn op( 38 | &self, 39 | allocator: &mut Allocator, 40 | o: NodePtr, 41 | argument_list: NodePtr, 42 | max_cost: Cost, 43 | _extensions: OperatorSet, 44 | ) -> Response { 45 | let atom = allocator.atom(o); 46 | let b = atom.as_ref(); 47 | 48 | if b.len() == 1 { 49 | if let Some(f) = self.f_lookup[b[0] as usize] { 50 | return f(allocator, argument_list, max_cost); 51 | } 52 | } 53 | if (self.flags & NO_UNKNOWN_OPS) != 0 { 54 | err(o, "unimplemented operator") 55 | } else { 56 | op_unknown(allocator, o, argument_list, max_cost) 57 | } 58 | } 59 | 60 | fn quote_kw(&self) -> u32 { 61 | self.quote_kw[0] as u32 62 | } 63 | fn apply_kw(&self) -> u32 { 64 | self.apply_kw[0] as u32 65 | } 66 | fn softfork_kw(&self) -> u32 { 67 | self.softfork_kw[0] as u32 68 | } 69 | 70 | fn softfork_extension(&self, _ext: u32) -> OperatorSet { 71 | OperatorSet::Default 72 | } 73 | 74 | fn allow_unknown_ops(&self) -> bool { 75 | (self.flags & NO_UNKNOWN_OPS) == 0 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /src/secp_ops.rs: -------------------------------------------------------------------------------- 1 | use crate::allocator::{Allocator, NodePtr}; 2 | use crate::cost::{check_cost, Cost}; 3 | use crate::err_utils::err; 4 | use crate::op_utils::{atom, get_args}; 5 | use crate::reduction::{Reduction, Response}; 6 | use k256::ecdsa::{Signature as K1Signature, VerifyingKey as K1VerifyingKey}; 7 | use p256::ecdsa::signature::hazmat::PrehashVerifier; 8 | use p256::ecdsa::{Signature as P1Signature, VerifyingKey as P1VerifyingKey}; 9 | 10 | const SECP256R1_VERIFY_COST: Cost = 1850000; 11 | const SECP256K1_VERIFY_COST: Cost = 1300000; 12 | 13 | // expects: pubkey msg sig 14 | pub fn op_secp256r1_verify(a: &mut Allocator, input: NodePtr, max_cost: Cost) -> Response { 15 | let cost = SECP256R1_VERIFY_COST; 16 | check_cost(a, cost, max_cost)?; 17 | 18 | let [pubkey, msg, sig] = get_args::<3>(a, input, "secp256r1_verify")?; 19 | 20 | // first argument is sec1 encoded pubkey 21 | let pubkey = atom(a, pubkey, "secp256r1_verify pubkey")?; 22 | let verifier = P1VerifyingKey::from_sec1_bytes(pubkey.as_ref()) 23 | .or_else(|_| err(input, "secp256r1_verify pubkey is not valid"))?; 24 | 25 | // second arg is sha256 hash of message 26 | let msg = atom(a, msg, "secp256r1_verify msg")?; 27 | if msg.as_ref().len() != 32 { 28 | return err(input, "secp256r1_verify message digest is not 32 bytes"); 29 | } 30 | 31 | // third arg is a fixed-size signature 32 | let sig = atom(a, sig, "secp256r1_verify sig")?; 33 | let sig = P1Signature::from_slice(sig.as_ref()) 34 | .or_else(|_| err(input, "secp256r1_verify sig is not valid"))?; 35 | 36 | // verify signature 37 | let result = verifier.verify_prehash(msg.as_ref(), &sig); 38 | 39 | if result.is_err() { 40 | err(input, "secp256r1_verify failed") 41 | } else { 42 | Ok(Reduction(cost, a.nil())) 43 | } 44 | } 45 | 46 | // expects: pubkey msg sig 47 | pub fn op_secp256k1_verify(a: &mut Allocator, input: NodePtr, max_cost: Cost) -> Response { 48 | let cost = SECP256K1_VERIFY_COST; 49 | check_cost(a, cost, max_cost)?; 50 | 51 | let [pubkey, msg, sig] = get_args::<3>(a, input, "secp256k1_verify")?; 52 | 53 | // first argument is sec1 encoded pubkey 54 | let pubkey = atom(a, pubkey, "secp256k1_verify pubkey")?; 55 | let verifier = K1VerifyingKey::from_sec1_bytes(pubkey.as_ref()) 56 | .or_else(|_| err(input, "secp256k1_verify pubkey is not valid"))?; 57 | 58 | // second arg is message 59 | let msg = atom(a, msg, "secp256k1_verify msg")?; 60 | if msg.as_ref().len() != 32 { 61 | return err(input, "secp256k1_verify message digest is not 32 bytes"); 62 | } 63 | 64 | // third arg is a fixed-size signature 65 | let sig = atom(a, sig, "secp256k1_verify sig")?; 66 | let sig = K1Signature::from_slice(sig.as_ref()) 67 | .or_else(|_| err(input, "secp256k1_verify sig is not valid"))?; 68 | 69 | // verify signature 70 | let result = verifier.verify_prehash(msg.as_ref(), &sig); 71 | 72 | if result.is_err() { 73 | err(input, "secp256k1_verify failed") 74 | } else { 75 | Ok(Reduction(cost, a.nil())) 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /src/serde/bitset.rs: -------------------------------------------------------------------------------- 1 | /// This is a simple bitfield used to indicates whether a node has been visited 2 | /// during a tree search or not. We terminate a search path if we've reached the 3 | /// node first via a different (shorter) path. 4 | #[derive(Clone, Default)] 5 | pub struct BitSet { 6 | bits: Vec, 7 | } 8 | 9 | impl BitSet { 10 | const BITS: usize = usize::BITS as usize; 11 | 12 | /// specify the number of nodes to track 13 | pub fn new(max_idx: u32) -> Self { 14 | let bits = vec![0; (max_idx as usize + Self::BITS) / Self::BITS]; 15 | Self { bits } 16 | } 17 | 18 | /// marks the specified node as visited and returns whether it had already 19 | /// been marked. 20 | pub fn visit(&mut self, idx: u32) -> bool { 21 | let pos = idx as usize / Self::BITS; 22 | let mask = (1_usize) << (idx as usize % Self::BITS); 23 | let ret = self.bits[pos] & mask; 24 | self.bits[pos] |= mask; 25 | ret != 0 26 | } 27 | 28 | pub fn is_visited(&self, idx: u32) -> bool { 29 | let pos = idx as usize / Self::BITS; 30 | let mask = (1_usize) << (idx as usize % Self::BITS); 31 | (self.bits[pos] & mask) != 0 32 | } 33 | 34 | pub fn extend(&mut self, max_idx: u32) { 35 | let new_len = (max_idx as usize + Self::BITS) / Self::BITS; 36 | assert!(max_idx as usize >= self.bits.len()); 37 | self.bits.resize(new_len, 0); 38 | } 39 | } 40 | 41 | #[cfg(test)] 42 | mod tests { 43 | use super::*; 44 | 45 | #[test] 46 | fn test_visited_nodes() { 47 | let mut n = BitSet::new(100); 48 | for i in 0..100 { 49 | assert!(!n.is_visited(i)); 50 | assert!(!n.visit(i)); 51 | assert!(n.is_visited(i)); 52 | assert!(n.visit(i)); 53 | assert!(n.is_visited(i)); 54 | } 55 | } 56 | 57 | #[test] 58 | fn test_visited_nodes_reverse() { 59 | let mut n = BitSet::new(100); 60 | for i in (0..100).rev() { 61 | assert!(!n.is_visited(i)); 62 | assert!(!n.visit(i)); 63 | assert!(n.is_visited(i)); 64 | assert!(n.visit(i)); 65 | assert!(n.is_visited(i)); 66 | } 67 | } 68 | 69 | #[test] 70 | fn test_extend() { 71 | let mut n = BitSet::default(); 72 | n.extend(1); 73 | assert!(!n.is_visited(0)); 74 | assert!(!n.visit(0)); 75 | assert!(n.is_visited(0)); 76 | 77 | n.extend(2); 78 | assert!(n.is_visited(0)); 79 | 80 | assert!(!n.is_visited(1)); 81 | assert!(!n.visit(1)); 82 | assert!(n.is_visited(1)); 83 | 84 | n.extend(100); 85 | assert!(n.is_visited(0)); 86 | assert!(n.is_visited(1)); 87 | 88 | for i in 2..100 { 89 | assert!(!n.is_visited(i)); 90 | assert!(!n.visit(i)); 91 | assert!(n.is_visited(i)); 92 | assert!(n.visit(i)); 93 | assert!(n.is_visited(i)); 94 | } 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /src/serde/bytes32.rs: -------------------------------------------------------------------------------- 1 | use chia_sha2::Sha256; 2 | 3 | pub type Bytes32 = [u8; 32]; 4 | 5 | pub fn hash_blob(blob: &[u8]) -> Bytes32 { 6 | let mut sha256 = Sha256::new(); 7 | sha256.update(blob); 8 | sha256.finalize() 9 | } 10 | 11 | pub fn hash_blobs(blobs: &[&[u8]]) -> Bytes32 { 12 | let mut sha256 = Sha256::new(); 13 | for blob in blobs.iter() { 14 | sha256.update(blob); 15 | } 16 | sha256.finalize() 17 | } 18 | -------------------------------------------------------------------------------- /src/serde/de.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | use std::io::{Cursor, Read}; 3 | 4 | use crate::allocator::{Allocator, NodePtr}; 5 | 6 | use super::parse_atom::parse_atom; 7 | 8 | const CONS_BOX_MARKER: u8 = 0xff; 9 | 10 | #[repr(u8)] 11 | enum ParseOp { 12 | SExp, 13 | Cons, 14 | } 15 | 16 | /// deserialize a clvm node from a `std::io::Cursor` 17 | pub fn node_from_stream(allocator: &mut Allocator, f: &mut Cursor<&[u8]>) -> io::Result { 18 | let mut values: Vec = Vec::new(); 19 | let mut ops = vec![ParseOp::SExp]; 20 | 21 | let mut b = [0; 1]; 22 | while let Some(op) = ops.pop() { 23 | match op { 24 | ParseOp::SExp => { 25 | f.read_exact(&mut b)?; 26 | if b[0] == CONS_BOX_MARKER { 27 | ops.push(ParseOp::Cons); 28 | ops.push(ParseOp::SExp); 29 | ops.push(ParseOp::SExp); 30 | } else { 31 | values.push(parse_atom(allocator, b[0], f)?); 32 | } 33 | } 34 | ParseOp::Cons => { 35 | // cons 36 | let v2 = values.pop(); 37 | let v1 = values.pop(); 38 | values.push(allocator.new_pair(v1.unwrap(), v2.unwrap())?); 39 | } 40 | } 41 | } 42 | Ok(values.pop().unwrap()) 43 | } 44 | 45 | pub fn node_from_bytes(allocator: &mut Allocator, b: &[u8]) -> io::Result { 46 | let mut buffer = Cursor::new(b); 47 | node_from_stream(allocator, &mut buffer) 48 | } 49 | -------------------------------------------------------------------------------- /src/serde/errors.rs: -------------------------------------------------------------------------------- 1 | use std::io::{Error, ErrorKind}; 2 | 3 | pub fn bad_encoding() -> Error { 4 | Error::new(ErrorKind::InvalidInput, "bad encoding") 5 | } 6 | 7 | pub fn internal_error() -> Error { 8 | Error::new(ErrorKind::InvalidInput, "internal error") 9 | } 10 | -------------------------------------------------------------------------------- /src/serde/identity_hash.rs: -------------------------------------------------------------------------------- 1 | use rand::Rng; 2 | use std::hash::{BuildHasher, Hasher}; 3 | 4 | #[derive(Default, Clone, Copy)] 5 | pub struct IdentityHash(u64, u64); 6 | 7 | impl IdentityHash { 8 | fn new(salt: u64) -> Self { 9 | Self(0, salt) 10 | } 11 | } 12 | 13 | impl Hasher for IdentityHash { 14 | fn finish(&self) -> u64 { 15 | self.0 16 | } 17 | 18 | fn write(&mut self, bytes: &[u8]) { 19 | self.0 = 20 | u64::from_le_bytes(bytes[0..8].try_into().expect("expected 32 byte hashes")) ^ self.1; 21 | } 22 | 23 | fn write_u64(&mut self, _i: u64) { 24 | panic!("This hasher only takes bytes"); 25 | } 26 | } 27 | 28 | #[derive(Clone)] 29 | pub struct RandomState(u64); 30 | 31 | impl Default for RandomState { 32 | fn default() -> Self { 33 | let mut rng = rand::thread_rng(); 34 | Self(rng.gen()) 35 | } 36 | } 37 | 38 | impl BuildHasher for RandomState { 39 | type Hasher = IdentityHash; 40 | 41 | fn build_hasher(&self) -> Self::Hasher { 42 | IdentityHash::new(self.0) 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /src/serde/mod.rs: -------------------------------------------------------------------------------- 1 | mod bitset; 2 | mod bytes32; 3 | mod de; 4 | mod de_br; 5 | mod de_tree; 6 | mod errors; 7 | mod identity_hash; 8 | mod incremental; 9 | mod object_cache; 10 | mod parse_atom; 11 | mod path_builder; 12 | mod read_cache_lookup; 13 | mod ser; 14 | mod ser_br; 15 | mod serialized_length; 16 | mod tools; 17 | mod tree_cache; 18 | mod utils; 19 | pub mod write_atom; 20 | 21 | #[cfg(test)] 22 | mod test; 23 | 24 | pub use bitset::BitSet; 25 | pub use de::node_from_bytes; 26 | pub use de_br::{ 27 | node_from_bytes_backrefs, node_from_bytes_backrefs_old, node_from_bytes_backrefs_record, 28 | }; 29 | pub use de_tree::{parse_triples, ParsedTriple}; 30 | pub use identity_hash::RandomState; 31 | pub use incremental::{Serializer, UndoState}; 32 | pub use object_cache::{serialized_length, treehash, ObjectCache}; 33 | pub use path_builder::{ChildPos, PathBuilder}; 34 | pub use read_cache_lookup::ReadCacheLookup; 35 | pub use ser::{node_to_bytes, node_to_bytes_limit}; 36 | pub use ser_br::{node_to_bytes_backrefs, node_to_bytes_backrefs_limit}; 37 | pub use serialized_length::{serialized_length_atom, serialized_length_small_number}; 38 | pub use tools::{ 39 | is_canonical_serialization, serialized_length_from_bytes, serialized_length_from_bytes_trusted, 40 | tree_hash_from_stream, 41 | }; 42 | pub use tree_cache::{TreeCache, TreeCacheCheckpoint}; 43 | -------------------------------------------------------------------------------- /src/serde/ser.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | use std::io::Cursor; 3 | use std::io::ErrorKind; 4 | use std::io::Write; 5 | 6 | use super::write_atom::write_atom; 7 | use crate::allocator::{len_for_value, Allocator, NodePtr, NodeVisitor}; 8 | 9 | const CONS_BOX_MARKER: u8 = 0xff; 10 | 11 | pub struct LimitedWriter { 12 | inner: W, 13 | limit: usize, 14 | } 15 | 16 | impl LimitedWriter { 17 | pub fn new(w: W, limit: usize) -> LimitedWriter { 18 | LimitedWriter { inner: w, limit } 19 | } 20 | 21 | pub fn into_inner(self) -> W { 22 | self.inner 23 | } 24 | } 25 | 26 | impl Write for LimitedWriter { 27 | fn write(&mut self, buf: &[u8]) -> io::Result { 28 | if self.limit < buf.len() { 29 | return Err(ErrorKind::OutOfMemory.into()); 30 | } 31 | let written = self.inner.write(buf)?; 32 | self.limit -= written; 33 | Ok(written) 34 | } 35 | fn flush(&mut self) -> io::Result<()> { 36 | self.inner.flush() 37 | } 38 | } 39 | 40 | /// serialize a node 41 | pub fn node_to_stream(a: &Allocator, node: NodePtr, f: &mut W) -> io::Result<()> { 42 | let mut values: Vec = vec![node]; 43 | while let Some(v) = values.pop() { 44 | match a.node(v) { 45 | NodeVisitor::Buffer(buf) => write_atom(f, buf)?, 46 | NodeVisitor::U32(val) => { 47 | let buf = val.to_be_bytes(); 48 | let len = len_for_value(val); 49 | write_atom(f, &buf[4 - len..])? 50 | } 51 | NodeVisitor::Pair(left, right) => { 52 | f.write_all(&[CONS_BOX_MARKER])?; 53 | values.push(right); 54 | values.push(left); 55 | } 56 | } 57 | } 58 | Ok(()) 59 | } 60 | 61 | pub fn node_to_bytes_limit(a: &Allocator, node: NodePtr, limit: usize) -> io::Result> { 62 | let buffer = Cursor::new(Vec::new()); 63 | let mut writer = LimitedWriter::new(buffer, limit); 64 | node_to_stream(a, node, &mut writer)?; 65 | let vec = writer.into_inner().into_inner(); 66 | Ok(vec) 67 | } 68 | 69 | pub fn node_to_bytes(a: &Allocator, node: NodePtr) -> io::Result> { 70 | node_to_bytes_limit(a, node, 2000000) 71 | } 72 | 73 | #[cfg(test)] 74 | mod tests { 75 | use super::*; 76 | 77 | #[test] 78 | fn test_serialize_limit() { 79 | let mut a = Allocator::new(); 80 | 81 | let leaf = a.new_atom(&[1, 2, 3, 4, 5]).unwrap(); 82 | let l1 = a.new_pair(leaf, leaf).unwrap(); 83 | let l2 = a.new_pair(l1, l1).unwrap(); 84 | let l3 = a.new_pair(l2, l2).unwrap(); 85 | 86 | { 87 | let buffer = Cursor::new(Vec::new()); 88 | let mut writer = LimitedWriter::new(buffer, 55); 89 | node_to_stream(&a, l3, &mut writer).unwrap(); 90 | let vec = writer.into_inner().into_inner(); 91 | assert_eq!( 92 | vec, 93 | &[ 94 | 0xff, 0xff, 0xff, 133, 1, 2, 3, 4, 5, 133, 1, 2, 3, 4, 5, 0xff, 133, 1, 2, 3, 95 | 4, 5, 133, 1, 2, 3, 4, 5, 0xff, 0xff, 133, 1, 2, 3, 4, 5, 133, 1, 2, 3, 4, 5, 96 | 0xff, 133, 1, 2, 3, 4, 5, 133, 1, 2, 3, 4, 5 97 | ] 98 | ); 99 | } 100 | 101 | { 102 | let buffer = Cursor::new(Vec::new()); 103 | let mut writer = LimitedWriter::new(buffer, 54); 104 | assert_eq!( 105 | node_to_stream(&a, l3, &mut writer).unwrap_err().kind(), 106 | io::ErrorKind::OutOfMemory 107 | ); 108 | } 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /src/serde/ser_br.rs: -------------------------------------------------------------------------------- 1 | // Serialization with "back-references" 2 | 3 | use std::io; 4 | use std::io::Cursor; 5 | 6 | use super::object_cache::{serialized_length, treehash, ObjectCache}; 7 | use super::read_cache_lookup::ReadCacheLookup; 8 | use super::write_atom::write_atom; 9 | use crate::allocator::{Allocator, NodePtr, SExp}; 10 | use crate::serde::ser::LimitedWriter; 11 | 12 | const BACK_REFERENCE: u8 = 0xfe; 13 | const CONS_BOX_MARKER: u8 = 0xff; 14 | 15 | #[derive(PartialEq, Eq)] 16 | enum ReadOp { 17 | Parse, 18 | Cons, 19 | } 20 | 21 | pub fn node_to_stream_backrefs( 22 | allocator: &Allocator, 23 | node: NodePtr, 24 | f: &mut W, 25 | ) -> io::Result<()> { 26 | let mut read_op_stack: Vec = vec![ReadOp::Parse]; 27 | let mut write_stack: Vec = vec![node]; 28 | 29 | let mut read_cache_lookup = ReadCacheLookup::new(); 30 | 31 | let mut thc = ObjectCache::new(treehash); 32 | let mut slc = ObjectCache::new(serialized_length); 33 | 34 | while let Some(node_to_write) = write_stack.pop() { 35 | let op = read_op_stack.pop(); 36 | assert!(op == Some(ReadOp::Parse)); 37 | 38 | let node_serialized_length = *slc 39 | .get_or_calculate(allocator, &node_to_write, None) 40 | .expect("couldn't calculate serialized length"); 41 | let node_tree_hash = thc 42 | .get_or_calculate(allocator, &node_to_write, None) 43 | .expect("can't get treehash"); 44 | match read_cache_lookup.find_path(node_tree_hash, node_serialized_length) { 45 | Some(path) => { 46 | f.write_all(&[BACK_REFERENCE])?; 47 | write_atom(f, &path)?; 48 | read_cache_lookup.push(*node_tree_hash); 49 | } 50 | None => match allocator.sexp(node_to_write) { 51 | SExp::Pair(left, right) => { 52 | f.write_all(&[CONS_BOX_MARKER])?; 53 | write_stack.push(right); 54 | write_stack.push(left); 55 | read_op_stack.push(ReadOp::Cons); 56 | read_op_stack.push(ReadOp::Parse); 57 | read_op_stack.push(ReadOp::Parse); 58 | } 59 | SExp::Atom => { 60 | let atom = allocator.atom(node_to_write); 61 | write_atom(f, atom.as_ref())?; 62 | read_cache_lookup.push(*node_tree_hash); 63 | } 64 | }, 65 | } 66 | while let Some(ReadOp::Cons) = read_op_stack.last() { 67 | read_op_stack.pop(); 68 | read_cache_lookup.pop2_and_cons(); 69 | } 70 | } 71 | Ok(()) 72 | } 73 | 74 | pub fn node_to_bytes_backrefs_limit( 75 | a: &Allocator, 76 | node: NodePtr, 77 | limit: usize, 78 | ) -> io::Result> { 79 | let buffer = Cursor::new(Vec::new()); 80 | let mut writer = LimitedWriter::new(buffer, limit); 81 | node_to_stream_backrefs(a, node, &mut writer)?; 82 | let vec = writer.into_inner().into_inner(); 83 | Ok(vec) 84 | } 85 | 86 | pub fn node_to_bytes_backrefs(a: &Allocator, node: NodePtr) -> io::Result> { 87 | let mut buffer = Cursor::new(Vec::new()); 88 | node_to_stream_backrefs(a, node, &mut buffer)?; 89 | let vec = buffer.into_inner(); 90 | Ok(vec) 91 | } 92 | 93 | #[cfg(test)] 94 | mod tests { 95 | use super::*; 96 | use crate::serde::node_to_bytes_backrefs; 97 | 98 | #[test] 99 | fn test_serialize_limit() { 100 | let mut a = Allocator::new(); 101 | 102 | let leaf = a.new_atom(&[1, 2, 3, 4, 5]).unwrap(); 103 | let l1 = a.new_pair(leaf, leaf).unwrap(); 104 | let l2 = a.new_pair(l1, l1).unwrap(); 105 | let l3 = a.new_pair(l2, l2).unwrap(); 106 | 107 | let expected = &[255, 255, 255, 133, 1, 2, 3, 4, 5, 254, 2, 254, 2, 254, 2]; 108 | 109 | assert_eq!(node_to_bytes_backrefs(&a, l3).unwrap(), expected); 110 | assert_eq!(node_to_bytes_backrefs_limit(&a, l3, 15).unwrap(), expected); 111 | assert_eq!( 112 | node_to_bytes_backrefs_limit(&a, l3, 14).unwrap_err().kind(), 113 | io::ErrorKind::OutOfMemory 114 | ); 115 | } 116 | } 117 | -------------------------------------------------------------------------------- /src/serde/serialized_length.rs: -------------------------------------------------------------------------------- 1 | use crate::allocator::len_for_value; 2 | 3 | pub fn serialized_length_atom(buf: &[u8]) -> u32 { 4 | let lb = buf.len() as u32; 5 | if lb == 0 || (lb == 1 && buf[0] < 128) { 6 | 1 7 | } else if lb < 0x40 { 8 | 1 + lb 9 | } else if lb < 0x2000 { 10 | 2 + lb 11 | } else if lb < 0x100000 { 12 | 3 + lb 13 | } else if lb < 0x8000000 { 14 | 4 + lb 15 | } else { 16 | 5 + lb 17 | } 18 | } 19 | 20 | pub fn serialized_length_small_number(val: u32) -> u32 { 21 | len_for_value(val) as u32 + 1 22 | } 23 | 24 | // given an atom with num_bits (counting from the most significant set bit) 25 | // return the number of bytes we need to serialized this atom 26 | pub fn atom_length_bits(num_bits: u64) -> Option { 27 | if num_bits < 8 { 28 | return Some(1); 29 | } 30 | let num_bytes = num_bits.div_ceil(8); 31 | match num_bytes { 32 | 1..0x40 => Some(1 + num_bytes), 33 | 0x40..0x2000 => Some(2 + num_bytes), 34 | 0x2000..0x10_0000 => Some(3 + num_bytes), 35 | 0x10_0000..0x800_0000 => Some(4 + num_bytes), 36 | 0x800_0000..0x4_0000_0000 => Some(5 + num_bytes), 37 | _ => { 38 | assert!(num_bits >= 0x4_0000_0000 * 8 - 7); 39 | None 40 | } 41 | } 42 | } 43 | 44 | #[cfg(test)] 45 | mod tests { 46 | use super::*; 47 | use rstest::rstest; 48 | 49 | #[rstest] 50 | #[case(&[], 1)] 51 | #[case(&[1], 1)] 52 | #[case(&[0x7f], 1)] 53 | #[case(&[0x80], 2)] 54 | #[case(&[0x81], 2)] 55 | #[case(&[0x80, 0], 3)] 56 | #[case(&[1; 0x3f], 0x40)] 57 | #[case(&[1; 0x40], 0x42)] 58 | fn test_serialized_length_atom(#[case] atom: &[u8], #[case] expect: u32) { 59 | assert_eq!(serialized_length_atom(atom), expect); 60 | } 61 | 62 | #[rstest] 63 | #[case(0, 1)] 64 | #[case(1, 2)] 65 | #[case(0x7f, 2)] 66 | #[case(0x80, 3)] 67 | #[case(0x7fff, 3)] 68 | #[case(0x7fffff, 4)] 69 | #[case(0x800000, 5)] 70 | #[case(0x7fffffff, 5)] 71 | #[case(0x80000000, 6)] 72 | #[case(0xffffffff, 6)] 73 | fn test_serialized_length_small_number(#[case] value: u32, #[case] expect: u32) { 74 | assert_eq!(serialized_length_small_number(value), expect); 75 | } 76 | 77 | #[rstest] 78 | #[case(0, Some(1))] 79 | #[case(1, Some(1))] 80 | #[case(7, Some(1))] 81 | #[case(8, Some(2))] 82 | #[case(9, Some(3))] 83 | #[case(504, Some(1+63))] 84 | #[case(505, Some(2+64))] 85 | #[case(0xfff8, Some(2+0x1fff))] 86 | #[case(0xfff9, Some(3+0x2000))] 87 | #[case(0x3ffffff8, Some(4 + 0x3ffffff8_u64.div_ceil(8)))] 88 | #[case(0x3ffffff9, Some(5 + 0x3ffffff9_u64.div_ceil(8)))] 89 | #[case(0x1ffffffff8, Some(5 + 0x1ffffffff8_u64.div_ceil(8)))] 90 | #[case(0x1ffffffff9, None)] 91 | fn test_atom_length_bits(#[case] num_bits: u64, #[case] expect: Option) { 92 | assert_eq!(atom_length_bits(num_bits), expect); 93 | } 94 | } 95 | -------------------------------------------------------------------------------- /src/serde/test.rs: -------------------------------------------------------------------------------- 1 | use hex::FromHex; 2 | 3 | use crate::allocator::Allocator; 4 | use crate::serde::{ 5 | node_from_bytes, node_from_bytes_backrefs, node_to_bytes, node_to_bytes_backrefs, Serializer, 6 | }; 7 | 8 | fn check_round_trip(obj_ser_br_hex: &str, serializer_output: Option<&str>) { 9 | // serialized with br => obj => serialized no br =(allow_br)=> obj => serialized w br 10 | 11 | // serialized object, with back-refs 12 | let obj_ser_br = >::from_hex(obj_ser_br_hex).unwrap(); 13 | 14 | // turn into serialized object with no back-refs 15 | let mut allocator = Allocator::new(); 16 | let obj = node_from_bytes_backrefs(&mut allocator, &obj_ser_br).unwrap(); 17 | 18 | let obj_ser_no_br_1 = node_to_bytes(&allocator, obj).unwrap(); 19 | 20 | // deserialize using `node_from_bytes_backrefs` (even though there are no backrefs) 21 | // and reserialized without back-refs 22 | let mut allocator = Allocator::new(); 23 | let obj = node_from_bytes_backrefs(&mut allocator, &obj_ser_no_br_1).unwrap(); 24 | 25 | let obj_ser_no_br_2 = node_to_bytes(&allocator, obj).unwrap(); 26 | 27 | // compare both reserializations (without back-refs) 28 | assert_eq!(obj_ser_no_br_1, obj_ser_no_br_2); 29 | 30 | // now reserialize with back-refs 31 | let mut allocator = Allocator::new(); 32 | let obj = node_from_bytes(&mut allocator, &obj_ser_no_br_1).unwrap(); 33 | 34 | let obj_ser_br_1 = node_to_bytes_backrefs(&allocator, obj).unwrap(); 35 | 36 | // and compare to original 37 | assert_eq!(obj_ser_br, obj_ser_br_1); 38 | 39 | // now reserialize with back-refs using the incremental serializer 40 | let mut allocator = Allocator::new(); 41 | let obj = node_from_bytes(&mut allocator, &obj_ser_no_br_1).unwrap(); 42 | 43 | let mut serializer = Serializer::new(None); 44 | let (done, _) = serializer.add(&allocator, obj).unwrap(); 45 | assert!(done); 46 | let obj_ser_br_2 = serializer.into_inner(); 47 | 48 | // and compare to original 49 | assert_eq!(obj_ser_br, obj_ser_br_1); 50 | 51 | // Serializer uses a different implementation that takes some short-cuts. 52 | // Specifically, it doesn't generate references to the parse stack itself 53 | match serializer_output { 54 | Some(expect) => { 55 | assert_eq!(expect, hex::encode(obj_ser_br_2)); 56 | } 57 | None => { 58 | assert_eq!(obj_ser_br_1, obj_ser_br_2); 59 | assert_eq!(obj_ser_br, obj_ser_br_2); 60 | } 61 | } 62 | } 63 | 64 | #[test] 65 | fn test_round_trip() { 66 | let check = check_round_trip; 67 | check("01", None); // 1 68 | check("ff83666f6f83626172", None); // (foo . bar) 69 | check("ff83666f6fff8362617280", None); // (foo bar) 70 | check("ffff0102ff0304", None); // ((1 . 2) . (3 . 4)) 71 | check("ff01ff02ff03ff04ff05ff0680", None); // (1 2 3 4 5 6) 72 | check("ff83666f6ffe02", None); // (foo . foo) 73 | 74 | // (long string of long text string) 75 | check( 76 | "ff846c6f6e67ff86737472696e67ff826f66fffe0bff8474657874fffe1780", 77 | None, 78 | ); 79 | 80 | /* 81 | (foo (foo) ((foo) foo) (((foo) foo) (foo) foo) ((((foo) foo) (foo) foo) ((foo) foo) 82 | (foo) foo) (((((foo) foo) (foo) foo) ((foo) foo) (foo) foo) (((foo) foo) (foo) foo) 83 | ((foo) foo) (foo) foo) ((((((foo) foo) (foo) foo) ((foo) foo) (foo) foo) (((foo) foo) 84 | (foo) foo) ((foo) foo) (foo) foo) ((((foo) foo) (foo) foo) ((foo) foo) (foo) foo) 85 | (((foo) foo) (foo) foo) ((foo) foo) (foo) foo)) 86 | */ 87 | 88 | // These back-references point directly to the parse stack. The Serializer 89 | // doesn't generate back references like that, so it will only round-trip 90 | // with node_to_bytes_backrefs() 91 | check( 92 | "ff83666f6ffffe01fffe01fffe01fffe01fffe01fffe0180", 93 | Some("ff83666f6ffffffe0280fffffe02fe02fffffe02fe02fffffe02fe02fffffe02fe02fffffe02fe0280"), 94 | ); 95 | } 96 | -------------------------------------------------------------------------------- /src/serde/utils.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | use std::io::{copy, sink, Error, Read, Write}; 3 | 4 | pub fn copy_exactly( 5 | reader: &mut R, 6 | writer: &mut W, 7 | expected_size: u64, 8 | ) -> io::Result<()> { 9 | let mut reader = reader.by_ref().take(expected_size); 10 | 11 | let count = copy(&mut reader, writer)?; 12 | if count < expected_size { 13 | Err(Error::new( 14 | std::io::ErrorKind::UnexpectedEof, 15 | "copy terminated early", 16 | )) 17 | } else { 18 | Ok(()) 19 | } 20 | } 21 | 22 | pub fn skip_bytes(f: &mut R, size: u64) -> io::Result<()> { 23 | copy_exactly(f, &mut sink(), size) 24 | } 25 | -------------------------------------------------------------------------------- /src/tests.rs: -------------------------------------------------------------------------------- 1 | use super::allocator::{Allocator, NodePtr}; 2 | use super::serde::node_from_bytes; 3 | use super::serde::node_to_bytes; 4 | use super::test_ops::node_eq; 5 | 6 | fn test_serialize_roundtrip(a: &mut Allocator, n: NodePtr) { 7 | let vec = node_to_bytes(a, n).unwrap(); 8 | let n0 = node_from_bytes(a, &vec).unwrap(); 9 | assert!(node_eq(a, n, n0)); 10 | } 11 | 12 | #[test] 13 | fn test_roundtrip() { 14 | let mut a = Allocator::new(); 15 | let n = a.nil(); 16 | test_serialize_roundtrip(&mut a, n); 17 | 18 | let n = a.one(); 19 | test_serialize_roundtrip(&mut a, n); 20 | 21 | let n = a.new_atom(&[1_u8, 2_u8, 3_u8]).unwrap(); 22 | test_serialize_roundtrip(&mut a, n); 23 | 24 | let a1 = a.new_atom(&[1_u8, 2_u8, 3_u8]).unwrap(); 25 | let a2 = a.new_atom(&[4_u8, 5_u8, 6_u8]).unwrap(); 26 | let p = a.new_pair(a1, a2).unwrap(); 27 | test_serialize_roundtrip(&mut a, p); 28 | 29 | for idx in 0..=255 { 30 | let n = a.new_atom(&[idx]).unwrap(); 31 | test_serialize_roundtrip(&mut a, n); 32 | } 33 | 34 | // large blob 35 | let buf = vec![0; 1000000]; 36 | let n = a.new_atom(&buf).unwrap(); 37 | test_serialize_roundtrip(&mut a, n); 38 | 39 | // deep tree 40 | let mut prev = a.nil(); 41 | for _ in 0..=4000 { 42 | prev = a.new_pair(a.one(), prev).unwrap(); 43 | } 44 | test_serialize_roundtrip(&mut a, prev); 45 | 46 | // deep reverse tree 47 | let mut prev = a.nil(); 48 | for _ in 0..=4000 { 49 | let n = a.one(); 50 | prev = a.new_pair(prev, n).unwrap(); 51 | } 52 | test_serialize_roundtrip(&mut a, prev); 53 | } 54 | 55 | #[test] 56 | fn test_serialize_blobs() { 57 | let mut a = Allocator::new(); 58 | 59 | // nil 60 | let n = a.nil(); 61 | assert_eq!(node_to_bytes(&a, n).unwrap(), &[0x80]); 62 | 63 | // one 64 | let n = a.one(); 65 | assert_eq!(node_to_bytes(&a, n).unwrap(), &[1]); 66 | 67 | // single byte 68 | let n = a.new_atom(&[128]).unwrap(); 69 | assert_eq!(node_to_bytes(&a, n).unwrap(), &[0x81, 128]); 70 | test_serialize_roundtrip(&mut a, n); 71 | 72 | // two bytes 73 | let n = a.new_atom(&[0x10, 0xff]).unwrap(); 74 | assert_eq!(node_to_bytes(&a, n).unwrap(), &[0x82, 0x10, 0xff]); 75 | test_serialize_roundtrip(&mut a, n); 76 | 77 | // three bytes 78 | let n = a.new_atom(&[0xff, 0x10, 0xff]).unwrap(); 79 | assert_eq!(node_to_bytes(&a, n).unwrap(), &[0x83, 0xff, 0x10, 0xff]); 80 | test_serialize_roundtrip(&mut a, n); 81 | } 82 | 83 | #[test] 84 | fn test_serialize_lists() { 85 | let mut a = Allocator::new(); 86 | 87 | // nil 88 | let n = a.nil(); 89 | assert_eq!(node_to_bytes(&a, n).unwrap(), &[0x80]); 90 | 91 | // one item 92 | let n = a.new_pair(a.one(), n).unwrap(); 93 | assert_eq!(node_to_bytes(&a, n).unwrap(), &[0xff, 1, 0x80]); 94 | 95 | // two items 96 | let n = a.new_pair(a.one(), n).unwrap(); 97 | assert_eq!(node_to_bytes(&a, n).unwrap(), &[0xff, 1, 0xff, 1, 0x80]); 98 | test_serialize_roundtrip(&mut a, n); 99 | 100 | // three items 101 | let n = a.new_pair(a.one(), n).unwrap(); 102 | assert_eq!( 103 | node_to_bytes(&a, n).unwrap(), 104 | &[0xff, 1, 0xff, 1, 0xff, 1, 0x80] 105 | ); 106 | test_serialize_roundtrip(&mut a, n); 107 | 108 | // a backwards list 109 | let n = a.one(); 110 | let n = a.new_pair(n, a.one()).unwrap(); 111 | let n = a.new_pair(n, a.one()).unwrap(); 112 | let n = a.new_pair(n, a.one()).unwrap(); 113 | assert_eq!( 114 | node_to_bytes(&a, n).unwrap(), 115 | &[0xff, 0xff, 0xff, 1, 1, 1, 1] 116 | ); 117 | test_serialize_roundtrip(&mut a, n); 118 | } 119 | 120 | #[test] 121 | fn test_serialize_tree() { 122 | let mut a = Allocator::new(); 123 | 124 | let a1 = a.new_atom(&[1]).unwrap(); 125 | let a2 = a.new_atom(&[2]).unwrap(); 126 | let a3 = a.new_atom(&[3]).unwrap(); 127 | let a4 = a.new_atom(&[4]).unwrap(); 128 | let l = a.new_pair(a1, a2).unwrap(); 129 | let r = a.new_pair(a3, a4).unwrap(); 130 | let n = a.new_pair(l, r).unwrap(); 131 | assert_eq!( 132 | node_to_bytes(&a, n).unwrap(), 133 | &[0xff, 0xff, 1, 2, 0xff, 3, 4] 134 | ); 135 | test_serialize_roundtrip(&mut a, n); 136 | } 137 | -------------------------------------------------------------------------------- /tests/programs/args-add.envhex: -------------------------------------------------------------------------------- 1 | ff8201f480 2 | -------------------------------------------------------------------------------- /tests/programs/args-all.envhex: -------------------------------------------------------------------------------- 1 | ff8201f480 2 | -------------------------------------------------------------------------------- /tests/programs/args-and.envhex: -------------------------------------------------------------------------------- 1 | ff8201f480 2 | -------------------------------------------------------------------------------- /tests/programs/args-any.envhex: -------------------------------------------------------------------------------- 1 | ff8201f480 2 | -------------------------------------------------------------------------------- /tests/programs/args-cat.envhex: -------------------------------------------------------------------------------- 1 | ff8201f480 2 | -------------------------------------------------------------------------------- /tests/programs/args-mul.envhex: -------------------------------------------------------------------------------- 1 | ff8201f480 2 | -------------------------------------------------------------------------------- /tests/programs/args-or.envhex: -------------------------------------------------------------------------------- 1 | ff8201f480 2 | -------------------------------------------------------------------------------- /tests/programs/args-point_add.envhex: -------------------------------------------------------------------------------- 1 | ff8080 2 | -------------------------------------------------------------------------------- /tests/programs/args-sha.envhex: -------------------------------------------------------------------------------- 1 | ff8201f480 2 | -------------------------------------------------------------------------------- /tests/programs/args-sub.envhex: -------------------------------------------------------------------------------- 1 | ff8201f480 2 | -------------------------------------------------------------------------------- /tests/programs/args-unknown-1.envhex: -------------------------------------------------------------------------------- 1 | ff8201f480 2 | -------------------------------------------------------------------------------- /tests/programs/args-unknown-2.envhex: -------------------------------------------------------------------------------- 1 | ff8201f480 2 | -------------------------------------------------------------------------------- /tests/programs/args-unknown-3.envhex: -------------------------------------------------------------------------------- 1 | ff8201f480 2 | -------------------------------------------------------------------------------- /tests/programs/args-unknown-4.envhex: -------------------------------------------------------------------------------- 1 | ff8201f480 2 | -------------------------------------------------------------------------------- /tests/programs/args-unknown-5.envhex: -------------------------------------------------------------------------------- 1 | ff832dc6c080 2 | -------------------------------------------------------------------------------- /tests/programs/args-unknown-5.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff04ffff04ff02ffff04ffff02ff06ffff04ff02ffff01ff06808080ffff04ff05ff8080808080ffff04ffff01ffff02ffff03ffff09ff0bff8080ffff0105ffff01ff02ff04ffff04ff02ffff04ffff8307ff00ff05ffff0187ffffffffffffff80ffff04ffff11ff0bffff010180ff808080808080ff0180ff02ffff03ff05ffff01ff17ffff02ff06ffff04ff02ffff04ffff11ff05ffff010180ff80808080ffff018300ffff80ffff01ff01818080ff0180ff018080 2 | -------------------------------------------------------------------------------- /tests/programs/args-unknown-6.envhex: -------------------------------------------------------------------------------- 1 | ff8401c9c38080 2 | -------------------------------------------------------------------------------- /tests/programs/args-unknown-6.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff04ffff04ff02ffff04ffff02ff06ffff04ff02ffff01ff06808080ffff04ff05ff8080808080ffff04ffff01ffff02ffff03ffff09ff0bff8080ffff0105ffff01ff02ff04ffff04ff02ffff04ffff820001ff05ffff01870fffffffffffff80ffff04ffff11ff0bffff010180ff808080808080ff0180ff02ffff03ff05ffff01ff17ffff02ff06ffff04ff02ffff04ffff11ff05ffff010180ff80808080ffff018300ffff80ffff01ff01818080ff0180ff018080 2 | -------------------------------------------------------------------------------- /tests/programs/args-unknown-7.envhex: -------------------------------------------------------------------------------- 1 | ff8401c9c38080 2 | -------------------------------------------------------------------------------- /tests/programs/args-unknown-7.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff04ffff04ff02ffff04ffff02ff06ffff04ff02ffff01ff06808080ffff04ff05ff8080808080ffff04ffff01ffff02ffff03ffff09ff0bff8080ffff0105ffff01ff02ff04ffff04ff02ffff04ffff820041ff05ffff01870fffffffffffff80ffff04ffff11ff0bffff010180ff808080808080ff0180ff02ffff03ff05ffff01ff17ffff02ff06ffff04ff02ffff04ffff11ff05ffff010180ff80808080ffff018300ffff80ffff01ff01818080ff0180ff018080 2 | -------------------------------------------------------------------------------- /tests/programs/args-unknown-8.envhex: -------------------------------------------------------------------------------- 1 | ff8401c9c38080 2 | -------------------------------------------------------------------------------- /tests/programs/args-unknown-8.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff04ffff04ff02ffff04ffff02ff06ffff04ff02ffff01ff06808080ffff04ff05ff8080808080ffff04ffff01ffff02ffff03ffff09ff0bff8080ffff0105ffff01ff02ff04ffff04ff02ffff04ffff820081ff05ffff01870fffffffffffff80ffff04ffff11ff0bffff010180ff808080808080ff0180ff02ffff03ff05ffff01ff17ffff02ff06ffff04ff02ffff04ffff11ff05ffff010180ff80808080ffff018300ffff80ffff01ff01818080ff0180ff018080 2 | -------------------------------------------------------------------------------- /tests/programs/args-unknown-9.envhex: -------------------------------------------------------------------------------- 1 | ff8401c9c38080 2 | -------------------------------------------------------------------------------- /tests/programs/args-unknown-9.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff04ffff04ff02ffff04ffff02ff06ffff04ff02ffff01ff06808080ffff04ff05ff8080808080ffff04ffff01ffff02ffff03ffff09ff0bff8080ffff0105ffff01ff02ff04ffff04ff02ffff04ffff8200c1ff05ffff01870fffffffffffff80ffff04ffff11ff0bffff010180ff808080808080ff0180ff02ffff03ff05ffff01ff17ffff02ff06ffff04ff02ffff04ffff11ff05ffff010180ff80808080ffff018300ffff80ffff01ff01818080ff0180ff018080 2 | -------------------------------------------------------------------------------- /tests/programs/args-xor.envhex: -------------------------------------------------------------------------------- 1 | ff8201f480 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-add.envhex: -------------------------------------------------------------------------------- 1 | ff834c4b4080 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-add.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ffff019907ffffffffffffffffffffffffffffffffffffffffffffffffffff04ff05ff8080808080ffff04ffff01ff02ffff03ffff09ff0bff8080ffff0105ffff01ff02ff02ffff04ff02ffff04ffff10ff05ff0580ffff04ffff11ff0bffff010180ff808080808080ff0180ff018080 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-ash.envhex: -------------------------------------------------------------------------------- 1 | ff82271080 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-ash.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff04ffff04ff02ffff04ffff02ff06ffff04ff02ffff01ff06808080ffff04ff05ff8080808080ffff04ffff01ffff02ffff03ffff09ff0bff8080ffff0105ffff01ff02ff04ffff04ff02ffff04ffff16ff05ffff018300ffff80ffff04ffff11ff0bffff010180ff808080808080ff0180ff02ffff03ff05ffff01ff17ffff02ff06ffff04ff02ffff04ffff11ff05ffff010180ff80808080ffff018300ffff80ffff01ff01818080ff0180ff018080 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-cat.envhex: -------------------------------------------------------------------------------- 1 | ff1d80 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-cat.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ffff0186414243444546ffff04ff05ff8080808080ffff04ffff01ff02ffff03ffff09ff0bff8080ffff0105ffff01ff02ff02ffff04ff02ffff04ffff0eff05ff0580ffff04ffff11ff0bffff010180ff808080808080ff0180ff018080 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-cons.envhex: -------------------------------------------------------------------------------- 1 | ff840098968080 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-cons.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ffff01820539ffff04ff05ff8080808080ffff04ffff01ff02ffff03ff0bffff01ff04ff05ffff02ff02ffff04ff02ffff04ff05ffff04ffff11ff0bffff010180ffff04ff05ff80808080808080ff8080ff0180ff018080 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-div.envhex: -------------------------------------------------------------------------------- 1 | ff830f424080 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-div.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff04ffff04ff02ffff04ffff02ff06ffff04ff02ffff01ff06808080ffff04ff05ff8080808080ffff04ffff01ffff02ffff03ffff09ff0bff8080ffff0105ffff01ff02ff04ffff04ff02ffff04ffff13ff05ffff010d80ffff04ffff11ff0bffff010180ff808080808080ff0180ff02ffff03ff05ffff01ff17ffff02ff06ffff04ff02ffff04ffff11ff05ffff010180ff80808080ffff018300ffff80ffff01ff01818080ff0180ff018080 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-lsh.envhex: -------------------------------------------------------------------------------- 1 | ff82271080 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-lsh.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff04ffff04ff02ffff04ffff02ff06ffff04ff02ffff01ff06808080ffff04ff05ff8080808080ffff04ffff01ffff02ffff03ffff09ff0bff8080ffff0105ffff01ff02ff04ffff04ff02ffff04ffff17ff05ffff018300ffff80ffff04ffff11ff0bffff010180ff808080808080ff0180ff02ffff03ff05ffff01ff17ffff02ff06ffff04ff02ffff04ffff11ff05ffff010180ff80808080ffff018300ffff80ffff01ff01818080ff0180ff018080 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-mul.envhex: -------------------------------------------------------------------------------- 1 | ff6480 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-mul.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ffff019707ffffffffffffffffffffffffffffffffffffffffffffffff04ff05ff8080808080ffff04ffff01ff02ffff03ffff09ff0bff8080ffff0105ffff01ff02ff02ffff04ff02ffff04ffff12ff05ff0580ffff04ffff11ff0bffff010180ff808080808080ff0180ff018080 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-not.envhex: -------------------------------------------------------------------------------- 1 | ff840098968080 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-not.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff04ffff04ff02ffff04ffff02ff06ffff04ff02ffff01ff06808080ffff04ff05ff8080808080ffff04ffff01ffff02ffff03ffff09ff0bff8080ffff0105ffff01ff02ff04ffff04ff02ffff04ffff1bff0580ffff04ffff11ff0bffff010180ff808080808080ff0180ff02ffff03ff05ffff01ff17ffff02ff06ffff04ff02ffff04ffff11ff05ffff010180ff80808080ffff018300ffff80ffff01ff01818080ff0180ff018080 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-pubkey.envhex: -------------------------------------------------------------------------------- 1 | ff82271080 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-pubkey.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff04ffff04ff02ffff04ffff02ff06ffff04ff02ffff01ff06808080ffff04ff05ff8080808080ffff04ffff01ffff02ffff03ffff09ff0bff8080ffff0105ffff01ff02ff04ffff04ff02ffff04ffff1eff0580ffff04ffff11ff0bffff010180ff808080808080ff0180ff02ffff03ff05ffff01ff17ffff02ff06ffff04ff02ffff04ffff11ff05ffff010180ff80808080ffff018300ffff80ffff01ff01818080ff0180ff018080 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-sub.envhex: -------------------------------------------------------------------------------- 1 | ff834c4b4080 2 | -------------------------------------------------------------------------------- /tests/programs/recursive-sub.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ffff019907ffffffffffffffffffffffffffffffffffffffffffffffffffff04ff05ff8080808080ffff04ffff01ff02ffff03ffff09ff0bff8080ffff0105ffff01ff02ff02ffff04ff02ffff04ffff11ff05ff0580ffff04ffff11ff0bffff010180ff808080808080ff0180ff018080 2 | -------------------------------------------------------------------------------- /tests/programs/softfork-1.envhex: -------------------------------------------------------------------------------- 1 | ff84ffffffff80 2 | -------------------------------------------------------------------------------- /tests/programs/softfork-1.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ff05ffff0180808080ffff04ffff01ff02ffff03ffff09ffff0180ff0580ffff01ff012affff01ff02ff02ffff04ff02ffff04ffff10ffff11ff05ffff010180ffff24ffff018900ffffffffffffff458080ffff018080808080ff0180ff018080 2 | -------------------------------------------------------------------------------- /tests/programs/softfork-2.envhex: -------------------------------------------------------------------------------- 1 | ff84ffffffff80 2 | -------------------------------------------------------------------------------- /tests/programs/softfork-2.hex: -------------------------------------------------------------------------------- 1 | ff02ffff01ff02ff02ffff04ff02ffff04ff05ffff0180808080ffff04ffff01ff02ffff03ffff09ffff0180ff0580ffff01ff012affff01ff02ff02ffff04ff02ffff04ffff10ffff11ff05ffff010180ffff24ffff018500ffffff458080ffff018080808080ff0180ff018080 2 | -------------------------------------------------------------------------------- /tests/run.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from clvm_rs.clvm_rs import run_serialized_chia_program 4 | 5 | 6 | def run_clvm(fn, env=None): 7 | 8 | program = bytes.fromhex(open(fn, 'r').read()) 9 | if env is not None: 10 | env = bytes.fromhex(open(env, 'r').read()) 11 | else: 12 | env = bytes.fromhex("ff80") 13 | # constants from the main chia blockchain: 14 | # https://github.com/Chia-Network/chia-blockchain/blob/main/chia/consensus/default_constants.py 15 | max_cost = 11000000000 16 | cost_per_byte = 12000 17 | 18 | max_cost -= (len(program) + len(env)) * cost_per_byte 19 | return run_serialized_chia_program( 20 | program, 21 | env, 22 | max_cost, 23 | 0, 24 | ) 25 | 26 | 27 | def count_tree_size(tree) -> int: 28 | stack = [tree] 29 | ret = 0 30 | while len(stack): 31 | i = stack.pop() 32 | if i.atom is not None: 33 | ret += len(i.atom) 34 | elif i.pair is not None: 35 | stack.append(i.pair[1]) 36 | stack.append(i.pair[0]) 37 | else: 38 | # this shouldn't happen 39 | assert False 40 | return ret 41 | 42 | if __name__ == "__main__": 43 | import sys 44 | from time import time 45 | 46 | try: 47 | start = time() 48 | cost, result = run_clvm(sys.argv[1], sys.argv[2]) 49 | duration = time() - start; 50 | print(f"cost: {cost}") 51 | print(f"execution time: {duration:.2f}s") 52 | except Exception as e: 53 | print("FAIL:", e.args[0]) 54 | sys.exit(1) 55 | start = time() 56 | ret_size = count_tree_size(result) 57 | duration = time() - start; 58 | print(f"returned bytes: {ret_size}") 59 | print(f"parse return value time: {duration:.2f}s") 60 | sys.exit(0) 61 | -------------------------------------------------------------------------------- /tools/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "clvm-rs-test-tools" 3 | version = "0.14.0" 4 | authors = ["Arvid Norberg ", "Cameron Cooper "] 5 | edition = "2021" 6 | license = "Apache-2.0" 7 | description = "Developer tools for the CLVM interpreter" 8 | homepage = "https://github.com/Chia-Network/clvm_rs/tools/" 9 | repository = "https://github.com/Chia-Network/clvm_rs/tools/" 10 | readme = "README.md" 11 | 12 | [dependencies] 13 | hex-literal = { workspace = true } 14 | hex = { workspace = true } 15 | rand = { workspace = true } 16 | sha1 = { workspace = true } 17 | linreg = { workspace = true } 18 | clvmr = { workspace = true } 19 | chia-bls = { workspace = true } 20 | num-bigint = { workspace = true } 21 | num-integer = { workspace = true } 22 | serde = { workspace = true, features = ["derive"] } 23 | serde_json = { workspace = true } 24 | clap = { workspace = true, features = ["derive"] } 25 | rand_chacha = { workspace = true } 26 | 27 | [[bin]] 28 | name = "generate-fuzz-corpus" 29 | test = false 30 | bench = false 31 | 32 | [[bin]] 33 | name = "benchmark-clvm-cost" 34 | test = false 35 | bench = false 36 | 37 | [[bin]] 38 | name = "verify-zksnark" 39 | test = false 40 | bench = false 41 | 42 | [[bin]] 43 | name = "generate-modpow-tests" 44 | test = false 45 | bench = false 46 | -------------------------------------------------------------------------------- /tools/data/proof.json: -------------------------------------------------------------------------------- 1 | { 2 | "pi_a": [ 3 | "2448972748141989272536367560659697170734634088573614916679357916264488049700372778155603853824786623305387094546369", 4 | "2613221712984417828767835104274024828239341398418470730553885155357439750821374624630210264872985961930235777347104", 5 | "1" 6 | ], 7 | "pi_b": [ 8 | [ 9 | "3708254411817616046496599112503459290131712318992805100196099293124450895778668562366777707942755580648476411528616", 10 | "1615373645805710633948600413508988659344946361996638390351319550905473691532825592955678648251052813842459992431813" 11 | ], 12 | [ 13 | "2001495116686267442302054141712026804161251446824505371797990860144851289966066534281669278894973744357520656326919", 14 | "1341482890450207545789724195498464169941018957781713961603436195744234605397631185080983084054424258281861049763542" 15 | ], 16 | ["1", "0"] 17 | ], 18 | "pi_c": [ 19 | "3029498251172243945704453776550289972710027563944505099878777570942300616351648238243169819882261150555837899543624", 20 | "3369937544529554744574272044068942679061743305676782825796582255803972813518101160644118161590230148923409065282626", 21 | "1" 22 | ], 23 | "protocol": "groth16", 24 | "curve": "bls12381" 25 | } 26 | -------------------------------------------------------------------------------- /tools/data/public.json: -------------------------------------------------------------------------------- 1 | [ 2 | "15744006038856998268181219516291113434365469909648022488288672656450282844855" 3 | ] 4 | -------------------------------------------------------------------------------- /tools/data/verification_key.json: -------------------------------------------------------------------------------- 1 | { 2 | "protocol": "groth16", 3 | "curve": "bls12381", 4 | "nPublic": 1, 5 | "vk_alpha_1": [ 6 | "3687990602627480788576278326778684068346499502295437145891072024790539587089059507329252689410539273728007931173343", 7 | "2691609246889571927545472613612306448566845944689354319490588876705534279625652314844953252469547550362023064748953", 8 | "1" 9 | ], 10 | "vk_beta_2": [ 11 | [ 12 | "2608641754095668628654511081770632612569796850505052942197090784708482913082697973366799224986199956634486073060199", 13 | "89228168523230122473878930637028667136418498377368837726462188398136487986559177833403891300545206849263721046238" 14 | ], 15 | [ 16 | "41546066546231073094970759775221151302737154809289359952977752591405008468272119343489892173609247914574253145019", 17 | "1073496232887667728611248773592182301683863569516966096481458702792361423011100241892970384222710119575879538773631" 18 | ], 19 | ["1", "0"] 20 | ], 21 | "vk_gamma_2": [ 22 | [ 23 | "352701069587466618187139116011060144890029952792775240219908644239793785735715026873347600343865175952761926303160", 24 | "3059144344244213709971259814753781636986470325476647558659373206291635324768958432433509563104347017837885763365758" 25 | ], 26 | [ 27 | "1985150602287291935568054521177171638300868978215655730859378665066344726373823718423869104263333984641494340347905", 28 | "927553665492332455747201965776037880757740193453592970025027978793976877002675564980949289727957565575433344219582" 29 | ], 30 | ["1", "0"] 31 | ], 32 | "vk_delta_2": [ 33 | [ 34 | "2302708795135883755295689012757758746531394778684760845557415078964262613319745952046265901445936183734832887419314", 35 | "3084780362642599240218688073008885474013386097145303880494731842809534952957928683302064241771259067324907482804535" 36 | ], 37 | [ 38 | "2353281801229749567454967639478048114084038149642432168712668513087247657105865478271864938520646795729273580954373", 39 | "3946758327447072982383197368123140665753687006919351726238936862294926911822209283838500408364423349160285940132546" 40 | ], 41 | ["1", "0"] 42 | ], 43 | "vk_alphabeta_12": [ 44 | [ 45 | [ 46 | "3646922899734621031309198643792122859755051285299968901190456847922062309516344525965903754151703713929910803018723", 47 | "3787264463476506417753905333017140683881970278885692947093103102190734933948009040062419894798125099171961993441585" 48 | ], 49 | [ 50 | "888591976801838355558310496304482240721117286293757254042783018082905268796780457128037898191123107332073390969306", 51 | "193551792940402175998097385264246078420044892736093193471826013538673041275411949024638134205343639644439406295978" 52 | ], 53 | [ 54 | "1317889796712672053878879651236837007079529832659749901543207138507635113785380816474503253440740877839206481081138", 55 | "68492254186446573347883950975645044908957072900012067873632406930630971765229261191765781341134978015638910128690" 56 | ] 57 | ], 58 | [ 59 | [ 60 | "2516694800776785349103641204442847315220092352707018103618606902656552402169819493388195598701143922571277337980272", 61 | "96623648346070294868753263474971669617108833331306255390212761619996570553190757977722341498313064273906964152134" 62 | ], 63 | [ 64 | "2502317210664014936951176356618294055923532685127730693917839807090502831127716870590666308945251016529938493301293", 65 | "1269831695938094358083014213758376386842407521241795645485130044447135656806386120537823082192926908937323744695066" 66 | ], 67 | [ 68 | "2096572493157417726452007018982901557663874671746322246164788364050706635296947426419903916849105863115688337239000", 69 | "1736417117473120462831776778505826154107507679154832319336687186056350388252408161606715924310118176458088198747427" 70 | ] 71 | ] 72 | ], 73 | "IC": [ 74 | [ 75 | "1071823547084176756430637039762682182586478148082321678474568557380586500897665218485348420354524434569648761461634", 76 | "2684952160352532512645026212843511021778883533927836353068736374874296422762544669022406839997640419830394096116948", 77 | "1" 78 | ], 79 | [ 80 | "2839260942033980200881629479143771629126097863497154112088469889816939528519692319012782094539984411463350527566132", 81 | "2039646667501610214724415985007403112148409765841539023420972305120615285033331651618581255120345157629653223270351", 82 | "1" 83 | ] 84 | ] 85 | } 86 | -------------------------------------------------------------------------------- /tools/generate-keccak-tests.py: -------------------------------------------------------------------------------- 1 | from eth_hash.auto import keccak 2 | from random import randbytes, randint, seed, sample 3 | from more_itertools import sliced 4 | 5 | # need 6 | # python -m pip install "eth-hash[pycryptodome]" 7 | 8 | seed(1337) 9 | 10 | SIZE = 100 11 | 12 | with open("../op-tests/test-keccak256-generated.txt", "w+") as f: 13 | f.write("; This file was generated by tools/generate-keccak-tests.py\n\n") 14 | 15 | for i in range(SIZE): 16 | 17 | blob = randbytes(randint(1, 30)) 18 | result = keccak(blob) 19 | 20 | for i in range(1, len(blob) + 1): 21 | args = sliced(blob, i) 22 | cost = 50 + len(blob) * 2 + (len(blob)+i-1)//i * 160 + len(result) * 10 23 | args_str = " ".join([f"0x{a.hex()}" for a in args]) 24 | f.write(f"keccak256 {args_str} => 0x{result.hex()} | {cost}\n") 25 | -------------------------------------------------------------------------------- /tools/generate-secp256k1-tests.py: -------------------------------------------------------------------------------- 1 | from secp256k1 import PublicKey, PrivateKey 2 | from hashlib import sha256 3 | from random import randbytes, randint, seed, sample 4 | 5 | def flip_bit(b: bytes) -> bytearray: 6 | idx = randint(0, len(b) - 1) 7 | bit = 1 << randint(0, 7) 8 | ret = bytearray(b) 9 | ret[idx] ^= bit 10 | return ret 11 | 12 | def print_validation_test_case(f, num_cases, filter_pk, filter_msg, filter_sig, expect: str): 13 | sks = sample(secret_keys, num_cases) 14 | cost = 1300000 15 | sigs = [] 16 | 17 | args = "" 18 | for sk in sks: 19 | pk = sk.pubkey 20 | msg = randbytes(randint(3,40)) 21 | sig = sk.ecdsa_sign(msg) 22 | sha = sha256() 23 | sha.update(msg) 24 | f.write(f"secp256k1_verify 0x{bytes(filter_pk(pk.serialize())).hex()} 0x{filter_msg(sha.digest()).hex()} 0x{bytes(filter_sig(sk.ecdsa_serialize_compact(sig))).hex()}") 25 | 26 | f.write(f" => {expect}") 27 | if expect != "FAIL": 28 | f.write(f" | {cost}") 29 | f.write("\n") 30 | 31 | 32 | seed(1337) 33 | 34 | SIZE = 30 35 | 36 | # generate a bunch of keys 37 | secret_keys = [] 38 | for i in range(SIZE): 39 | secret_keys.append(PrivateKey()) 40 | 41 | 42 | with open("../op-tests/test-secp256k1.txt", "w+") as f: 43 | f.write("; This file was generated by tools/generate-secp256k1-tests.py\n\n") 44 | 45 | print_validation_test_case(f, SIZE, lambda pk: pk, lambda msg: msg, lambda sig: sig, "0") 46 | 47 | # negative tests (alter public key) 48 | print_validation_test_case(f, 3, flip_bit, lambda msg: msg, lambda sig: sig, "FAIL") 49 | 50 | # negative tests (alter message) 51 | print_validation_test_case(f, 3, lambda pk: pk, flip_bit, lambda sig: sig, "FAIL") 52 | 53 | # negative tests (alter signature) 54 | print_validation_test_case(f, 3, lambda pk: pk, lambda msg: msg, flip_bit, "FAIL") 55 | -------------------------------------------------------------------------------- /tools/generate-secp256r1-tests.py: -------------------------------------------------------------------------------- 1 | from ecdsa import SigningKey, NIST256p 2 | from hashlib import sha256 3 | from random import randbytes, randint, seed, sample 4 | 5 | def flip_bit(b: bytes) -> bytearray: 6 | idx = randint(0, len(b) - 1) 7 | bit = 1 << randint(0, 7) 8 | ret = bytearray(b) 9 | ret[idx] ^= bit 10 | return ret 11 | 12 | def print_validation_test_case(f, num_cases, filter_pk, filter_msg, filter_sig, expect: str): 13 | sks = sample(secret_keys, num_cases) 14 | cost = 1850000 15 | sigs = [] 16 | 17 | args = "" 18 | for sk in sks: 19 | pk = sk.verifying_key 20 | msg = randbytes(randint(3,40)) 21 | sig = sk.sign_deterministic(msg) 22 | sha = sha256() 23 | sha.update(msg) 24 | f.write(f"secp256r1_verify 0x{bytes(filter_pk(pk.to_string('compressed'))).hex()} 0x{filter_msg(sha.digest()).hex()} 0x{bytes(filter_sig(sig)).hex()}") 25 | 26 | f.write(f" => {expect}") 27 | if expect != "FAIL": 28 | f.write(f" | {cost}") 29 | f.write("\n") 30 | 31 | 32 | seed(1337) 33 | 34 | SIZE = 30 35 | 36 | # generate a bunch of keys 37 | secret_keys = [] 38 | for i in range(SIZE): 39 | secret_keys.append(SigningKey.generate(curve=NIST256p, hashfunc=sha256)) 40 | 41 | 42 | with open("../op-tests/test-secp256r1.txt", "w+") as f: 43 | f.write("; This file was generated by tools/generate-secp256r1-tests.py\n\n") 44 | 45 | print_validation_test_case(f, SIZE, lambda pk: pk, lambda msg: msg, lambda sig: sig, "0") 46 | 47 | # negative tests (alter public key) 48 | print_validation_test_case(f, 3, flip_bit, lambda msg: msg, lambda sig: sig, "FAIL") 49 | 50 | # negative tests (alter message) 51 | print_validation_test_case(f, 3, lambda pk: pk, flip_bit, lambda sig: sig, "FAIL") 52 | 53 | # negative tests (alter signature) 54 | print_validation_test_case(f, 3, lambda pk: pk, lambda msg: msg, flip_bit, "FAIL") 55 | -------------------------------------------------------------------------------- /tools/generate-sha256-tests.py: -------------------------------------------------------------------------------- 1 | from random import randbytes, randint, seed, choice 2 | from hashlib import sha256 3 | 4 | seed(1337) 5 | SIZE = 500 6 | 7 | test_cases = set() 8 | 9 | with open("../op-tests/test-sha256.txt", "w+") as f: 10 | f.write("; This file was generated by tools/generate-sha256-tests.py\n\n") 11 | 12 | for i in range(0, SIZE): 13 | num_args = choice([0, 1, 2, 2, 2, 3, 3, 3, 4]) 14 | args = [] 15 | cost = 87 16 | ctx = sha256() 17 | test_args = [] 18 | for i in range(num_args): 19 | cost += 134 20 | arg = choice([b"", b"\x01", b"\x02", b"foobar", randbytes(24), randbytes(48), randbytes(32)]) 21 | cost += len(arg) * 2 22 | args.append(arg) 23 | ctx.update(arg) 24 | if arg == b"": 25 | test_args.append("0") 26 | else: 27 | test_args.append(f"0x{arg.hex()}") 28 | # malloc cost 29 | cost += 32 * 10 30 | test = " ".join(test_args) 31 | if test in test_cases: 32 | continue 33 | test_cases.add(test) 34 | f.write(f"sha256 {test} => 0x{ctx.hexdigest()} | {cost}\n") 35 | -------------------------------------------------------------------------------- /tools/src/bin/generate-modpow-tests.rs: -------------------------------------------------------------------------------- 1 | use std::fs; 2 | 3 | use clvmr::Allocator; 4 | use num_bigint::{BigInt, Sign}; 5 | use num_integer::Integer; 6 | use rand::{Rng, SeedableRng}; 7 | use rand_chacha::ChaCha8Rng; 8 | 9 | fn main() { 10 | // Seed the RNG with a fixed value for reproducibility. 11 | let mut rng = ChaCha8Rng::seed_from_u64(1337); 12 | 13 | // Generate a random quantity (within a range) of random bytes. 14 | let mut bytes = |min: usize, max: usize| { 15 | let len = rng.gen_range(min..=max); 16 | let mut bytes = vec![0; len]; 17 | rng.fill(&mut bytes[..]); 18 | bytes 19 | }; 20 | 21 | let mut tests = 22 | "; This file was generated by tools/src/bin/generate-modpow-tests.rs\n\n".to_string(); 23 | 24 | for _ in 0..100 { 25 | let base = BigInt::from_signed_bytes_be(&bytes(0, 32)); 26 | 27 | // Generate a random exponent, but ensure it's positive. 28 | let exponent = BigInt::from_bytes_be(Sign::Plus, &bytes(0, 32)); 29 | 30 | // Generate a random modulus, but ensure it's non-zero. 31 | let mut modulus = BigInt::from_signed_bytes_be(&bytes(0, 32)); 32 | if modulus == BigInt::ZERO { 33 | modulus += 1; 34 | } 35 | 36 | let base_len = atom_len(base.clone()); 37 | let exponent_len = atom_len(exponent.clone()); 38 | let modulus_len = atom_len(modulus.clone()); 39 | 40 | let result = base.modpow(&exponent, &modulus); 41 | let result_len = atom_len(result.clone()); 42 | let cost = 17000 43 | + base_len * 38 44 | + exponent_len * exponent_len * 3 45 | + modulus_len * modulus_len * 21 46 | + result_len * 10; 47 | 48 | tests.push_str(&format!( 49 | "modpow {base} {exponent} {modulus} => {result} | {cost}\n" 50 | )); 51 | } 52 | 53 | for _ in 0..100 { 54 | let base = BigInt::from_signed_bytes_be(&bytes(0, 32)); 55 | 56 | // Generate a random exponent, but ensure it's positive. 57 | let mut modulus = BigInt::from_signed_bytes_be(&bytes(0, 16)); 58 | if modulus == BigInt::ZERO { 59 | modulus += 1; 60 | } 61 | 62 | let base_len = atom_len(base.clone()); 63 | let modulus_len = atom_len(modulus.clone()); 64 | 65 | // CLVM uses neither `%` nor `mod_euclid`, but rather `mod_floor`. 66 | let result = base.mod_floor(&modulus); 67 | let result_len = atom_len(result.clone()); 68 | let cost = 988 + base_len * 4 + modulus_len * 4 + result_len * 10; 69 | 70 | tests.push_str(&format!("% {base} {modulus} => {result} | {cost}\n")); 71 | } 72 | 73 | fs::write("./op-tests/test-modpow.txt", tests).unwrap(); 74 | } 75 | 76 | // Convert a `BigInt` to a CLVM atom. 77 | fn atom_len(num: BigInt) -> usize { 78 | let mut allocator = Allocator::new(); 79 | let ptr = allocator.new_number(num).unwrap(); 80 | allocator.atom_len(ptr) 81 | } 82 | -------------------------------------------------------------------------------- /tools/src/bin/verify-zksnark.rs: -------------------------------------------------------------------------------- 1 | use chia_bls::{aggregate_pairing, G1Element, G2Element}; 2 | use num_bigint::BigInt; 3 | use serde::Deserialize; 4 | 5 | use std::fs::File; 6 | use std::io::Read; 7 | 8 | #[allow(dead_code)] 9 | #[derive(Debug, Deserialize)] 10 | struct VerificationKey { 11 | vk_alpha_1: Vec, 12 | vk_beta_2: Vec>, 13 | vk_gamma_2: Vec>, 14 | vk_delta_2: Vec>, 15 | vk_alphabeta_12: Vec>>, 16 | #[serde(alias = "IC")] 17 | ic: Vec>, 18 | #[serde(alias = "nPublic")] 19 | n_public: u8, 20 | protocol: String, 21 | curve: String, 22 | } 23 | 24 | #[allow(dead_code)] 25 | #[derive(Debug, Deserialize)] 26 | struct Proof { 27 | pi_a: Vec, 28 | pi_b: Vec>, 29 | pi_c: Vec, 30 | protocol: String, 31 | curve: String, 32 | } 33 | 34 | fn bigint_to_48bytes(i: &BigInt) -> [u8; 48] { 35 | fn prepend(v: &mut Vec, x: T, n: usize) { 36 | v.resize(v.len() + n, x); 37 | v.rotate_right(n); 38 | } 39 | 40 | let mut out: Vec = i.to_bytes_be().1; 41 | let len = out.len(); 42 | prepend(&mut out, 0, 48 - len); 43 | out.try_into().unwrap() 44 | } 45 | 46 | fn vec_pair(arr: &[String]) -> ([u8; 48], [u8; 48]) { 47 | ( 48 | bigint_to_48bytes(&arr[0].clone().parse::().unwrap()), 49 | bigint_to_48bytes(&arr[1].clone().parse::().unwrap()), 50 | ) 51 | } 52 | 53 | fn vec_pair_g1(arr: &[String]) -> G1Element { 54 | let (fp_1, fp_2) = vec_pair(arr); 55 | let data: [u8; 96] = [fp_1, fp_2].concat().try_into().unwrap(); 56 | println!("G1 uncompressed: {}", hex::encode(data)); 57 | let ret = G1Element::from_uncompressed(&data).unwrap(); 58 | println!("G1 compressed: {}", hex::encode(ret.to_bytes())); 59 | ret 60 | } 61 | 62 | fn vec_pair_g2(arr: &[Vec]) -> G2Element { 63 | let (fp_1, fp_2) = vec_pair(&arr[0]); 64 | let (fp_3, fp_4) = vec_pair(&arr[1]); 65 | let data: [u8; 192] = [fp_2, fp_1, fp_4, fp_3].concat().try_into().unwrap(); 66 | println!("G2 uncompressed: {}", hex::encode(data)); 67 | let ret = G2Element::from_uncompressed(&data).unwrap(); 68 | println!("G2 compressed: {}", hex::encode(ret.to_bytes())); 69 | ret 70 | } 71 | 72 | pub fn main() { 73 | println!("verifying zksnark"); 74 | 75 | // Read verification_key.json 76 | let mut file = File::open("data/verification_key.json").unwrap(); 77 | let mut verification_key = String::new(); 78 | file.read_to_string(&mut verification_key).unwrap(); 79 | let verification_key: VerificationKey = serde_json::from_str(&verification_key) 80 | .expect("Verification Key JSON was not well-formatted"); 81 | 82 | // Read public.json 83 | let mut file = File::open("data/public.json").unwrap(); 84 | let mut public = String::new(); 85 | file.read_to_string(&mut public).unwrap(); 86 | let public: Vec = 87 | serde_json::from_str(&public).expect("Public JSON was not well-formatted"); 88 | 89 | // Read proof.json 90 | let mut file = File::open("data/proof.json").unwrap(); 91 | let mut proof = String::new(); 92 | file.read_to_string(&mut proof).unwrap(); 93 | let proof: Proof = serde_json::from_str(&proof).expect("Proof JSON was not well-formatted"); 94 | 95 | let ic0 = vec_pair_g1(&verification_key.ic[0]); 96 | 97 | let mut cpub = G1Element::default(); 98 | for (i, public_i) in public.iter().enumerate() { 99 | let mut ic = vec_pair_g1(&verification_key.ic[i + 1]); 100 | let scalar = public_i.parse::().unwrap().to_bytes_be().1; 101 | ic.scalar_multiply(&scalar); 102 | cpub += ⁣ 103 | } 104 | cpub += &ic0; 105 | 106 | let mut pi_a = vec_pair_g1(&proof.pi_a); 107 | pi_a.negate(); 108 | let pi_b = vec_pair_g2(&proof.pi_b); 109 | let pi_c = vec_pair_g1(&proof.pi_c); 110 | 111 | let vk_gamma_2 = vec_pair_g2(&verification_key.vk_gamma_2); 112 | let vk_delta_2 = vec_pair_g2(&verification_key.vk_delta_2); 113 | let vk_alpha_1 = vec_pair_g1(&verification_key.vk_alpha_1); 114 | let vk_beta_2 = vec_pair_g2(&verification_key.vk_beta_2); 115 | 116 | // output the compressed values 117 | println!( 118 | "bls_pairing_identity 0x{} 0x{} 0x{} 0x{} 0x{} 0x{} 0x{} 0x{} => 0 | 7800000", 119 | hex::encode(pi_a.to_bytes()), 120 | hex::encode(pi_b.to_bytes()), 121 | hex::encode(cpub.to_bytes()), 122 | hex::encode(vk_gamma_2.to_bytes()), 123 | hex::encode(pi_c.to_bytes()), 124 | hex::encode(vk_delta_2.to_bytes()), 125 | hex::encode(vk_alpha_1.to_bytes()), 126 | hex::encode(vk_beta_2.to_bytes()) 127 | ); 128 | 129 | // run the miller loop 130 | let item_refs: Vec<(&G1Element, &G2Element)> = vec![ 131 | (&pi_a, &pi_b), 132 | (&cpub, &vk_gamma_2), 133 | (&pi_c, &vk_delta_2), 134 | (&vk_alpha_1, &vk_beta_2), 135 | ]; 136 | let identity: bool = aggregate_pairing(item_refs); 137 | assert!(identity); 138 | } 139 | -------------------------------------------------------------------------------- /wasm/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "clvm_wasm" 3 | version = "0.14.0" 4 | authors = ["Richard Kiss "] 5 | edition = "2021" 6 | license = "Apache-2.0" 7 | description = "Implementation of `clvm` for Chia Network's cryptocurrency" 8 | homepage = "https://github.com/Chia-Network/clvm_rs/" 9 | repository = "https://github.com/Chia-Network/clvm_rs/" 10 | readme = "README.md" 11 | 12 | [lib] 13 | name = "clvm_wasm" 14 | crate-type = ["cdylib"] 15 | path = "src/lib.rs" 16 | 17 | [dependencies] 18 | clvmr = { workspace = true } 19 | wasm-bindgen = { workspace = true } 20 | wasm-bindgen-test = { workspace = true } 21 | js-sys = { workspace = true } 22 | getrandom = { workspace = true, features = ["js"] } 23 | -------------------------------------------------------------------------------- /wasm/README.md: -------------------------------------------------------------------------------- 1 | The `clvm_rs` package has JavaScript bindings for the rust implementation of clvm in wasm. 2 | This project is very immature, and only some test API is available for the moment. Pull requests are welcome. 3 | 4 | ## Build 5 | 6 | Use `wasm-pack` to build the wasm `pkg` file used with npm. Install it with: 7 | 8 | ```bash 9 | cargo install wasm-pack 10 | ``` 11 | 12 | Then build with 13 | 14 | ```bash 15 | # Make sure you're at /wasm 16 | wasm-pack build --release --target=nodejs 17 | ``` 18 | 19 | ## Test 20 | 21 | Prerequisite: 22 | 23 | - NodeJS >= 16 24 | - Wasm files built by `wasm-pack` command exist at `/wasm/pkg/` 25 | 26 | ```bash 27 | # Make sure you're at /wasm 28 | node ./tests/index.js 29 | ``` 30 | -------------------------------------------------------------------------------- /wasm/src/flags.rs: -------------------------------------------------------------------------------- 1 | // when this flag is set, the block generator serialization is allowed to 2 | // contain back-references 3 | pub const ALLOW_BACKREFS: u32 = 0x2000000; 4 | -------------------------------------------------------------------------------- /wasm/src/lazy_node.rs: -------------------------------------------------------------------------------- 1 | use js_sys::Array; 2 | use std::rc::Rc; 3 | use wasm_bindgen::prelude::*; 4 | 5 | use clvmr::allocator::{Allocator, NodePtr, SExp}; 6 | use clvmr::serde::{ 7 | node_from_bytes, node_from_bytes_backrefs, node_to_bytes_backrefs, node_to_bytes_limit, 8 | }; 9 | 10 | #[wasm_bindgen] 11 | #[derive(Clone)] 12 | pub struct LazyNode { 13 | allocator: Rc, 14 | node: NodePtr, 15 | } 16 | 17 | #[wasm_bindgen] 18 | impl LazyNode { 19 | #[wasm_bindgen(getter)] 20 | pub fn pair(&self) -> Option { 21 | match &self.allocator.sexp(self.node) { 22 | SExp::Pair(p1, p2) => { 23 | let r1 = Self::new(self.allocator.clone(), *p1); 24 | let r2 = Self::new(self.allocator.clone(), *p2); 25 | let tuple = Array::new_with_length(2); 26 | tuple.set(0, JsValue::from(r1)); 27 | tuple.set(1, JsValue::from(r2)); 28 | Some(tuple) 29 | } 30 | _ => None, 31 | } 32 | } 33 | 34 | #[wasm_bindgen(getter)] 35 | pub fn atom(&self) -> Option> { 36 | match &self.allocator.sexp(self.node) { 37 | SExp::Atom => Some(self.allocator.atom(self.node).as_ref().into()), 38 | _ => None, 39 | } 40 | } 41 | 42 | #[wasm_bindgen] 43 | pub fn to_bytes_with_backref(&self) -> Result, String> { 44 | node_to_bytes_backrefs(&self.allocator, self.node).map_err(|e| e.to_string()) 45 | } 46 | 47 | #[wasm_bindgen] 48 | pub fn to_bytes(&self, limit: usize) -> Result, String> { 49 | node_to_bytes_limit(&self.allocator, self.node, limit).map_err(|e| e.to_string()) 50 | } 51 | 52 | #[wasm_bindgen] 53 | pub fn from_bytes_with_backref(b: &[u8]) -> Result { 54 | let mut allocator = Allocator::new(); 55 | let node = node_from_bytes_backrefs(&mut allocator, b).map_err(|e| e.to_string())?; 56 | Ok(LazyNode::new(Rc::new(allocator), node)) 57 | } 58 | 59 | #[wasm_bindgen] 60 | pub fn from_bytes(b: &[u8]) -> Result { 61 | let mut allocator = Allocator::new(); 62 | let node = node_from_bytes(&mut allocator, b).map_err(|e| e.to_string())?; 63 | Ok(LazyNode::new(Rc::new(allocator), node)) 64 | } 65 | } 66 | 67 | impl LazyNode { 68 | pub const fn new(a: Rc, n: NodePtr) -> Self { 69 | Self { 70 | allocator: a, 71 | node: n, 72 | } 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /wasm/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod flags; 2 | pub mod lazy_node; 3 | pub mod run_program; 4 | pub mod serialize; 5 | 6 | #[cfg(test)] 7 | pub mod tests; 8 | -------------------------------------------------------------------------------- /wasm/src/run_program.rs: -------------------------------------------------------------------------------- 1 | use js_sys::Array; 2 | use std::rc::Rc; 3 | use wasm_bindgen::prelude::*; 4 | 5 | use crate::flags::ALLOW_BACKREFS; 6 | use crate::lazy_node::LazyNode; 7 | use clvmr::allocator::Allocator; 8 | use clvmr::chia_dialect::ChiaDialect; 9 | use clvmr::chia_dialect::NO_UNKNOWN_OPS as _no_unknown_ops; 10 | use clvmr::cost::Cost; 11 | use clvmr::run_program::run_program; 12 | use clvmr::serde::{node_from_bytes, node_from_bytes_backrefs, node_to_bytes}; 13 | 14 | #[wasm_bindgen] 15 | pub struct Flag; 16 | 17 | #[wasm_bindgen] 18 | impl Flag { 19 | #[wasm_bindgen] 20 | pub fn no_unknown_ops() -> u32 { 21 | _no_unknown_ops 22 | } 23 | 24 | #[wasm_bindgen] 25 | pub fn allow_backrefs() -> u32 { 26 | ALLOW_BACKREFS 27 | } 28 | } 29 | 30 | #[wasm_bindgen] 31 | pub fn run_clvm(program: &[u8], args: &[u8], flag: u32) -> Vec { 32 | let max_cost: Cost = 1_000_000_000_000_000; 33 | 34 | let mut allocator = Allocator::new(); 35 | let deserializer = if (flag & ALLOW_BACKREFS) != 0 { 36 | node_from_bytes_backrefs 37 | } else { 38 | node_from_bytes 39 | }; 40 | let program = deserializer(&mut allocator, program).unwrap(); 41 | let args = deserializer(&mut allocator, args).unwrap(); 42 | let dialect = ChiaDialect::new(flag); 43 | 44 | let r = run_program(&mut allocator, &dialect, program, args, max_cost); 45 | match r { 46 | Ok(reduction) => node_to_bytes(&allocator, reduction.1).unwrap(), 47 | Err(_eval_err) => format!("{:?}", _eval_err).into(), 48 | } 49 | } 50 | 51 | #[wasm_bindgen] 52 | pub fn run_chia_program( 53 | program: &[u8], 54 | args: &[u8], 55 | max_cost: Cost, // Expecting `BigInt` to be passed from JavaScript world 56 | flag: u32, 57 | ) -> Result { 58 | let mut allocator = Allocator::new(); 59 | let deserializer = if (flag & ALLOW_BACKREFS) != 0 { 60 | node_from_bytes_backrefs 61 | } else { 62 | node_from_bytes 63 | }; 64 | let program = deserializer(&mut allocator, program).unwrap(); 65 | let args = deserializer(&mut allocator, args).unwrap(); 66 | let dialect = ChiaDialect::new(flag); 67 | 68 | let r = run_program(&mut allocator, &dialect, program, args, max_cost); 69 | match r { 70 | Ok(reduction) => { 71 | let cost = JsValue::from(reduction.0); 72 | let node = LazyNode::new(Rc::new(allocator), reduction.1); 73 | let val = JsValue::from(node); 74 | 75 | let tuple = Array::new_with_length(2); 76 | tuple.set(0, cost); 77 | tuple.set(1, val); 78 | Ok(tuple) 79 | } 80 | Err(_eval_err) => Err(format!("{:?}", _eval_err)), 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /wasm/src/serialize.rs: -------------------------------------------------------------------------------- 1 | use std::rc::Rc; 2 | use wasm_bindgen::prelude::wasm_bindgen; 3 | 4 | use crate::flags::ALLOW_BACKREFS; 5 | use crate::lazy_node::LazyNode; 6 | use clvmr::serde::{ 7 | node_from_bytes as _node_from_bytes, node_from_bytes_backrefs, serialized_length_from_bytes, 8 | }; 9 | use clvmr::Allocator; 10 | 11 | #[wasm_bindgen] 12 | pub fn serialized_length(program: &[u8]) -> Result { 13 | serialized_length_from_bytes(program).map_err(|x| x.to_string()) 14 | } 15 | 16 | #[wasm_bindgen] 17 | pub fn node_from_bytes(b: &[u8], flag: u32) -> Result { 18 | let mut allocator = Allocator::new(); 19 | let deserializer = if (flag & ALLOW_BACKREFS) != 0 { 20 | node_from_bytes_backrefs 21 | } else { 22 | _node_from_bytes 23 | }; 24 | let node = deserializer(&mut allocator, b).map_err(|e| e.to_string())?; 25 | Ok(LazyNode::new(Rc::new(allocator), node)) 26 | } 27 | -------------------------------------------------------------------------------- /wasm/src/tests.rs: -------------------------------------------------------------------------------- 1 | //! Test suite for the Web and headless browsers. 2 | 3 | #![cfg(target_arch = "wasm32")] 4 | 5 | use wasm_bindgen_test::*; 6 | 7 | wasm_bindgen_test_configure!(run_in_browser); 8 | 9 | #[wasm_bindgen_test] 10 | fn pass() { 11 | assert_eq!(1 + 1, 2); 12 | } 13 | -------------------------------------------------------------------------------- /wasm/tests/index.js: -------------------------------------------------------------------------------- 1 | const wasm = require("../pkg/clvm_wasm.js"); 2 | 3 | function expect_equal(challenge, expected) { 4 | if (challenge !== expected) { 5 | throw new Error( 6 | `Assertion Error: Expected "${expected}" but actual value was "${challenge}"`, 7 | ); 8 | } 9 | } 10 | 11 | function expect_throw(callback) { 12 | let is_error = undefined; 13 | try { 14 | callback(); 15 | } catch (e) { 16 | is_error = e; 17 | } 18 | 19 | if (!is_error) { 20 | throw new Error("Expected an exception but it was not thrown"); 21 | } 22 | } 23 | 24 | function bytesFromHex(hex) { 25 | return Uint8Array.from(Buffer.from(hex, "hex")); 26 | } 27 | 28 | function numsToByteStr(numArray) { 29 | return Uint8Array.from(numArray).toString(); 30 | } 31 | 32 | let current_test_number = 0; 33 | function test_case(testTitle, test) { 34 | const testNo = ++current_test_number; 35 | console.log(`Case#${testNo} ${testTitle}`); 36 | try { 37 | test(); 38 | console.log(`✓ Successfully finished case#${testNo}`); 39 | } catch (e) { 40 | console.error(`❌ Failed Case#${testNo}`); 41 | console.error(`${e.name}: ${e.message}`); 42 | process.exit(1); 43 | } 44 | } 45 | 46 | // ----------------------------------------------------- // 47 | 48 | test_case("Test '(q . 127)' '()'", function () { 49 | // (q . 127) 50 | const prog = bytesFromHex("ff017f"); 51 | // () 52 | const arg = bytesFromHex("80"); 53 | // 100,000,000,000 54 | const max_cost = BigInt("100000000000"); 55 | const flag = 0; 56 | const [cost, sexp] = wasm.run_chia_program(prog, arg, max_cost, flag); 57 | expect_equal(sexp.atom.toString(), "127"); 58 | }); 59 | 60 | test_case("Test '(+ 1 (q . 3))' '2'", function () { 61 | // (+ 1 (q . 3)) 62 | const prog = bytesFromHex("ff10ff01ffff010380"); 63 | // 2 64 | const arg = bytesFromHex("02"); 65 | // 100,000,000,000 66 | const max_cost = BigInt("100000000000"); 67 | const flag = 0; 68 | const [cost, sexp] = wasm.run_chia_program(prog, arg, max_cost, flag); 69 | expect_equal(sexp.atom.toString(), "5"); 70 | }); 71 | 72 | test_case("Test '(+ 7 (q . 3))' '(() . (() . 2))'", function () { 73 | // (+ 7 (q . 3)) 74 | const prog = bytesFromHex("ff10ff07ffff010380"); 75 | // (() . (() . 2)) 76 | const arg = bytesFromHex("ff80ff8002"); 77 | // 100,000,000,000 78 | const max_cost = BigInt("100000000000"); 79 | const flag = 0; 80 | const [cost, sexp] = wasm.run_chia_program(prog, arg, max_cost, flag); 81 | expect_equal(sexp.atom.toString(), "5"); 82 | }); 83 | 84 | test_case("Test max_cost too low", function () { 85 | // (q . 127) 86 | const prog = bytesFromHex("ff017f"); 87 | // () 88 | const arg = bytesFromHex("80"); 89 | // MaxCost too low 90 | const max_cost = BigInt("1"); 91 | const flag = 0; 92 | expect_throw(function () { 93 | wasm.run_chia_program(prog, arg, max_cost, flag); 94 | }); 95 | }); 96 | 97 | test_case("Test divmod", function () { 98 | // (divmod (q . 5) (q . -3)) 99 | const prog = bytesFromHex("ff14ffff0105ffff0181fd80"); 100 | // () 101 | const arg = bytesFromHex("80"); 102 | // 100,000,000,000 103 | const max_cost = BigInt("100000000000"); 104 | const flag = 0; 105 | const [cost, sexp] = wasm.run_chia_program(prog, arg, max_cost, flag); 106 | expect_equal(sexp.pair[0].atom.toString(), numsToByteStr([-2])); 107 | expect_equal(sexp.pair[1].atom.toString(), numsToByteStr([-1])); 108 | }); 109 | 110 | test_case("Test negative div", function () { 111 | // (/ (q . 5) (q . -3)) 112 | const prog = bytesFromHex("ff13ffff0105ffff0181fd80"); 113 | // () 114 | const arg = bytesFromHex("80"); 115 | // 100,000,000,000 116 | const max_cost = BigInt("100000000000"); 117 | const [cost, sexp] = wasm.run_chia_program(prog, arg, max_cost, 0); 118 | // div rounds towards negative infinity, so this is -2 119 | expect_equal(sexp.atom.toString(), "254"); 120 | }); 121 | 122 | test_case("Test serialized_length", function () { 123 | // (q . 127) 124 | const prog = bytesFromHex("ff017f"); 125 | expect_equal(wasm.serialized_length(prog), BigInt("3")); 126 | expect_throw(function () { 127 | wasm.serialized_length(bytesFromHex("abcdef0123")); 128 | }); 129 | try { 130 | wasm.serialized_length(bytesFromHex("abcdef0123")); 131 | } catch (e) { 132 | expect_equal(e, "bad encoding"); 133 | } 134 | }); 135 | -------------------------------------------------------------------------------- /wheel/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "clvm_rs" 3 | version = "0.14.0" 4 | authors = ["Richard Kiss "] 5 | edition = "2021" 6 | license = "Apache-2.0" 7 | description = "Implementation of `clvm` for Chia Network's cryptocurrency" 8 | homepage = "https://github.com/Chia-Network/clvm_rs/" 9 | repository = "https://github.com/Chia-Network/clvm_rs/" 10 | readme = "../README.md" 11 | 12 | [lib] 13 | name = "clvm_rs" 14 | crate-type = ["cdylib"] 15 | path = "src/lib.rs" 16 | 17 | [dependencies] 18 | clvmr = { workspace = true } 19 | pyo3 = { workspace = true, features = ["abi3-py38", "extension-module"] } 20 | 21 | [features] 22 | openssl = ["clvmr/openssl"] 23 | -------------------------------------------------------------------------------- /wheel/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["maturin>=0.13.0"] 3 | build-backend = "maturin" 4 | 5 | [tool.maturin] 6 | bindings = "pyo3" 7 | python-source = "python" 8 | -------------------------------------------------------------------------------- /wheel/python/clvm_rs/__init__.py: -------------------------------------------------------------------------------- 1 | from .eval_error import EvalError 2 | from .program import Program 3 | 4 | 5 | __all__ = ["Program", "EvalError"] 6 | -------------------------------------------------------------------------------- /wheel/python/clvm_rs/at.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import Optional 3 | 4 | from .clvm_storage import CLVMStorage 5 | 6 | 7 | def at(obj: CLVMStorage, position: str) -> Optional[CLVMStorage]: 8 | """ 9 | Take a string of `f` and `r` characters and follow that path. 10 | 11 | Example: 12 | 13 | ``` 14 | p1 = Program.to([10, 20, 30, [15, 17], 40, 50]) 15 | assert Program.to(17) == at(p1, "rrrfrf") 16 | ``` 17 | 18 | Returns `None` if an atom is hit at some intermediate node. 19 | 20 | ``` 21 | p1 = Program.to(10) 22 | assert None == at(p1, "rr") 23 | ``` 24 | 25 | """ 26 | v = obj 27 | for c in position.lower(): 28 | pair = v.pair 29 | if pair is None: 30 | return None 31 | if c == "f": 32 | v = pair[0] 33 | elif c == "r": 34 | v = pair[1] 35 | else: 36 | raise ValueError( 37 | f"`at` got illegal character `{c}`. Only `f` & `r` allowed" 38 | ) 39 | return v 40 | -------------------------------------------------------------------------------- /wheel/python/clvm_rs/chia_dialect.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import List 3 | 4 | 5 | @dataclass 6 | class Dialect: 7 | KEYWORDS: List[str] 8 | 9 | NULL: bytes 10 | ONE: bytes 11 | TWO: bytes 12 | Q_KW: bytes 13 | A_KW: bytes 14 | C_KW: bytes 15 | 16 | 17 | CHIA_DIALECT = Dialect( 18 | ( 19 | # core opcodes 0x01-x08 20 | ". q a i c f r l x " 21 | # opcodes on atoms as strings 0x09-0x0f 22 | "= >s sha256 substr strlen concat . " 23 | # opcodes on atoms as ints 0x10-0x17 24 | "+ - * / divmod > ash lsh " 25 | # opcodes on atoms as vectors of bools 0x18-0x1c 26 | "logand logior logxor lognot . " 27 | # opcodes for bls 1381 0x1d-0x1f 28 | "point_add pubkey_for_exp . " 29 | # bool opcodes 0x20-0x23 30 | "not any all . " 31 | # misc 0x24 32 | "softfork " 33 | ).split(), 34 | NULL=bytes.fromhex(""), 35 | ONE=bytes.fromhex("01"), 36 | TWO=bytes.fromhex("02"), 37 | Q_KW=bytes.fromhex("01"), 38 | A_KW=bytes.fromhex("02"), 39 | C_KW=bytes.fromhex("04"), 40 | ) 41 | -------------------------------------------------------------------------------- /wheel/python/clvm_rs/clvm_rs.pyi: -------------------------------------------------------------------------------- 1 | from typing import List, Optional, Tuple 2 | 3 | from .clvm_storage import CLVMStorage 4 | 5 | def run_serialized_chia_program( 6 | program: bytes, environment: bytes, max_cost: int, flags: int 7 | ) -> Tuple[int, CLVMStorage]: ... 8 | def deserialize_as_tree( 9 | blob: bytes, calculate_tree_hashes: bool 10 | ) -> Tuple[List[Tuple[int, int, int]], Optional[List[bytes]]]: ... 11 | def serialized_length(blob: bytes) -> int: ... 12 | 13 | NO_NEG_DIV: int 14 | NO_UNKNOWN_OPS: int 15 | LIMIT_HEAP: int 16 | MEMPOOL_MODE: int 17 | 18 | class LazyNode(CLVMStorage): 19 | atom: Optional[bytes] 20 | 21 | @property 22 | def pair(self) -> Optional[Tuple[CLVMStorage, CLVMStorage]]: ... 23 | -------------------------------------------------------------------------------- /wheel/python/clvm_rs/clvm_storage.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, Tuple, _SpecialForm, cast 2 | 3 | # we support py3.7 which doesn't yet have typing.Protocol 4 | 5 | try: 6 | from typing import Protocol 7 | except ImportError: 8 | Protocol = cast(_SpecialForm, object) 9 | 10 | 11 | class CLVMStorage(Protocol): 12 | atom: Optional[bytes] 13 | 14 | @property 15 | def pair(self) -> Optional[Tuple["CLVMStorage", "CLVMStorage"]]: 16 | ... 17 | 18 | # optional fields used to speed implementations: 19 | 20 | # `_cached_sha256_treehash: Optional[bytes]` is used by `sha256_treehash` 21 | # `_cached_serialization: bytes` is used by `sexp_to_byte_iterator` 22 | # to speed up serialization 23 | 24 | 25 | def is_clvm_storage(obj): 26 | return hasattr(obj, "atom") and hasattr(obj, "pair") 27 | -------------------------------------------------------------------------------- /wheel/python/clvm_rs/de.py: -------------------------------------------------------------------------------- 1 | from typing import Callable, List, Optional, Tuple 2 | 3 | from .tree_hash import shatree_atom, shatree_pair 4 | 5 | deserialize_as_tree: Optional[ 6 | Callable[[bytes, bool], Tuple[List[Tuple[int, int, int]], Optional[List[bytes]]]] 7 | ] 8 | 9 | try: 10 | from clvm_rs.clvm_rs import deserialize_as_tree 11 | except ImportError: 12 | deserialize_as_tree = None 13 | 14 | 15 | MAX_SINGLE_BYTE = 0x7F 16 | CONS_BOX_MARKER = 0xFF 17 | 18 | 19 | # ATOM: serialize_offset, serialize_end, atom_offset 20 | # PAIR: serialize_offset, serialize_end, right_index 21 | 22 | Triple = Tuple[int, int, int] 23 | DeserOp = Callable[[bytes, int, List[Triple], List], int] 24 | 25 | 26 | def deserialize_as_tuples( 27 | blob: bytes, cursor: int, calculate_tree_hash: bool 28 | ) -> Tuple[List[Tuple[int, int, int]], Optional[List[bytes]]]: 29 | if deserialize_as_tree: 30 | try: 31 | tree, hashes = deserialize_as_tree(blob, calculate_tree_hash) 32 | except OSError as ex: 33 | raise ValueError(ex) 34 | return tree, hashes 35 | 36 | def save_cursor( 37 | index: int, 38 | blob: bytes, 39 | cursor: int, 40 | obj_list: List[Triple], 41 | op_stack: List[DeserOp], 42 | ) -> int: 43 | blob_index = obj_list[index][0] 44 | assert blob[blob_index] == 0xFF 45 | v0 = obj_list[index][0] 46 | v2 = obj_list[index][2] 47 | obj_list[index] = (v0, cursor, v2) 48 | if calculate_tree_hash: 49 | left_hash = tree_hash_list[index + 1] 50 | hash_index = obj_list[index][2] 51 | right_hash = tree_hash_list[hash_index] 52 | tree_hash_list[index] = shatree_pair(left_hash, right_hash) 53 | return cursor 54 | 55 | def save_index( 56 | index: int, 57 | blob: bytes, 58 | cursor: int, 59 | obj_list: List[Triple], 60 | op_stack: List[DeserOp], 61 | ) -> int: 62 | e = obj_list[index] 63 | obj_list[index] = (e[0], e[1], len(obj_list)) 64 | return cursor 65 | 66 | def parse_obj( 67 | blob: bytes, cursor: int, obj_list: List[Triple], op_stack: List[DeserOp] 68 | ) -> int: 69 | if cursor >= len(blob): 70 | raise ValueError("bad encoding") 71 | 72 | if blob[cursor] == CONS_BOX_MARKER: 73 | index = len(obj_list) 74 | obj_list.append((cursor, 0, 0)) 75 | op_stack.append(lambda *args: save_cursor(index, *args)) 76 | op_stack.append(parse_obj) 77 | op_stack.append(lambda *args: save_index(index, *args)) 78 | op_stack.append(parse_obj) 79 | if calculate_tree_hash: 80 | tree_hash_list.append(b"") 81 | return cursor + 1 82 | atom_offset, new_cursor = _atom_size_from_cursor(blob, cursor) 83 | my_hash = None 84 | if calculate_tree_hash: 85 | my_hash = shatree_atom(blob[cursor + atom_offset:new_cursor]) 86 | tree_hash_list.append(my_hash) 87 | obj_list.append((cursor, new_cursor, atom_offset)) 88 | return new_cursor 89 | 90 | obj_list: List[Triple] = [] 91 | tree_hash_list: List[bytes] = [] 92 | op_stack: List[DeserOp] = [parse_obj] 93 | while op_stack: 94 | f = op_stack.pop() 95 | cursor = f(blob, cursor, obj_list, op_stack) 96 | return obj_list, tree_hash_list if calculate_tree_hash else None 97 | 98 | 99 | def _atom_size_from_cursor(blob, cursor) -> Tuple[int, int]: 100 | # return `(size_of_prefix, cursor)` 101 | b = blob[cursor] 102 | if b == 0x80: 103 | return 1, cursor + 1 104 | if b <= MAX_SINGLE_BYTE: 105 | return 0, cursor + 1 106 | bit_count = 0 107 | bit_mask = 0x80 108 | while b & bit_mask: 109 | bit_count += 1 110 | b &= 0xFF ^ bit_mask 111 | bit_mask >>= 1 112 | size_blob = bytes([b]) 113 | if bit_count > 1: 114 | size_blob += blob[cursor + 1:cursor + bit_count] 115 | size = int.from_bytes(size_blob, "big") 116 | new_cursor = cursor + size + bit_count 117 | if new_cursor > len(blob): 118 | raise ValueError("end of stream") 119 | return bit_count, new_cursor 120 | -------------------------------------------------------------------------------- /wheel/python/clvm_rs/eval_error.py: -------------------------------------------------------------------------------- 1 | from .ser import sexp_to_bytes 2 | 3 | class EvalError(ValueError): 4 | def __init__(self, message: str, sexp): 5 | super().__init__(message) 6 | self._sexp = sexp 7 | 8 | def __str__(self) -> str: 9 | return f"({self.args[0]}, {sexp_to_bytes(self._sexp).hex()})" 10 | -------------------------------------------------------------------------------- /wheel/python/clvm_rs/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Chia-Network/clvm_rs/c811cefe74f6b623fe63a80b1b0d6cb039b218fe/wheel/python/clvm_rs/py.typed -------------------------------------------------------------------------------- /wheel/python/clvm_rs/replace.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import Dict 3 | 4 | from .casts import CastableType 5 | from .clvm_storage import CLVMStorage 6 | 7 | 8 | def replace(program: CLVMStorage, **kwargs: CastableType) -> CastableType: 9 | # if `kwargs == {}` then `return program` unchanged 10 | if len(kwargs) == 0: 11 | return program 12 | 13 | if "" in kwargs: 14 | if len(kwargs) > 1: 15 | raise ValueError("conflicting paths") 16 | return kwargs[""] 17 | 18 | # we've confirmed that no `kwargs` is the empty string. 19 | # Now split `kwargs` into two groups: those 20 | # that start with `f` and those that start with `r` 21 | 22 | args_by_prefix: Dict[str, Dict[str, CastableType]] = dict(f={}, r={}) 23 | for k, v in kwargs.items(): 24 | c = k[0] 25 | if c not in "fr": 26 | msg = f"bad path containing {c}: must only contain `f` and `r`" 27 | raise ValueError(msg) 28 | args_by_prefix[c][k[1:]] = v 29 | 30 | pair = program.pair 31 | if pair is None: 32 | raise ValueError("path into atom") 33 | 34 | # recurse down the tree 35 | new_f = replace(pair[0], **args_by_prefix.get("f", {})) 36 | new_r = replace(pair[1], **args_by_prefix.get("r", {})) 37 | 38 | return (new_f, new_r) 39 | -------------------------------------------------------------------------------- /wheel/python/clvm_rs/tree_hash.py: -------------------------------------------------------------------------------- 1 | """ 2 | This is an implementation of `sha256_treehash`, used to calculate 3 | puzzle hashes in clvm. 4 | 5 | This implementation goes to great pains to be non-recursive so we don't 6 | have to worry about blowing out the python stack. 7 | """ 8 | 9 | from hashlib import sha256 10 | from typing import Callable, List, Tuple, cast 11 | 12 | from .clvm_storage import CLVMStorage 13 | 14 | 15 | OP_STACK_F = Callable[[List[CLVMStorage], List[bytes], List["OP_STACK_F"]], None] 16 | 17 | 18 | class Treehasher: 19 | """ 20 | `Treehasher` performs the standard sha256tree hashing in a non-recursive 21 | way so that extremely large objects don't blow out the python stack. 22 | 23 | We also force a `_cached_sha256_treehash` into the hashed sub-objects 24 | whenever possible so that taking the hash of the same sub-tree is 25 | more efficient in future. 26 | """ 27 | 28 | atom_prefix: bytes 29 | pair_prefix: bytes 30 | cache_hits: int 31 | 32 | def __init__(self, atom_prefix: bytes, pair_prefix: bytes): 33 | self.atom_prefix = atom_prefix 34 | self.pair_prefix = pair_prefix 35 | self.cache_hits = 0 36 | 37 | def shatree_atom(self, atom: bytes) -> bytes: 38 | s = sha256() 39 | s.update(self.atom_prefix) 40 | s.update(atom) 41 | return s.digest() 42 | 43 | def shatree_pair(self, left_hash: bytes, right_hash: bytes) -> bytes: 44 | s = sha256() 45 | s.update(self.pair_prefix) 46 | s.update(left_hash) 47 | s.update(right_hash) 48 | return s.digest() 49 | 50 | def sha256_treehash(self, clvm_storage: CLVMStorage) -> bytes: 51 | def handle_obj( 52 | obj_stack: List[CLVMStorage], 53 | hash_stack: List[bytes], 54 | op_stack: List[OP_STACK_F], 55 | ) -> None: 56 | obj = obj_stack.pop() 57 | r = getattr(obj, "_cached_sha256_treehash", None) 58 | if r is not None: 59 | self.cache_hits += 1 60 | hash_stack.append(r) 61 | return 62 | elif obj.atom is not None: 63 | r = shatree_atom(obj.atom) 64 | hash_stack.append(r) 65 | try: 66 | setattr(obj, "_cached_sha256_treehash", r) 67 | except AttributeError: 68 | pass 69 | else: 70 | pair = cast(Tuple[CLVMStorage, CLVMStorage], obj.pair) 71 | p0, p1 = pair 72 | obj_stack.append(obj) 73 | obj_stack.append(p0) 74 | obj_stack.append(p1) 75 | op_stack.append(handle_pair) 76 | op_stack.append(handle_obj) 77 | op_stack.append(handle_obj) 78 | 79 | def handle_pair( 80 | obj_stack: List[CLVMStorage], 81 | hash_stack: List[bytes], 82 | op_stack: List[OP_STACK_F], 83 | ) -> None: 84 | p0 = hash_stack.pop() 85 | p1 = hash_stack.pop() 86 | r = shatree_pair(p0, p1) 87 | hash_stack.append(r) 88 | obj = obj_stack.pop() 89 | try: 90 | setattr(obj, "_cached_sha256_treehash", r) 91 | except AttributeError: 92 | pass 93 | 94 | obj_stack: List[CLVMStorage] = [clvm_storage] 95 | op_stack: List[OP_STACK_F] = [handle_obj] 96 | hash_stack: List[bytes] = [] 97 | while len(op_stack) > 0: 98 | op: OP_STACK_F = op_stack.pop() 99 | op(obj_stack, hash_stack, op_stack) 100 | return hash_stack[0] 101 | 102 | 103 | CHIA_TREE_HASH_ATOM_PREFIX = bytes.fromhex("01") 104 | CHIA_TREE_HASH_PAIR_PREFIX = bytes.fromhex("02") 105 | CHIA_TREEHASHER = Treehasher(CHIA_TREE_HASH_ATOM_PREFIX, CHIA_TREE_HASH_PAIR_PREFIX) 106 | 107 | sha256_treehash = CHIA_TREEHASHER.sha256_treehash 108 | shatree_atom = CHIA_TREEHASHER.shatree_atom 109 | shatree_pair = CHIA_TREEHASHER.shatree_pair 110 | -------------------------------------------------------------------------------- /wheel/python/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Chia-Network/clvm_rs/c811cefe74f6b623fe63a80b1b0d6cb039b218fe/wheel/python/tests/__init__.py -------------------------------------------------------------------------------- /wheel/python/tests/test_curry_and_treehash.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from clvm_rs import Program 4 | from clvm_rs.chia_dialect import CHIA_DIALECT 5 | from clvm_rs.curry_and_treehash import CurryTreehasher 6 | 7 | CHIA_CURRY_TREEHASHER = CurryTreehasher(CHIA_DIALECT) 8 | curry_and_treehash = CHIA_CURRY_TREEHASHER.curry_and_treehash 9 | calculate_hash_of_quoted_mod_hash = ( 10 | CHIA_CURRY_TREEHASHER.calculate_hash_of_quoted_mod_hash 11 | ) 12 | 13 | 14 | def test_curry_and_treehash() -> None: 15 | arbitrary_mod = Program.fromhex("ff10ff02ff0580") # `(+ 2 5)` 16 | arbitrary_mod_hash = arbitrary_mod.tree_hash() 17 | 18 | # we don't really care what `arbitrary_mod` is. We just need some code 19 | 20 | quoted_mod_hash = calculate_hash_of_quoted_mod_hash(arbitrary_mod_hash) 21 | exp_hash = "9f487f9078d4b215e0cbe2cbdd21215ad6ed8e894ae00d616751e0efdccb25a9" 22 | assert quoted_mod_hash == bytes.fromhex(exp_hash) 23 | 24 | for v in range(500): 25 | args = [v, v * v, v * v * v] 26 | # we don't really care about the arguments either 27 | puzzle = arbitrary_mod.curry(*args) 28 | puzzle_hash_via_curry = puzzle.tree_hash() 29 | hashed_args = [Program.to(_).tree_hash() for _ in args] 30 | puzzle_hash_via_f = curry_and_treehash(quoted_mod_hash, *hashed_args) 31 | assert puzzle_hash_via_curry == puzzle_hash_via_f 32 | puzzle_hash_via_m = arbitrary_mod.curry_hash(*hashed_args) 33 | assert puzzle_hash_via_curry == puzzle_hash_via_m 34 | 35 | 36 | def test_bad_parameter() -> None: 37 | arbitrary_mod = Program.fromhex("ff10ff02ff0580") # `(+ 2 5)` 38 | with pytest.raises(ValueError): 39 | arbitrary_mod.curry_hash(b"foo") 40 | -------------------------------------------------------------------------------- /wheel/src/adapt_response.rs: -------------------------------------------------------------------------------- 1 | use std::rc::Rc; 2 | 3 | use crate::lazy_node::LazyNode; 4 | use clvmr::allocator::Allocator; 5 | use clvmr::reduction::Response; 6 | 7 | use pyo3::exceptions::PyValueError; 8 | use pyo3::prelude::*; 9 | use pyo3::types::PyTuple; 10 | 11 | pub fn adapt_response( 12 | py: Python, 13 | allocator: Allocator, 14 | response: Response, 15 | ) -> PyResult<(u64, LazyNode)> { 16 | match response { 17 | Ok(reduction) => { 18 | let val = LazyNode::new(Rc::new(allocator), reduction.1); 19 | Ok((reduction.0, val)) 20 | } 21 | Err(eval_err) => { 22 | let sexp = LazyNode::new(Rc::new(allocator), eval_err.0).to_object(py); 23 | let msg = eval_err.1.to_object(py); 24 | let tuple = PyTuple::new_bound(py, [msg, sexp]); 25 | let value_error: PyErr = PyValueError::new_err(tuple.to_object(py)); 26 | Err(value_error) 27 | } 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /wheel/src/api.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::useless_conversion)] 2 | use std::io; 3 | 4 | use super::lazy_node::LazyNode; 5 | use crate::adapt_response::adapt_response; 6 | use clvmr::allocator::Allocator; 7 | use clvmr::chia_dialect::ChiaDialect; 8 | use clvmr::cost::Cost; 9 | use clvmr::reduction::Response; 10 | use clvmr::run_program::run_program; 11 | use clvmr::serde::{node_from_bytes, parse_triples, serialized_length_from_bytes, ParsedTriple}; 12 | use clvmr::{LIMIT_HEAP, MEMPOOL_MODE, NO_UNKNOWN_OPS}; 13 | use pyo3::prelude::*; 14 | use pyo3::types::{PyBytes, PyTuple}; 15 | use pyo3::wrap_pyfunction; 16 | 17 | #[pyfunction] 18 | pub fn serialized_length(program: &[u8]) -> PyResult { 19 | Ok(serialized_length_from_bytes(program)?) 20 | } 21 | 22 | #[pyfunction] 23 | pub fn run_serialized_chia_program( 24 | py: Python, 25 | program: &[u8], 26 | args: &[u8], 27 | max_cost: Cost, 28 | flags: u32, 29 | ) -> PyResult<(u64, LazyNode)> { 30 | let mut allocator = if flags & LIMIT_HEAP != 0 { 31 | Allocator::new_limited(500000000) 32 | } else { 33 | Allocator::new() 34 | }; 35 | 36 | let r: Response = (|| -> PyResult { 37 | let program = node_from_bytes(&mut allocator, program)?; 38 | let args = node_from_bytes(&mut allocator, args)?; 39 | let dialect = ChiaDialect::new(flags); 40 | 41 | Ok(py.allow_threads(|| run_program(&mut allocator, &dialect, program, args, max_cost))) 42 | })()?; 43 | adapt_response(py, allocator, r) 44 | } 45 | 46 | fn tuple_for_parsed_triple(py: Python<'_>, p: &ParsedTriple) -> PyObject { 47 | let tuple = match p { 48 | ParsedTriple::Atom { 49 | start, 50 | end, 51 | atom_offset, 52 | } => PyTuple::new_bound(py, [*start, *end, *atom_offset as u64]), 53 | ParsedTriple::Pair { 54 | start, 55 | end, 56 | right_index, 57 | } => PyTuple::new_bound(py, [*start, *end, *right_index as u64]), 58 | }; 59 | tuple.into_py(py) 60 | } 61 | 62 | #[pyfunction] 63 | fn deserialize_as_tree( 64 | py: Python, 65 | blob: &[u8], 66 | calculate_tree_hashes: bool, 67 | ) -> PyResult<(Vec, Option>)> { 68 | let mut cursor = io::Cursor::new(blob); 69 | let (r, tree_hashes) = parse_triples(&mut cursor, calculate_tree_hashes)?; 70 | let r = r.iter().map(|pt| tuple_for_parsed_triple(py, pt)).collect(); 71 | let s = tree_hashes.map(|ths| { 72 | ths.iter() 73 | .map(|b| PyBytes::new_bound(py, b).into()) 74 | .collect() 75 | }); 76 | Ok((r, s)) 77 | } 78 | 79 | #[pymodule] 80 | fn clvm_rs(_py: Python, m: &Bound<'_, PyModule>) -> PyResult<()> { 81 | m.add_function(wrap_pyfunction!(run_serialized_chia_program, m)?)?; 82 | m.add_function(wrap_pyfunction!(serialized_length, m)?)?; 83 | m.add_function(wrap_pyfunction!(deserialize_as_tree, m)?)?; 84 | 85 | m.add("NO_UNKNOWN_OPS", NO_UNKNOWN_OPS)?; 86 | m.add("LIMIT_HEAP", LIMIT_HEAP)?; 87 | m.add("MEMPOOL_MODE", MEMPOOL_MODE)?; 88 | m.add_class::()?; 89 | 90 | Ok(()) 91 | } 92 | -------------------------------------------------------------------------------- /wheel/src/lazy_node.rs: -------------------------------------------------------------------------------- 1 | use clvmr::allocator::{Allocator, NodePtr, SExp}; 2 | use std::rc::Rc; 3 | 4 | use pyo3::prelude::*; 5 | use pyo3::types::{PyBytes, PyTuple}; 6 | 7 | #[pyclass(subclass, unsendable)] 8 | #[derive(Clone)] 9 | pub struct LazyNode { 10 | allocator: Rc, 11 | node: NodePtr, 12 | } 13 | 14 | impl ToPyObject for LazyNode { 15 | fn to_object(&self, py: Python<'_>) -> PyObject { 16 | let node: Bound = Bound::new(py, self.clone()).unwrap(); 17 | node.to_object(py) 18 | } 19 | } 20 | 21 | #[pymethods] 22 | impl LazyNode { 23 | #[getter(pair)] 24 | pub fn pair(&self, py: Python) -> PyResult> { 25 | match &self.allocator.sexp(self.node) { 26 | SExp::Pair(p1, p2) => { 27 | let r1 = Self::new(self.allocator.clone(), *p1); 28 | let r2 = Self::new(self.allocator.clone(), *p2); 29 | let v = PyTuple::new_bound(py, &[r1, r2]); 30 | Ok(Some(v.into())) 31 | } 32 | _ => Ok(None), 33 | } 34 | } 35 | 36 | #[getter(atom)] 37 | pub fn atom(&self, py: Python) -> Option { 38 | match &self.allocator.sexp(self.node) { 39 | SExp::Atom => { 40 | Some(PyBytes::new_bound(py, self.allocator.atom(self.node).as_ref()).into()) 41 | } 42 | _ => None, 43 | } 44 | } 45 | } 46 | 47 | impl LazyNode { 48 | pub const fn new(a: Rc, n: NodePtr) -> Self { 49 | Self { 50 | allocator: a, 51 | node: n, 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /wheel/src/lib.rs: -------------------------------------------------------------------------------- 1 | mod adapt_response; 2 | pub mod api; 3 | pub mod lazy_node; 4 | --------------------------------------------------------------------------------