├── .cargo └── config.toml ├── .clippy.toml ├── .config ├── en_US.dic ├── forest.dic ├── lychee.toml ├── nextest.toml ├── spellcheck.md └── spellcheck.toml ├── .dockerignore ├── .eslintrc.yml ├── .gitattributes ├── .github ├── .prettierrc ├── CARGO_ADVISORIES_ISSUE_TEMPLATE.md ├── CHECKPOINT_ISSUE_TEMPLATE.md ├── CODEOWNERS ├── DOCKER_ISSUE_TEMPLATE.md ├── ISSUE_TEMPLATE │ ├── 1-bug_report.md │ ├── 2-user_request.md │ ├── 3-epic.md │ ├── 4-task.md │ ├── 5-other.md │ └── config.yml ├── PULL_REQUEST_TEMPLATE.md ├── RPC_PARITY_ISSUE_TEMPLATE.md ├── SNAPSHOT_PARITY_ISSUE_TEMPLATE.md ├── dependabot.yml └── workflows │ ├── butterflynet.yml │ ├── cargo-advisories.yml │ ├── checkpoints.yml │ ├── curio-devnet-publish.yml │ ├── docker-latest-tag.yml │ ├── docker.yml │ ├── dockerfile-check.yml │ ├── docs-auto-update.yml │ ├── docs-check.yml │ ├── docs-deploy.yml │ ├── docs-required-override.yml │ ├── forest.yml │ ├── link-check.yml │ ├── lotus-api-bump.yml │ ├── lotus-devnet-publish.yml │ ├── release.yml │ ├── release_dispatch.yml │ ├── rpc-parity.yml │ ├── rpc_test_repeat.yml │ ├── rust-lint.yml │ ├── scripts-lint.yml │ ├── snapshot-parity.yml │ └── unit-tests.yml ├── .gitignore ├── .prettierignore ├── .yarnrc.yml ├── CHANGELOG.md ├── CONTRIBUTING.md ├── Cargo.lock ├── Cargo.toml ├── Dockerfile ├── Dockerfile-ci ├── FUNDING.json ├── LICENSE-APACHE ├── LICENSE-MIT ├── Makefile ├── README.md ├── benches ├── car-index.rs └── example-benchmark.rs ├── build.rs ├── build ├── bootstrap │ ├── butterflynet │ ├── calibnet │ └── mainnet ├── known_blocks.yaml ├── manifest.json └── vendored-docs-redirect.index.html ├── deny.toml ├── docs ├── .bookignore ├── .gitattributes ├── .gitignore ├── .spellcheck.yml ├── Makefile ├── README.md ├── babel.config.js ├── devSidebars.js ├── dictionary.txt ├── docs │ ├── developers │ │ ├── guides │ │ │ ├── _category_.json │ │ │ ├── network_upgrades.md │ │ │ └── rpc_test_snapshot.md │ │ └── introduction.md │ └── users │ │ ├── filecoin_services.md │ │ ├── getting_started │ │ ├── _category_.json │ │ ├── hardware-reqs.md │ │ ├── install.md │ │ └── syncing.md │ │ ├── guides │ │ ├── _category_.json │ │ ├── advanced │ │ │ ├── _category_.json │ │ │ ├── backups.md │ │ │ └── generating_snapshots.md │ │ ├── gc.md │ │ ├── interacting_with_wallets.md │ │ ├── methods_filtering.md │ │ ├── monitoring │ │ │ ├── _category_.json │ │ │ ├── best_practices.md │ │ │ ├── health_checks.md │ │ │ ├── logs.md │ │ │ └── metrics.md │ │ ├── running_bootstrap_node.md │ │ ├── running_with_curio.md │ │ └── running_with_gateway.md │ │ ├── introduction.md │ │ ├── knowledge_base │ │ ├── _category_.json │ │ ├── docker_tips.md │ │ ├── jwt_handling.md │ │ ├── network_upgrades_state_migrations.md │ │ └── snapshot_service.md │ │ ├── openrpc.json │ │ └── reference │ │ ├── _category_.json │ │ ├── cli.md │ │ ├── cli.sh │ │ ├── env_variables.md │ │ ├── generate_cli_md.sh │ │ ├── json_rpc.md │ │ └── metrics.md ├── docusaurus.config.js ├── package.json ├── src │ └── css │ │ └── index.css ├── static │ ├── .nojekyll │ └── img │ │ ├── chainsafe_logo.png │ │ ├── favicon.ico │ │ ├── filecoin_logo.png │ │ ├── logo-with-text.png │ │ └── logo.png ├── tsconfig.json ├── userSidebars.js └── yarn.lock ├── documentation ├── .gitignore ├── book.toml └── src │ ├── SUMMARY.md │ ├── developer_documentation │ ├── archie_and_fuzzy.md │ ├── chain_index_spike.md │ ├── chain_muxer_state_machine.md │ ├── database_migrations.md │ ├── devnet_notes.md │ ├── heaptrack │ │ ├── bottom_up.png │ │ ├── caller_callee.png │ │ ├── consumed.png │ │ ├── flamegraph.png │ │ ├── sizes.png │ │ └── summary.png │ ├── introduction.md │ ├── local_actions.md │ ├── memory-analysis.md │ ├── release_checklist.md │ ├── rpc_api_compatibility.md │ ├── state_migration_guide.md │ └── test_plan.md │ ├── img │ └── forest_logo.png │ ├── introduction.md │ ├── offline-forest.md │ └── trouble_shooting.md ├── f3-sidecar ├── .gitignore ├── README.md ├── api.go ├── ec.go ├── ec_test.go ├── f3manifest_2k.json ├── f3manifest_butterfly.json ├── f3manifest_calibnet.json ├── f3manifest_mainnet.json ├── ffi_gen.go ├── ffi_impl.go ├── go.mod ├── go.sum ├── main.go ├── manifest.go ├── p2p.go ├── pubsub.go ├── run.go ├── types.go ├── utils.go └── utils_test.go ├── go.work ├── interop-tests ├── Cargo.toml ├── README.md ├── build.rs └── src │ ├── lib.rs │ └── tests │ ├── bitswap_go_compat.rs │ ├── go_app │ ├── .gitignore │ ├── bitswap_impl.go │ ├── common.go │ ├── go.mod │ ├── go.sum │ └── kad_impl.go │ ├── go_ffi.rs │ ├── kad_go_compat.rs │ └── mod.rs ├── monitoring ├── README.md ├── docker-compose.yml ├── grafana │ ├── dashboards │ │ ├── README.md │ │ └── forest.json │ └── provisioning │ │ ├── dashboards │ │ └── dashboard.yml │ │ └── datasources │ │ └── datasource.yml └── prometheus │ └── prometheus.yml ├── package.json ├── proto └── bitswap_pb.proto ├── rust-toolchain.toml ├── scripts ├── add_license.sh ├── copyright.txt ├── db_params_hyperfine.sh ├── devnet-curio │ ├── .env │ ├── README.md │ ├── curio.dockerfile │ ├── curio.env │ ├── docker-compose.yml │ ├── forest_config.toml.tpl │ ├── lotus-miner.env │ ├── lotus.env │ └── run_curio.sh ├── devnet │ ├── .env │ ├── README.md │ ├── check.sh │ ├── docker-compose.yml │ ├── forest_ci.dockerfile │ ├── forest_config.toml.tpl │ ├── lotus-miner.env │ ├── lotus.dockerfile │ ├── lotus.env │ └── setup.sh ├── linters │ └── find_unused_deps.rb ├── s3 │ ├── requirement.txt │ └── set_sccache_do_bucket_lifecycle.py └── tests │ ├── api_compare │ ├── .env │ ├── api_compare.sh │ ├── docker-compose.yml │ ├── filter-list │ ├── filter-list-offline │ └── setup.sh │ ├── bootstrapper │ ├── .env │ ├── README.md │ ├── docker-compose-forest.yml │ ├── docker-compose-lotus.yml │ └── test_bootstrapper.sh │ ├── butterflynet_check.sh │ ├── calibnet_db_migration.sh │ ├── calibnet_eth_mapping_check.sh │ ├── calibnet_export_check.sh │ ├── calibnet_kademlia_check.sh │ ├── calibnet_migration_regression_tests.sh │ ├── calibnet_no_discovery_check.sh │ ├── calibnet_other_check.sh │ ├── calibnet_stateless_mode_check.sh │ ├── calibnet_stateless_rpc_check.sh │ ├── calibnet_wallet_check.sh │ ├── forest_cli_check.sh │ ├── harness.sh │ ├── snapshot_parity │ ├── .env │ ├── docker-compose.yml │ └── setup.sh │ ├── test_data │ └── calibnet_block_3000.json │ └── upload_rcpsnaps.sh ├── src ├── auth │ └── mod.rs ├── beacon │ ├── beacon_entries.rs │ ├── drand.rs │ ├── mock_beacon.rs │ ├── mod.rs │ ├── signatures │ │ ├── mod.rs │ │ ├── public_key_impls.rs │ │ ├── signature_impls.rs │ │ └── tests.rs │ └── tests │ │ └── drand.rs ├── bin │ ├── forest-cli.rs │ ├── forest-tool.rs │ ├── forest-wallet.rs │ └── forest.rs ├── blocks │ ├── block.rs │ ├── chain4u.rs │ ├── election_proof.rs │ ├── gossip_block.rs │ ├── header.rs │ ├── mod.rs │ ├── tests │ │ ├── calibnet │ │ │ └── HEAD │ │ ├── serialization-vectors │ │ │ ├── README.md │ │ │ ├── block_headers.json │ │ │ ├── message_signing.json │ │ │ └── unsigned_messages.json │ │ ├── serialization_vectors.rs │ │ └── ticket_test.rs │ ├── ticket.rs │ ├── tipset.rs │ └── vrf_proof.rs ├── chain │ ├── mod.rs │ ├── store │ │ ├── base_fee.rs │ │ ├── chain_store.rs │ │ ├── errors.rs │ │ ├── index.rs │ │ ├── mod.rs │ │ └── tipset_tracker.rs │ └── weight.rs ├── chain_sync │ ├── bad_block_cache.rs │ ├── chain_follower.rs │ ├── chain_muxer.rs │ ├── consensus.rs │ ├── metrics.rs │ ├── mod.rs │ ├── network_context.rs │ ├── sync_status.rs │ ├── tipset_syncer.rs │ └── validation.rs ├── cid_collections │ ├── hash_map.rs │ ├── hash_set.rs │ ├── mod.rs │ └── small_cid_vec.rs ├── cli │ ├── humantoken.rs │ ├── main.rs │ ├── mod.rs │ └── subcommands │ │ ├── auth_cmd.rs │ │ ├── chain_cmd.rs │ │ ├── chain_cmd │ │ └── prune.rs │ │ ├── config_cmd.rs │ │ ├── f3_cmd.rs │ │ ├── f3_cmd │ │ ├── certificate.tpl │ │ ├── manifest.tpl │ │ ├── progress.tpl │ │ └── tests.rs │ │ ├── healthcheck_cmd.rs │ │ ├── info_cmd.rs │ │ ├── mod.rs │ │ ├── mpool_cmd.rs │ │ ├── net_cmd.rs │ │ ├── send_cmd.rs │ │ ├── shutdown_cmd.rs │ │ ├── snapshot_cmd.rs │ │ ├── state_cmd.rs │ │ ├── sync_cmd.rs │ │ └── wait_api_cmd.rs ├── cli_shared │ ├── cli │ │ ├── client.rs │ │ ├── completion_cmd.rs │ │ ├── config.rs │ │ └── mod.rs │ ├── logger │ │ └── mod.rs │ ├── mod.rs │ └── snapshot.rs ├── daemon │ ├── bundle.rs │ ├── context.rs │ ├── db_util.rs │ ├── main.rs │ └── mod.rs ├── db │ ├── blockstore_with_read_cache.rs │ ├── blockstore_with_write_buffer.rs │ ├── car │ │ ├── any.rs │ │ ├── forest.rs │ │ ├── forest │ │ │ └── index │ │ │ │ ├── hash.rs │ │ │ │ └── mod.rs │ │ ├── many.rs │ │ ├── mod.rs │ │ └── plain.rs │ ├── db_mode.rs │ ├── gc │ │ ├── mod.rs │ │ └── snapshot.rs │ ├── memory.rs │ ├── migration │ │ ├── db_migration.rs │ │ ├── migration_map.rs │ │ ├── mod.rs │ │ ├── v0_22_1.rs │ │ ├── v0_26_0.rs │ │ └── void_migration.rs │ ├── mod.rs │ ├── parity_db.rs │ ├── parity_db_config.rs │ ├── tests │ │ ├── db_utils │ │ │ ├── mod.rs │ │ │ └── parity.rs │ │ ├── mem_test.rs │ │ ├── parity_test.rs │ │ └── subtests │ │ │ └── mod.rs │ └── ttl │ │ └── mod.rs ├── documentation.rs ├── eth │ ├── eip_1559_transaction.rs │ ├── eip_155_transaction.rs │ ├── homestead_transaction.rs │ ├── mod.rs │ └── transaction.rs ├── f3 │ ├── go_ffi.rs │ └── mod.rs ├── fil_cns │ ├── mod.rs │ ├── validation.rs │ └── weight.rs ├── genesis │ ├── export40.car │ └── mod.rs ├── health │ ├── endpoints.rs │ └── mod.rs ├── interpreter │ ├── errors.rs │ ├── fvm2.rs │ ├── fvm3.rs │ ├── fvm4.rs │ ├── mod.rs │ └── vm.rs ├── ipld │ ├── mod.rs │ ├── selector │ │ ├── empty_map.rs │ │ └── mod.rs │ ├── tests │ │ ├── cbor_test.rs │ │ ├── ipld-traversal-vectors │ │ │ ├── selector_explore.json │ │ │ ├── selector_walk.json │ │ │ └── selector_walk_links.json │ │ ├── selector_explore.rs │ │ └── selector_gen_tests.rs │ └── util.rs ├── key_management │ ├── errors.rs │ ├── keystore.rs │ ├── mod.rs │ ├── tests │ │ └── keystore_encrypted_old │ │ │ └── keystore │ ├── wallet.rs │ └── wallet_helpers.rs ├── lib.rs ├── libp2p │ ├── behaviour.rs │ ├── chain_exchange │ │ ├── behaviour.rs │ │ ├── message.rs │ │ ├── mod.rs │ │ └── provider.rs │ ├── config.rs │ ├── discovery.rs │ ├── gossip_params.rs │ ├── hello │ │ ├── behaviour.rs │ │ ├── codec.rs │ │ ├── message.rs │ │ └── mod.rs │ ├── keypair.rs │ ├── metrics.rs │ ├── mod.rs │ ├── peer_manager.rs │ ├── ping.rs │ ├── rpc │ │ ├── decoder.rs │ │ └── mod.rs │ ├── service.rs │ └── tests │ │ └── decode_test.rs ├── libp2p_bitswap │ ├── behaviour.rs │ ├── bitswap_pb.rs │ ├── internals │ │ ├── codec.rs │ │ ├── event_handlers.rs │ │ ├── mod.rs │ │ └── prefix.rs │ ├── message.rs │ ├── metrics.rs │ ├── mod.rs │ ├── request_manager.rs │ ├── store.rs │ └── tests │ │ └── request_manager.rs ├── lotus_json │ ├── actor_state.rs │ ├── actor_states │ │ ├── account_state.rs │ │ ├── cron_state.rs │ │ ├── entry.rs │ │ ├── evm_state.rs │ │ ├── market_state.rs │ │ ├── miner_state.rs │ │ ├── mod.rs │ │ ├── system_state.rs │ │ └── vesting_funds.rs │ ├── address.rs │ ├── allocation.rs │ ├── beacon_entry.rs │ ├── beneficiary_term.rs │ ├── big_int.rs │ ├── bit_field.rs │ ├── block_header.rs │ ├── bytecode_hash.rs │ ├── cid.rs │ ├── duration.rs │ ├── election_proof.rs │ ├── extended_sector_info.rs │ ├── gossip_block.rs │ ├── hash_map.rs │ ├── ipld.rs │ ├── key_info.rs │ ├── message.rs │ ├── miner_info.rs │ ├── miner_power.rs │ ├── mod.rs │ ├── nonempty.rs │ ├── opt.rs │ ├── pending_beneficiary_change.rs │ ├── po_st_proof.rs │ ├── power_claim.rs │ ├── raw_bytes.rs │ ├── receipt.rs │ ├── registered_po_st_proof.rs │ ├── registered_seal_proof.rs │ ├── sector_info.rs │ ├── sector_size.rs │ ├── signature.rs │ ├── signature_type.rs │ ├── signed_message.rs │ ├── ticket.rs │ ├── tipset_keys.rs │ ├── token_amount.rs │ ├── tombstone.rs │ ├── transient_data.rs │ ├── vec.rs │ ├── vec_u8.rs │ ├── verifreg_claim.rs │ └── vrf_proof.rs ├── message │ ├── chain_message.rs │ ├── mod.rs │ ├── signed_message.rs │ └── tests │ │ └── builder_test.rs ├── message_pool │ ├── block_prob.rs │ ├── config.rs │ ├── errors.rs │ ├── mod.rs │ ├── msg_chain.rs │ └── msgpool │ │ ├── metrics.rs │ │ ├── mod.rs │ │ ├── msg_pool.rs │ │ ├── provider.rs │ │ ├── selection.rs │ │ ├── test_provider.rs │ │ └── utils.rs ├── metrics │ ├── db.rs │ └── mod.rs ├── networks │ ├── actors_bundle.rs │ ├── butterflynet │ │ └── mod.rs │ ├── calibnet │ │ ├── genesis.car │ │ └── mod.rs │ ├── devnet │ │ └── mod.rs │ ├── drand.rs │ ├── mainnet │ │ ├── genesis.car │ │ └── mod.rs │ ├── metrics.rs │ └── mod.rs ├── rpc │ ├── actor_registry.rs │ ├── auth_layer.rs │ ├── channel.rs │ ├── client.rs │ ├── error.rs │ ├── filter_layer.rs │ ├── filter_list.rs │ ├── log_layer.rs │ ├── methods │ │ ├── auth.rs │ │ ├── beacon.rs │ │ ├── chain.rs │ │ ├── chain │ │ │ └── types.rs │ │ ├── common.rs │ │ ├── eth.rs │ │ ├── eth │ │ │ ├── errors.rs │ │ │ ├── eth_tx.rs │ │ │ ├── filter │ │ │ │ ├── event.rs │ │ │ │ ├── mempool.rs │ │ │ │ ├── mod.rs │ │ │ │ ├── store.rs │ │ │ │ └── tipset.rs │ │ │ ├── trace.rs │ │ │ ├── types.rs │ │ │ └── utils.rs │ │ ├── f3.rs │ │ ├── f3 │ │ │ ├── contract_manifest_golden.json │ │ │ ├── contract_return.hex │ │ │ ├── types.rs │ │ │ └── util.rs │ │ ├── gas.rs │ │ ├── market.rs │ │ ├── miner.rs │ │ ├── misc.rs │ │ ├── mpool.rs │ │ ├── msig.rs │ │ ├── net.rs │ │ ├── net │ │ │ └── types.rs │ │ ├── node.rs │ │ ├── state.rs │ │ ├── state │ │ │ └── types.rs │ │ ├── sync.rs │ │ ├── sync │ │ │ └── types.rs │ │ └── wallet.rs │ ├── metrics_layer.rs │ ├── mod.rs │ ├── reflect │ │ ├── jsonrpc_types.rs │ │ ├── mod.rs │ │ ├── parser.rs │ │ └── util.rs │ ├── request.rs │ ├── segregation_layer.rs │ ├── set_extension_layer.rs │ ├── snapshots │ │ ├── .gitattributes │ │ ├── .gitignore │ │ └── README.md │ └── types │ │ ├── address_impl.rs │ │ ├── deal_impl.rs │ │ ├── mod.rs │ │ ├── sector_impl.rs │ │ ├── tests.rs │ │ └── tsk_impl.rs ├── shim │ ├── actors │ │ ├── builtin │ │ │ ├── account │ │ │ │ └── mod.rs │ │ │ ├── cron │ │ │ │ └── mod.rs │ │ │ ├── datacap │ │ │ │ └── mod.rs │ │ │ ├── eam.rs │ │ │ ├── evm │ │ │ │ └── mod.rs │ │ │ ├── init │ │ │ │ └── mod.rs │ │ │ ├── market │ │ │ │ ├── ext │ │ │ │ │ ├── balance_table.rs │ │ │ │ │ ├── mod.rs │ │ │ │ │ └── state.rs │ │ │ │ └── mod.rs │ │ │ ├── miner │ │ │ │ ├── ext │ │ │ │ │ ├── deadline.rs │ │ │ │ │ ├── mod.rs │ │ │ │ │ ├── partition.rs │ │ │ │ │ └── state.rs │ │ │ │ └── mod.rs │ │ │ ├── mod.rs │ │ │ ├── multisig │ │ │ │ ├── ext │ │ │ │ │ ├── mod.rs │ │ │ │ │ └── state.rs │ │ │ │ └── mod.rs │ │ │ ├── power │ │ │ │ ├── ext.rs │ │ │ │ └── mod.rs │ │ │ ├── reward │ │ │ │ └── mod.rs │ │ │ ├── system │ │ │ │ └── mod.rs │ │ │ └── verifreg │ │ │ │ ├── ext │ │ │ │ ├── mod.rs │ │ │ │ └── state.rs │ │ │ │ └── mod.rs │ │ ├── common.rs │ │ ├── convert.rs │ │ ├── macros.rs │ │ ├── mod.rs │ │ ├── state_load.rs │ │ └── version.rs │ ├── address.rs │ ├── bigint.rs │ ├── clock.rs │ ├── crypto.rs │ ├── deal.rs │ ├── econ.rs │ ├── error.rs │ ├── executor.rs │ ├── externs.rs │ ├── gas.rs │ ├── kernel.rs │ ├── machine │ │ ├── manifest.rs │ │ └── mod.rs │ ├── message.rs │ ├── mod.rs │ ├── piece.rs │ ├── randomness.rs │ ├── sector.rs │ ├── state_tree.rs │ ├── state_tree_v0.rs │ ├── trace.rs │ └── version.rs ├── state_manager │ ├── cache.rs │ ├── chain_rand.rs │ ├── circulating_supply.rs │ ├── errors.rs │ ├── mod.rs │ └── utils.rs ├── state_migration │ ├── common │ │ ├── macros │ │ │ ├── mod.rs │ │ │ ├── system.rs │ │ │ └── verifier.rs │ │ ├── migration_job.rs │ │ ├── migrators.rs │ │ ├── mod.rs │ │ ├── state_migration.rs │ │ └── verifier.rs │ ├── mod.rs │ ├── nv17 │ │ ├── datacap.rs │ │ ├── migration.rs │ │ ├── miner.rs │ │ ├── mod.rs │ │ ├── util.rs │ │ └── verifreg_market.rs │ ├── nv18 │ │ ├── eam.rs │ │ ├── eth_account.rs │ │ ├── init.rs │ │ ├── migration.rs │ │ └── mod.rs │ ├── nv19 │ │ ├── migration.rs │ │ ├── miner.rs │ │ ├── mod.rs │ │ └── power.rs │ ├── nv21 │ │ ├── migration.rs │ │ ├── miner.rs │ │ └── mod.rs │ ├── nv21fix │ │ ├── migration.rs │ │ └── mod.rs │ ├── nv21fix2 │ │ ├── migration.rs │ │ └── mod.rs │ ├── nv22 │ │ ├── market.rs │ │ ├── migration.rs │ │ ├── miner.rs │ │ └── mod.rs │ ├── nv22fix │ │ ├── migration.rs │ │ └── mod.rs │ ├── nv23 │ │ ├── migration.rs │ │ ├── mining_reserve.rs │ │ └── mod.rs │ ├── nv24 │ │ ├── migration.rs │ │ ├── mod.rs │ │ └── power.rs │ ├── nv25 │ │ ├── evm.rs │ │ ├── migration.rs │ │ ├── miner.rs │ │ └── mod.rs │ ├── nv26fix │ │ ├── migration.rs │ │ └── mod.rs │ ├── tests │ │ ├── data │ │ │ └── .gitignore │ │ └── mod.rs │ └── type_migrations │ │ ├── evm │ │ ├── mod.rs │ │ └── state_v15_to_v16.rs │ │ ├── init │ │ ├── mod.rs │ │ └── state_v9_to_v10.rs │ │ ├── market │ │ ├── mod.rs │ │ └── state_v8_to_v9.rs │ │ ├── miner │ │ ├── deadline_v15_to_v16.rs │ │ ├── deadlines_v15_to_v16.rs │ │ ├── info_v8_to_v9.rs │ │ ├── mod.rs │ │ ├── power_pair_v11_to_v12.rs │ │ ├── power_pair_v8_to_v9.rs │ │ ├── sector_onchain_info_v11_to_v12.rs │ │ ├── sector_onchain_info_v8_to_v9.rs │ │ ├── sector_precommit_info_v8_to_v9.rs │ │ ├── sector_precommit_onchain_info_v8_to_v9.rs │ │ ├── state_v10_to_v11.rs │ │ ├── state_v15_to_v16.rs │ │ ├── state_v8_to_v9.rs │ │ └── vesting_funds_v15_to_v16.rs │ │ └── mod.rs ├── statediff │ ├── mod.rs │ └── resolve.rs ├── test_utils │ └── mod.rs ├── tool │ ├── main.rs │ ├── mod.rs │ ├── offline_server │ │ ├── mod.rs │ │ └── server.rs │ └── subcommands │ │ ├── api_cmd.rs │ │ ├── api_cmd │ │ ├── api_compare_tests.rs │ │ ├── contracts │ │ │ ├── compile.sh │ │ │ ├── invoke_cthulhu.hex │ │ │ └── invoke_cthulhu.sol │ │ ├── generate_test_snapshot.rs │ │ ├── test_snapshot.rs │ │ ├── test_snapshots.txt │ │ └── test_snapshots_ignored.txt │ │ ├── archive_cmd.rs │ │ ├── backup_cmd.rs │ │ ├── benchmark_cmd.rs │ │ ├── car_cmd.rs │ │ ├── db_cmd.rs │ │ ├── fetch_params_cmd.rs │ │ ├── index_cmd.rs │ │ ├── mod.rs │ │ ├── net_cmd.rs │ │ ├── shed_cmd.rs │ │ ├── shed_cmd │ │ └── migration.rs │ │ ├── snapshot_cmd.rs │ │ └── state_migration_cmd.rs ├── utils │ ├── cid │ │ └── mod.rs │ ├── db │ │ ├── car_stream.rs │ │ ├── car_util.rs │ │ └── mod.rs │ ├── encoding │ │ ├── cid_de_cbor.rs │ │ ├── fallback_de_ipld_dagcbor.rs │ │ └── mod.rs │ ├── flume │ │ └── mod.rs │ ├── io │ │ ├── mmap.rs │ │ ├── mod.rs │ │ ├── progress_log.rs │ │ └── writer_checksum.rs │ ├── misc │ │ ├── adaptive_value_provider.rs │ │ ├── env.rs │ │ ├── logo.rs │ │ └── mod.rs │ ├── mod.rs │ ├── monitoring │ │ ├── mem_tracker.rs │ │ └── mod.rs │ ├── multihash.rs │ ├── net.rs │ ├── net │ │ └── download_file.rs │ ├── p2p │ │ └── mod.rs │ ├── proofs_api │ │ ├── mod.rs │ │ ├── parameters.json │ │ ├── parameters.rs │ │ └── paramfetch.rs │ ├── rand │ │ └── mod.rs │ ├── reqwest_resume │ │ ├── mod.rs │ │ └── tests.rs │ ├── stats │ │ └── mod.rs │ ├── stream.rs │ ├── tests │ │ └── files.rs │ └── version │ │ └── mod.rs └── wallet │ ├── main.rs │ ├── mod.rs │ └── subcommands │ ├── mod.rs │ └── wallet_cmd.rs ├── taplo.toml ├── test-snapshots ├── carv2.car.zst ├── chain4.car ├── chain4.car.zst └── chain4.forest.car.zst ├── tests ├── cmd_state_migration_tests.rs ├── common │ └── mod.rs ├── config.rs ├── db_migration_tests.rs ├── db_mode_tests.rs ├── import_snapshot_tests.rs ├── keystore_tests.rs ├── lint.rs ├── lints │ └── mod.rs └── tool_tests.rs └── yarn.lock /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [alias] 2 | # Permits `cargo cli --chain calibnet ...` 3 | cli = "run --bin forest-cli --" 4 | daemon = "run --bin forest --" 5 | forest-tool = "run --bin forest-tool --release --" 6 | 7 | [build] 8 | incremental = true 9 | 10 | # TODO(aatifsyed): remove - this can be pushed out to readme 11 | # In all cases, pass --cfg=tokio_unstable for tokio console integration 12 | # See (https://github.com/ChainSafe/forest/pull/2245) 13 | # Note that this may be overriden by user configuration at ~/.cargo/config.toml 14 | rustflags = ["--cfg=tokio_unstable"] 15 | 16 | [net] 17 | git-fetch-with-cli = true 18 | retry = 5 19 | 20 | [registries.crates-io] 21 | protocol = "sparse" 22 | 23 | [env] 24 | # Disable exponential formatting in `bigdecimal` 25 | # as a workaround for https://github.com/ChainSafe/forest/issues/4035 26 | RUST_BIGDECIMAL_FMT_EXPONENTIAL_THRESHOLD = { value = "100", force = true } # 0.4.3 27 | RUST_BIGDECIMAL_FMT_EXPONENTIAL_LOWER_THRESHOLD = { value = "100", force = true } # 0.4.7 28 | RUST_BIGDECIMAL_FMT_EXPONENTIAL_UPPER_THRESHOLD = { value = "100", force = true } # 0.4.7 29 | -------------------------------------------------------------------------------- /.config/lychee.toml: -------------------------------------------------------------------------------- 1 | # This is the common config used by lychee, our dead html link checker 2 | # See the github actions workflows to see the inputs 3 | # https://github.com/lycheeverse/lychee/blob/2109470dc380eaf66944b6bcfa86230e0a58e58f/lychee-bin/src/options.rs#L152 4 | 5 | verbose = "debug" 6 | no_progress = true 7 | exclude_path = ["./node_modules", "./docs/node_modules", "./documentation", "./target"] 8 | exclude = [ 9 | # Avoid Github rate limits 10 | "github.com/ChainSafe/forest", 11 | # Requires CAPTCHA verification 12 | "faucet.calibnet.chainsafe-fil.io/funds.html", 13 | # Bot protection 14 | "jwt.io", 15 | "forest-explorer.chainsafe.dev", 16 | # Maybe temporarily down with 404, but it blocks the CI 17 | "filecoin.io/slack", 18 | 19 | ] 20 | timeout = 30 21 | max_retries = 6 22 | retry_wait_time = 10 23 | 24 | output = "lychee-report.md" 25 | -------------------------------------------------------------------------------- /.config/spellcheck.toml: -------------------------------------------------------------------------------- 1 | dev_comments = false 2 | skip_readme = false 3 | 4 | [hunspell] 5 | lang = "en_US" 6 | search_dirs = ["."] 7 | skip_os_lookups = true 8 | use_builtin = true 9 | tokenization_splitchars = "\",;:.!?#(){}[]|/_-‒'`&@§¶…<>=" 10 | extra_dictionaries = ["forest.dic", "en_US.dic"] 11 | 12 | [hunspell.quirks] 13 | transform_regex = [ 14 | # 10.7% 15 | "^[0-9_]+(?:\\.[0-9]*)?%$", 16 | ] 17 | allow_concatenation = false 18 | allow_dashes = false 19 | allow_emojis = true 20 | 21 | [nlprules] 22 | 23 | [reflow] 24 | max_line_length = 80 25 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .github 2 | .maintain 3 | Dockerfile 4 | .dockerignore 5 | *.md 6 | target/ 7 | scripts/ 8 | 9 | # Ignore CAR files fetched to the project directory which tends to happen during development. 10 | # Without it, the Docker context may bloat to hundreds of gigabytes of data. 11 | /*.car 12 | /*.car.zst 13 | -------------------------------------------------------------------------------- /.eslintrc.yml: -------------------------------------------------------------------------------- 1 | env: 2 | commonjs: true 3 | es2021: true 4 | shared-node-browser: true 5 | extends: eslint:recommended 6 | overrides: [] 7 | parserOptions: 8 | ecmaVersion: latest 9 | rules: {} 10 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | assets/actor_bundles.car.zst filter=lfs diff=lfs merge=lfs -text 2 | -------------------------------------------------------------------------------- /.github/.prettierrc: -------------------------------------------------------------------------------- 1 | proseWrap: never 2 | -------------------------------------------------------------------------------- /.github/CARGO_ADVISORIES_ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "[automated] `cargo deny check advisories` failure @ {{ date | date('D/M/YY HH:mm') }}" 3 | labels: ["Bug"] 4 | --- 5 | 6 | ## Description 7 | 8 | Please [check the logs]({{ env.WORKFLOW_URL }}) for more information. 9 | -------------------------------------------------------------------------------- /.github/CHECKPOINT_ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "cron: update known_blocks.yaml" 3 | labels: ["Type: Task"] 4 | --- 5 | 6 | Forest uses checkpoints to improve performance when loading a snapshot. Without checkpoints, the blockchain has to be fully traversed to verify we have the right genesis block. Checkpoints short-circuit this search and shave off tens of minutes in boot time. 7 | 8 | Checkpoints have to be regularly updated, though, and [this issue](/.github/CHECKPOINT_ISSUE_TEMPLATE.md) is [automatically created once per month](/.github/workflows/checkpoints.yml). Follow the procedure below to update [`build/known_blocks.yaml`](/build/known_blocks.yaml), and close this issue. 9 | 10 | # Procedure 11 | 12 | ```bash 13 | #!/bin/bash 14 | 15 | # Perform this for `calibnet` AND `mainnet` 16 | chains=("mainnet" "calibnet") 17 | 18 | for chain in "${chains[@]}" 19 | do 20 | # download the latest snapshot. 21 | # ============================= 22 | # - calibnet ~3G, ~1min on a droplet 23 | # - mainnet ~60G, ~15mins on a droplet 24 | aria2c -x5 https://forest-archive.chainsafe.dev/latest/"$chain"/ -o "$chain" 25 | 26 | # print out the checkpoints. 27 | # ========================== 28 | # The whole operation takes a long time, BUT you only need the first line or so. 29 | timeout 15s forest-tool archive checkpoints "$chain" 30 | done 31 | 32 | # Update `build/known_blocks.yaml` as appropriate, manually. 33 | ``` 34 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Two members of the Forest team are automatically (and randomly) assigned to review all PRs. 2 | * @ChainSafe/Forest 3 | -------------------------------------------------------------------------------- /.github/DOCKER_ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "[automated] Docker check failure" 3 | labels: ["Bug"] 4 | --- 5 | 6 | ## Description 7 | 8 | Latest Docker check failed. Please [check the logs]({{ env.WORKFLOW_URL }}) for more information. 9 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/1-bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Provide a report of unexpected behaviour 4 | title: "" 5 | labels: "Type: Bug" 6 | assignees: "" 7 | --- 8 | 9 | ## Describe the bug 10 | 11 | 12 | 13 | ## To reproduce 14 | 15 | 16 | 17 | 1. Go to '...' 18 | 2. Run '....' 19 | 3. See error 20 | 21 | ## Log output 22 | 23 | 24 |
25 | Log Output 26 | 27 | ```Paste log output here 28 | paste log output... 29 | ``` 30 |
31 | 32 | ## Expected behaviour 33 | 34 | 35 | 36 | ## Screenshots 37 | 38 | 39 | 40 | ## Environment (please complete the following information): 41 | 42 | - OS: 43 | - Branch/commit 44 | - Hardware 45 | 46 | ## Other information and links 47 | 48 | 49 | 50 | 51 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/2-user_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: User Request 3 | about: Request a feature or change 4 | title: "" 5 | labels: "Type: Request" 6 | assignees: "" 7 | --- 8 | 9 | # Summary 10 | 11 | 12 | 13 | # Requirements 14 | 15 | 16 | 17 | # Motivation 18 | 19 | 20 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/3-epic.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Epic 3 | about: "[Internal] Larger chunk of work that can be broken down into smaller tasks" 4 | title: "" 5 | labels: "Type: Epic" 6 | assignees: "" 7 | --- 8 | 9 | # Summary 10 | 11 | 12 | 13 | # Motivation 14 | 15 | 16 | 17 | # Tasks 18 | 19 | 20 | 21 | 22 | - [ ] 23 | 24 | # Risks & Dependencies 25 | 26 | 27 | 28 | # Additional Links & Resources 29 | 30 | 31 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/4-task.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Task 3 | about: "[Internal] A specific, actionable unit of work" 4 | title: "" 5 | labels: "Type: Task" 6 | assignees: "" 7 | --- 8 | 9 | # Summary 10 | 11 | 12 | 13 | # Completion Criteria 14 | 15 | 16 | 17 | - [ ] 18 | 19 | # Additional Links & Resources 20 | 21 | 22 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/5-other.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Other 3 | about: Use wisely 🧐 4 | title: "" 5 | labels: "" 6 | assignees: "" 7 | --- 8 | 9 | 10 | 11 | This issue is NOT: 12 | 13 | - [ ] A bug 14 | - [ ] A user-request 15 | - [ ] An epic 16 | - [ ] A task 17 | 18 | Please check the other issue types if any of these categories apply. 19 | 20 | --- 21 | 22 | 23 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: true 2 | contact_links: 3 | - name: "Filecoin Slack (#fil-forest-help or #fil-forest-dev)" 4 | url: https://join.slack.com/t/filecoinproject/shared_invite/enQtNTUwNTI1Mzk5MDYwLTY4YmFjMzRlZjFiNDc0NmI2N2JjMjk5YTAyMDUyODljODg3MGI0ZGRhZTI5ZDNkZTAyNjkyMzI1ODM1YjA1MWI 5 | about: Please ask questions here. 6 | - name: General Filecoin Discussion Forum 7 | url: https://github.com/filecoin-project/community/discussions 8 | about: Please ask any general Filecoin questions here. 9 | - name: Forest Discussion Forum 10 | url: https://github.com/ChainSafe/forest/discussions 11 | about: Please ask Forest specific questions here. 12 | -------------------------------------------------------------------------------- /.github/RPC_PARITY_ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "[automated] RPC parity test failure @ {{ date | date('D/M/YY HH:mm') }}" 3 | labels: ["Bug"] 4 | --- 5 | 6 | ## Description 7 | 8 | Latest RPC parity test failed. Please [check the logs]({{ env.WORKFLOW_URL }}) for more information. 9 | -------------------------------------------------------------------------------- /.github/SNAPSHOT_PARITY_ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "[automated] Snapshot parity test failure" 3 | labels: ["Bug"] 4 | --- 5 | 6 | ## Description 7 | 8 | Latest snapshot parity test failed. Please [check the logs]({{ env.WORKFLOW_URL }}) for more information. 9 | -------------------------------------------------------------------------------- /.github/workflows/butterflynet.yml: -------------------------------------------------------------------------------- 1 | name: Butterflynet checks 2 | on: 3 | workflow_dispatch: 4 | env: 5 | CI: 1 6 | CARGO_INCREMENTAL: 0 7 | CACHE_TIMEOUT_MINUTES: 5 8 | SCRIPT_TIMEOUT_MINUTES: 30 9 | AWS_ACCESS_KEY_ID: "${{ secrets.AWS_ACCESS_KEY_ID }}" 10 | AWS_SECRET_ACCESS_KEY: "${{ secrets.AWS_SECRET_ACCESS_KEY }}" 11 | RUSTC_WRAPPER: sccache 12 | CC: sccache clang 13 | CXX: sccache clang++ 14 | FIL_PROOFS_PARAMETER_CACHE: /var/tmp/filecoin-proof-parameters 15 | SHELL_IMAGE: busybox 16 | jobs: 17 | butterflynet-checks: 18 | name: Butterflynet checks 19 | runs-on: ubuntu-24.04-arm 20 | steps: 21 | - name: Checkout Sources 22 | uses: actions/checkout@v4 23 | - name: Setup sccache 24 | uses: mozilla-actions/sccache-action@v0.0.9 25 | timeout-minutes: "${{ fromJSON(env.CACHE_TIMEOUT_MINUTES) }}" 26 | continue-on-error: true 27 | - uses: actions/setup-go@v5 28 | with: 29 | go-version-file: "go.work" 30 | - name: Build and install Forest binaries 31 | env: 32 | # To minimize compile times: https://nnethercote.github.io/perf-book/build-configuration.html#minimizing-compile-times 33 | RUSTFLAGS: "-C linker=clang -C link-arg=-fuse-ld=lld" 34 | run: make install-slim-quick 35 | - name: Run butterflynet checks 36 | run: ./scripts/tests/butterflynet_check.sh 37 | timeout-minutes: "${{ fromJSON(env.SCRIPT_TIMEOUT_MINUTES) }}" 38 | -------------------------------------------------------------------------------- /.github/workflows/cargo-advisories.yml: -------------------------------------------------------------------------------- 1 | name: cargo deny advisories 2 | on: 3 | workflow_dispatch: 4 | schedule: 5 | - cron: "0 0 * * *" 6 | jobs: 7 | rpc-parity: 8 | name: cargo deny advisories 9 | runs-on: ubuntu-24.04-arm 10 | steps: 11 | - uses: actions/checkout@v4 12 | - run: make install-cargo-binstall 13 | - run: cargo binstall --no-confirm cargo-deny 14 | - run: cargo deny check advisories 15 | - name: Set WORKFLOW_URL 16 | if: failure() 17 | run: | 18 | export WORKFLOW_URL="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" 19 | echo ${WORKFLOW_URL} 20 | echo "WORKFLOW_URL=${WORKFLOW_URL}" >> $GITHUB_ENV 21 | - uses: JasonEtco/create-an-issue@v2 22 | if: failure() 23 | env: 24 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 25 | with: 26 | filename: .github/CARGO_ADVISORIES_ISSUE_TEMPLATE.md 27 | -------------------------------------------------------------------------------- /.github/workflows/checkpoints.yml: -------------------------------------------------------------------------------- 1 | name: Checkpoints 2 | 3 | on: 4 | workflow_dispatch: 5 | schedule: 6 | - cron: "0 0 1 * *" # The 1st of every month 7 | 8 | jobs: 9 | checkpoint_issue: 10 | runs-on: ubuntu-24.04-arm 11 | steps: 12 | - uses: actions/checkout@v4 13 | - uses: JasonEtco/create-an-issue@v2 14 | env: 15 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 16 | with: 17 | filename: .github/CHECKPOINT_ISSUE_TEMPLATE.md 18 | -------------------------------------------------------------------------------- /.github/workflows/dockerfile-check.yml: -------------------------------------------------------------------------------- 1 | # Checks periodically that the Dockerfile builds successfully, and if it doesn't, it creates an issue with the error message. 2 | name: Dockerfile Check 3 | 4 | on: 5 | workflow_dispatch: 6 | schedule: 7 | # Run every Sunday at midnight 8 | - cron: '0 0 * * 0' 9 | 10 | jobs: 11 | docker-check: 12 | runs-on: ubuntu-24.04-arm 13 | steps: 14 | - uses: actions/checkout@v4 15 | - run: | 16 | docker build -t forest-test . 17 | docker run --rm forest-test --version 18 | - name: Set WORKFLOW_URL 19 | if: always() 20 | run: | 21 | export WORKFLOW_URL="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" 22 | echo ${WORKFLOW_URL} 23 | echo "WORKFLOW_URL=${WORKFLOW_URL}" >> $GITHUB_ENV 24 | - uses: JasonEtco/create-an-issue@v2 25 | if: failure() 26 | env: 27 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 28 | with: 29 | filename: .github/DOCKER_ISSUE_TEMPLATE.md 30 | -------------------------------------------------------------------------------- /.github/workflows/docs-auto-update.yml: -------------------------------------------------------------------------------- 1 | # This workflows, run daily, updates the Forest CLI usage docs, and submits a PR with the changes. 2 | name: Update Forest usage docs 3 | 4 | on: 5 | workflow_dispatch: 6 | schedule: 7 | - cron: "0 0 * * *" 8 | 9 | jobs: 10 | update-docs: 11 | runs-on: ubuntu-24.04-arm 12 | steps: 13 | - uses: actions/checkout@v4 14 | - name: Run update script 15 | run: | 16 | cd docs/docs/users/reference 17 | ./generate_cli_md.sh docker > cli.md 18 | - uses: actions/setup-node@v4 19 | with: 20 | node-version: "18" 21 | - run: corepack enable 22 | - run: yarn --immutable 23 | - run: yarn md-fmt 24 | - name: Create Pull Request 25 | uses: peter-evans/create-pull-request@v7 26 | with: 27 | base: main 28 | branch: leshy/update-forest-docs 29 | token: ${{ secrets.ACTIONS_PAT }} 30 | commit-message: Update Forest CLI docs 31 | title: "[automated] Update Forest CLI docs" 32 | body: | 33 | ### Changes 34 | - Updates Forest CLI docs to the latest commit in the `main` branch. 35 | -------------------------------------------------------------------------------- /.github/workflows/docs-check.yml: -------------------------------------------------------------------------------- 1 | name: Docs Check 2 | 3 | # Cancel workflow if there is a new change to the branch. 4 | concurrency: 5 | group: ${{ github.workflow }}-${{ github.ref }} 6 | cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} 7 | 8 | on: 9 | push: 10 | branches: 11 | - main 12 | paths: 13 | - '**.md' 14 | - 'docs/**' 15 | - '.github/workflows/docs-check.yml' 16 | merge_group: 17 | pull_request: 18 | branches: 19 | - main 20 | paths: 21 | - '**.md' 22 | - 'docs/**' 23 | - '.github/workflows/docs-check.yml' 24 | 25 | jobs: 26 | docs-check: 27 | name: Check 28 | runs-on: ubuntu-24.04-arm 29 | defaults: 30 | run: 31 | working-directory: ./docs 32 | steps: 33 | - uses: actions/checkout@v4 34 | - uses: actions/setup-node@v4 35 | with: 36 | node-version: 18 37 | # See https://github.com/actions/setup-node/issues/1027 38 | # cache: yarn 39 | - run: corepack enable 40 | - run: make format-spellcheck-dictionary-check 41 | - run: yarn --immutable 42 | - run: yarn typecheck 43 | - run: yarn spellcheck 44 | - run: yarn format-check 45 | - run: yarn build 46 | -------------------------------------------------------------------------------- /.github/workflows/docs-deploy.yml: -------------------------------------------------------------------------------- 1 | name: Docs Deploy 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - 'docs/**' 9 | - '.github/workflows/docs-deploy.yml' 10 | merge_group: 11 | pull_request: 12 | branches: 13 | - main 14 | paths: 15 | - 'docs/**' 16 | - '.github/workflows/docs-deploy.yml' 17 | 18 | permissions: 19 | contents: read 20 | deployments: write 21 | pull-requests: write 22 | 23 | jobs: 24 | docs-deploy: 25 | name: Deploy to Cloudflare Pages 26 | runs-on: ubuntu-24.04-arm 27 | defaults: 28 | run: 29 | working-directory: ./docs 30 | steps: 31 | - uses: actions/checkout@v4 32 | - uses: actions/setup-node@v4 33 | with: 34 | node-version: "18" 35 | # See https://github.com/actions/setup-node/issues/1027 36 | # cache: yarn 37 | - run: corepack enable 38 | - run: yarn --immutable 39 | - run: yarn run build 40 | 41 | - name: Deploy 42 | uses: cloudflare/wrangler-action@v3 43 | with: 44 | apiToken: ${{ secrets.CLOUDFLARE_PAGES_API_TOKEN }} 45 | accountId: ${{ secrets.CLOUDFLARE_PAGES_ACCOUNT_ID }} 46 | command: pages deploy ./docs/build --project-name=forest-docs 47 | gitHubToken: ${{ secrets.GITHUB_TOKEN }} 48 | -------------------------------------------------------------------------------- /.github/workflows/link-check.yml: -------------------------------------------------------------------------------- 1 | name: Link Checker 2 | 3 | on: 4 | workflow_dispatch: 5 | schedule: 6 | - cron: "0 0 * * *" 7 | push: 8 | branches: 9 | - main 10 | paths: 11 | - "**.md" 12 | - "**.mdx" 13 | - "**.html" 14 | - ".github/workflows/link-check.yml" 15 | merge_group: 16 | pull_request: 17 | branches: 18 | - main 19 | paths: 20 | - "**.md" 21 | - "**.mdx" 22 | - "**.html" 23 | - ".github/workflows/link-check.yml" 24 | 25 | jobs: 26 | link-check: 27 | name: Link Check 28 | runs-on: ubuntu-24.04-arm 29 | permissions: 30 | contents: read 31 | issues: write 32 | steps: 33 | - uses: actions/checkout@v4 34 | 35 | - name: Link Checker 36 | id: lychee 37 | uses: lycheeverse/lychee-action@v2 38 | with: 39 | args: --user-agent "curl/8.5.0" --no-progress --config ./.config/lychee.toml './**/*.md' './**/*.mdx' './**/*.html' 40 | 41 | - name: Create Issue From File 42 | if: env.lychee_exit_code != 0 && github.event_name == 'schedule' 43 | uses: peter-evans/create-issue-from-file@v5 44 | with: 45 | title: Link Checker Report 46 | content-filepath: ./lychee-report.md 47 | 48 | - name: Fail job on error 49 | if: env.lychee_exit_code != 0 && github.event_name != 'schedule' 50 | run: exit 1 51 | -------------------------------------------------------------------------------- /.github/workflows/rpc_test_repeat.yml: -------------------------------------------------------------------------------- 1 | name: RPC tests on repeat 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | image: 7 | description: 'Forest image to use' 8 | required: false 9 | default: 'ghcr.io/chainsafe/forest:edge-fat' 10 | type: string 11 | 12 | schedule: 13 | # Run every day at midnight 14 | - cron: 0 0 * * * 15 | 16 | env: 17 | SHELL_IMAGE: busybox 18 | SCRIPT_TIMEOUT_MINUTES: 30 19 | 20 | jobs: 21 | calibnet-rpc-checks: 22 | strategy: 23 | fail-fast: false 24 | matrix: 25 | # GH Actions do not support running jobs in a loop. 26 | # This is a workaround to run the same job 100 times. 27 | x: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] 28 | y: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] 29 | name: Calibnet RPC checks 30 | runs-on: ubuntu-24.04-arm 31 | steps: 32 | - uses: actions/checkout@v4 33 | - name: Run api compare tests 34 | shell: bash 35 | run: | 36 | IMAGE=${{ github.event.inputs.image }} 37 | if [ -z "$IMAGE" ]; then 38 | IMAGE="ghcr.io/chainsafe/forest:edge-fat" 39 | fi 40 | echo "FROM $IMAGE" > Dockerfile-RPC 41 | export FOREST_DOCKERFILE_OVERRIDE=Dockerfile-RPC 42 | ./scripts/tests/api_compare/setup.sh 43 | timeout-minutes: '${{ fromJSON(env.SCRIPT_TIMEOUT_MINUTES) }}' 44 | - name: Dump docker logs 45 | if: always() 46 | uses: jwalton/gh-docker-logs@v2 47 | -------------------------------------------------------------------------------- /.github/workflows/snapshot-parity.yml: -------------------------------------------------------------------------------- 1 | name: Snapshot parity test 2 | on: 3 | workflow_dispatch: 4 | schedule: 5 | - cron: "0 0 * * 0" # Runs at 00:00, only on Sunday 6 | jobs: 7 | snapshot-parity: 8 | name: Snapshot parity test 9 | runs-on: buildjet-4vcpu-ubuntu-2204 10 | steps: 11 | - name: Checkout Sources 12 | uses: actions/checkout@v4 13 | - name: Run snapshot parity test 14 | run: ./scripts/tests/snapshot_parity/setup.sh 15 | timeout-minutes: 60 16 | - name: Dump docker logs 17 | if: always() 18 | uses: jwalton/gh-docker-logs@v2 19 | - name: Set WORKFLOW_URL 20 | if: always() 21 | run: | 22 | export WORKFLOW_URL="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}" 23 | echo ${WORKFLOW_URL} 24 | echo "WORKFLOW_URL=${WORKFLOW_URL}" >> $GITHUB_ENV 25 | - uses: JasonEtco/create-an-issue@v2 26 | if: github.ref == 'refs/heads/main' && failure() 27 | env: 28 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 29 | with: 30 | filename: .github/SNAPSHOT_PARITY_ISSUE_TEMPLATE.md 31 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | target 2 | -------------------------------------------------------------------------------- /.yarnrc.yml: -------------------------------------------------------------------------------- 1 | nodeLinker: node-modules 2 | -------------------------------------------------------------------------------- /FUNDING.json: -------------------------------------------------------------------------------- 1 | { 2 | "drips": { 3 | "filecoin": { 4 | "ownedBy": "0xb4713f39476841fAF0ea5a555d0b1d451e6B05A1" 5 | } 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /build/bootstrap/butterflynet: -------------------------------------------------------------------------------- 1 | /dnsaddr/bootstrap.butterfly.fildev.network 2 | -------------------------------------------------------------------------------- /build/bootstrap/calibnet: -------------------------------------------------------------------------------- 1 | /dns/bootstrap.calibration.filecoin.chain.love/tcp/1237/p2p/12D3KooWQPYouEAsUQKzvFUA9sQ8tz4rfpqtTzh2eL6USd9bwg7x 2 | /dns/bootstrap-calibnet-0.chainsafe-fil.io/tcp/34000/p2p/12D3KooWABQ5gTDHPWyvhJM7jPhtNwNJruzTEo32Lo4gcS5ABAMm 3 | /dns/bootstrap-calibnet-1.chainsafe-fil.io/tcp/34000/p2p/12D3KooWS3ZRhMYL67b4bD5XQ6fcpTyVQXnDe8H89LvwrDqaSbiT 4 | /dns/bootstrap-calibnet-2.chainsafe-fil.io/tcp/34000/p2p/12D3KooWEiBN8jBX8EBoM3M47pVRLRWV812gDRUJhMxgyVkUoR48 5 | /dns/bootstrap-archive-calibnet-0.chainsafe-fil.io/tcp/1347/p2p/12D3KooWLcRpEfmUq1fC8vfcLnKc1s161C92rUewEze3ALqCd9yJ 6 | -------------------------------------------------------------------------------- /build/bootstrap/mainnet: -------------------------------------------------------------------------------- 1 | /dns/bootstrap.filecoin.chain.love/tcp/1235/p2p/12D3KooWBF8cpp65hp2u9LK5mh19x67ftAam84z9LsfaquTDSBpt 2 | /dns/bootstrap-venus.mainnet.filincubator.com/tcp/8888/p2p/QmQu8C6deXwKvJP2D8B6QGyhngc3ZiDnFzEHBDx8yeBXST 3 | /dns/bootstrap-mainnet-0.chainsafe-fil.io/tcp/34000/p2p/12D3KooWKKkCZbcigsWTEu1cgNetNbZJqeNtysRtFpq7DTqw3eqH 4 | /dns/bootstrap-mainnet-1.chainsafe-fil.io/tcp/34000/p2p/12D3KooWGnkd9GQKo3apkShQDaq1d6cKJJmsVe6KiQkacUk1T8oZ 5 | /dns/bootstrap-mainnet-2.chainsafe-fil.io/tcp/34000/p2p/12D3KooWHQRSDFv4FvAjtU32shQ7znz7oRbLBryXzZ9NMK2feyyH 6 | /dns/n1.mainnet.fil.devtty.eu/udp/443/quic-v1/p2p/12D3KooWAke3M2ji7tGNKx3BQkTHCyxVhtV1CN68z6Fkrpmfr37F 7 | /dns/n1.mainnet.fil.devtty.eu/tcp/443/p2p/12D3KooWAke3M2ji7tGNKx3BQkTHCyxVhtV1CN68z6Fkrpmfr37F 8 | /dns/n1.mainnet.fil.devtty.eu/udp/443/quic-v1/webtransport/certhash/uEiAWlgd8EqbNhYLv86OdRvXHMosaUWFFDbhgGZgCkcmKnQ/certhash/uEiAvtq6tvZOZf_sIuityDDTyAXDJPfXSRRDK2xy9UVPsqA/p2p/12D3KooWAke3M2ji7tGNKx3BQkTHCyxVhtV1CN68z6Fkrpmfr37F 9 | -------------------------------------------------------------------------------- /build/vendored-docs-redirect.index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /deny.toml: -------------------------------------------------------------------------------- 1 | [advisories] 2 | ignore = [ 3 | # Unmaintained crates that fvm2 requires, and will not change for 4 | # compatability/consensus reasons - see 5 | # https://github.com/filecoin-project/ref-fvm/issues/1843 6 | "RUSTSEC-2022-0061", # parity-wasm is deprecated 7 | "RUSTSEC-2024-0436", # paste is unmaintained 8 | ] 9 | 10 | [licenses] 11 | allow = [ 12 | "Apache-2.0", 13 | "Apache-2.0 WITH LLVM-exception", 14 | "BSD-2-Clause", 15 | "BSD-3-Clause", 16 | "CC0-1.0", 17 | "ISC", 18 | "MIT", 19 | "Unicode-3.0", 20 | "Unlicense", 21 | "Zlib", 22 | ] 23 | 24 | exceptions = [ 25 | { allow = [ 26 | "CDLA-Permissive-2.0", 27 | ], crate = "webpki-roots" }, 28 | { allow = [ 29 | "CDLA-Permissive-2.0", 30 | ], crate = "webpki-root-certs" }, 31 | { allow = [ 32 | "MPL-2.0", 33 | ], crate = "option-ext" }, 34 | { allow = [ 35 | "MPL-2.0", 36 | ], crate = "colored" }, 37 | { allow = [ 38 | "MPL-2.0", 39 | ], crate = "cbindgen" }, 40 | { allow = [ 41 | "MPL-2.0", 42 | ], crate = "attohttpc" }, 43 | ] 44 | 45 | [[licenses.clarify]] 46 | crate = "ring" 47 | expression = "MIT AND ISC AND OpenSSL" 48 | license-files = [{ path = "LICENSE", hash = 0xbd0eed23 }] 49 | 50 | [bans] 51 | multiple-versions = "allow" 52 | -------------------------------------------------------------------------------- /docs/.bookignore: -------------------------------------------------------------------------------- 1 | README.md 2 | .github 3 | .spellcheck.yml 4 | LICENSE -------------------------------------------------------------------------------- /docs/.gitattributes: -------------------------------------------------------------------------------- 1 | docs/reference/cli.md linguist-generated=true 2 | -------------------------------------------------------------------------------- /docs/.spellcheck.yml: -------------------------------------------------------------------------------- 1 | matrix: 2 | - name: Markdown 3 | aspell: 4 | lang: en 5 | dictionary: 6 | wordlists: 7 | - .wordlist.txt 8 | encoding: utf-8 9 | pipeline: 10 | - pyspelling.filters.markdown: 11 | - pyspelling.filters.html: 12 | comments: false 13 | ignores: 14 | - code 15 | - pre 16 | sources: 17 | - "**/*.md" 18 | default_encoding: utf-8 19 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | SPELLCHECK_DICTIONARY=./dictionary.txt 2 | format-spellcheck-dictionary: 3 | @cat $(SPELLCHECK_DICTIONARY) | sort --ignore-case | uniq > $(SPELLCHECK_DICTIONARY).tmp 4 | @mv $(SPELLCHECK_DICTIONARY).tmp $(SPELLCHECK_DICTIONARY) 5 | 6 | format-spellcheck-dictionary-check: 7 | @cat $(SPELLCHECK_DICTIONARY) | sort --ignore-case | uniq > $(SPELLCHECK_DICTIONARY).tmp 8 | @diff $(SPELLCHECK_DICTIONARY) $(SPELLCHECK_DICTIONARY).tmp 9 | @rm $(SPELLCHECK_DICTIONARY).tmp 10 | -------------------------------------------------------------------------------- /docs/babel.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | presets: [require.resolve("@docusaurus/core/lib/babel/preset")], 3 | }; 4 | -------------------------------------------------------------------------------- /docs/devSidebars.js: -------------------------------------------------------------------------------- 1 | const sidebars = { 2 | userSidebar: [{ type: "autogenerated", dirName: "." }], 3 | }; 4 | 5 | module.exports = sidebars; 6 | -------------------------------------------------------------------------------- /docs/dictionary.txt: -------------------------------------------------------------------------------- 1 | 2k 2 | APIs 3 | backend 4 | benchmarking 5 | blockstore 6 | Butterflynet 7 | Calibnet 8 | calibnet 9 | calibnet-related 10 | cardinality 11 | ChainSafe 12 | chainsafe 13 | ChainSafe's 14 | changelog 15 | CIDs 16 | CLI 17 | cli 18 | Cloudflare 19 | codebase 20 | config 21 | Datacap 22 | datacap 23 | devnet 24 | Devops 25 | Devs 26 | DHT 27 | DigitalOcean 28 | Drand 29 | Ethereum 30 | F3 31 | f3 32 | f3-sidecar 33 | FFI 34 | FIL 35 | fil 36 | Filecoin 37 | filecoin-project 38 | Filfox 39 | FilOz 40 | FIP 41 | FIPs 42 | FVM 43 | GC 44 | GiB 45 | Github 46 | Grafana 47 | hardcoded 48 | hotfix 49 | ie. 50 | Implementers 51 | implementers 52 | io 53 | IPFS 54 | JSON 55 | JSON-RPC 56 | JWT 57 | JWTs 58 | keypair 59 | keystore 60 | Kubernetes 61 | kubernetes 62 | Linux 63 | Liveness 64 | liveness 65 | localhost 66 | localhost's 67 | M1 68 | M2 69 | macOS 70 | Mainnet 71 | mainnet 72 | multiaddress 73 | namespace 74 | NV22 75 | NV23 76 | NV24 77 | NVMe 78 | onwards 79 | Organisation 80 | P2P 81 | p2p 82 | performant 83 | pre-compiled 84 | preload 85 | preloaded 86 | Q4 87 | README 88 | RNG 89 | Roadmap 90 | roadmap 91 | RPC 92 | rustup 93 | S3-compatible 94 | semver 95 | serverless 96 | stateroots 97 | subcommands 98 | swappiness 99 | TabItem 100 | TBD 101 | Terraform 102 | testnet 103 | tipset 104 | tipsets 105 | V0 106 | V1 107 | VPS 108 | WIP 109 | -------------------------------------------------------------------------------- /docs/docs/developers/guides/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Guides" 3 | } 4 | -------------------------------------------------------------------------------- /docs/docs/developers/introduction.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Introduction 3 | hide_title: true 4 | sidebar_position: 1 5 | slug: / 6 | --- 7 | 8 | ## Developer Documentation 9 | 10 | :::warning 11 | 12 | This documentation is intended for contributors to the Forest codebase. If you are a user of Forest you might want 13 | the [user documentation](/). 14 | 15 | ::: 16 | 17 | Welcome to the Forest developer documentation! 18 | -------------------------------------------------------------------------------- /docs/docs/users/getting_started/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Getting Started", 3 | "position": 2 4 | } 5 | -------------------------------------------------------------------------------- /docs/docs/users/getting_started/hardware-reqs.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Hardware Requirements 3 | sidebar_position: 1 4 | --- 5 | 6 | Forest is designed to be lightweight enough to run on consumer hardware. 7 | 8 | | | Minimum | Recommended | Notes | 9 | | ---------- | ------- | ----------- | --------------------------------------------------------- | 10 | | CPU | 4-core | 8-core | | 11 | | Memory | 16 GiB | 32 GiB | State migrations can require increased amounts of memory. | 12 | | Disk Space | 128 GiB | 256 GiB | NVMe recommended. Snapshots can require ~70+ GiB each. | 13 | -------------------------------------------------------------------------------- /docs/docs/users/getting_started/syncing.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Syncing A Node 3 | sidebar_position: 3 4 | --- 5 | 6 | :::info 7 | 8 | All nodes joining the network are recommended to sync from a snapshot. This is the default behavior of Forest. 9 | 10 | Syncing from genesis (tipset 0) is generally infeasible. 11 | 12 | ::: 13 | 14 | Once started, Forest will connect to the bootstrap peers and in parallel fetch the latest snapshot from [Forest's snapshot service](../knowledge_base/snapshot_service.md). Once the snapshot is downloaded, it will be loaded into the node, and then syncing will continue by utilizing its peers. 15 | 16 | ### Mainnet 17 | 18 | ```shell 19 | forest 20 | ``` 21 | 22 | ### Calibnet 23 | 24 | ```shell 25 | forest --chain calibnet 26 | ``` 27 | 28 | ## Monitoring Sync Status 29 | 30 | In another shell: 31 | 32 | ```shell 33 | forest-cli sync status 34 | ``` 35 | -------------------------------------------------------------------------------- /docs/docs/users/guides/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Guides", 3 | "position": 3 4 | } 5 | -------------------------------------------------------------------------------- /docs/docs/users/guides/advanced/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Advanced", 3 | "position": 7 4 | } 5 | -------------------------------------------------------------------------------- /docs/docs/users/guides/advanced/generating_snapshots.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Generating Snapshots 3 | sidebar_position: 1 4 | --- 5 | 6 | # Snapshot exporting 📸 7 | 8 | ## Hardware requirements 9 | 10 | To export a mainnet snapshot, you need a setup with at least 16 GB of RAM. On a 11 | machine with rapid NVMe, the default export should take around 30 12 | minutes. 13 | 14 | The requirements for calibnet snapshots are lower, but it is still recommended 15 | to have at least 8 GB of RAM. The export should take less than a minute. 16 | 17 | ## Running the node 18 | 19 | Wait until the node is fully synced. You can use the command: 20 | 21 | ```shell 22 | forest-cli sync wait 23 | ``` 24 | 25 | ## Exporting the snapshot 26 | 27 | To export the snapshot with the defaults, run: 28 | 29 | ```shell 30 | forest-cli snapshot export 31 | ``` 32 | 33 | The snapshot will be exported with 2000 recent stateroots to the current directory. The snapshot will be 34 | compressed. 35 | 36 | For mainnet, you should expect a file of over 70 GB. For calibnet, you should 37 | expect a file of over 5 GB. Note that the snapshot size grows over time. 38 | 39 | ### CLI reference 40 | 41 | Details on the `forest-cli snapshot export` command and its subcommands can be found at the [CLI reference](../../reference/cli.md#forest-cli-snapshot). 42 | -------------------------------------------------------------------------------- /docs/docs/users/guides/monitoring/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Monitoring", 3 | "position": 6 4 | } 5 | -------------------------------------------------------------------------------- /docs/docs/users/guides/monitoring/best_practices.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Best Practices 3 | --- 4 | 5 | ### Node 6 | 7 | - **monitor disk space usage**, especially the database size - it can grow quickly, especially around network upgrades 8 | - **monitor the memory usage** - the node can use a lot of memory, especially during sync. Don't let it run too close to the limit 9 | - **monitor the chain sync status** - on average, the node should be able to sync two epochs per minute 10 | - **monitor the number of peers** - the more peers, the better, If a node has no peers, it cannot sync 11 | - **monitor the logs for errors and warnings** - they can indicate potential issues 12 | 13 | ### Monitoring 14 | 15 | - **monitor the monitoring system** - if the monitoring system goes down, you won't know if the node is down 16 | - **set up alerts for critical metrics** - disk space, memory usage, sync status, etc. 17 | - **ensure the persistence of the monitoring system** - if the monitoring system loses data, you won't be able to diagnose issues 18 | - **don't expose monitoring endpoints to the internet** - they are not secured and can leak sensitive information 19 | - **don't set the log levels too high** - this can generate a lot of data and slow down the node 20 | -------------------------------------------------------------------------------- /docs/docs/users/guides/monitoring/logs.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Logs 3 | --- 4 | 5 | Logs are written to standard output by default. They can be written to rolling log files with the `--log-dir ` flag. The log level can be set with the `RUST_LOG` environment variable. The defaults are generally sufficient for most users but can be adjusted to provide more or less information. Different modules can have different log levels, and the log level can be set to `trace`, `debug`, `info`, `warn`, `error`, or `off`. 6 | 7 | ```bash 8 | RUST_LOG=info,forest_filecoin=debug forest --chain calibnet 9 | ``` 10 | 11 | Sample output: 12 | 13 | ```console 14 | 2024-08-28T12:49:59.830012Z INFO forest::daemon::main: Using default calibnet config 15 | 2024-08-28T12:49:59.834109Z INFO forest::daemon: Starting Forest daemon, version 0.19.2+git.74fd562acce 16 | 2024-08-28T12:49:59.834123Z DEBUG forest::daemon: Increased file descriptor limit from 1024 to 8192 17 | 2024-08-28T12:49:59.834164Z DEBUG forest::libp2p::keypair: Recovered libp2p keypair from /home/rumcajs/.local/share/forest/libp2p/keypair 18 | ``` 19 | 20 | :::tip 21 | Enabling `trace` or `debug` logging can generate gargantuan log files (gigabytes per minute). Make sure to adjust the log level to your needs. 22 | ::: 23 | 24 | Sending logs to Loki is also possible. Pass `--loki` to the Forest daemon to enable it. The logs are sent to Loki via the HTTP API. The Loki endpoint can be set with the `--loki-endpoint` flag. The default endpoint is `http://localhost:3100`. 25 | -------------------------------------------------------------------------------- /docs/docs/users/guides/running_with_curio.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Running Forest With Curio 3 | sidebar_position: 99 4 | --- 5 | 6 | :::note 7 | 8 | [Curio](https://curiostorage.org/) is the successor to Lotus-miner under active development by Curio Storage Inc. Compatibility with Forest is being worked on but has not yet been released. Check back in Q4 2024 for more updates. 9 | 10 | ::: 11 | 12 | Coming soon!™ 13 | -------------------------------------------------------------------------------- /docs/docs/users/knowledge_base/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Knowledge Base", 3 | "position": 5 4 | } 5 | -------------------------------------------------------------------------------- /docs/docs/users/reference/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Reference", 3 | "position": 4 4 | } 5 | -------------------------------------------------------------------------------- /docs/docs/users/reference/generate_cli_md.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ "$1" == "local" ]; then 4 | ENVIRONMENT="local" 5 | elif [ "$1" == "docker" ]; then 6 | ENVIRONMENT="docker" 7 | else 8 | echo "Usage: $0 " 9 | exit 1 10 | fi 11 | 12 | cat < 22 | 23 | This document lists every command line option and sub-command for Forest. 24 | EOF 25 | 26 | if [ "$ENVIRONMENT" == "local" ]; then 27 | bash ./cli.sh 28 | else 29 | docker run --rm --entrypoint /bin/bash -v "$(pwd)":/forest ghcr.io/chainsafe/forest:edge-fat /forest/cli.sh 30 | fi 31 | -------------------------------------------------------------------------------- /docs/static/.nojekyll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/docs/static/.nojekyll -------------------------------------------------------------------------------- /docs/static/img/chainsafe_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/docs/static/img/chainsafe_logo.png -------------------------------------------------------------------------------- /docs/static/img/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/docs/static/img/favicon.ico -------------------------------------------------------------------------------- /docs/static/img/filecoin_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/docs/static/img/filecoin_logo.png -------------------------------------------------------------------------------- /docs/static/img/logo-with-text.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/docs/static/img/logo-with-text.png -------------------------------------------------------------------------------- /docs/static/img/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/docs/static/img/logo.png -------------------------------------------------------------------------------- /docs/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | // This file is not used in compilation. It is here just for a nice editor experience. 3 | "extends": "@docusaurus/tsconfig", 4 | "compilerOptions": { 5 | "baseUrl": ".", 6 | "skipLibCheck": true 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /docs/userSidebars.js: -------------------------------------------------------------------------------- 1 | const sidebars = { 2 | userSidebar: [{ type: "autogenerated", dirName: "." }], 3 | }; 4 | 5 | module.exports = sidebars; 6 | -------------------------------------------------------------------------------- /documentation/.gitignore: -------------------------------------------------------------------------------- 1 | # Build artifacts 2 | /book 3 | -------------------------------------------------------------------------------- /documentation/book.toml: -------------------------------------------------------------------------------- 1 | [book] 2 | authors = [] 3 | language = "en" 4 | multilingual = false 5 | src = "src" 6 | 7 | [build] 8 | create-missing = false 9 | 10 | [output.html] 11 | 12 | [output.linkcheck] 13 | -------------------------------------------------------------------------------- /documentation/src/SUMMARY.md: -------------------------------------------------------------------------------- 1 | # Summary 2 | 3 | # User Guide 4 | 5 | - [Introduction](./introduction.md) 6 | - [Troubleshooting](./trouble_shooting.md) 7 | - [Offline Forest](./offline-forest.md) 8 | 9 | # Developer documentation 10 | 11 | - [Developer documentation](./developer_documentation/introduction.md) 12 | - [Database migrations](./developer_documentation/database_migrations.md) 13 | - [Local GH Actions](./developer_documentation/local_actions.md) 14 | - [Memory analysis](./developer_documentation/memory-analysis.md) 15 | - [Release checklist](./developer_documentation/release_checklist.md) 16 | - [State migration guide](./developer_documentation/state_migration_guide.md) 17 | - [Test plan](./developer_documentation/test_plan.md) 18 | - [Devnet Notes](./developer_documentation/devnet_notes.md) 19 | - [Archie and Fuzzy](./developer_documentation/archie_and_fuzzy.md) 20 | - [RPC API Compatibility](./developer_documentation/rpc_api_compatibility.md) 21 | - [ChainMuxer/TipsetProcessor state machine](./developer_documentation/chain_muxer_state_machine.md) 22 | -------------------------------------------------------------------------------- /documentation/src/developer_documentation/chain_muxer_state_machine.md: -------------------------------------------------------------------------------- 1 | Date: 2023-10-21 2 | 3 | `ChainMuxer` state transitions: 4 | 5 | ```mermaid 6 | flowchart TD 7 | A[Idle] 8 | B[Connect] 9 | C[Bootstrap] 10 | D[Follow] 11 | 12 | A -->|sync| B 13 | A -->|skip| D 14 | B -->|behind| C 15 | B -->|in-sync| D 16 | D -->|on-error| A 17 | C --> A 18 | ``` 19 | 20 | Once the `ChainMuxer` is in `follow` mode, it passes control to the 21 | `TipsetProcessor`. A typical start-up sequence looks like this: 22 | 23 | 1. `idle` state: Immediately switch to `connect` state. 24 | 2. `connect` state: Wait for 5 tipsets from peers. If we're within 1 epoch of 25 | the heaviest seen tipset, switch to `follow` state. Otherwise, switch to 26 | `bootstrap` state. 27 | 3. `bootstrap` state: Fetch tipsets between the heaviest seen tipset and the 28 | last validated tipset. Validate all of those tipsets and return to `idle` 29 | state. 30 | 4. `follow` state: Pass control to the `TipsetProcessor` state machine. 31 | 32 | `TipsetProcessor` state transitions: 33 | 34 | ```mermaid 35 | flowchart TD 36 | A[Idle] 37 | B[FindRange] 38 | C[SyncRange] 39 | 40 | A -->|new tipset group| B 41 | B --> C 42 | C --> A 43 | C --> B 44 | ``` 45 | -------------------------------------------------------------------------------- /documentation/src/developer_documentation/heaptrack/bottom_up.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/documentation/src/developer_documentation/heaptrack/bottom_up.png -------------------------------------------------------------------------------- /documentation/src/developer_documentation/heaptrack/caller_callee.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/documentation/src/developer_documentation/heaptrack/caller_callee.png -------------------------------------------------------------------------------- /documentation/src/developer_documentation/heaptrack/consumed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/documentation/src/developer_documentation/heaptrack/consumed.png -------------------------------------------------------------------------------- /documentation/src/developer_documentation/heaptrack/flamegraph.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/documentation/src/developer_documentation/heaptrack/flamegraph.png -------------------------------------------------------------------------------- /documentation/src/developer_documentation/heaptrack/sizes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/documentation/src/developer_documentation/heaptrack/sizes.png -------------------------------------------------------------------------------- /documentation/src/developer_documentation/heaptrack/summary.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/documentation/src/developer_documentation/heaptrack/summary.png -------------------------------------------------------------------------------- /documentation/src/developer_documentation/introduction.md: -------------------------------------------------------------------------------- 1 | # Developer documentation 2 | 3 | In this section you will find resources targeted for Forest developers. 4 | -------------------------------------------------------------------------------- /documentation/src/img/forest_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/documentation/src/img/forest_logo.png -------------------------------------------------------------------------------- /documentation/src/trouble_shooting.md: -------------------------------------------------------------------------------- 1 | # Trouble Shooting 2 | 3 | ## Common Issues 4 | 5 | #### Jemalloc issues on Apple Silicon macs 6 | 7 | Forest is compiled with `jemalloc` as a default allocator. If you are having 8 | problems running or compiling Forest, use this checklist: 9 | 10 | 1. Make sure you are using an arm64 version of homebrew; this could be a problem 11 | one inherits when migrating from an Intel Mac to Apple Silicon: 12 | [Stackoverflow example](https://stackoverflow.com/a/68443301). 13 | 2. Make sure your default host is set to `aarch64-apple-darwin` via 14 | `rustup set default-host aarch64-apple-darwin`. 15 | 3. This could result in various errors related to the fact that you still have 16 | some of the libraries symlinked to `/usr/local/lib` from an intel Homebrew 17 | installation. The easiest fix for this is: 18 | - Remove the libraries in question from `/usr/local/lib`. 19 | - Add `export LIBRARY_PATH=/opt/homebrew/lib` to your bash profile. 20 | - Source the new bash profile. 21 | -------------------------------------------------------------------------------- /f3-sidecar/.gitignore: -------------------------------------------------------------------------------- 1 | # The default f3 database folder 2 | /f3-data 3 | # The binary 4 | f3-sidecar 5 | -------------------------------------------------------------------------------- /f3-sidecar/manifest.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | _ "embed" 5 | "encoding/json" 6 | 7 | "github.com/filecoin-project/go-f3/gpbft" 8 | "github.com/filecoin-project/go-f3/manifest" 9 | ) 10 | 11 | var Network2PredefinedManifestMappings map[gpbft.NetworkName]*manifest.Manifest = make(map[gpbft.NetworkName]*manifest.Manifest) 12 | 13 | func init() { 14 | for _, bytes := range [][]byte{F3ManifestBytes2K, F3ManifestBytesButterfly, F3ManifestBytesCalibnet, F3ManifestBytesMainnet} { 15 | m := loadManifest(bytes) 16 | Network2PredefinedManifestMappings[m.NetworkName] = m 17 | } 18 | } 19 | 20 | //go:embed f3manifest_2k.json 21 | var F3ManifestBytes2K []byte 22 | 23 | //go:embed f3manifest_butterfly.json 24 | var F3ManifestBytesButterfly []byte 25 | 26 | //go:embed f3manifest_calibnet.json 27 | var F3ManifestBytesCalibnet []byte 28 | 29 | //go:embed f3manifest_mainnet.json 30 | var F3ManifestBytesMainnet []byte 31 | 32 | func loadManifest(bytes []byte) *manifest.Manifest { 33 | var m manifest.Manifest 34 | if err := json.Unmarshal(bytes, &m); err != nil { 35 | logger.Panicf("failed to unmarshal F3 manifest: %s", err) 36 | } 37 | if err := m.Validate(); err != nil { 38 | logger.Panicf("invalid F3 manifest: %s", err) 39 | } 40 | return &m 41 | } 42 | -------------------------------------------------------------------------------- /f3-sidecar/types.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "encoding/json" 5 | "fmt" 6 | "time" 7 | 8 | "github.com/filecoin-project/go-f3/gpbft" 9 | ) 10 | 11 | type TipSet struct { 12 | TsKey []byte `json:"key"` 13 | TsBeacon []byte `json:"beacon"` 14 | TsEpoch int64 `json:"epoch"` 15 | TsTimestamp int64 `json:"timestamp"` 16 | } 17 | 18 | func (ts TipSet) Key() gpbft.TipSetKey { 19 | return gpbft.TipSetKey(ts.TsKey) 20 | } 21 | 22 | func (ts TipSet) Beacon() []byte { 23 | return ts.TsBeacon 24 | } 25 | 26 | func (ts TipSet) Epoch() int64 { 27 | return ts.TsEpoch 28 | } 29 | 30 | func (ts TipSet) Timestamp() time.Time { 31 | return time.Unix(ts.TsTimestamp, 0) 32 | } 33 | 34 | func (ts TipSet) String() string { 35 | bytes, err := json.Marshal(&ts) 36 | if err != nil { 37 | return fmt.Sprintf("%s", err) 38 | } 39 | return string(bytes) 40 | } 41 | -------------------------------------------------------------------------------- /f3-sidecar/utils.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import "github.com/ipfs/go-cid" 4 | 5 | var CID_UNDEF_RUST = cid.MustParse("baeaaaaa") 6 | 7 | func isCidDefined(c cid.Cid) bool { 8 | return c.Defined() && c != CID_UNDEF_RUST 9 | } 10 | -------------------------------------------------------------------------------- /f3-sidecar/utils_test.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/ipfs/go-cid" 7 | "github.com/stretchr/testify/require" 8 | ) 9 | 10 | func TestIsCidDefined(t *testing.T) { 11 | require.NotEqual(t, cid.Undef, CID_UNDEF_RUST) 12 | require.False(t, isCidDefined(cid.Undef)) 13 | require.False(t, isCidDefined(CID_UNDEF_RUST)) 14 | require.True(t, isCidDefined(cid.MustParse("bafy2bzaceac6jbaeolcsbh7rawcshcvh2cokvxrgsh4sxg5yu34i5xllbfpw4"))) 15 | } 16 | -------------------------------------------------------------------------------- /go.work: -------------------------------------------------------------------------------- 1 | go 1.23.9 2 | 3 | use ( 4 | ./f3-sidecar 5 | ./interop-tests/src/tests/go_app 6 | ) 7 | -------------------------------------------------------------------------------- /interop-tests/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "forest-interop-tests" 3 | version = "0.1.0" 4 | authors = ["ChainSafe Systems "] 5 | repository = "https://github.com/ChainSafe/forest" 6 | edition = "2021" 7 | license = "MIT OR Apache-2.0" 8 | description = "Interop tests for Forest." 9 | publish = false 10 | 11 | [dependencies] 12 | 13 | [dev-dependencies] 14 | anyhow = { workspace = true } 15 | cid = { workspace = true } 16 | flume = { workspace = true } 17 | forest = { package = "forest-filecoin", path = "../", default-features = false, features = [ 18 | "interop-tests-private", 19 | "no-f3-sidecar", 20 | ] } 21 | futures = { workspace = true } 22 | libp2p = { workspace = true, features = [ 23 | 'kad', 24 | 'identify', 25 | 'noise', 26 | 'yamux', 27 | 'tcp', 28 | 'tokio', 29 | 'macros', 30 | 'serde', 31 | 'rsa', 32 | 'ecdsa', 33 | 'ed25519', 34 | 'secp256k1', 35 | ] } 36 | libp2p-swarm-test = { workspace = true } 37 | multihash-codetable = { workspace = true } 38 | rust2go = { workspace = true } 39 | tokio = { workspace = true, features = ['full'] } 40 | 41 | [build-dependencies] 42 | rust2go = { workspace = true, features = ["build"] } 43 | -------------------------------------------------------------------------------- /interop-tests/README.md: -------------------------------------------------------------------------------- 1 | This crate contains interops tests for Forest. 2 | 3 | To compile and run the interop tests, below dependencies are required in 4 | addition to the Rust toolchain. 5 | 6 | - [Go 1.22.x](https://go.dev/dl/) 7 | 8 | To run the interop tests 9 | 10 | ``` 11 | cargo test 12 | ``` 13 | -------------------------------------------------------------------------------- /interop-tests/build.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | fn main() { 5 | println!("cargo::rerun-if-changed=src/tests/go_app"); 6 | unsafe { 7 | std::env::set_var("GOWORK", "off"); 8 | std::env::set_var("GOFLAGS", "-tags=netgo"); 9 | } 10 | rust2go::Builder::default() 11 | .with_go_src("./src/tests/go_app") 12 | .with_regen_arg(rust2go::RegenArgs { 13 | src: "./src/tests/go_ffi.rs".into(), 14 | dst: "./src/tests/go_app/gen.go".into(), 15 | ..Default::default() 16 | }) 17 | .build(); 18 | } 19 | -------------------------------------------------------------------------------- /interop-tests/src/lib.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | #[cfg(test)] 5 | mod tests; 6 | -------------------------------------------------------------------------------- /interop-tests/src/tests/go_app/.gitignore: -------------------------------------------------------------------------------- 1 | gen.go 2 | -------------------------------------------------------------------------------- /interop-tests/src/tests/go_app/common.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import "os" 4 | 5 | const ListenAddr = "/ip4/127.0.0.1/tcp/0" 6 | 7 | func checkError(err error) { 8 | if err != nil { 9 | panic(err) 10 | } 11 | } 12 | 13 | // To avoid potential panics 14 | // See 15 | func setGoDebugEnv() { 16 | os.Setenv("GODEBUG", "invalidptr=0,cgocheck=0") 17 | } 18 | -------------------------------------------------------------------------------- /interop-tests/src/tests/go_ffi.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | pub mod binding { 5 | #![allow(warnings)] 6 | #![allow(clippy::indexing_slicing)] 7 | rust2go::r2g_include_binding!(); 8 | } 9 | 10 | #[rust2go::r2g] 11 | pub trait GoKadNode { 12 | fn run(); 13 | 14 | fn connect(multiaddr: &String); 15 | 16 | fn get_n_connected() -> usize; 17 | } 18 | 19 | #[rust2go::r2g] 20 | pub trait GoBitswapNode { 21 | fn run(); 22 | 23 | fn connect(multiaddr: &String); 24 | 25 | fn get_block(cid: &String) -> bool; 26 | } 27 | -------------------------------------------------------------------------------- /interop-tests/src/tests/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod go_ffi; 5 | 6 | mod bitswap_go_compat; 7 | mod kad_go_compat; 8 | -------------------------------------------------------------------------------- /monitoring/docker-compose.yml: -------------------------------------------------------------------------------- 1 | # Docker compose file to start the metrics and monitoring stack for a local Forest node 2 | # 3 | # # Processes 4 | # - Prometheus server 5 | # - Loki server 6 | # - Grafana server 7 | 8 | version: "3.8" 9 | 10 | volumes: 11 | prometheus_data: {} 12 | 13 | services: 14 | prometheus: 15 | image: prom/prometheus 16 | command: 17 | - "--config.file=/etc/prometheus/prometheus.yml" 18 | - "--storage.tsdb.path=/prometheus" 19 | - "--web.console.libraries=/etc/prometheus/console_libraries" 20 | - "--web.console.templates=/etc/prometheus/consoles" 21 | - "--web.enable-lifecycle" 22 | volumes: 23 | - ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml 24 | - prometheus_data:/prometheus/ 25 | restart: unless-stopped 26 | extra_hosts: 27 | - host.docker.internal:host-gateway 28 | 29 | loki: 30 | image: grafana/loki 31 | restart: unless-stopped 32 | ports: 33 | - "3100:3100" 34 | 35 | grafana: 36 | image: grafana/grafana 37 | depends_on: 38 | - prometheus 39 | - loki 40 | volumes: 41 | - ./grafana/provisioning/:/etc/grafana/provisioning 42 | - ./grafana/dashboards/:/etc/grafana/provisioning/dashboard-definitions 43 | restart: unless-stopped 44 | ports: 45 | - "3000:3000" 46 | -------------------------------------------------------------------------------- /monitoring/grafana/dashboards/README.md: -------------------------------------------------------------------------------- 1 | # Preloaded Dashboards for Forest 2 | 3 | ## Dashboards 4 | 5 | - `forest`: The forest dashboard keeps track of process, syncing, and execution 6 | metrics 7 | 8 | ## Updating 9 | 10 | To update any dashboard, make changes to the dashboard in the Grafana web 11 | application, export the dashboard, and replace the dashboard JSON definition in 12 | this directory. 13 | -------------------------------------------------------------------------------- /monitoring/grafana/provisioning/dashboards/dashboard.yml: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | providers: 4 | - name: 'Prometheus' 5 | orgId: 1 6 | folder: '' 7 | type: file 8 | disableDeletion: false 9 | editable: false 10 | options: 11 | path: /etc/grafana/provisioning/dashboard-definitions 12 | -------------------------------------------------------------------------------- /monitoring/prometheus/prometheus.yml: -------------------------------------------------------------------------------- 1 | global: 2 | scrape_interval: 5s 3 | 4 | scrape_configs: 5 | - job_name: 'forest' 6 | static_configs: 7 | - targets: ['host.docker.internal:6116'] 8 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "root", 3 | "private": true, 4 | "devDependencies": { 5 | "@docusaurus/tsconfig": "^3.7.0", 6 | "prettier": "^3.5.3" 7 | }, 8 | "scripts": { 9 | "prettier-version": "prettier --version", 10 | "md-fmt": "prettier --write '**/*.md'", 11 | "md-check": "prettier --check '**/*.md'" 12 | }, 13 | "packageManager": "yarn@4.7.0" 14 | } 15 | -------------------------------------------------------------------------------- /proto/bitswap_pb.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package bitswap_pb; 4 | 5 | message Message { 6 | 7 | message Wantlist { 8 | enum WantType { 9 | Block = 0; 10 | Have = 1; 11 | } 12 | 13 | message Entry { 14 | bytes block = 1; // the block cid (cidV0 in bitswap 1.0.0, cidV1 in bitswap 1.1.0) 15 | int32 priority = 2; // the priority (normalized). default to 1 16 | bool cancel = 3; // whether this revokes an entry 17 | WantType wantType = 4; // Note: defaults to enum 0, ie Block 18 | bool sendDontHave = 5; // Note: defaults to false 19 | } 20 | 21 | repeated Entry entries = 1; // a list of wantlist entries 22 | bool full = 2; // whether this is the full wantlist. default to false 23 | } 24 | 25 | message Block { 26 | bytes prefix = 1; // CID prefix (cid version, multicodec and multihash prefix (type + length) 27 | bytes data = 2; 28 | } 29 | 30 | enum BlockPresenceType { 31 | Have = 0; 32 | DontHave = 1; 33 | } 34 | message BlockPresence { 35 | bytes cid = 1; 36 | BlockPresenceType type = 2; 37 | } 38 | 39 | Wantlist wantlist = 1; 40 | repeated bytes blocks = 2; // used to send Blocks in bitswap 1.0.0 41 | repeated Block payload = 3; // used to send Blocks in bitswap 1.1.0 42 | repeated BlockPresence blockPresences = 4; 43 | int32 pendingBytes = 5; 44 | } 45 | -------------------------------------------------------------------------------- /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "1.87.0" 3 | components = ["clippy", "llvm-tools-preview", "rustfmt"] 4 | -------------------------------------------------------------------------------- /scripts/add_license.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Checks if the source code contains required license and adds it if necessary. 4 | # Returns 1 if there was a missing license, 0 otherwise. 5 | 6 | PAT_APA="^// Copyright 2019-2025 ChainSafe Systems// SPDX-License-Identifier: Apache-2.0, MIT$" 7 | 8 | ret=0 9 | for file in $(git grep --cached -Il '' -- '*.rs' ':!*src/utils/encoding/fallback_de_ipld_dagcbor.rs' ':!*src/external/**/*.rs'); do 10 | header=$(head -2 "$file" | tr -d '\n') 11 | if ! echo "$header" | grep -q "$PAT_APA"; then 12 | echo "$file was missing header" 13 | cat ./scripts/copyright.txt "$file" > temp 14 | mv temp "$file" 15 | ret=1 16 | fi 17 | done 18 | 19 | exit $ret 20 | -------------------------------------------------------------------------------- /scripts/copyright.txt: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | -------------------------------------------------------------------------------- /scripts/db_params_hyperfine.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | CHAIN=calibnet 4 | 5 | # https://forest-snapshots.fra1.cdn.digitaloceanspaces.com/debug/filecoin_full_calibnet_2023-04-07_450000.car 6 | SNAPSHOT=filecoin_full_calibnet_2023-04-07_450000.car 7 | if [ ! -f $SNAPSHOT ] 8 | then 9 | aria2c -x 4 "https://forest-snapshots.fra1.cdn.digitaloceanspaces.com/debug/filecoin_full_calibnet_2023-04-07_450000.car" 10 | fi 11 | 12 | cargo build --release 13 | 14 | # For some reason, cleaning the database with --cleanup gives me wildly inconsistent results. 15 | hyperfine \ 16 | --runs 5 \ 17 | --parameter-list CHUNK_SIZE 1000,5000,10000,20000,40000,200000,500000 \ 18 | --parameter-list BUFFER_CAPACITY 0,1,2,3 \ 19 | --export-markdown db_tune_params.md \ 20 | --command-name 'forest-import-{CHUNK_SIZE}-{BUFFER_CAPACITY}' \ 21 | "echo \"[client]\nchunk_size = {CHUNK_SIZE}\nbuffer_size = {BUFFER_CAPACITY}\" > /tmp/forest.conf; \ 22 | ./target/release/forest \ 23 | --chain ${CHAIN} --config /tmp/forest.conf --rpc false --no-gc --encrypt-keystore false --halt-after-import \ 24 | --import-snapshot ${SNAPSHOT}; \ 25 | ./target/release/forest-tool db destroy --chain ${CHAIN} --force" 26 | -------------------------------------------------------------------------------- /scripts/devnet-curio/.env: -------------------------------------------------------------------------------- 1 | LOTUS_IMAGE=ghcr.io/chainsafe/lotus-devnet:2024-12-06-2368695 2 | CURIO_IMAGE=ghcr.io/chainsafe/curio-devnet:2025-01-20-622bacd 3 | FOREST_DATA_DIR=/forest_data 4 | LOTUS_DATA_DIR=/lotus_data 5 | CURIO_REPO_PATH=/var/lib/curio 6 | FIL_PROOFS_PARAMETER_CACHE=/var/tmp/filecoin-proof-parameters 7 | MINER_ACTOR_ADDRESS=t01000 8 | LOTUS_RPC_PORT=1234 9 | LOTUS_P2P_PORT=1235 10 | MINER_RPC_PORT=2345 11 | FOREST_RPC_PORT=3456 12 | FOREST_OFFLINE_RPC_PORT=3457 13 | F3_RPC_PORT=23456 14 | F3_FINALITY=10 15 | GENESIS_NETWORK_VERSION=25 16 | SHARK_HEIGHT=-10 17 | HYGGE_HEIGHT=-9 18 | LIGHTNING_HEIGHT=-8 19 | THUNDER_HEIGHT=-7 20 | WATERMELON_HEIGHT=-6 21 | DRAGON_HEIGHT=-5 22 | WAFFLE_HEIGHT=-4 23 | TUKTUK_HEIGHT=-3 24 | TEEP_HEIGHT=-2 25 | TARGET_HEIGHT=24 26 | -------------------------------------------------------------------------------- /scripts/devnet-curio/curio.env: -------------------------------------------------------------------------------- 1 | LOTUS_PATH=/lotus_data/lotus-local-net 2 | LOTUS_MINER_PATH=/lotus_data/lotus-miner-local-net 3 | LOTUS_SKIP_GENESIS_CHECK=_yes_ 4 | LOTUS_API_LISTENADDRESS=/dns/lotus-miner/tcp/2345/http 5 | CURIO_REPO_PATH=/var/lib/curio 6 | CURIO_HARMONYDB_HOSTS=yugabyte 7 | -------------------------------------------------------------------------------- /scripts/devnet-curio/forest_config.toml.tpl: -------------------------------------------------------------------------------- 1 | [client] 2 | encrypt_keystore = false 3 | data_dir = "/forest_data" 4 | 5 | [network] 6 | kademlia = false 7 | target_peer_count = 1 8 | 9 | # Note that this has to come last. The actual TOML file will have 10 | # the chain name appended. 11 | [chain] 12 | type = "devnet" 13 | -------------------------------------------------------------------------------- /scripts/devnet-curio/lotus-miner.env: -------------------------------------------------------------------------------- 1 | LOTUS_PATH=/lotus_data/lotus-local-net 2 | LOTUS_MINER_PATH=/lotus_data/lotus-miner-local-net 3 | LOTUS_SKIP_GENESIS_CHECK=_yes_ 4 | LOTUS_API_LISTENADDRESS=/dns/lotus-miner/tcp/2345/http 5 | -------------------------------------------------------------------------------- /scripts/devnet-curio/lotus.env: -------------------------------------------------------------------------------- 1 | LOTUS_PATH=/lotus_data/lotus-local-net 2 | LOTUS_MINER_PATH=/lotus_data/lotus-miner-local-net 3 | LOTUS_SKIP_GENESIS_CHECK=_yes_ 4 | LOTUS_API_LISTENADDRESS=/dns/lotus/tcp/1234/http 5 | LOTUS_LIBP2P_LISTENADDRESSES=/ip4/0.0.0.0/tcp/1235 6 | -------------------------------------------------------------------------------- /scripts/devnet/.env: -------------------------------------------------------------------------------- 1 | LOTUS_IMAGE=ghcr.io/chainsafe/lotus-devnet:2025-04-04-824e369 2 | FOREST_DATA_DIR=/forest_data 3 | LOTUS_DATA_DIR=/lotus_data 4 | FIL_PROOFS_PARAMETER_CACHE=/var/tmp/filecoin-proof-parameters 5 | MINER_ACTOR_ADDRESS=t01000 6 | LOTUS_RPC_PORT=1234 7 | LOTUS_P2P_PORT=1235 8 | MINER_RPC_PORT=2345 9 | FOREST_RPC_PORT=3456 10 | FOREST_OFFLINE_RPC_PORT=3457 11 | F3_RPC_PORT=23456 12 | F3_FINALITY=10 13 | GENESIS_NETWORK_VERSION=18 14 | SHARK_HEIGHT=-10 15 | HYGGE_HEIGHT=-9 16 | LIGHTNING_HEIGHT=3 17 | THUNDER_HEIGHT=6 18 | WATERMELON_HEIGHT=9 19 | DRAGON_HEIGHT=12 20 | WAFFLE_HEIGHT=18 21 | TUKTUK_HEIGHT=20 22 | TEEP_HEIGHT=22 23 | TOCK_HEIGHT=24 24 | TOCK_FIX_HEIGHT=26 25 | TARGET_HEIGHT=30 26 | -------------------------------------------------------------------------------- /scripts/devnet/forest_ci.dockerfile: -------------------------------------------------------------------------------- 1 | # The version has to match Github CI runner images 2 | FROM ubuntu:24.04 3 | 4 | ENV DEBIAN_FRONTEND=noninteractive 5 | 6 | RUN apt-get update && apt-get install --no-install-recommends -y \ 7 | ca-certificates \ 8 | && rm -rf /var/lib/apt/lists/* 9 | 10 | COPY forest* /usr/local/bin/ 11 | RUN chmod +x /usr/local/bin/forest* 12 | 13 | # Roughly verify that the binaries work. 14 | # This should ensure that all dynamically-linked libraries are present. 15 | RUN forest -V && forest-cli -V 16 | -------------------------------------------------------------------------------- /scripts/devnet/forest_config.toml.tpl: -------------------------------------------------------------------------------- 1 | [client] 2 | encrypt_keystore = false 3 | data_dir = "/forest_data" 4 | 5 | [network] 6 | kademlia = false 7 | target_peer_count = 1 8 | 9 | # Note that this has to come last. The actual TOML file will have 10 | # the chain name appended. 11 | [chain] 12 | type = "devnet" 13 | -------------------------------------------------------------------------------- /scripts/devnet/lotus-miner.env: -------------------------------------------------------------------------------- 1 | LOTUS_PATH=/lotus_data/lotus-local-net 2 | LOTUS_MINER_PATH=/lotus_data/lotus-miner-local-net 3 | LOTUS_SKIP_GENESIS_CHECK=_yes_ 4 | LOTUS_API_LISTENADDRESS=/dns/lotus/tcp/2345/http 5 | LOTUS_API_LISTENADDRESS=/dns/lotus-miner/tcp/2345/http 6 | -------------------------------------------------------------------------------- /scripts/devnet/lotus.env: -------------------------------------------------------------------------------- 1 | LOTUS_PATH=/lotus_data/lotus-local-net 2 | LOTUS_MINER_PATH=/lotus_data/lotus-miner-local-net 3 | LOTUS_SKIP_GENESIS_CHECK=_yes_ 4 | LOTUS_API_LISTENADDRESS=/dns/lotus/tcp/1234/http 5 | LOTUS_LIBP2P_LISTENADDRESSES=/ip4/0.0.0.0/tcp/1235 6 | -------------------------------------------------------------------------------- /scripts/devnet/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This script is used to set up the CI environment for the 3 | # local devnet tests. 4 | 5 | set -euxo pipefail 6 | 7 | # Path to the directory containing this script. 8 | PARENT_PATH=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd -P ) 9 | pushd "${PARENT_PATH}" 10 | source .env 11 | 12 | # This should not be needed in GH. It is useful for running locally. 13 | docker compose down --remove-orphans 14 | docker compose rm -f 15 | # Cleanup data volumes 16 | docker volume rm -f devnet_lotus-data 17 | docker volume rm -f devnet_forest-data 18 | 19 | # Run it in the background so we can perform checks on it. 20 | # Ideally, we could use `--wait` and `--wait-timeout` to wait for services 21 | # to be up. However, `compose` does not distinct between services and 22 | # init containers. See more: https://github.com/docker/compose/issues/10596 23 | docker compose up --build --force-recreate --detach 24 | 25 | # Wait for Forest to be ready. We can assume that it is ready when the 26 | # RPC server is up. This checks if Forest's RPC endpoint is up. 27 | function call_forest_chain_head { 28 | curl --silent -X POST -H "Content-Type: application/json" \ 29 | --data '{"jsonrpc":"2.0","id":2,"method":"Filecoin.ChainHead","param":"null"}' \ 30 | "http://127.0.0.1:${FOREST_RPC_PORT}/rpc/v1" 31 | } 32 | 33 | until call_forest_chain_head; do 34 | echo "Forest is unavailable - sleeping for 1s" 35 | sleep 1 36 | done 37 | 38 | popd 39 | -------------------------------------------------------------------------------- /scripts/linters/find_unused_deps.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'toml-rb' 4 | require 'set' 5 | 6 | exit_code = 0 7 | 8 | def get_pattern(crate_raw) 9 | crate = crate_raw.gsub(/-/, '_') 10 | Regexp.new("(\\buse\\s#{crate}\\b)|(\\b#{crate}::)") 11 | end 12 | 13 | # Special cases to suppress false positives. 14 | def excluded?(crates, crate) 15 | # `quickcheck` is required implicitly by `quickcheck_macros` 16 | crate == 'quickcheck' && crates.include?('quickcheck_macros') 17 | end 18 | 19 | Dir.glob('**/*.toml').each do |file| 20 | crate_dir = File.dirname(file) 21 | toml = TomlRB.load_file(file) 22 | crates = Set.new 23 | toml['dependencies']&.each_key do |crate_name| 24 | crates.add crate_name 25 | end 26 | toml['dev-dependencies']&.each_key do |crate_name| 27 | crates.add crate_name 28 | end 29 | if toml['workspace'] 30 | toml['workspace']['dependencies']&.each_key do |crate_name| 31 | crates.add crate_name 32 | end 33 | end 34 | 35 | # Load all the source code from the crate into an in-memory array 36 | # to improve performance. 37 | source_code = Dir.glob("#{crate_dir}/**/*.rs").map { |rs| File.read(rs) } 38 | crates.each do |crate| 39 | pattern = get_pattern(crate) 40 | unless source_code.any? { |line| line.match?(pattern) } || excluded?(crates, crate) 41 | puts "Potentially unused: #{crate} in #{crate_dir}" 42 | exit_code = 1 43 | end 44 | end 45 | end 46 | 47 | exit exit_code 48 | -------------------------------------------------------------------------------- /scripts/s3/requirement.txt: -------------------------------------------------------------------------------- 1 | boto3==1.35.27 2 | -------------------------------------------------------------------------------- /scripts/s3/set_sccache_do_bucket_lifecycle.py: -------------------------------------------------------------------------------- 1 | # 2 | # This script sets the bucket lifecycle policy of the sccache digitalocean 3 | # space backend to automatically delete records after a period of time. 4 | # 5 | # to run this script, DO space key and secret need to be configured with 6 | # environment variables `DO_SPACE_KEY` and `DO_SPACE_SECRET` respectively. 7 | # 8 | 9 | import boto3 10 | import json 11 | import os 12 | 13 | 14 | def main(): 15 | bucket = "forest-sccache-us-west" 16 | lifecycle_config = { 17 | "Rules": [ 18 | { 19 | "Expiration": { 20 | "Days": 30, 21 | }, 22 | "ID": "cache-retention", 23 | "Prefix": "", 24 | "Status": "Enabled", 25 | }, 26 | ] 27 | } 28 | s3 = boto3.client( 29 | "s3", 30 | region_name="sfo3", 31 | endpoint_url="https://sfo3.digitaloceanspaces.com", 32 | aws_access_key_id=os.getenv("DO_SPACE_KEY"), 33 | aws_secret_access_key=os.getenv("DO_SPACE_SECRET"), 34 | ) 35 | s3.put_bucket_lifecycle_configuration( 36 | Bucket=bucket, LifecycleConfiguration=lifecycle_config 37 | ) 38 | result = s3.get_bucket_lifecycle_configuration(Bucket=bucket) 39 | print(json.dumps(result)) 40 | 41 | 42 | if __name__ == "__main__": 43 | main() 44 | -------------------------------------------------------------------------------- /scripts/tests/api_compare/.env: -------------------------------------------------------------------------------- 1 | # Note: this should be a `fat` image so that it contains the pre-downloaded filecoin proof parameters 2 | FOREST_IMAGE=ghcr.io/chainsafe/forest:edge-fat 3 | LOTUS_IMAGE=filecoin/lotus-all-in-one:v1.33.0-calibnet 4 | FIL_PROOFS_PARAMETER_CACHE=/var/tmp/filecoin-proof-parameters 5 | LOTUS_RPC_PORT=1234 6 | FOREST_RPC_PORT=2345 7 | FOREST_OFFLINE_RPC_PORT=3456 8 | FOREST_HEALTHZ_RPC_PORT=2346 9 | CHAIN=calibnet 10 | 11 | # This is a pre-generated miner generated from Lotus 12 | # The process is too lengthy to create the miner on the fly (needs to send FIL to the miner, wait for confirmations, etc) 13 | # It's fine to use this miner for testing purposes, e.g., signing messages in tests. 14 | MINER_ADDRESS=t0111551 # t2nfplhzpyeck5dcc4fokj5ar6nbs3mhbdmq6xu3q 15 | MINER_WORKER_ADDRESS=t3sw466j35hqjbch5x7tcr7ona6idsgzypoturfci2ajqsfrrwhp7ty3ythtd7x646adaidnvxpdr5b2ftcciq 16 | MINER_WORKER_KEY=7b2254797065223a22626c73222c22507269766174654b6579223a225a6c4c784f55666d666f44332b577a2f386175482f6b2f456f4b674443365365584256563447714c4c6d6b3d227d 17 | -------------------------------------------------------------------------------- /scripts/tests/api_compare/filter-list: -------------------------------------------------------------------------------- 1 | # This list contains potentially broken methods (or tests) that are ignored. 2 | # They should be considered bugged, and not used until the root cause is resolved. 3 | # Disable until next Lotus release with go-f3 0.8.0 4 | !Filecoin.F3GetManifest 5 | -------------------------------------------------------------------------------- /scripts/tests/api_compare/filter-list-offline: -------------------------------------------------------------------------------- 1 | # This list contains potentially broken methods (or tests) that are ignored. 2 | # They should be considered bugged, and not used until the root cause is resolved. 3 | !Filecoin.EthSyncing 4 | !eth_syncing 5 | !Filecoin.NetAddrsListen 6 | !Filecoin.NetAgentVersion 7 | !Filecoin.NetAutoNatStatus 8 | !Filecoin.NetPeers 9 | !Filecoin.NetFindPeer 10 | !Filecoin.NetProtectAdd 11 | !Filecoin.NetProtectRemove 12 | !Filecoin.NetProtectList 13 | # Most F3 methods are not avaiable on offline Forest RPC server 14 | !Filecoin.F3GetManifest 15 | !Filecoin.F3GetLatestCertificate 16 | !Filecoin.F3ListParticipants 17 | !Filecoin.F3GetProgress 18 | !Filecoin.F3IsRunning 19 | !Filecoin.F3GetCertificate 20 | !Filecoin.F3GetOrRenewParticipationTicket 21 | !Filecoin.F3GetF3PowerTable 22 | # CustomCheckFailed in Forest: https://github.com/ChainSafe/forest/actions/runs/9593268587/job/26453560366 23 | !Filecoin.StateCall 24 | # These methods don't make sense in the context of an offline node 25 | !Filecoin.MinerCreateBlock 26 | # Offline server won't provide correct results for finality-related methods 27 | !Filecoin.EthGetBlockByNumber 28 | !eth_getBlockByNumber 29 | !Filecoin.ChainSetHead 30 | -------------------------------------------------------------------------------- /scripts/tests/api_compare/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This script is used to set up clean environment for the 3 | # API compare checks. 4 | 5 | set -euxo pipefail 6 | 7 | # Path to the directory containing this script. 8 | PARENT_PATH=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd -P ) 9 | pushd "${PARENT_PATH}" 10 | source .env 11 | 12 | # This should not be needed in GH. It is useful for running locally. 13 | docker compose --profile include-offline-rpc down --remove-orphans 14 | docker compose rm -f 15 | # Cleanup data volumes 16 | # docker volume rm -f api_compare_node-data 17 | 18 | # Run it in the background so we can perform checks on it. 19 | # Ideally, we could use `--wait` and `--wait-timeout` to wait for services 20 | # to be up. However, `compose` does not distinct between services and 21 | # init containers. See more: https://github.com/docker/compose/issues/10596 22 | docker compose --profile include-offline-rpc up --build --force-recreate --detach --timestamps 23 | 24 | popd 25 | -------------------------------------------------------------------------------- /scripts/tests/bootstrapper/.env: -------------------------------------------------------------------------------- 1 | # Note: this should be a `fat` image so that it contains the pre-downloaded filecoin proof parameters 2 | LOTUS_IMAGE=filecoin/lotus-all-in-one:v1.33.0-calibnet 3 | FIL_PROOFS_PARAMETER_CACHE=/var/tmp/filecoin-proof-parameters 4 | LOTUS_RPC_PORT=1234 5 | FOREST_RPC_PORT=2345 6 | FOREST_P2P_PORT=12345 7 | # Pre-generated keypair for the forest node. This is required to easily connect to the forest node from the lotus node. 8 | FOREST_PEER_KEYPAIR=7PCBrDPUebd7Pj+DqhbzNuKBWmldP9r2K5eEnbYelUoK4xd+ng8c6C9gDa/q31/U5b6FIlNnHDQLQ4WSop1y6w== 9 | # The PeerID is derived from the `FOREST_PEER_KEYPAIR`. 10 | FOREST_BOOTSTRAPPER_ADDRESS=/dns/forest-bootstrapper/tcp/12345/p2p/12D3KooWAYs5zbzniHaL9RnnH2RKdNvibuj3BCS4b3bHtYvC81yL 11 | CHAIN=calibnet 12 | -------------------------------------------------------------------------------- /scripts/tests/bootstrapper/README.md: -------------------------------------------------------------------------------- 1 | # Forest as a bootstrapper test 2 | 3 | The setup here creates a single Forest bootstrap peer with a well-known peer id 4 | and p2p listening port. Then, a secondary Forest or Lotus are created and 5 | connected to that peer. The assertion succeeds if the secondary peer is able to 6 | sync the chain from the bootstrap peer and have multiple peers in their 7 | peerstores. 8 | 9 | Note that Lotus and Forest are checked separately, to limit the resources 10 | required for the test. 11 | 12 | This is illustrated in the following flowchart: 13 | 14 | ```mermaid 15 | flowchart TD 16 | A[Init] -->|Download proofs and snapshot| B(Start the Forest bootstrapper) 17 | B --> C(Start Forest/Lotus peer) 18 | C -->|Wait for sync| E(Assert peer store populated) 19 | E --> F(Finish) 20 | ``` 21 | 22 | ## Usage 23 | 24 | ```bash 25 | ./test_bootstrapper.sh forest 26 | ./test_bootstrapper.sh lotus 27 | ``` 28 | 29 | ## Teardown 30 | 31 | ```bash 32 | docker compose -f ./docker-compose-forest.yml down -v --rmi all 33 | docker compose -f ./docker-compose-lotus.yml down -v --rmi all 34 | ``` 35 | -------------------------------------------------------------------------------- /scripts/tests/bootstrapper/test_bootstrapper.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This script is used to set up clean environment for the bootstrapper tests. 3 | 4 | set -euxo pipefail 5 | 6 | # Accepts one arg : Forest or Lotus 7 | if [ $# -ne 1 ]; then 8 | echo "Usage: $0 " 9 | exit 1 10 | fi 11 | 12 | if [ "$1" == "forest" ]; then 13 | COMPOSE_FILE="docker-compose-forest.yml" 14 | elif [ "$1" == "lotus" ]; then 15 | COMPOSE_FILE="docker-compose-lotus.yml" 16 | else 17 | echo "Usage: $0 " 18 | exit 1 19 | fi 20 | 21 | # Path to the directory containing this script. 22 | PARENT_PATH=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd -P ) 23 | pushd "${PARENT_PATH}" 24 | source .env 25 | 26 | # This should not be needed in GH. It is useful for running locally. 27 | docker compose -f $COMPOSE_FILE down --remove-orphans 28 | docker compose -f $COMPOSE_FILE rm -f 29 | 30 | # Run it in the background so we can perform checks on it. 31 | # Ideally, we could use `--wait` and `--wait-timeout` to wait for services 32 | # to be up. However, `compose` does not distinct between services and 33 | # init containers. See more: https://github.com/docker/compose/issues/10596 34 | docker compose -f $COMPOSE_FILE up --build --force-recreate --detach --timestamps 35 | 36 | popd 37 | -------------------------------------------------------------------------------- /scripts/tests/butterflynet_check.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euxo pipefail 3 | 4 | # This script tests Forest is able to catch up the butterflynet. 5 | 6 | source "$(dirname "$0")/harness.sh" 7 | 8 | function shutdown { 9 | kill -KILL $FOREST_NODE_PID 10 | } 11 | 12 | trap shutdown EXIT 13 | 14 | $FOREST_PATH --chain butterflynet --encrypt-keystore false & 15 | FOREST_NODE_PID=$! 16 | 17 | forest_wait_api 18 | 19 | forest_wait_for_sync 20 | -------------------------------------------------------------------------------- /scripts/tests/calibnet_export_check.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # This script is checking the correctness of 3 | # the snapshot export feature. 4 | # It requires both the `forest` and `forest-cli` binaries to be in the PATH. 5 | 6 | set -eu 7 | 8 | source "$(dirname "$0")/harness.sh" 9 | 10 | forest_init "$@" 11 | 12 | echo "Cleaning up the initial snapshot" 13 | rm --force --verbose ./*.{car,car.zst,sha256sum} 14 | 15 | echo "Exporting zstd compressed snapshot" 16 | $FOREST_CLI_PATH snapshot export 17 | 18 | echo "Testing snapshot validity" 19 | zstd --test ./*.car.zst 20 | 21 | echo "Verifying snapshot checksum" 22 | sha256sum --check ./*.sha256sum 23 | 24 | echo "Validating CAR files" 25 | zstd --decompress ./*.car.zst 26 | for f in *.car; do 27 | echo "Validating CAR file $f" 28 | $FOREST_TOOL_PATH snapshot validate "$f" 29 | done 30 | 31 | echo "Exporting zstd compressed snapshot at genesis" 32 | $FOREST_CLI_PATH snapshot export --tipset 0 33 | 34 | echo "Testing genesis snapshot validity" 35 | zstd --test forest_snapshot_calibnet_2022-11-01_height_0.forest.car.zst 36 | -------------------------------------------------------------------------------- /scripts/tests/calibnet_migration_regression_tests.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | # To test that migrations still work, we import a snapshot 100 epochs after the 6 | # migration point and then we validate the last 200 tipsets. This triggers the 7 | # migration logic without connecting to the real Filecoin network. 8 | 9 | FOREST_PATH="forest" 10 | MIGRATION_TEST="$FOREST_PATH --chain calibnet --encrypt-keystore false --halt-after-import --height=-200 --no-gc --import-snapshot" 11 | 12 | # NV17 - Shark, uncomment when we support the nv17 migration 13 | echo NV17 - Shark 14 | $MIGRATION_TEST "https://forest-snapshots.fra1.cdn.digitaloceanspaces.com/debug/filecoin_calibnet_height_16900.car.zst" 15 | 16 | echo NV18 - Hygge 17 | $MIGRATION_TEST "https://forest-snapshots.fra1.cdn.digitaloceanspaces.com/debug/filecoin_calibnet_height_322454.car.zst" 18 | 19 | echo NV19 - Lightning 20 | $MIGRATION_TEST "https://forest-snapshots.fra1.cdn.digitaloceanspaces.com/debug/filecoin_calibnet_height_489194.car.zst" 21 | 22 | echo NV20 - Thunder # (no migration should happen in practice, it's a shadow upgrade). We test it anyway. 23 | $MIGRATION_TEST "https://forest-snapshots.fra1.cdn.digitaloceanspaces.com/debug/filecoin_calibnet_height_492314.car.zst" 24 | -------------------------------------------------------------------------------- /scripts/tests/calibnet_no_discovery_check.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euxo pipefail 3 | 4 | # This script tests forest behaviours when discovery(mdns and kademlia) is disabled 5 | 6 | source "$(dirname "$0")/harness.sh" 7 | 8 | function shutdown { 9 | kill -KILL $FOREST_NODE_PID 10 | } 11 | 12 | trap shutdown EXIT 13 | 14 | $FOREST_PATH --chain calibnet --encrypt-keystore false --mdns false --kademlia false --auto-download-snapshot --exit-after-init 15 | $FOREST_PATH --chain calibnet --encrypt-keystore false --mdns false --kademlia false --auto-download-snapshot --log-dir "$LOG_DIRECTORY" & 16 | FOREST_NODE_PID=$! 17 | 18 | forest_wait_api 19 | 20 | # Verify that one of the seed nodes has been connected to 21 | until $FOREST_CLI_PATH net peers | grep "calib"; do 22 | sleep 1s; 23 | done 24 | 25 | # Verify F3 is getting certificates from the network 26 | until [[ $($FOREST_CLI_PATH f3 certs get --output json | jq '.GPBFTInstance') -gt 100 ]]; do 27 | sleep 1s; 28 | done 29 | 30 | echo "Test subcommands: f3 status" 31 | $FOREST_CLI_PATH f3 status 32 | echo "Test subcommands: f3 manifest" 33 | $FOREST_CLI_PATH f3 manifest 34 | echo "Test subcommands: f3 certs get" 35 | $FOREST_CLI_PATH f3 certs list 36 | echo "Test subcommands: f3 certs list" 37 | $FOREST_CLI_PATH f3 certs get 38 | -------------------------------------------------------------------------------- /scripts/tests/calibnet_stateless_mode_check.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -euxo pipefail 3 | 4 | # This script tests the stateless mode of a forest node 5 | 6 | source "$(dirname "$0")/harness.sh" 7 | 8 | forest_init_stateless 9 | 10 | # Example format: /ip4/127.0.0.1/tcp/41937/p2p/12D3KooWAB9z7vZ1x1v9t4BViVkX1Hy1ScoRnWV2GgGy5ec6pfUZ 11 | STATELESS_NODE_ADDRESS=$($FOREST_CLI_PATH net listen | tail -n 1) 12 | echo "Stateless node address: $STATELESS_NODE_ADDRESS" 13 | # Example format: 12D3KooWAB9z7vZ1x1v9t4BViVkX1Hy1ScoRnWV2GgGy5ec6pfUZ 14 | STATELESS_NODE_PEER_ID=$(echo "$STATELESS_NODE_ADDRESS" | cut --delimiter="/" --fields=7 --zero-terminated) 15 | echo "Stateless node peer id: $STATELESS_NODE_PEER_ID" 16 | 17 | # Run a normal forest node that only connects to the stateless node 18 | CONFIG_PATH="./forest_config.toml" 19 | cat <<- EOF > $CONFIG_PATH 20 | [network] 21 | listening_multiaddrs = ["/ip4/127.0.0.1/tcp/0"] 22 | bootstrap_peers = ["$STATELESS_NODE_ADDRESS"] 23 | mdns = false 24 | kademlia = false 25 | EOF 26 | 27 | # Disable discovery to not connect to more nodes 28 | $FOREST_PATH --chain calibnet --encrypt-keystore false --auto-download-snapshot --config "$CONFIG_PATH" --rpc false --metrics-address 127.0.0.1:6117 --healthcheck-address 127.0.0.1:2347 & 29 | FOREST_NODE_PID=$! 30 | # Verify that the stateless node can respond to chain exchange requests 31 | until curl http://127.0.0.1:6117/metrics | grep "chain_exchange_response_in"; do 32 | sleep 1s; 33 | done 34 | kill -KILL $FOREST_NODE_PID 35 | -------------------------------------------------------------------------------- /scripts/tests/snapshot_parity/.env: -------------------------------------------------------------------------------- 1 | LOTUS_IMAGE=filecoin/lotus-all-in-one:v1.33.0-calibnet 2 | FIL_PROOFS_PARAMETER_CACHE=/var/tmp/filecoin-proof-parameters 3 | LOTUS_RPC_PORT=1234 4 | FOREST_RPC_PORT=2345 5 | CHAIN=calibnet 6 | EXPORT_EPOCHS=900 7 | -------------------------------------------------------------------------------- /scripts/tests/snapshot_parity/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This script is used to set up clean environment for the 3 | # API compare checks. 4 | 5 | set -euxo pipefail 6 | 7 | # Path to the directory containing this script. 8 | PARENT_PATH=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd -P ) 9 | pushd "${PARENT_PATH}" 10 | source .env 11 | 12 | # This should not be needed in GH. It is useful for running locally. 13 | docker compose down --remove-orphans 14 | docker compose rm -f 15 | # Cleanup data volumes 16 | # docker volume rm -f snapshot_parity_node-data 17 | 18 | # Run it in the background so we can perform checks on it. 19 | # Ideally, we could use `--wait` and `--wait-timeout` to wait for services 20 | # to be up. However, `compose` does not distinct between services and 21 | # init containers. See more: https://github.com/docker/compose/issues/10596 22 | docker compose up --build --force-recreate --detach --timestamps 23 | 24 | popd 25 | -------------------------------------------------------------------------------- /src/beacon/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | pub mod beacon_entries; 5 | mod drand; 6 | pub mod signatures; 7 | pub use beacon_entries::*; 8 | pub use drand::*; 9 | 10 | #[cfg(test)] 11 | pub mod mock_beacon; 12 | #[cfg(test)] 13 | mod tests { 14 | mod drand; 15 | } 16 | -------------------------------------------------------------------------------- /src/beacon/signatures/signature_impls.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | 6 | impl From for SignatureOnG1 { 7 | fn from(val: G1Projective) -> Self { 8 | SignatureOnG1(val.into()) 9 | } 10 | } 11 | impl From for G1Projective { 12 | fn from(val: SignatureOnG1) -> Self { 13 | val.0.into() 14 | } 15 | } 16 | 17 | impl From for SignatureOnG1 { 18 | fn from(val: G1Affine) -> Self { 19 | SignatureOnG1(val) 20 | } 21 | } 22 | 23 | impl From for G1Affine { 24 | fn from(val: SignatureOnG1) -> Self { 25 | val.0 26 | } 27 | } 28 | 29 | fn g1_from_slice(raw: &[u8]) -> Result { 30 | const SIZE: usize = G1Affine::compressed_size(); 31 | 32 | if raw.len() != SIZE { 33 | return Err(Error::SizeMismatch); 34 | } 35 | 36 | let mut res = [0u8; SIZE]; 37 | res.copy_from_slice(raw); 38 | 39 | Option::from(G1Affine::from_compressed(&res)).ok_or(Error::GroupDecode) 40 | } 41 | 42 | impl SignatureOnG1 { 43 | pub fn from_bytes(raw: &[u8]) -> Result { 44 | let g1 = g1_from_slice(raw)?; 45 | Ok(g1.into()) 46 | } 47 | 48 | pub fn as_bytes(&self) -> [u8; G1Affine::compressed_size()] { 49 | self.0.to_compressed() 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/bin/forest-cli.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | fn main() -> anyhow::Result<()> { 5 | forest::forest_main(std::env::args_os()) 6 | } 7 | -------------------------------------------------------------------------------- /src/bin/forest-tool.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | fn main() -> anyhow::Result<()> { 5 | forest::forest_tool_main(std::env::args_os()) 6 | } 7 | -------------------------------------------------------------------------------- /src/bin/forest-wallet.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | fn main() -> anyhow::Result<()> { 5 | forest::forest_wallet_main(std::env::args_os()) 6 | } 7 | -------------------------------------------------------------------------------- /src/bin/forest.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | fn main() -> anyhow::Result<()> { 5 | forest::forestd_main(std::env::args_os()) 6 | } 7 | -------------------------------------------------------------------------------- /src/blocks/gossip_block.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use cid::Cid; 5 | use serde_tuple::{self, Deserialize_tuple, Serialize_tuple}; 6 | 7 | use crate::blocks::CachingBlockHeader; 8 | 9 | /// Block message used as serialized `gossipsub` messages for blocks topic. 10 | #[cfg_attr(test, derive(derive_quickcheck_arbitrary::Arbitrary, Default))] 11 | #[derive(Clone, Debug, PartialEq, Serialize_tuple, Deserialize_tuple)] 12 | pub struct GossipBlock { 13 | pub header: CachingBlockHeader, 14 | pub bls_messages: Vec, 15 | pub secpk_messages: Vec, 16 | } 17 | -------------------------------------------------------------------------------- /src/blocks/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use thiserror::Error; 5 | 6 | mod block; 7 | #[cfg(test)] 8 | mod chain4u; 9 | mod election_proof; 10 | mod gossip_block; 11 | mod header; 12 | mod ticket; 13 | #[cfg(not(doc))] 14 | mod tipset; 15 | #[cfg(doc)] 16 | pub mod tipset; 17 | mod vrf_proof; 18 | 19 | pub use block::{BLOCK_MESSAGE_LIMIT, Block, TxMeta}; 20 | pub use election_proof::ElectionProof; 21 | pub use gossip_block::GossipBlock; 22 | pub use header::{CachingBlockHeader, RawBlockHeader}; 23 | pub use ticket::Ticket; 24 | pub use tipset::{CreateTipsetError, FullTipset, Tipset, TipsetKey}; 25 | pub use vrf_proof::VRFProof; 26 | 27 | /// Blockchain blocks error 28 | #[derive(Debug, PartialEq, Eq, Error)] 29 | pub enum Error { 30 | /// Invalid signature 31 | #[error("Invalid signature: {0}")] 32 | InvalidSignature(String), 33 | /// Error in validating arbitrary data 34 | #[error("Error validating data: {0}")] 35 | Validation(String), 36 | } 37 | 38 | #[cfg(test)] 39 | pub(crate) use chain4u::{Chain4U, HeaderBuilder, chain4u}; 40 | 41 | #[cfg(any(test, doc))] 42 | mod tests { 43 | 44 | mod serialization_vectors; 45 | mod ticket_test; 46 | } 47 | -------------------------------------------------------------------------------- /src/blocks/tests/calibnet/HEAD: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/src/blocks/tests/calibnet/HEAD -------------------------------------------------------------------------------- /src/blocks/tests/serialization-vectors/README.md: -------------------------------------------------------------------------------- 1 | # The vectors are copied from the archived [serialization-vectors](https://github.com/filecoin-project/serialization-vectors) repository. 2 | -------------------------------------------------------------------------------- /src/blocks/tests/ticket_test.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::blocks::*; 5 | use crate::test_utils::construct_ticket; 6 | use crate::utils::encoding::from_slice_with_fallback; 7 | use fvm_ipld_encoding::to_vec; 8 | 9 | // From Lotus 10 | const TICKET: [u8; 99] = [ 11 | 0x81, 0x58, 0x60, 0x96, 0x64, 0x49, 0x2f, 0x30, 0xe9, 0xb9, 0x50, 0x3b, 0x71, 0x41, 0x0b, 0x1d, 12 | 0x38, 0x2e, 0x2b, 0xd4, 0x85, 0x7f, 0xe2, 0x15, 0x39, 0xac, 0x92, 0x1b, 0xcb, 0x7f, 0xd0, 0x86, 13 | 0xd5, 0x78, 0x71, 0xe6, 0xdd, 0x5c, 0x31, 0xcd, 0x23, 0x61, 0x8b, 0x52, 0x52, 0xb6, 0x2c, 0x7b, 14 | 0x44, 0x4c, 0x3a, 0x02, 0x9b, 0xba, 0xad, 0xc2, 0x50, 0x57, 0x56, 0x81, 0x06, 0x47, 0x77, 0xf6, 15 | 0x04, 0x06, 0xc4, 0xff, 0x00, 0x6f, 0x38, 0xfc, 0x61, 0x71, 0xfe, 0x45, 0xd4, 0x83, 0xe5, 0x15, 16 | 0x79, 0xd0, 0xe2, 0x47, 0x8b, 0x7e, 0x5f, 0xde, 0x2c, 0x51, 0xd2, 0xe8, 0x64, 0x63, 0xaf, 0x86, 17 | 0xd3, 0xcb, 0xd5, 18 | ]; 19 | 20 | #[test] 21 | fn encode_ticket() { 22 | let ticket = construct_ticket(); 23 | // Encode Ticket 24 | let encoded_ticket = to_vec(&ticket).unwrap(); 25 | assert_eq!(&TICKET[..], &encoded_ticket[..]); 26 | } 27 | 28 | #[test] 29 | fn decode_ticket() { 30 | let ticket = construct_ticket(); 31 | // Decode Ticket 32 | let decoded_ticket: Ticket = from_slice_with_fallback(&TICKET).unwrap(); 33 | assert_eq!(ticket, decoded_ticket); 34 | } 35 | -------------------------------------------------------------------------------- /src/blocks/ticket.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::blocks::VRFProof; 5 | use serde_tuple::{self, Deserialize_tuple, Serialize_tuple}; 6 | 7 | /// A Ticket is a marker of a tick of the blockchain's clock. It is the source 8 | /// of randomness for proofs of storage and leader election. It is generated 9 | /// by the miner of a block using a `VRF` and a `VDF`. 10 | #[derive( 11 | Clone, Debug, PartialEq, Eq, Default, Serialize_tuple, Deserialize_tuple, Hash, PartialOrd, Ord, 12 | )] 13 | pub struct Ticket { 14 | /// A proof output by running a `VRF` on the `VDFResult` of the parent 15 | /// ticket 16 | pub vrfproof: VRFProof, 17 | } 18 | 19 | impl Ticket { 20 | /// Ticket constructor 21 | pub fn new(vrfproof: VRFProof) -> Self { 22 | Self { vrfproof } 23 | } 24 | } 25 | 26 | #[cfg(test)] 27 | impl quickcheck::Arbitrary for Ticket { 28 | fn arbitrary(g: &mut quickcheck::Gen) -> Self { 29 | let fmt_str = format!("===={}=====", u64::arbitrary(g)); 30 | let vrfproof = VRFProof::new(fmt_str.into_bytes()); 31 | Self { vrfproof } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/blocks/vrf_proof.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::utils::encoding::{blake2b_256, serde_byte_array}; 5 | use serde::{Deserialize, Serialize}; 6 | 7 | /// The output from running a VRF proof. 8 | #[cfg_attr(test, derive(derive_quickcheck_arbitrary::Arbitrary))] 9 | #[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Default, Serialize, Deserialize, Hash)] 10 | pub struct VRFProof(#[serde(with = "serde_byte_array")] pub Vec); 11 | 12 | impl VRFProof { 13 | /// Creates a `VRFProof` from a raw vector. 14 | pub fn new(output: Vec) -> Self { 15 | Self(output) 16 | } 17 | 18 | /// Returns reference to underlying proof bytes. 19 | pub fn as_bytes(&self) -> &[u8] { 20 | &self.0 21 | } 22 | 23 | /// Compute the `BLAKE2b256` digest of the proof. 24 | pub fn digest(&self) -> [u8; 32] { 25 | blake2b_256(&self.0) 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /src/chain/store/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | pub mod base_fee; 5 | mod chain_store; 6 | mod errors; 7 | pub mod index; 8 | mod tipset_tracker; 9 | 10 | pub use self::{base_fee::*, chain_store::*, errors::*}; 11 | -------------------------------------------------------------------------------- /src/chain/weight.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | pub type Weight = num::BigInt; 5 | -------------------------------------------------------------------------------- /src/chain_sync/bad_block_cache.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use std::num::NonZeroUsize; 5 | 6 | use cid::Cid; 7 | use lru::LruCache; 8 | use nonzero_ext::nonzero; 9 | use parking_lot::Mutex; 10 | 11 | /// Thread-safe cache for tracking bad blocks. 12 | /// This cache is checked before validating a block, to ensure no duplicate 13 | /// work. 14 | #[derive(Debug)] 15 | pub struct BadBlockCache { 16 | cache: Mutex>, 17 | } 18 | 19 | impl Default for BadBlockCache { 20 | fn default() -> Self { 21 | Self::new(nonzero!(1usize << 15)) 22 | } 23 | } 24 | 25 | impl BadBlockCache { 26 | pub fn new(cap: NonZeroUsize) -> Self { 27 | Self { 28 | cache: Mutex::new(LruCache::new(cap)), 29 | } 30 | } 31 | 32 | /// Puts a bad block `Cid` in the cache with a given reason. 33 | pub fn put(&self, c: Cid, reason: String) -> Option { 34 | self.cache.lock().put(c, reason) 35 | } 36 | 37 | /// Returns `Some` with the reason if the block CID is in bad block cache. 38 | /// This function does not update the head position of the `Cid` key. 39 | pub fn peek(&self, c: &Cid) -> Option { 40 | self.cache.lock().peek(c).cloned() 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /src/chain_sync/chain_muxer.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use serde::{Deserialize, Serialize}; 5 | 6 | const DEFAULT_RECENT_STATE_ROOTS: i64 = 2000; 7 | 8 | /// Structure that defines syncing configuration options 9 | #[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Eq)] 10 | #[cfg_attr(test, derive(derive_quickcheck_arbitrary::Arbitrary))] 11 | pub struct SyncConfig { 12 | /// Number of recent state roots to keep in the database after `sync` 13 | /// and to include in the exported snapshot. 14 | pub recent_state_roots: i64, 15 | } 16 | 17 | impl Default for SyncConfig { 18 | fn default() -> Self { 19 | Self { 20 | recent_state_roots: DEFAULT_RECENT_STATE_ROOTS, 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/chain_sync/consensus.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use futures::{StreamExt, stream::FuturesUnordered}; 5 | use nunny::Vec as NonEmpty; 6 | 7 | /// Helper function to collect errors from async validations. 8 | pub async fn collect_errs( 9 | mut handles: FuturesUnordered>>, 10 | ) -> Result<(), NonEmpty> { 11 | let mut errors = Vec::new(); 12 | 13 | while let Some(result) = handles.next().await { 14 | if let Ok(Err(e)) = result { 15 | errors.push(e); 16 | } 17 | } 18 | 19 | match errors.try_into() { 20 | Ok(it) => Err(it), 21 | Err(_) => Ok(()), 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/chain_sync/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod bad_block_cache; 5 | mod chain_follower; 6 | mod chain_muxer; 7 | pub mod consensus; 8 | pub mod metrics; 9 | pub mod network_context; 10 | mod sync_status; 11 | mod tipset_syncer; 12 | mod validation; 13 | 14 | pub use self::{ 15 | bad_block_cache::BadBlockCache, 16 | chain_follower::ChainFollower, 17 | chain_muxer::SyncConfig, 18 | consensus::collect_errs, 19 | sync_status::{ForkSyncInfo, ForkSyncStage, NodeSyncStatus, SyncStatusReport}, 20 | validation::{TipsetValidationError, TipsetValidator}, 21 | }; 22 | -------------------------------------------------------------------------------- /src/cli/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | pub mod humantoken; 4 | pub mod main; 5 | pub mod subcommands; 6 | -------------------------------------------------------------------------------- /src/cli/subcommands/chain_cmd/prune.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::rpc::{self, RpcMethodExt, chain::ChainPruneSnapshot}; 5 | use clap::Subcommand; 6 | use std::time::Duration; 7 | 8 | /// Prune chain database 9 | #[derive(Debug, Subcommand)] 10 | pub enum ChainPruneCommands { 11 | /// Run snapshot GC 12 | Snap { 13 | /// Do not block until GC is completed 14 | #[arg(long)] 15 | no_wait: bool, 16 | }, 17 | } 18 | 19 | impl ChainPruneCommands { 20 | pub async fn run(self, client: rpc::Client) -> anyhow::Result<()> { 21 | match self { 22 | Self::Snap { no_wait } => { 23 | client 24 | .call(ChainPruneSnapshot::request((!no_wait,))?.with_timeout(Duration::MAX)) 25 | .await?; 26 | } 27 | } 28 | 29 | Ok(()) 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/cli/subcommands/config_cmd.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use std::io::Write; 5 | 6 | use anyhow::Context as _; 7 | use clap::Subcommand; 8 | 9 | use crate::cli::subcommands::Config; 10 | 11 | #[derive(Debug, Subcommand)] 12 | pub enum ConfigCommands { 13 | /// Dump default configuration to standard output 14 | Dump, 15 | } 16 | 17 | impl ConfigCommands { 18 | pub fn run(self, sink: &mut W) -> anyhow::Result<()> { 19 | match self { 20 | Self::Dump => writeln!( 21 | sink, 22 | "{}", 23 | toml::to_string(&Config::default()) 24 | .context("Could not convert configuration to TOML format")? 25 | ) 26 | .context("Failed to write the configuration"), 27 | } 28 | } 29 | } 30 | 31 | #[cfg(test)] 32 | mod tests { 33 | use super::*; 34 | 35 | #[tokio::test] 36 | async fn given_default_configuration_should_print_valid_toml() { 37 | let expected_config = Config::default(); 38 | let mut sink = std::io::BufWriter::new(Vec::new()); 39 | 40 | ConfigCommands::Dump.run(&mut sink).unwrap(); 41 | 42 | let actual_config: Config = toml::from_str(std::str::from_utf8(sink.buffer()).unwrap()) 43 | .expect("Invalid configuration!"); 44 | 45 | assert_eq!(expected_config, actual_config); 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/cli/subcommands/f3_cmd/certificate.tpl: -------------------------------------------------------------------------------- 1 | Instance: {{ GPBFTInstance }} 2 | Power Table: 3 | Next: {{ power_table_cid }} 4 | Delta: {{ power_table_delta_string }} 5 | Finalized Chain: 6 | Length: {{ ECChain | length }} 7 | Epochs: {{ epochs }} 8 | Chain: 9 | {% for line in chain_lines %} 10 | {{- line }} 11 | {% endfor %} 12 | -------------------------------------------------------------------------------- /src/cli/subcommands/f3_cmd/progress.tpl: -------------------------------------------------------------------------------- 1 | Progress: 2 | Instance: {{ ID }} 3 | Round: {{ Round }} 4 | Phase: {{ phase_string }} 5 | -------------------------------------------------------------------------------- /src/cli/subcommands/shutdown_cmd.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::cli::subcommands::prompt_confirm; 5 | use crate::rpc::{self, prelude::*}; 6 | 7 | #[derive(Debug, clap::Args)] 8 | pub struct ShutdownCommand { 9 | /// Assume "yes" as answer to shutdown prompt 10 | #[arg(long)] 11 | force: bool, 12 | } 13 | 14 | impl ShutdownCommand { 15 | pub async fn run(self, client: rpc::Client) -> anyhow::Result<()> { 16 | println!("Shutting down Forest node"); 17 | if !self.force && !prompt_confirm() { 18 | println!("Aborted."); 19 | return Ok(()); 20 | } 21 | Shutdown::call(&client, ()).await?; 22 | Ok(()) 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/db/migration/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod db_migration; 5 | mod migration_map; 6 | mod v0_22_1; 7 | mod v0_26_0; 8 | mod void_migration; 9 | 10 | pub use db_migration::DbMigration; 11 | -------------------------------------------------------------------------------- /src/db/parity_db_config.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use serde::{Deserialize, Serialize}; 5 | 6 | /// `ParityDb` configuration exposed in Forest. 7 | #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Default)] 8 | #[cfg_attr(test, derive(derive_quickcheck_arbitrary::Arbitrary))] 9 | #[serde(default)] 10 | pub struct ParityDbConfig { 11 | pub enable_statistics: bool, 12 | } 13 | -------------------------------------------------------------------------------- /src/db/tests/db_utils/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | pub(in crate::db) mod parity; 5 | -------------------------------------------------------------------------------- /src/db/tests/db_utils/parity.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use std::ops::Deref; 5 | 6 | use crate::db::{parity_db::ParityDb, parity_db_config::ParityDbConfig}; 7 | 8 | /// Temporary, self-cleaning ParityDB 9 | pub struct TempParityDB { 10 | pub db: Option, 11 | _dir: tempfile::TempDir, // kept for cleaning up during Drop 12 | } 13 | 14 | impl TempParityDB { 15 | /// Creates a new DB in a temporary path that gets wiped out when the 16 | /// variable gets out of scope. 17 | pub fn new() -> TempParityDB { 18 | let dir = tempfile::Builder::new() 19 | .tempdir() 20 | .expect("Failed to create temporary path for db."); 21 | let path = dir.path().join("paritydb"); 22 | let config = ParityDbConfig::default(); 23 | 24 | TempParityDB { 25 | db: Some(ParityDb::open(path, &config).unwrap()), 26 | _dir: dir, 27 | } 28 | } 29 | } 30 | 31 | impl Deref for TempParityDB { 32 | type Target = ParityDb; 33 | 34 | fn deref(&self) -> &Self::Target { 35 | self.db.as_ref().unwrap() 36 | } 37 | } 38 | 39 | impl AsRef for TempParityDB { 40 | fn as_ref(&self) -> &ParityDb { 41 | self.db.as_ref().unwrap() 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/db/tests/mem_test.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::subtests; 5 | 6 | use crate::db::MemoryDB; 7 | 8 | #[test] 9 | fn mem_db_write() { 10 | let db = MemoryDB::default(); 11 | subtests::write_bin(&db); 12 | } 13 | 14 | #[test] 15 | fn mem_db_read() { 16 | let db = MemoryDB::default(); 17 | subtests::read_bin(&db); 18 | } 19 | 20 | #[test] 21 | fn mem_db_exists() { 22 | let db = MemoryDB::default(); 23 | subtests::exists(&db); 24 | } 25 | 26 | #[test] 27 | fn mem_db_does_not_exist() { 28 | let db = MemoryDB::default(); 29 | subtests::does_not_exist(&db); 30 | } 31 | 32 | #[test] 33 | fn mem_write_read_obj() { 34 | let db = MemoryDB::default(); 35 | subtests::write_read_obj(&db); 36 | } 37 | -------------------------------------------------------------------------------- /src/db/tests/parity_test.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::{db_utils::parity::TempParityDB, subtests}; 5 | 6 | #[test] 7 | fn db_write() { 8 | let db = TempParityDB::new(); 9 | subtests::write_bin(&*db); 10 | } 11 | 12 | #[test] 13 | fn db_read() { 14 | let db = TempParityDB::new(); 15 | subtests::read_bin(&*db); 16 | } 17 | 18 | #[test] 19 | fn db_exists() { 20 | let db = TempParityDB::new(); 21 | subtests::exists(&*db); 22 | } 23 | 24 | #[test] 25 | fn db_does_not_exist() { 26 | let db = TempParityDB::new(); 27 | subtests::does_not_exist(&*db); 28 | } 29 | 30 | #[test] 31 | fn db_write_read_obj() { 32 | let db = TempParityDB::new(); 33 | subtests::write_read_obj(&*db); 34 | } 35 | -------------------------------------------------------------------------------- /src/f3/go_ffi.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | pub mod binding { 5 | #![allow(warnings)] 6 | #![allow(clippy::indexing_slicing)] 7 | rust2go::r2g_include_binding!(); 8 | } 9 | 10 | #[rust2go::r2g] 11 | pub trait GoF3Node { 12 | fn run( 13 | rpc_endpoint: String, 14 | jwt: String, 15 | f3_rpc_endpoint: String, 16 | initial_power_table: String, 17 | bootstrap_epoch: i64, 18 | finality: i64, 19 | f3_root: String, 20 | ) -> bool; 21 | } 22 | -------------------------------------------------------------------------------- /src/genesis/export40.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/src/genesis/export40.car -------------------------------------------------------------------------------- /src/interpreter/errors.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::blocks; 5 | use thiserror::Error; 6 | 7 | /// Interpreter error. 8 | #[derive(Debug, Error)] 9 | pub enum Error { 10 | #[error("failed to read state from the database: {0}")] 11 | Lookup(#[from] anyhow::Error), 12 | 13 | #[error(transparent)] 14 | Signature(#[from] blocks::Error), 15 | } 16 | -------------------------------------------------------------------------------- /src/interpreter/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod errors; 5 | mod fvm2; 6 | pub mod fvm3; 7 | mod fvm4; 8 | mod vm; 9 | 10 | use crate::shim::actors::AccountActorStateLoad as _; 11 | use crate::shim::actors::account; 12 | use crate::shim::{ 13 | address::{Address, Protocol}, 14 | state_tree::StateTree, 15 | }; 16 | use fvm_ipld_blockstore::Blockstore; 17 | 18 | pub use self::vm::*; 19 | 20 | /// returns the public key type of address (`BLS`/`SECP256K1`) of an account 21 | /// actor identified by `addr`. 22 | pub fn resolve_to_key_addr( 23 | st: &StateTree, 24 | store: &BS, 25 | addr: &Address, 26 | ) -> Result 27 | where 28 | BS: Blockstore, 29 | S: Blockstore, 30 | { 31 | if addr.protocol() == Protocol::BLS 32 | || addr.protocol() == Protocol::Secp256k1 33 | || addr.protocol() == Protocol::Delegated 34 | { 35 | return Ok(*addr); 36 | } 37 | 38 | let act = st 39 | .get_actor(addr)? 40 | .ok_or_else(|| anyhow::anyhow!("Failed to retrieve actor: {}", addr))?; 41 | 42 | // If there _is_ an f4 address, return it as "key" address 43 | if let Some(address) = act.delegated_address { 44 | return Ok(address.into()); 45 | } 46 | 47 | let acc_st = account::State::load(store, act.code, act.state)?; 48 | 49 | Ok(acc_st.pubkey_address().into()) 50 | } 51 | -------------------------------------------------------------------------------- /src/ipld/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | pub mod selector; 5 | pub mod util; 6 | 7 | pub use ipld_core::ipld::Ipld; 8 | pub use util::*; 9 | 10 | #[cfg(test)] 11 | mod tests { 12 | mod cbor_test; 13 | mod selector_explore; 14 | mod selector_gen_tests; 15 | } 16 | -------------------------------------------------------------------------------- /src/ipld/selector/empty_map.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use serde::{Deserialize, Deserializer, Serialize, Serializer}; 5 | 6 | // This is only used as a utility because go impl serializes no data as an empty 7 | // map 8 | 9 | #[derive(Serialize, Deserialize)] 10 | struct EmptyMap {} 11 | 12 | pub fn serialize(serializer: S) -> Result 13 | where 14 | S: Serializer, 15 | { 16 | EmptyMap {}.serialize(serializer) 17 | } 18 | 19 | pub fn deserialize<'de, D>(deserializer: D) -> Result<(), D::Error> 20 | where 21 | D: Deserializer<'de>, 22 | { 23 | let EmptyMap {} = Deserialize::deserialize(deserializer)?; 24 | Ok(()) 25 | } 26 | -------------------------------------------------------------------------------- /src/key_management/errors.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use std::io; 5 | 6 | use thiserror::Error; 7 | 8 | #[derive(Debug, Error)] 9 | pub enum Error { 10 | /// info that corresponds to key does not exist 11 | #[error("Key info not found")] 12 | KeyInfo, 13 | /// Key already exists in key store 14 | #[error("Key already exists")] 15 | KeyExists, 16 | #[error("Key does not exist")] 17 | KeyNotExists, 18 | #[error("Key not found")] 19 | NoKey, 20 | #[error(transparent)] 21 | IO(#[from] io::Error), 22 | #[error("{0}")] 23 | Other(String), 24 | #[error("Could not convert from KeyInfo to Key")] 25 | KeyInfoConversion, 26 | } 27 | -------------------------------------------------------------------------------- /src/key_management/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod errors; 5 | mod keystore; 6 | mod wallet; 7 | mod wallet_helpers; 8 | 9 | pub use errors::*; 10 | pub use keystore::*; 11 | pub use wallet::*; 12 | pub use wallet_helpers::*; 13 | #[cfg(test)] 14 | mod tests {} 15 | -------------------------------------------------------------------------------- /src/key_management/tests/keystore_encrypted_old/keystore: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/src/key_management/tests/keystore_encrypted_old/keystore -------------------------------------------------------------------------------- /src/libp2p/chain_exchange/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod behaviour; 5 | mod message; 6 | mod provider; 7 | pub use behaviour::*; 8 | 9 | pub use self::{message::*, provider::*}; 10 | use super::rpc::CborRequestResponse; 11 | 12 | /// Libp2p protocol name for `ChainExchange`. 13 | pub const CHAIN_EXCHANGE_PROTOCOL_NAME: &str = "/fil/chain/xchg/0.0.1"; 14 | 15 | /// `ChainExchange` protocol codec to be used within the RPC service. 16 | pub type ChainExchangeCodec = 17 | CborRequestResponse<&'static str, ChainExchangeRequest, ChainExchangeResponse>; 18 | -------------------------------------------------------------------------------- /src/libp2p/hello/codec.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | use crate::libp2p::rpc::CborRequestResponse; 6 | 7 | /// Hello protocol codec to be used within the RPC service. 8 | pub type HelloCodec = CborRequestResponse<&'static str, HelloRequest, HelloResponse>; 9 | -------------------------------------------------------------------------------- /src/libp2p/hello/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod message; 5 | pub use self::message::*; 6 | mod behaviour; 7 | pub use behaviour::*; 8 | mod codec; 9 | use codec::*; 10 | 11 | /// Libp2p Hello protocol name. 12 | pub const HELLO_PROTOCOL_NAME: &str = "/fil/hello/1.0.0"; 13 | -------------------------------------------------------------------------------- /src/libp2p/metrics.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use once_cell::sync::Lazy; 5 | use prometheus_client::metrics::{counter::Counter, gauge::Gauge}; 6 | 7 | pub static PEER_FAILURE_TOTAL: Lazy = Lazy::new(|| { 8 | let metric = Counter::default(); 9 | crate::metrics::default_registry().register( 10 | "peer_failure_total", 11 | "Total number of failed peer requests", 12 | metric.clone(), 13 | ); 14 | metric 15 | }); 16 | 17 | pub static FULL_PEERS: Lazy = Lazy::new(|| { 18 | let metric = Gauge::default(); 19 | crate::metrics::default_registry().register( 20 | "full_peers", 21 | "Number of healthy peers recognized by the node", 22 | metric.clone(), 23 | ); 24 | metric 25 | }); 26 | 27 | pub static BAD_PEERS: Lazy = Lazy::new(|| { 28 | let metric = Gauge::default(); 29 | crate::metrics::default_registry().register( 30 | "bad_peers", 31 | "Number of bad peers recognized by the node", 32 | metric.clone(), 33 | ); 34 | metric 35 | }); 36 | -------------------------------------------------------------------------------- /src/libp2p/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod behaviour; 5 | pub mod chain_exchange; 6 | mod config; 7 | pub mod discovery; 8 | mod gossip_params; 9 | pub mod hello; 10 | pub mod keypair; 11 | pub mod metrics; 12 | mod peer_manager; 13 | pub mod ping; 14 | pub mod rpc; 15 | mod service; 16 | 17 | // Re-export some libp2p types 18 | pub use cid::multihash::Multihash; 19 | pub use libp2p::{ 20 | identity::{Keypair, ParseError, PeerId, ed25519}, 21 | multiaddr::{Multiaddr, Protocol}, 22 | }; 23 | 24 | pub(in crate::libp2p) use self::behaviour::*; 25 | pub use self::{config::*, peer_manager::*, service::*}; 26 | #[cfg(test)] 27 | mod tests { 28 | mod decode_test; 29 | } 30 | -------------------------------------------------------------------------------- /src/libp2p_bitswap/internals/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | pub(in crate::libp2p_bitswap) mod codec; 5 | pub(in crate::libp2p_bitswap) mod event_handlers; 6 | pub(in crate::libp2p_bitswap) mod prefix; 7 | -------------------------------------------------------------------------------- /src/lotus_json/actor_states/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | use super::*; 4 | mod account_state; 5 | mod cron_state; 6 | mod entry; 7 | mod evm_state; 8 | mod market_state; 9 | mod miner_state; 10 | mod system_state; 11 | mod vesting_funds; 12 | -------------------------------------------------------------------------------- /src/lotus_json/address.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | use crate::shim::address::Address; 6 | 7 | #[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)] 8 | #[schemars(rename = "Address")] 9 | pub struct AddressLotusJson( 10 | #[schemars(with = "String")] 11 | #[serde(with = "crate::lotus_json::stringify")] 12 | Address, 13 | ); 14 | 15 | impl HasLotusJson for Address { 16 | type LotusJson = AddressLotusJson; 17 | 18 | #[cfg(test)] 19 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 20 | vec![(json!("f00"), Address::default())] 21 | } 22 | 23 | fn into_lotus_json(self) -> Self::LotusJson { 24 | AddressLotusJson(self) 25 | } 26 | 27 | fn from_lotus_json(AddressLotusJson(address): Self::LotusJson) -> Self { 28 | address 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/lotus_json/beacon_entry.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::beacon::BeaconEntry; 5 | 6 | use super::*; 7 | 8 | #[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)] 9 | #[serde(rename_all = "PascalCase")] 10 | #[schemars(rename = "BeaconEntry")] 11 | pub struct BeaconEntryLotusJson { 12 | round: u64, 13 | #[schemars(with = "LotusJson>")] 14 | #[serde(with = "crate::lotus_json")] 15 | data: Vec, 16 | } 17 | 18 | impl HasLotusJson for BeaconEntry { 19 | type LotusJson = BeaconEntryLotusJson; 20 | 21 | #[cfg(test)] 22 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 23 | vec![(json!({"Round": 0, "Data": null}), BeaconEntry::default())] 24 | } 25 | 26 | fn into_lotus_json(self) -> Self::LotusJson { 27 | let (round, data) = self.into_parts(); 28 | Self::LotusJson { round, data } 29 | } 30 | 31 | fn from_lotus_json(lotus_json: Self::LotusJson) -> Self { 32 | let Self::LotusJson { round, data } = lotus_json; 33 | Self::new(round, data) 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/lotus_json/big_int.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | 6 | use num::BigInt; 7 | 8 | #[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)] 9 | #[schemars(rename = "BigInt")] 10 | pub struct BigIntLotusJson( 11 | #[schemars(with = "String")] 12 | #[serde(with = "crate::lotus_json::stringify")] 13 | BigInt, 14 | ); 15 | 16 | impl HasLotusJson for BigInt { 17 | type LotusJson = BigIntLotusJson; 18 | 19 | #[cfg(test)] 20 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 21 | vec![(json!("1"), BigInt::from(1))] 22 | } 23 | 24 | fn into_lotus_json(self) -> Self::LotusJson { 25 | BigIntLotusJson(self) 26 | } 27 | 28 | fn from_lotus_json(BigIntLotusJson(big_int): Self::LotusJson) -> Self { 29 | big_int 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/lotus_json/bit_field.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | 6 | use fil_actors_shared::fvm_ipld_bitfield::{BitField, json::BitFieldJson}; 7 | 8 | #[derive(Debug, PartialEq, Serialize, Deserialize, JsonSchema)] 9 | #[schemars(rename = "BitField")] 10 | pub struct BitFieldLotusJson(#[schemars(with = "Option>")] pub BitFieldJson); 11 | 12 | impl Clone for BitFieldLotusJson { 13 | fn clone(&self) -> Self { 14 | Self(BitFieldJson(self.0.0.clone())) 15 | } 16 | } 17 | 18 | impl HasLotusJson for BitField { 19 | type LotusJson = BitFieldLotusJson; 20 | #[cfg(test)] 21 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 22 | vec![ 23 | (json!([0]), Self::new()), 24 | (json!([1, 1]), { 25 | let mut it = Self::new(); 26 | it.set(1); 27 | it 28 | }), 29 | ] 30 | } 31 | fn into_lotus_json(self) -> Self::LotusJson { 32 | BitFieldLotusJson(BitFieldJson(self)) 33 | } 34 | fn from_lotus_json(BitFieldLotusJson(BitFieldJson(it)): Self::LotusJson) -> Self { 35 | it 36 | } 37 | } 38 | 39 | #[test] 40 | fn snapshots() { 41 | assert_all_snapshots::(); 42 | } 43 | -------------------------------------------------------------------------------- /src/lotus_json/bytecode_hash.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | use fil_actor_evm_state::v16::BytecodeHash; 6 | 7 | #[derive(Debug, Serialize, Deserialize, JsonSchema)] 8 | #[serde(rename_all = "PascalCase")] 9 | #[schemars(rename = "BytecodeHash")] 10 | pub struct BytecodeHashLotusJson([u8; 32]); 11 | 12 | impl HasLotusJson for BytecodeHash { 13 | type LotusJson = BytecodeHashLotusJson; 14 | 15 | #[cfg(test)] 16 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 17 | vec![] 18 | } 19 | 20 | fn into_lotus_json(self) -> Self::LotusJson { 21 | BytecodeHashLotusJson(self.into()) 22 | } 23 | 24 | fn from_lotus_json(lotus_json: Self::LotusJson) -> Self { 25 | Self::from(lotus_json.0) 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /src/lotus_json/cid.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | 6 | #[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)] 7 | #[schemars(rename = "Cid")] 8 | pub struct CidLotusJson { 9 | #[schemars(with = "String")] 10 | #[serde(rename = "/", with = "crate::lotus_json::stringify")] 11 | slash: ::cid::Cid, 12 | } 13 | 14 | impl HasLotusJson for ::cid::Cid { 15 | type LotusJson = CidLotusJson; 16 | 17 | #[cfg(test)] 18 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 19 | vec![(json!({"/": "baeaaaaa"}), ::cid::Cid::default())] 20 | } 21 | 22 | fn into_lotus_json(self) -> Self::LotusJson { 23 | Self::LotusJson { slash: self } 24 | } 25 | 26 | fn from_lotus_json(lotus_json: Self::LotusJson) -> Self { 27 | let Self::LotusJson { slash } = lotus_json; 28 | slash 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/lotus_json/duration.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | use std::time::Duration; 6 | 7 | impl HasLotusJson for Duration { 8 | type LotusJson = u64; 9 | 10 | #[cfg(test)] 11 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 12 | vec![(json!(15000000000_u64), Duration::from_secs(15))] 13 | } 14 | 15 | fn into_lotus_json(self) -> Self::LotusJson { 16 | self.as_nanos() as _ 17 | } 18 | 19 | fn from_lotus_json(lotus_json: Self::LotusJson) -> Self { 20 | Self::from_nanos(lotus_json) 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/lotus_json/election_proof.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::blocks::{ElectionProof, VRFProof}; 5 | 6 | use super::*; 7 | 8 | #[derive(Serialize, Deserialize, JsonSchema)] 9 | #[serde(rename_all = "PascalCase")] 10 | #[schemars(rename = "ElectionProof")] 11 | pub struct ElectionProofLotusJson { 12 | #[schemars(with = "LotusJson")] 13 | #[serde(with = "crate::lotus_json")] 14 | v_r_f_proof: VRFProof, 15 | win_count: i64, 16 | } 17 | 18 | impl HasLotusJson for ElectionProof { 19 | type LotusJson = ElectionProofLotusJson; 20 | 21 | #[cfg(test)] 22 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 23 | vec![( 24 | json!({ 25 | "WinCount": 0, 26 | "VRFProof": null 27 | }), 28 | ElectionProof::default(), 29 | )] 30 | } 31 | 32 | fn into_lotus_json(self) -> Self::LotusJson { 33 | let Self { 34 | win_count, 35 | vrfproof, 36 | } = self; 37 | Self::LotusJson { 38 | v_r_f_proof: vrfproof, 39 | win_count, 40 | } 41 | } 42 | 43 | fn from_lotus_json(lotus_json: Self::LotusJson) -> Self { 44 | let Self::LotusJson { 45 | v_r_f_proof, 46 | win_count, 47 | } = lotus_json; 48 | Self { 49 | win_count, 50 | vrfproof: v_r_f_proof, 51 | } 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /src/lotus_json/hash_map.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | use ahash::HashMap as AHashMap; 6 | use std::hash::Hash; 7 | 8 | impl HasLotusJson for AHashMap 9 | where 10 | K: Serialize + DeserializeOwned + Eq + Hash, 11 | V: HasLotusJson, 12 | { 13 | type LotusJson = AHashMap::LotusJson>; 14 | 15 | #[cfg(test)] 16 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 17 | unimplemented!() 18 | } 19 | 20 | fn into_lotus_json(self) -> Self::LotusJson { 21 | self.into_iter() 22 | .map(|(k, v)| (k, v.into_lotus_json())) 23 | .collect() 24 | } 25 | 26 | fn from_lotus_json(value: Self::LotusJson) -> Self { 27 | value 28 | .into_iter() 29 | .map(|(k, v)| (k, V::from_lotus_json(v))) 30 | .collect() 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /src/lotus_json/nonempty.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | 6 | impl HasLotusJson for nunny::Vec 7 | where 8 | T: HasLotusJson, 9 | { 10 | type LotusJson = nunny::Vec<::LotusJson>; 11 | 12 | #[cfg(test)] 13 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 14 | unimplemented!("only NonEmpty is tested, below") 15 | } 16 | 17 | fn into_lotus_json(self) -> Self::LotusJson { 18 | self.into_iter_ne() 19 | .map(HasLotusJson::into_lotus_json) 20 | .collect_vec() 21 | } 22 | 23 | fn from_lotus_json(lotus_json: Self::LotusJson) -> Self { 24 | lotus_json 25 | .into_iter_ne() 26 | .map(HasLotusJson::from_lotus_json) 27 | .collect_vec() 28 | } 29 | } 30 | 31 | #[cfg(test)] 32 | mod tests { 33 | use super::*; 34 | use ::cid::Cid; 35 | use nunny::vec as nonempty; 36 | use quickcheck_macros::quickcheck; 37 | 38 | #[test] 39 | fn shapshots() { 40 | assert_one_snapshot(json!([{"/": "baeaaaaa"}]), nonempty![::cid::Cid::default()]); 41 | } 42 | 43 | #[quickcheck] 44 | fn assert_unchanged(it: nunny::Vec) { 45 | assert_unchanged_via_json(it) 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/lotus_json/opt.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | 6 | // TODO(forest): https://github.com/ChainSafe/forest/issues/4032 7 | // Remove this - users should use `Option>` instead 8 | // of LotusJson> 9 | impl HasLotusJson for Option 10 | where 11 | T: HasLotusJson, 12 | { 13 | type LotusJson = Option; 14 | 15 | #[cfg(test)] 16 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 17 | unimplemented!("only Option is tested, below") 18 | } 19 | 20 | fn into_lotus_json(self) -> Self::LotusJson { 21 | self.map(T::into_lotus_json) 22 | } 23 | 24 | fn from_lotus_json(lotus_json: Self::LotusJson) -> Self { 25 | lotus_json.map(T::from_lotus_json) 26 | } 27 | } 28 | 29 | #[test] 30 | fn shapshots() { 31 | assert_one_snapshot(json!({"/": "baeaaaaa"}), Some(::cid::Cid::default())); 32 | assert_one_snapshot(json!(null), None::<::cid::Cid>); 33 | } 34 | 35 | #[cfg(test)] 36 | quickcheck! { 37 | fn quickcheck(val: Option<::cid::Cid>) -> () { 38 | assert_unchanged_via_json(val) 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /src/lotus_json/raw_bytes.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::{vec_u8::VecU8LotusJson, *}; 5 | use fvm_ipld_encoding::RawBytes; 6 | 7 | #[test] 8 | fn snapshots() { 9 | assert_all_snapshots::(); 10 | } 11 | 12 | #[cfg(test)] 13 | quickcheck! { 14 | fn quickcheck(val: Vec) -> () { 15 | assert_unchanged_via_json(RawBytes::new(val)) 16 | } 17 | } 18 | 19 | impl HasLotusJson for RawBytes { 20 | type LotusJson = VecU8LotusJson; 21 | 22 | #[cfg(test)] 23 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 24 | vec![( 25 | json!("aGVsbG8gd29ybGQh"), 26 | RawBytes::new(Vec::from_iter(*b"hello world!")), 27 | )] 28 | } 29 | 30 | fn into_lotus_json(self) -> Self::LotusJson { 31 | Vec::from(self).into_lotus_json() 32 | } 33 | 34 | fn from_lotus_json(value: Self::LotusJson) -> Self { 35 | Self::from(Vec::from_lotus_json(value)) 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/lotus_json/registered_po_st_proof.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::shim::sector::RegisteredPoStProof; 5 | use fvm_shared4::sector::RegisteredPoStProof as RegisteredPoStProofV4; 6 | 7 | use super::*; 8 | 9 | impl HasLotusJson for RegisteredPoStProof { 10 | type LotusJson = i64; 11 | 12 | #[cfg(test)] 13 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 14 | vec![( 15 | json!(0), 16 | RegisteredPoStProof::from(RegisteredPoStProofV4::StackedDRGWinning2KiBV1), 17 | )] 18 | } 19 | 20 | fn into_lotus_json(self) -> Self::LotusJson { 21 | i64::from(RegisteredPoStProofV4::from(self)) 22 | } 23 | 24 | fn from_lotus_json(i: Self::LotusJson) -> Self { 25 | Self::from(RegisteredPoStProofV4::from(i)) 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /src/lotus_json/registered_seal_proof.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | use crate::shim::sector::RegisteredSealProof; 6 | use fvm_shared4::sector::RegisteredSealProof as RegisteredSealProofV4; 7 | 8 | impl HasLotusJson for RegisteredSealProof { 9 | type LotusJson = i64; 10 | 11 | #[cfg(test)] 12 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 13 | vec![( 14 | json!(0), 15 | Self::from(RegisteredSealProofV4::StackedDRG2KiBV1), 16 | )] 17 | } 18 | 19 | fn into_lotus_json(self) -> Self::LotusJson { 20 | i64::from(RegisteredSealProofV4::from(self)) 21 | } 22 | 23 | fn from_lotus_json(i: Self::LotusJson) -> Self { 24 | Self::from(RegisteredSealProofV4::from(i)) 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/lotus_json/sector_size.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | use crate::shim::sector::SectorSize; 6 | 7 | #[derive(Serialize, Deserialize, JsonSchema)] 8 | #[schemars(rename = "SectorSize")] 9 | // This should probably be a JSON Schema `enum` 10 | pub struct SectorSizeLotusJson(#[schemars(with = "u64")] SectorSize); 11 | 12 | impl HasLotusJson for SectorSize { 13 | type LotusJson = SectorSizeLotusJson; 14 | 15 | #[cfg(test)] 16 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 17 | vec![(json!(2048), Self::_2KiB)] 18 | } 19 | 20 | fn into_lotus_json(self) -> Self::LotusJson { 21 | SectorSizeLotusJson(self) 22 | } 23 | 24 | fn from_lotus_json(SectorSizeLotusJson(inner): Self::LotusJson) -> Self { 25 | inner 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /src/lotus_json/ticket.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::blocks::{Ticket, VRFProof}; 5 | 6 | use super::*; 7 | 8 | #[derive(Serialize, Deserialize, JsonSchema)] 9 | #[serde(rename_all = "PascalCase")] 10 | #[schemars(rename = "Ticket")] 11 | pub struct TicketLotusJson { 12 | #[schemars(with = "LotusJson")] 13 | #[serde(with = "crate::lotus_json")] 14 | v_r_f_proof: VRFProof, 15 | } 16 | 17 | impl HasLotusJson for Ticket { 18 | type LotusJson = TicketLotusJson; 19 | 20 | #[cfg(test)] 21 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 22 | vec![( 23 | json!({"VRFProof": "aGVsbG8gd29ybGQh"}), 24 | Ticket { 25 | vrfproof: crate::blocks::VRFProof(Vec::from_iter(*b"hello world!")), 26 | }, 27 | )] 28 | } 29 | 30 | fn into_lotus_json(self) -> Self::LotusJson { 31 | let Self { vrfproof } = self; 32 | Self::LotusJson { 33 | v_r_f_proof: vrfproof, 34 | } 35 | } 36 | 37 | fn from_lotus_json(lotus_json: Self::LotusJson) -> Self { 38 | let Self::LotusJson { v_r_f_proof } = lotus_json; 39 | Self { 40 | vrfproof: v_r_f_proof, 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/lotus_json/tipset_keys.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | use crate::blocks::TipsetKey; 6 | use ::cid::Cid; 7 | 8 | impl HasLotusJson for TipsetKey { 9 | type LotusJson = nunny::Vec<::LotusJson>; 10 | 11 | #[cfg(test)] 12 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 13 | vec![( 14 | json!([{"/": "baeaaaaa"}]), 15 | ::nunny::vec![::cid::Cid::default()].into(), 16 | )] 17 | } 18 | 19 | fn into_lotus_json(self) -> Self::LotusJson { 20 | self.into_cids() 21 | .into_iter_ne() 22 | .map(Cid::into_lotus_json) 23 | .collect_vec() 24 | } 25 | 26 | fn from_lotus_json(lotus_json: Self::LotusJson) -> Self { 27 | lotus_json 28 | .into_iter_ne() 29 | .map(Cid::from_lotus_json) 30 | .collect_vec() 31 | .into() 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/lotus_json/token_amount.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | use crate::shim::econ::TokenAmount; 6 | use num::BigInt; 7 | 8 | #[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)] 9 | #[serde(transparent)] // name the field for clarity 10 | #[schemars(rename = "TokenAmount")] 11 | pub struct TokenAmountLotusJson { 12 | #[schemars(with = "LotusJson")] 13 | #[serde(with = "crate::lotus_json")] 14 | attos: BigInt, 15 | } 16 | 17 | impl HasLotusJson for TokenAmount { 18 | type LotusJson = TokenAmountLotusJson; 19 | 20 | #[cfg(test)] 21 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 22 | vec![(json!("1"), TokenAmount::from_atto(1))] 23 | } 24 | 25 | fn into_lotus_json(self) -> Self::LotusJson { 26 | Self::LotusJson { 27 | attos: self.atto().clone(), 28 | } 29 | } 30 | 31 | fn from_lotus_json(lotus_json: Self::LotusJson) -> Self { 32 | let Self::LotusJson { attos } = lotus_json; 33 | Self::from_atto(attos) 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/lotus_json/verifreg_claim.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::lotus_json::HasLotusJson; 5 | use crate::rpc::types::ClaimLotusJson; 6 | use crate::shim::actors::verifreg::Claim; 7 | 8 | impl HasLotusJson for Claim { 9 | type LotusJson = ClaimLotusJson; 10 | #[cfg(test)] 11 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 12 | vec![] 13 | } 14 | fn into_lotus_json(self) -> Self::LotusJson { 15 | ClaimLotusJson { 16 | size: self.size, 17 | sector: self.sector, 18 | data: self.data, 19 | client: self.client, 20 | provider: self.provider, 21 | term_max: self.term_max, 22 | term_min: self.term_min, 23 | term_start: self.term_start, 24 | } 25 | } 26 | fn from_lotus_json(lotus_json: Self::LotusJson) -> Self { 27 | Claim { 28 | size: lotus_json.size, 29 | sector: lotus_json.sector, 30 | data: lotus_json.data, 31 | client: lotus_json.client, 32 | provider: lotus_json.provider, 33 | term_max: lotus_json.term_max, 34 | term_min: lotus_json.term_min, 35 | term_start: lotus_json.term_start, 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src/lotus_json/vrf_proof.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | use crate::blocks::VRFProof; 6 | 7 | impl HasLotusJson for VRFProof { 8 | type LotusJson = as HasLotusJson>::LotusJson; 9 | 10 | #[cfg(test)] 11 | fn snapshots() -> Vec<(serde_json::Value, Self)> { 12 | vec![( 13 | json!("aGVsbG8gd29ybGQh"), 14 | VRFProof(Vec::from_iter(*b"hello world!")), 15 | )] 16 | } 17 | 18 | fn into_lotus_json(self) -> Self::LotusJson { 19 | let Self(vec) = self; 20 | vec.into_lotus_json() 21 | } 22 | 23 | fn from_lotus_json(lotus_json: Self::LotusJson) -> Self { 24 | Self(HasLotusJson::from_lotus_json(lotus_json)) 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/message/tests/builder_test.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::message::SignedMessage; 5 | use crate::shim::{ 6 | address::Address, 7 | crypto::Signature, 8 | message::{Message, Message_v3}, 9 | }; 10 | use rand::RngCore; 11 | 12 | #[test] 13 | fn generate_signed_message() { 14 | let msg: Message = Message_v3 { 15 | to: Address::new_id(1).into(), 16 | from: Address::new_id(2).into(), 17 | ..Message_v3::default() 18 | } 19 | .into(); 20 | 21 | let mut dummy_sig = vec![0]; 22 | crate::utils::rand::forest_rng().fill_bytes(&mut dummy_sig); 23 | let signed_msg = 24 | SignedMessage::new_unchecked(msg.clone(), Signature::new_secp256k1(dummy_sig.clone())); 25 | 26 | // Assert message and signature are expected 27 | assert_eq!(signed_msg.message(), &msg); 28 | assert_eq!(signed_msg.signature(), &Signature::new_secp256k1(dummy_sig)); 29 | } 30 | -------------------------------------------------------------------------------- /src/message_pool/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | mod block_prob; 4 | mod config; 5 | mod errors; 6 | mod msg_chain; 7 | mod msgpool; 8 | 9 | pub use self::{ 10 | config::*, 11 | errors::*, 12 | msgpool::{msg_pool::MessagePool, provider::MpoolRpcProvider, *}, 13 | }; 14 | 15 | pub use block_prob::block_probabilities; 16 | -------------------------------------------------------------------------------- /src/message_pool/msgpool/metrics.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use once_cell::sync::Lazy; 5 | use prometheus_client::metrics::gauge::Gauge; 6 | 7 | pub static MPOOL_MESSAGE_TOTAL: Lazy = Lazy::new(|| { 8 | let metric = Gauge::default(); 9 | crate::metrics::default_registry().register( 10 | "mpool_message_total", 11 | "Total number of messages in the message pool", 12 | metric.clone(), 13 | ); 14 | metric 15 | }); 16 | -------------------------------------------------------------------------------- /src/networks/calibnet/genesis.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/src/networks/calibnet/genesis.car -------------------------------------------------------------------------------- /src/networks/mainnet/genesis.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/src/networks/mainnet/genesis.car -------------------------------------------------------------------------------- /src/rpc/methods/beacon.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::rpc::{ApiPaths, Ctx, Permission, RpcMethod, ServerError}; 5 | use crate::{beacon::BeaconEntry, shim::clock::ChainEpoch}; 6 | use anyhow::Result; 7 | use enumflags2::{BitFlags, make_bitflags}; 8 | use fvm_ipld_blockstore::Blockstore; 9 | 10 | /// `BeaconGetEntry` returns the beacon entry for the given Filecoin epoch. If 11 | /// the entry has not yet been produced, the call will block until the entry 12 | /// becomes available 13 | pub enum BeaconGetEntry {} 14 | impl RpcMethod<1> for BeaconGetEntry { 15 | const NAME: &'static str = "Filecoin.BeaconGetEntry"; 16 | const PARAM_NAMES: [&'static str; 1] = ["first"]; 17 | const API_PATHS: BitFlags = make_bitflags!(ApiPaths::V0); // Not supported in V1 18 | const PERMISSION: Permission = Permission::Read; 19 | 20 | type Params = (ChainEpoch,); 21 | type Ok = BeaconEntry; 22 | 23 | async fn handle( 24 | ctx: Ctx, 25 | (first,): Self::Params, 26 | ) -> Result { 27 | let (_, beacon) = ctx.beacon().beacon_for_epoch(first)?; 28 | let rr = 29 | beacon.max_beacon_round_for_epoch(ctx.state_manager.get_network_version(first), first); 30 | let e = beacon.entry(rr).await?; 31 | Ok(e) 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/rpc/methods/chain/types.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | 6 | #[derive(Serialize, Deserialize, JsonSchema, Clone, Debug, Eq, PartialEq, Default)] 7 | #[serde(rename_all = "PascalCase")] 8 | pub struct ObjStat { 9 | pub size: usize, 10 | pub links: usize, 11 | } 12 | lotus_json_with_self!(ObjStat); 13 | -------------------------------------------------------------------------------- /src/rpc/snapshots/.gitattributes: -------------------------------------------------------------------------------- 1 | # To keep certain files from displaying in diffs by default, 2 | # or counting toward the repository language, 3 | # you can mark them with the linguist-generated attribute in a .gitattributes file. 4 | # - github docs 5 | *.snap linguist-generated=true 6 | -------------------------------------------------------------------------------- /src/rpc/snapshots/.gitignore: -------------------------------------------------------------------------------- 1 | # Draft snapshots 2 | /*.snap.new 3 | -------------------------------------------------------------------------------- /src/rpc/snapshots/README.md: -------------------------------------------------------------------------------- 1 | Snapshot testing saves a copy of an expected output inside a repo, and asserts 2 | that the System Under Test always produces a matching copy. 3 | 4 | [`insta`](https://docs.rs/insta), the snapshot testing library we use stores 5 | snapshots in this folder. 6 | 7 | This allows us to regression-test and review changes to our autogenerated 8 | OpenRPC definitions. 9 | 10 | If you're making changes to OpenRPC files, review is easier with 11 | 12 | ```console 13 | $ cargo install cargo-insta 14 | ... 15 | $ cargo insta review 16 | ``` 17 | 18 | See also . 19 | -------------------------------------------------------------------------------- /src/rpc/types/address_impl.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | 6 | const EMPTY_ADDRESS_VALUE: &str = ""; 7 | 8 | impl Serialize for AddressOrEmpty { 9 | fn serialize(&self, s: S) -> Result 10 | where 11 | S: Serializer, 12 | { 13 | let address_bytes = match self.0 { 14 | Some(addr) => addr.to_string(), 15 | None => EMPTY_ADDRESS_VALUE.to_string(), 16 | }; 17 | 18 | s.collect_str(&address_bytes) 19 | } 20 | } 21 | 22 | impl<'de> Deserialize<'de> for AddressOrEmpty { 23 | fn deserialize(deserializer: D) -> Result 24 | where 25 | D: Deserializer<'de>, 26 | { 27 | let address_str = String::deserialize(deserializer)?; 28 | if address_str.eq(EMPTY_ADDRESS_VALUE) { 29 | return Ok(Self(None)); 30 | } 31 | 32 | Address::from_str(&address_str) 33 | .map_err(de::Error::custom) 34 | .map(|addr| Self(Some(addr))) 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/rpc/types/tests.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | use crate::lotus_json::HasLotusJson as _; 6 | use quickcheck_macros::quickcheck; 7 | 8 | #[quickcheck] 9 | fn test_api_tipset_key(cids: Vec) { 10 | test_api_tipset_key_inner(cids) 11 | } 12 | 13 | #[test] 14 | fn test_api_tipset_key_empty() { 15 | test_api_tipset_key_inner(vec![]) 16 | } 17 | 18 | #[test] 19 | fn test_api_tipset_key_deserialization_empty_vec() { 20 | let api_ts_lotus_json: LotusJson = serde_json::from_str("[]").unwrap(); 21 | assert!(api_ts_lotus_json.into_inner().0.is_none()); 22 | } 23 | 24 | #[test] 25 | fn test_api_tipset_key_deserialization_null() { 26 | let api_ts_lotus_json: LotusJson = serde_json::from_str("null").unwrap(); 27 | assert!(api_ts_lotus_json.into_inner().0.is_none()); 28 | } 29 | 30 | fn test_api_tipset_key_inner(cids: Vec) { 31 | let lotus_json_str = cids.clone().into_lotus_json_string_pretty().unwrap(); 32 | let api_ts_lotus_json: LotusJson = serde_json::from_str(&lotus_json_str).unwrap(); 33 | let api_ts = api_ts_lotus_json.into_inner(); 34 | let cids_from_api_ts = api_ts 35 | .0 36 | .map(|ts| ts.into_cids().into_iter().collect::>()) 37 | .unwrap_or_default(); 38 | assert_eq!(cids_from_api_ts, cids); 39 | } 40 | -------------------------------------------------------------------------------- /src/shim/actors/builtin/cron/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use fvm_shared2::address::Address; 5 | use serde::Serialize; 6 | 7 | /// Cron actor address. 8 | pub const ADDRESS: Address = Address::new_id(3); 9 | 10 | /// Cron actor method. 11 | pub type Method = fil_actor_cron_state::v8::Method; 12 | 13 | /// Cron actor state. 14 | #[derive(Serialize, Debug)] 15 | #[serde(untagged)] 16 | pub enum State { 17 | V8(fil_actor_cron_state::v8::State), 18 | V9(fil_actor_cron_state::v9::State), 19 | V10(fil_actor_cron_state::v10::State), 20 | V11(fil_actor_cron_state::v11::State), 21 | V12(fil_actor_cron_state::v12::State), 22 | V13(fil_actor_cron_state::v13::State), 23 | V14(fil_actor_cron_state::v14::State), 24 | V15(fil_actor_cron_state::v15::State), 25 | V16(fil_actor_cron_state::v16::State), 26 | } 27 | 28 | #[derive(Clone, Serialize, Debug)] 29 | #[serde(untagged)] 30 | pub enum Entry { 31 | V8(fil_actor_cron_state::v8::Entry), 32 | V9(fil_actor_cron_state::v9::Entry), 33 | V10(fil_actor_cron_state::v10::Entry), 34 | V11(fil_actor_cron_state::v11::Entry), 35 | V12(fil_actor_cron_state::v12::Entry), 36 | V13(fil_actor_cron_state::v13::Entry), 37 | V14(fil_actor_cron_state::v14::Entry), 38 | V15(fil_actor_cron_state::v15::Entry), 39 | V16(fil_actor_cron_state::v16::Entry), 40 | } 41 | -------------------------------------------------------------------------------- /src/shim/actors/builtin/eam.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | pub type CreateExternalReturn = fil_actor_eam_state::v16::CreateExternalReturn; 5 | -------------------------------------------------------------------------------- /src/shim/actors/builtin/market/ext/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod balance_table; 5 | mod state; 6 | 7 | use crate::shim::actors::{market, verifreg::AllocationID}; 8 | use crate::shim::address::Address; 9 | use crate::shim::deal::DealID; 10 | use crate::shim::econ::TokenAmount; 11 | use ahash::HashMap; 12 | use fvm_ipld_blockstore::Blockstore; 13 | 14 | pub trait MarketStateExt { 15 | fn get_allocations_for_pending_deals( 16 | &self, 17 | store: &impl Blockstore, 18 | ) -> anyhow::Result>; 19 | 20 | fn get_allocation_id_for_pending_deal( 21 | &self, 22 | store: &impl Blockstore, 23 | deal_id: &DealID, 24 | ) -> anyhow::Result; 25 | } 26 | 27 | pub trait BalanceTableExt { 28 | fn for_each(&self, f: F) -> anyhow::Result<()> 29 | where 30 | F: FnMut(&Address, &TokenAmount) -> anyhow::Result<()>; 31 | } 32 | -------------------------------------------------------------------------------- /src/shim/actors/builtin/miner/ext/deadline.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | use num::Zero; 6 | 7 | impl DeadlineExt for Deadline { 8 | fn daily_fee(&self) -> TokenAmount { 9 | use Deadline::*; 10 | match self { 11 | V8(_) => Zero::zero(), 12 | V9(_) => Zero::zero(), 13 | V10(_) => Zero::zero(), 14 | V11(_) => Zero::zero(), 15 | V12(_) => Zero::zero(), 16 | V13(_) => Zero::zero(), 17 | V14(_) => Zero::zero(), 18 | V15(_) => Zero::zero(), 19 | V16(d) => (&d.daily_fee).into(), 20 | } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/shim/actors/builtin/miner/ext/partition.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::*; 5 | use crate::shim::actors::miner::Partition; 6 | 7 | impl PartitionExt for Partition<'_> { 8 | fn terminated(&self) -> &BitField { 9 | match self { 10 | Partition::V8(dl) => &dl.terminated, 11 | Partition::V9(dl) => &dl.terminated, 12 | Partition::V10(dl) => &dl.terminated, 13 | Partition::V11(dl) => &dl.terminated, 14 | Partition::V12(dl) => &dl.terminated, 15 | Partition::V13(dl) => &dl.terminated, 16 | Partition::V14(dl) => &dl.terminated, 17 | Partition::V15(dl) => &dl.terminated, 18 | Partition::V16(dl) => &dl.terminated, 19 | } 20 | } 21 | 22 | fn expirations_epochs(&self) -> Cid { 23 | match self { 24 | Partition::V8(dl) => dl.expirations_epochs, 25 | Partition::V9(dl) => dl.expirations_epochs, 26 | Partition::V10(dl) => dl.expirations_epochs, 27 | Partition::V11(dl) => dl.expirations_epochs, 28 | Partition::V12(dl) => dl.expirations_epochs, 29 | Partition::V13(dl) => dl.expirations_epochs, 30 | Partition::V14(dl) => dl.expirations_epochs, 31 | Partition::V15(dl) => dl.expirations_epochs, 32 | Partition::V16(dl) => dl.expirations_epochs, 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/shim/actors/builtin/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | pub mod account; 5 | pub mod cron; 6 | pub mod datacap; 7 | pub mod eam; 8 | pub mod evm; 9 | pub mod init; 10 | pub mod market; 11 | pub mod miner; 12 | pub mod multisig; 13 | pub mod power; 14 | pub mod reward; 15 | pub mod system; 16 | pub mod verifreg; 17 | 18 | pub use fil_actor_reward_state::v8::AwardBlockRewardParams; 19 | pub use fvm_shared2::{clock::EPOCH_DURATION_SECONDS, smooth::FilterEstimate}; 20 | -------------------------------------------------------------------------------- /src/shim/actors/builtin/multisig/ext/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod state; 5 | 6 | use crate::rpc::types::MsigVesting; 7 | use crate::shim::actors::multisig::State; 8 | 9 | pub trait MultisigExt { 10 | fn get_vesting_schedule(&self) -> anyhow::Result; 11 | } 12 | -------------------------------------------------------------------------------- /src/shim/actors/builtin/power/ext.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::shim::actors::power::State; 5 | use crate::shim::clock::ChainEpoch; 6 | 7 | pub trait PowerStateExt { 8 | /// `FIP0081` activation epoch. Should be same as `TukTuk` epoch. 9 | fn ramp_start_epoch(&self) -> ChainEpoch; 10 | /// `FIP0081` activation ramp. One year on mainnet, 3 days on calibnet, 11 | /// defaults to 200 epochs on devnet. Only applicable to `v15` (aka `TukTuk`) 12 | /// actors. 13 | fn ramp_duration_epochs(&self) -> u64; 14 | } 15 | 16 | impl PowerStateExt for State { 17 | fn ramp_start_epoch(&self) -> ChainEpoch { 18 | match self { 19 | State::V15(st) => st.ramp_start_epoch, 20 | State::V16(st) => st.ramp_start_epoch, 21 | _ => 0, 22 | } 23 | } 24 | 25 | fn ramp_duration_epochs(&self) -> u64 { 26 | match self { 27 | State::V15(st) => st.ramp_duration_epochs, 28 | State::V16(st) => st.ramp_duration_epochs, 29 | _ => 0, 30 | } 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /src/shim/actors/builtin/system/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use fvm_shared2::address::Address; 5 | use serde::Serialize; 6 | 7 | /// System actor address. 8 | pub const ADDRESS: Address = Address::new_id(0); 9 | 10 | /// System actor method. 11 | pub type Method = fil_actor_system_state::v8::Method; 12 | 13 | /// System actor state. 14 | #[derive(Serialize, Debug)] 15 | #[serde(untagged)] 16 | pub enum State { 17 | V8(fil_actor_system_state::v8::State), 18 | V9(fil_actor_system_state::v9::State), 19 | V10(fil_actor_system_state::v10::State), 20 | V11(fil_actor_system_state::v11::State), 21 | V12(fil_actor_system_state::v12::State), 22 | V13(fil_actor_system_state::v13::State), 23 | V14(fil_actor_system_state::v14::State), 24 | V15(fil_actor_system_state::v15::State), 25 | V16(fil_actor_system_state::v16::State), 26 | } 27 | -------------------------------------------------------------------------------- /src/shim/actors/builtin/verifreg/ext/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod state; 5 | 6 | use crate::shim::actors::verifreg::{Allocation, AllocationID, Claim, State}; 7 | use crate::shim::address::Address; 8 | use ahash::HashMap; 9 | use fil_actor_verifreg_state::v13::ClaimID; 10 | use fvm_ipld_blockstore::Blockstore; 11 | 12 | pub trait VerifiedRegistryStateExt { 13 | fn get_allocations( 14 | &self, 15 | store: &BS, 16 | address: &Address, 17 | ) -> anyhow::Result>; 18 | 19 | fn get_all_allocations( 20 | &self, 21 | store: &BS, 22 | ) -> anyhow::Result>; 23 | 24 | fn get_claims( 25 | &self, 26 | store: &BS, 27 | provider_id_address: &Address, 28 | ) -> anyhow::Result>; 29 | 30 | fn get_all_claims(&self, store: &BS) 31 | -> anyhow::Result>; 32 | 33 | fn root_key(&self) -> Address; 34 | } 35 | -------------------------------------------------------------------------------- /src/shim/actors/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | #![allow(unused)] 5 | mod builtin; 6 | pub mod convert; 7 | mod macros; 8 | 9 | pub use self::builtin::*; 10 | pub use fil_actors_shared::v9::builtin::singletons::{BURNT_FUNDS_ACTOR_ADDR, CHAOS_ACTOR_ADDR}; 11 | pub use fil_actors_shared::v13::runtime::Policy; 12 | 13 | pub mod common; 14 | pub use common::*; 15 | pub mod state_load; 16 | pub use state_load::*; 17 | mod version; 18 | pub use version::*; 19 | -------------------------------------------------------------------------------- /src/shim/bigint.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use std::ops::{Deref, DerefMut}; 5 | 6 | use super::fvm_shared_latest::bigint::bigint_ser; 7 | use serde::{Deserialize, Serialize}; 8 | 9 | #[derive(Default, Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] 10 | #[serde(transparent)] 11 | pub struct BigInt(#[serde(with = "bigint_ser")] num_bigint::BigInt); 12 | 13 | impl Deref for BigInt { 14 | type Target = num_bigint::BigInt; 15 | fn deref(&self) -> &Self::Target { 16 | &self.0 17 | } 18 | } 19 | 20 | impl DerefMut for BigInt { 21 | fn deref_mut(&mut self) -> &mut Self::Target { 22 | &mut self.0 23 | } 24 | } 25 | 26 | impl From for BigInt { 27 | fn from(other: num_bigint::BigInt) -> Self { 28 | BigInt(other) 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/shim/clock.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | pub use fvm_shared3::ALLOWABLE_CLOCK_DRIFT; 5 | pub use fvm_shared3::BLOCKS_PER_EPOCH; 6 | pub use fvm_shared3::clock::EPOCH_DURATION_SECONDS; 7 | 8 | pub const SECONDS_IN_DAY: i64 = 86400; 9 | pub const EPOCHS_IN_DAY: i64 = SECONDS_IN_DAY / EPOCH_DURATION_SECONDS; 10 | 11 | pub type ChainEpoch = i64; 12 | -------------------------------------------------------------------------------- /src/shim/deal.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | pub use super::fvm_shared_latest::deal::DealID; 5 | -------------------------------------------------------------------------------- /src/shim/machine/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod manifest; 5 | pub use manifest::{BuiltinActor, BuiltinActorManifest}; 6 | 7 | use fvm2::machine::MultiEngine as MultiEngine_v2; 8 | use fvm3::engine::MultiEngine as MultiEngine_v3; 9 | use fvm4::engine::MultiEngine as MultiEngine_v4; 10 | use once_cell::sync::Lazy; 11 | use std::sync::Arc; 12 | 13 | pub static GLOBAL_MULTI_ENGINE: Lazy> = Lazy::new(Default::default); 14 | 15 | pub struct MultiEngine { 16 | pub v2: MultiEngine_v2, 17 | pub v3: MultiEngine_v3, 18 | pub v4: MultiEngine_v4, 19 | } 20 | 21 | impl Default for MultiEngine { 22 | fn default() -> MultiEngine { 23 | MultiEngine::new(std::thread::available_parallelism().map(|x| x.get() as u32)) 24 | } 25 | } 26 | 27 | impl MultiEngine { 28 | pub fn new(concurrency: Result) -> MultiEngine { 29 | let concurrency = concurrency.ok(); 30 | MultiEngine { 31 | v2: Default::default(), 32 | v3: concurrency.map_or_else(Default::default, MultiEngine_v3::new), 33 | v4: concurrency.map_or_else(Default::default, MultiEngine_v4::new), 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/shim/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | pub mod actors; 5 | pub mod address; 6 | pub mod bigint; 7 | pub mod clock; 8 | pub mod crypto; 9 | pub mod deal; 10 | pub mod econ; 11 | pub mod error; 12 | pub mod executor; 13 | pub mod externs; 14 | pub mod gas; 15 | pub mod kernel; 16 | pub mod machine; 17 | pub mod message; 18 | pub mod piece; 19 | pub mod randomness; 20 | pub mod sector; 21 | pub mod state_tree; 22 | pub mod state_tree_v0; 23 | pub mod trace; 24 | pub mod version; 25 | 26 | pub mod fvm_shared_latest { 27 | // If `#[doc(inline)]`, we steal these docs from an external crate. 28 | // But they contain dead links, which means our dead link checker (lychee) 29 | // will complain. 30 | #[doc(no_inline)] 31 | pub use fvm_shared4::*; 32 | } 33 | pub mod fvm_latest { 34 | pub use fvm4::*; 35 | } 36 | -------------------------------------------------------------------------------- /src/state_manager/errors.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use std::fmt::{Debug, Display}; 5 | 6 | use thiserror::Error; 7 | use tokio::task::JoinError; 8 | 9 | /// State manager error 10 | #[derive(Debug, PartialEq, Error)] 11 | pub enum Error { 12 | /// Error originating from state 13 | #[error("{0}")] 14 | State(String), 15 | /// Other state manager error 16 | #[error("{0}")] 17 | Other(String), 18 | } 19 | 20 | impl Error { 21 | pub fn state(e: impl Display) -> Self { 22 | Self::State(e.to_string()) 23 | } 24 | 25 | pub fn other(e: impl Display) -> Self { 26 | Self::Other(e.to_string()) 27 | } 28 | } 29 | 30 | impl From for Error { 31 | fn from(e: String) -> Self { 32 | Error::Other(e) 33 | } 34 | } 35 | 36 | impl From for Error { 37 | fn from(e: anyhow::Error) -> Self { 38 | Error::other(e) 39 | } 40 | } 41 | 42 | impl From for Error { 43 | fn from(e: JoinError) -> Self { 44 | Error::Other(format!("failed joining on tokio task: {e}")) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/state_migration/common/macros/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod system; 5 | mod verifier; 6 | -------------------------------------------------------------------------------- /src/state_migration/common/verifier.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use std::sync::Arc; 5 | 6 | use crate::cid_collections::CidHashMap; 7 | use crate::shim::state_tree::StateTree; 8 | 9 | use super::Migrator; 10 | 11 | /// The implementation should verify that the migration specification is 12 | /// correct. This is to prevent accidental migration errors. 13 | pub(in crate::state_migration) trait ActorMigrationVerifier { 14 | fn verify_migration( 15 | &self, 16 | store: &BS, 17 | migrations: &CidHashMap>, 18 | actors_in: &StateTree, 19 | ) -> anyhow::Result<()>; 20 | } 21 | 22 | /// Type implementing the `ActorMigrationVerifier` trait. 23 | pub(in crate::state_migration) type MigrationVerifier = 24 | Arc + Send + Sync>; 25 | -------------------------------------------------------------------------------- /src/state_migration/nv17/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | //! This module contains the migration logic for the `NV17` upgrade. 5 | //! The corresponding Go implementation can be found here: 6 | //! 7 | 8 | mod datacap; 9 | mod migration; 10 | mod miner; 11 | mod util; 12 | mod verifreg_market; 13 | 14 | /// Run migration for `NV17`. This should be the only exported method in this 15 | /// module. 16 | pub use migration::run_migration; 17 | 18 | use crate::{define_system_states, impl_system, impl_verifier}; 19 | 20 | define_system_states!( 21 | fil_actor_system_state::v8::State, 22 | fil_actor_system_state::v9::State 23 | ); 24 | 25 | impl_system!(); 26 | impl_verifier!(); 27 | -------------------------------------------------------------------------------- /src/state_migration/nv18/eam.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::shim::{ 5 | address::Address, 6 | machine::{BuiltinActor, BuiltinActorManifest}, 7 | state_tree::{ActorState, StateTree}, 8 | }; 9 | use crate::utils::db::CborStoreExt as _; 10 | use fvm_ipld_blockstore::Blockstore; 11 | 12 | use crate::state_migration::common::PostMigrator; 13 | 14 | use super::SystemStateNew; 15 | 16 | pub struct EamPostMigrator; 17 | 18 | impl PostMigrator for EamPostMigrator { 19 | /// Creates the Ethereum Account Manager actor in the state tree. 20 | fn post_migrate_state(&self, store: &BS, actors_out: &mut StateTree) -> anyhow::Result<()> { 21 | let sys_actor = actors_out.get_required_actor(&Address::SYSTEM_ACTOR)?; 22 | let sys_state: SystemStateNew = store.get_cbor_required(&sys_actor.state)?; 23 | 24 | let manifest = BuiltinActorManifest::load_v1_actor_list(store, &sys_state.builtin_actors)?; 25 | 26 | let eam_actor = ActorState::new_empty(manifest.get(BuiltinActor::EAM)?, None); 27 | actors_out.set_actor(&Address::ETHEREUM_ACCOUNT_MANAGER_ACTOR, eam_actor)?; 28 | Ok(()) 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/state_migration/nv18/init.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | //! This module contains the migration logic for the `NV18` upgrade for the Init 5 | //! actor. 6 | 7 | use std::sync::Arc; 8 | 9 | use crate::state_migration::common::{ 10 | ActorMigration, ActorMigrationInput, ActorMigrationOutput, TypeMigration, TypeMigrator, 11 | }; 12 | use crate::utils::db::CborStoreExt; 13 | use cid::Cid; 14 | use fil_actor_init_state::{v9::State as InitStateOld, v10::State as InitStateNew}; 15 | use fvm_ipld_blockstore::Blockstore; 16 | 17 | pub struct InitMigrator(Cid); 18 | 19 | pub(in crate::state_migration) fn init_migrator( 20 | cid: Cid, 21 | ) -> Arc + Send + Sync> { 22 | Arc::new(InitMigrator(cid)) 23 | } 24 | 25 | impl ActorMigration for InitMigrator { 26 | fn migrate_state( 27 | &self, 28 | store: &BS, 29 | input: ActorMigrationInput, 30 | ) -> anyhow::Result> { 31 | let in_state: InitStateOld = store.get_cbor_required(&input.head)?; 32 | let out_state: InitStateNew = TypeMigrator::migrate_type(in_state, &store)?; 33 | let new_head = store.put_cbor_default(&out_state)?; 34 | Ok(Some(ActorMigrationOutput { 35 | new_code_cid: self.0, 36 | new_head, 37 | })) 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/state_migration/nv18/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | //! This module contains the migration logic for the `NV18` upgrade. 5 | //! The corresponding Go implementation can be found here: 6 | //! 7 | 8 | mod eam; 9 | mod eth_account; 10 | mod init; 11 | mod migration; 12 | 13 | /// Run migration for `NV18`. This should be the only exported method in this 14 | /// module. 15 | pub use migration::run_migration; 16 | 17 | use crate::{define_system_states, impl_system, impl_verifier}; 18 | 19 | define_system_states!( 20 | fil_actor_system_state::v9::State, 21 | fil_actor_system_state::v10::State 22 | ); 23 | 24 | impl_system!(); 25 | impl_verifier!(); 26 | -------------------------------------------------------------------------------- /src/state_migration/nv19/miner.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | //! This module contains the migration logic for the `NV19` upgrade for the 5 | //! Miner actor. 6 | 7 | use crate::state_migration::common::{ 8 | ActorMigration, ActorMigrationInput, ActorMigrationOutput, TypeMigration, TypeMigrator, 9 | }; 10 | use crate::utils::db::CborStoreExt as _; 11 | use cid::Cid; 12 | use fil_actor_miner_state::{v10::State as MinerStateOld, v11::State as MinerStateNew}; 13 | use fvm_ipld_blockstore::Blockstore; 14 | use std::sync::Arc; 15 | 16 | pub struct MinerMigrator(Cid); 17 | 18 | pub(in crate::state_migration) fn miner_migrator( 19 | cid: Cid, 20 | ) -> Arc + Send + Sync> { 21 | Arc::new(MinerMigrator(cid)) 22 | } 23 | 24 | impl ActorMigration for MinerMigrator { 25 | fn migrate_state( 26 | &self, 27 | store: &BS, 28 | input: ActorMigrationInput, 29 | ) -> anyhow::Result> { 30 | let in_state: MinerStateOld = store.get_cbor_required(&input.head)?; 31 | let out_state: MinerStateNew = TypeMigrator::migrate_type(in_state, &store)?; 32 | let new_head = store.put_cbor_default(&out_state)?; 33 | Ok(Some(ActorMigrationOutput { 34 | new_code_cid: self.0, 35 | new_head, 36 | })) 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src/state_migration/nv19/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | //! This module contains the migration logic for the `NV19` upgrade. 5 | //! The corresponding Go implementation can be found here: 6 | //! 7 | 8 | mod migration; 9 | mod miner; 10 | mod power; 11 | 12 | /// Run migration for `NV19`. This should be the only exported method in this 13 | /// module. 14 | pub use migration::run_migration; 15 | 16 | use crate::{define_system_states, impl_system, impl_verifier}; 17 | 18 | define_system_states!( 19 | fil_actor_system_state::v10::State, 20 | fil_actor_system_state::v11::State 21 | ); 22 | 23 | impl_system!(); 24 | impl_verifier!(); 25 | -------------------------------------------------------------------------------- /src/state_migration/nv21/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | //! This module contains the migration logic for the `NV21` upgrade. 5 | //! The corresponding Go implementation can be found here: 6 | //! 7 | 8 | mod migration; 9 | mod miner; 10 | 11 | /// Run migration for `NV21`. This should be the only exported method in this 12 | /// module. 13 | pub use migration::run_migration; 14 | 15 | use crate::{define_system_states, impl_system, impl_verifier}; 16 | 17 | define_system_states!( 18 | fil_actor_system_state::v11::State, 19 | fil_actor_system_state::v12::State 20 | ); 21 | 22 | impl_system!(); 23 | impl_verifier!(); 24 | -------------------------------------------------------------------------------- /src/state_migration/nv21fix/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | //! This module contains the migration logic for the `NV21` calibration network fix. 5 | 6 | mod migration; 7 | 8 | /// Run migration for `NV21` calibration network fix. This should be the only exported method in this 9 | /// module. 10 | pub use migration::run_migration; 11 | 12 | use crate::{define_system_states, impl_system, impl_verifier}; 13 | 14 | define_system_states!( 15 | fil_actor_system_state::v12::State, 16 | fil_actor_system_state::v12::State 17 | ); 18 | 19 | impl_system!(); 20 | impl_verifier!(); 21 | -------------------------------------------------------------------------------- /src/state_migration/nv21fix2/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | //! This module contains the migration logic for the `NV21` calibration network fix. 5 | 6 | mod migration; 7 | 8 | /// Run migration for `NV21` calibration network fix. This should be the only exported method in this 9 | /// module. 10 | pub use migration::run_migration; 11 | 12 | use crate::{define_system_states, impl_system, impl_verifier}; 13 | 14 | define_system_states!( 15 | fil_actor_system_state::v12::State, 16 | fil_actor_system_state::v12::State 17 | ); 18 | 19 | impl_system!(); 20 | impl_verifier!(); 21 | -------------------------------------------------------------------------------- /src/state_migration/nv22/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | //! This module contains the migration logic for the `NV22` upgrade. 5 | //! The corresponding Go implementation can be found here: 6 | //! 7 | 8 | mod market; 9 | mod migration; 10 | mod miner; 11 | 12 | /// Run migration for `NV22`. This should be the only exported method in this 13 | /// module. 14 | pub use migration::run_migration; 15 | 16 | use crate::{define_system_states, impl_system, impl_verifier}; 17 | 18 | define_system_states!( 19 | fil_actor_system_state::v12::State, 20 | fil_actor_system_state::v13::State 21 | ); 22 | 23 | impl_system!(); 24 | impl_verifier!(); 25 | -------------------------------------------------------------------------------- /src/state_migration/nv22fix/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | //! This module contains the fix logic for the `NV22` calibration network fix. 5 | //! The corresponding Go implementation can be found here: 6 | //! . 7 | mod migration; 8 | 9 | /// Run migration for `NV22fix`. This should be the only exported method in this 10 | /// module. 11 | pub use migration::run_migration; 12 | 13 | use crate::{define_system_states, impl_system, impl_verifier}; 14 | 15 | define_system_states!( 16 | fil_actor_system_state::v13::State, 17 | fil_actor_system_state::v13::State 18 | ); 19 | 20 | impl_system!(); 21 | impl_verifier!(); 22 | -------------------------------------------------------------------------------- /src/state_migration/nv23/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | //! This module contains the migration logic for the `NV23` upgrade. 5 | //! The corresponding Go implementation can be found here: 6 | //! 7 | 8 | mod migration; 9 | mod mining_reserve; 10 | 11 | /// Run migration for `NV23`. This should be the only exported method in this 12 | /// module. 13 | pub use migration::run_migration; 14 | 15 | use crate::{define_system_states, impl_system, impl_verifier}; 16 | 17 | define_system_states!( 18 | fil_actor_system_state::v13::State, 19 | fil_actor_system_state::v14::State 20 | ); 21 | 22 | impl_system!(); 23 | impl_verifier!(); 24 | -------------------------------------------------------------------------------- /src/state_migration/nv24/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | //! This module contains the migration logic for the `NV24` upgrade. 5 | mod migration; 6 | mod power; 7 | 8 | /// Run migration for `NV24`. This should be the only exported method in this 9 | /// module. 10 | pub use migration::run_migration; 11 | 12 | use crate::{define_system_states, impl_system, impl_verifier}; 13 | 14 | define_system_states!( 15 | fil_actor_system_state::v14::State, 16 | fil_actor_system_state::v15::State 17 | ); 18 | 19 | impl_system!(); 20 | impl_verifier!(); 21 | -------------------------------------------------------------------------------- /src/state_migration/nv25/evm.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | //! This module contains the logic for adding transient storage (EIP-1153). 5 | //! actor. See the [FIP-0097](https://github.com/filecoin-project/FIPs/blob/b258e36e5e085afd48525cb6442f2301553df528/FIPS/fip-0097.md) for more details. 6 | 7 | use crate::state_migration::common::{ 8 | ActorMigration, ActorMigrationInput, ActorMigrationOutput, TypeMigration, TypeMigrator, 9 | }; 10 | use crate::utils::db::CborStoreExt as _; 11 | use cid::Cid; 12 | use fil_actor_evm_state::v15::State as EvmStateOld; 13 | use fil_actor_evm_state::v16::State as EvmStateNew; 14 | use fvm_ipld_blockstore::Blockstore; 15 | 16 | pub struct EvmMigrator { 17 | pub new_code_cid: Cid, 18 | } 19 | 20 | impl ActorMigration for EvmMigrator { 21 | fn migrate_state( 22 | &self, 23 | store: &BS, 24 | input: ActorMigrationInput, 25 | ) -> anyhow::Result> { 26 | let in_state: EvmStateOld = store.get_cbor_required(&input.head)?; 27 | let out_state: EvmStateNew = TypeMigrator::migrate_type(in_state, store)?; 28 | let new_head = store.put_cbor_default(&out_state)?; 29 | Ok(Some(ActorMigrationOutput { 30 | new_code_cid: self.new_code_cid, 31 | new_head, 32 | })) 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/state_migration/nv25/miner.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::state_migration::common::{ 5 | ActorMigration, ActorMigrationInput, ActorMigrationOutput, TypeMigration, TypeMigrator, 6 | }; 7 | use crate::utils::db::CborStoreExt as _; 8 | use cid::Cid; 9 | use fil_actor_miner_state::v15::State as MinerStateOld; 10 | use fil_actor_miner_state::v16::State as MinerStateNew; 11 | use fvm_ipld_blockstore::Blockstore; 12 | 13 | pub struct MinerMigrator { 14 | pub new_code_cid: Cid, 15 | } 16 | 17 | impl ActorMigration for MinerMigrator { 18 | fn migrate_state( 19 | &self, 20 | store: &BS, 21 | input: ActorMigrationInput, 22 | ) -> anyhow::Result> { 23 | let in_state: MinerStateOld = store.get_cbor_required(&input.head)?; 24 | let out_state: MinerStateNew = TypeMigrator::migrate_type(in_state, store)?; 25 | let new_head = store.put_cbor_default(&out_state)?; 26 | Ok(Some(ActorMigrationOutput { 27 | new_code_cid: self.new_code_cid, 28 | new_head, 29 | })) 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/state_migration/nv25/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | //! This module contains the migration logic for the `NV25` upgrade. 5 | mod evm; 6 | mod migration; 7 | mod miner; 8 | 9 | /// Run migration for `NV25`. This should be the only exported method in this 10 | /// module. 11 | pub use migration::run_migration; 12 | 13 | use crate::{define_system_states, impl_system, impl_verifier}; 14 | 15 | define_system_states!( 16 | fil_actor_system_state::v15::State, 17 | fil_actor_system_state::v16::State 18 | ); 19 | 20 | impl_system!(); 21 | impl_verifier!(); 22 | -------------------------------------------------------------------------------- /src/state_migration/nv26fix/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | //! This module contains the migration logic for the `NV26 fix` that happened on calibration 5 | //! network after the `NV26` upgrade (but before it landed on mainnet). Read more details on the 6 | //! issue [here](https://github.com/filecoin-project/community/discussions/74#discussioncomment-12720764). 7 | mod migration; 8 | 9 | /// Run migration for `NV26fix`. This should be the only exported method in this 10 | /// module. 11 | pub use migration::run_migration; 12 | 13 | use crate::{define_system_states, impl_system, impl_verifier}; 14 | 15 | define_system_states!( 16 | fil_actor_system_state::v16::State, 17 | fil_actor_system_state::v16::State 18 | ); 19 | 20 | impl_system!(); 21 | impl_verifier!(); 22 | -------------------------------------------------------------------------------- /src/state_migration/tests/data/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore 3 | -------------------------------------------------------------------------------- /src/state_migration/type_migrations/evm/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod state_v15_to_v16; 5 | -------------------------------------------------------------------------------- /src/state_migration/type_migrations/evm/state_v15_to_v16.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::state_migration::common::{TypeMigration, TypeMigrator}; 5 | use anyhow::Context as _; 6 | use fil_actor_evm_state::{ 7 | v15::State as EvmStateV15, 8 | v16::{State as EvmStateV16, Tombstone as TombstoneV16}, 9 | }; 10 | use fvm_ipld_blockstore::Blockstore; 11 | 12 | impl TypeMigration for TypeMigrator { 13 | fn migrate_type(in_state: EvmStateV15, _: &impl Blockstore) -> anyhow::Result { 14 | let out_state = EvmStateV16 { 15 | bytecode: in_state.bytecode, 16 | bytecode_hash: in_state 17 | .bytecode_hash 18 | .as_slice() 19 | .try_into() 20 | .context("bytecode hash conversion failed")?, 21 | contract_state: in_state.contract_state, 22 | nonce: in_state.nonce, 23 | tombstone: in_state.tombstone.map(|t| TombstoneV16 { 24 | origin: t.origin, 25 | nonce: t.nonce, 26 | }), 27 | transient_data: None, 28 | }; 29 | Ok(out_state) 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/state_migration/type_migrations/init/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod state_v9_to_v10; 5 | -------------------------------------------------------------------------------- /src/state_migration/type_migrations/init/state_v9_to_v10.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::shim::{ 5 | address::{Address, PAYLOAD_HASH_LEN}, 6 | state_tree::ActorID, 7 | }; 8 | use fil_actor_init_state::{v9::State as InitStateV9, v10::State as InitStateV10}; 9 | use fil_actors_shared::v10::{Map, make_map_with_root}; 10 | use fvm_ipld_blockstore::Blockstore; 11 | 12 | use crate::state_migration::common::{TypeMigration, TypeMigrator}; 13 | 14 | impl TypeMigration for TypeMigrator { 15 | fn migrate_type(from: InitStateV9, store: &impl Blockstore) -> anyhow::Result { 16 | let mut in_addr_map: Map<_, ActorID> = 17 | make_map_with_root(&from.address_map, &store).map_err(|e| anyhow::anyhow!("{e}"))?; 18 | 19 | let actor_id = from.next_id; 20 | let eth_zero_addr = Address::new_delegated( 21 | Address::ETHEREUM_ACCOUNT_MANAGER_ACTOR.id()?, 22 | &[0; PAYLOAD_HASH_LEN], 23 | )?; 24 | in_addr_map.set(eth_zero_addr.to_bytes().into(), actor_id)?; 25 | 26 | let out_state = InitStateV10 { 27 | address_map: in_addr_map.flush()?, 28 | next_id: from.next_id + 1, 29 | network_name: from.network_name, 30 | }; 31 | 32 | Ok(out_state) 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/state_migration/type_migrations/market/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod state_v8_to_v9; 5 | -------------------------------------------------------------------------------- /src/state_migration/type_migrations/market/state_v8_to_v9.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use super::super::super::common::{TypeMigration, TypeMigrator}; 5 | use fil_actor_market_state::{v8::State as MarketStateV8, v9::State as MarketStateV9}; 6 | use fvm_ipld_blockstore::Blockstore; 7 | 8 | impl TypeMigration for TypeMigrator { 9 | fn migrate_type(from: MarketStateV8, _: &impl Blockstore) -> anyhow::Result { 10 | // https://github.com/filecoin-project/go-state-types/blob/master/builtin/v9/migration/market.go#L69 11 | let out_state = MarketStateV9 { 12 | proposals: from.proposals, 13 | pending_proposals: from.pending_proposals, 14 | escrow_table: from.escrow_table, 15 | locked_table: from.locked_table, 16 | next_id: from.next_id, 17 | deal_ops_by_epoch: from.deal_ops_by_epoch, 18 | last_cron: from.last_cron, 19 | total_client_locked_collateral: from.total_client_locked_collateral, 20 | total_provider_locked_collateral: from.total_provider_locked_collateral, 21 | total_client_storage_fee: from.total_client_storage_fee, 22 | 23 | // Changed 24 | states: from.states, 25 | pending_deal_allocation_ids: from.states, 26 | }; 27 | 28 | Ok(out_state) 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/state_migration/type_migrations/miner/deadlines_v15_to_v16.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::state_migration::common::{TypeMigration, TypeMigrator}; 5 | use crate::utils::db::CborStoreExt; 6 | use fil_actor_miner_state::{ 7 | v15::Deadline as DeadlineV15, v15::Deadlines as DeadlinesV15, v16::Deadline as DeadlineV16, 8 | v16::Deadlines as DeadlinesV16, 9 | }; 10 | use fvm_ipld_blockstore::Blockstore; 11 | 12 | impl TypeMigration for TypeMigrator { 13 | fn migrate_type(from: DeadlinesV15, store: &impl Blockstore) -> anyhow::Result { 14 | let mut to = DeadlinesV16 { 15 | due: Vec::with_capacity(from.due.len()), 16 | }; 17 | for old_deadline_cid in from.due { 18 | let old_deadline: DeadlineV15 = store.get_cbor_required(&old_deadline_cid)?; 19 | let new_deadline: DeadlineV16 = TypeMigrator::migrate_type(old_deadline, store)?; 20 | let new_deadline_cid = store.put_cbor_default(&new_deadline)?; 21 | to.due.push(new_deadline_cid); 22 | } 23 | Ok(to) 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /src/state_migration/type_migrations/miner/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod deadline_v15_to_v16; 5 | mod deadlines_v15_to_v16; 6 | mod info_v8_to_v9; 7 | mod power_pair_v11_to_v12; 8 | mod power_pair_v8_to_v9; 9 | mod sector_onchain_info_v11_to_v12; 10 | mod sector_onchain_info_v8_to_v9; 11 | mod sector_precommit_info_v8_to_v9; 12 | mod sector_precommit_onchain_info_v8_to_v9; 13 | mod state_v10_to_v11; 14 | mod state_v15_to_v16; 15 | mod state_v8_to_v9; 16 | mod vesting_funds_v15_to_v16; 17 | -------------------------------------------------------------------------------- /src/state_migration/type_migrations/miner/power_pair_v11_to_v12.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use fil_actor_miner_state::{v11::PowerPair as PowerPairV11, v12::PowerPair as PowerPairV12}; 5 | use fvm_ipld_blockstore::Blockstore; 6 | 7 | use super::super::super::common::{TypeMigration, TypeMigrator}; 8 | 9 | impl TypeMigration for TypeMigrator { 10 | fn migrate_type(from: PowerPairV11, _: &impl Blockstore) -> anyhow::Result { 11 | let out = PowerPairV12 { 12 | raw: from.raw, 13 | qa: from.qa, 14 | }; 15 | 16 | Ok(out) 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/state_migration/type_migrations/miner/power_pair_v8_to_v9.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use fil_actor_miner_state::{v8::PowerPair as PowerPairV8, v9::PowerPair as PowerPairV9}; 5 | use fvm_ipld_blockstore::Blockstore; 6 | 7 | use super::super::super::common::{TypeMigration, TypeMigrator}; 8 | 9 | impl TypeMigration for TypeMigrator { 10 | fn migrate_type(from: PowerPairV8, _: &impl Blockstore) -> anyhow::Result { 11 | let out = PowerPairV9 { 12 | raw: from.raw, 13 | qa: from.qa, 14 | }; 15 | 16 | Ok(out) 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/state_migration/type_migrations/miner/sector_precommit_info_v8_to_v9.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use fil_actor_miner_state::{ 5 | v8::SectorPreCommitInfo as SectorPreCommitInfoV8, 6 | v9::{CompactCommD as CompactCommDV9, SectorPreCommitInfo as SectorPreCommitInfoV9}, 7 | }; 8 | use fvm_ipld_blockstore::Blockstore; 9 | 10 | use super::super::super::common::{TypeMigration, TypeMigrator}; 11 | 12 | impl TypeMigration for TypeMigrator { 13 | fn migrate_type( 14 | from: SectorPreCommitInfoV8, 15 | _: &impl Blockstore, 16 | ) -> anyhow::Result { 17 | let out_info = SectorPreCommitInfoV9 { 18 | seal_proof: from.seal_proof, 19 | sector_number: from.sector_number, 20 | sealed_cid: from.sealed_cid, 21 | seal_rand_epoch: from.seal_rand_epoch, 22 | deal_ids: from.deal_ids, 23 | expiration: from.expiration, 24 | unsealed_cid: CompactCommDV9::default(), 25 | }; 26 | 27 | Ok(out_info) 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/state_migration/type_migrations/miner/sector_precommit_onchain_info_v8_to_v9.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use fil_actor_miner_state::{ 5 | v8::SectorPreCommitOnChainInfo as SectorPreCommitOnChainInfoV8, 6 | v9::SectorPreCommitOnChainInfo as SectorPreCommitOnChainInfoV9, 7 | }; 8 | use fvm_ipld_blockstore::Blockstore; 9 | 10 | use super::super::super::common::{TypeMigration, TypeMigrator}; 11 | 12 | impl TypeMigration for TypeMigrator { 13 | fn migrate_type( 14 | from: SectorPreCommitOnChainInfoV8, 15 | store: &impl Blockstore, 16 | ) -> anyhow::Result { 17 | let out_info = SectorPreCommitOnChainInfoV9 { 18 | pre_commit_deposit: from.pre_commit_deposit, 19 | pre_commit_epoch: from.pre_commit_epoch, 20 | info: TypeMigrator::migrate_type(from.info, store)?, 21 | }; 22 | 23 | Ok(out_info) 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /src/state_migration/type_migrations/miner/vesting_funds_v15_to_v16.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::state_migration::common::{TypeMigration, TypeMigrator}; 5 | use fil_actor_miner_state::{ 6 | v15::VestingFunds as VestingFundsV15, 7 | v16::{VestingFund as VestingFundV16, VestingFunds as VestingFundsV16}, 8 | }; 9 | use fvm_ipld_blockstore::Blockstore; 10 | 11 | impl TypeMigration for TypeMigrator { 12 | fn migrate_type( 13 | from: VestingFundsV15, 14 | store: &impl Blockstore, 15 | ) -> anyhow::Result { 16 | let mut to = VestingFundsV16::new(); 17 | let funds = from.funds.into_iter().map(|f| VestingFundV16 { 18 | epoch: f.epoch, 19 | amount: f.amount, 20 | }); 21 | to.save(store, funds)?; 22 | Ok(to) 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/state_migration/type_migrations/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod evm; 5 | mod init; 6 | mod market; 7 | mod miner; 8 | -------------------------------------------------------------------------------- /src/tool/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | pub mod main; 4 | mod offline_server; 5 | pub mod subcommands; 6 | -------------------------------------------------------------------------------- /src/tool/offline_server/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | pub(crate) mod server; 5 | pub use server::start_offline_server; 6 | -------------------------------------------------------------------------------- /src/tool/subcommands/api_cmd/contracts/compile.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # This script compiles all the Solidity files in the current directory and 3 | # generates the corresponding files with the compiled bytecode in hexadecimal 4 | # format. 5 | 6 | set -euo pipefail 7 | 8 | find . -maxdepth 1 -type f -name "*.sol" -print0 | while IFS= read -r -d '' file; do 9 | base_name="${file%.sol}" 10 | solc --bin "$file" | tail -n 1 | tr -d '\n' > "$base_name.hex" 11 | done 12 | -------------------------------------------------------------------------------- /src/tool/subcommands/api_cmd/contracts/invoke_cthulhu.hex: -------------------------------------------------------------------------------- 1 | 60806040525f5f5f6101000a81548160ff0219169083151502179055503480156026575f5ffd5b5061010e806100345f395ff3fe6080604052348015600e575f5ffd5b50600436106030575f3560e01c80635bf737e41460345780636572e3c514603c575b5f5ffd5b603a6056565b005b60426071565b604051604d9190609a565b60405180910390f35b60015f5f6101000a81548160ff021916908315150217905550565b5f5f9054906101000a900460ff1681565b5f8115159050919050565b6094816082565b82525050565b5f60208201905060ab5f830184608d565b9291505056fea264697066735822122005df6924d37f738c31eba559d6df942fcd07bc1d8a2c1300c0e09516b536df6f64736f6c637829302e382e32382d646576656c6f702e323032342e31302e31302b636f6d6d69742e3738393336313461005a -------------------------------------------------------------------------------- /src/tool/subcommands/api_cmd/contracts/invoke_cthulhu.sol: -------------------------------------------------------------------------------- 1 | // SPDX-License-Identifier: MIT 2 | pragma solidity >=0.8.17; 3 | 4 | contract InvokeCthulhu { 5 | bool public cthulhu_is_here = false; 6 | function incoming_doom() public { 7 | cthulhu_is_here = true; 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /src/tool/subcommands/state_migration_cmd.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::networks::{generate_actor_bundle, get_actor_bundles_metadata}; 5 | use std::path::PathBuf; 6 | 7 | #[derive(Debug, clap::Subcommand)] 8 | pub enum StateMigrationCommands { 9 | /// Generate a merged actor bundle from the hard-coded sources in forest 10 | ActorBundle { 11 | #[arg(default_value = "actor_bundles.car.zst")] 12 | output: PathBuf, 13 | }, 14 | /// Generate actors metadata from required bundles list 15 | GenerateActorsMetadata, 16 | } 17 | 18 | impl StateMigrationCommands { 19 | pub async fn run(self) -> anyhow::Result<()> { 20 | match self { 21 | Self::ActorBundle { output } => { 22 | generate_actor_bundle(&output).await?; 23 | println!("Wrote the actors bundle to {}", output.display()); 24 | Ok(()) 25 | } 26 | Self::GenerateActorsMetadata => { 27 | let metadata = get_actor_bundles_metadata().await?; 28 | let metadata_json = serde_json::to_string_pretty(&metadata)?; 29 | println!("{}", metadata_json); 30 | Ok(()) 31 | } 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/utils/cid/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use crate::utils::multihash::prelude::*; 5 | use cid::Cid; 6 | use fvm_ipld_encoding::Error; 7 | 8 | /// Extension methods for constructing `dag-cbor` [Cid] 9 | pub trait CidCborExt { 10 | /// Default CID builder for Filecoin 11 | /// 12 | /// - The default codec is [`fvm_ipld_encoding::DAG_CBOR`] 13 | /// - The default hash function is 256 bit BLAKE2b 14 | /// 15 | /// This matches [`abi.CidBuilder`](https://github.com/filecoin-project/go-state-types/blob/master/abi/cid.go#L49) in go 16 | fn from_cbor_blake2b256(obj: &S) -> Result { 17 | let bytes = fvm_ipld_encoding::to_vec(obj)?; 18 | Ok(Cid::new_v1( 19 | fvm_ipld_encoding::DAG_CBOR, 20 | MultihashCode::Blake2b256.digest(&bytes), 21 | )) 22 | } 23 | } 24 | 25 | impl CidCborExt for Cid {} 26 | -------------------------------------------------------------------------------- /src/utils/flume/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | pub trait FlumeSenderExt { 5 | fn send_or_warn(&self, msg: T); 6 | } 7 | 8 | impl FlumeSenderExt for flume::Sender { 9 | fn send_or_warn(&self, msg: T) { 10 | if let Err(e) = self.send(msg) { 11 | tracing::warn!("{e}"); 12 | } 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/utils/misc/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use is_terminal::IsTerminal; 5 | 6 | mod adaptive_value_provider; 7 | pub use adaptive_value_provider::*; 8 | mod logo; 9 | pub use logo::*; 10 | pub mod env; 11 | 12 | #[derive(Debug, Clone, PartialEq, Eq, strum::EnumString)] 13 | #[strum(serialize_all = "kebab-case")] 14 | pub enum LoggingColor { 15 | Always, 16 | Auto, 17 | Never, 18 | } 19 | 20 | impl LoggingColor { 21 | pub fn coloring_enabled(&self) -> bool { 22 | match self { 23 | LoggingColor::Auto => std::io::stdout().is_terminal(), 24 | LoggingColor::Always => true, 25 | LoggingColor::Never => false, 26 | } 27 | } 28 | } 29 | 30 | impl Default for LoggingColor { 31 | fn default() -> Self { 32 | Self::Auto 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/utils/monitoring/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod mem_tracker; 5 | pub use mem_tracker::*; 6 | -------------------------------------------------------------------------------- /src/utils/p2p/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use libp2p::Multiaddr; 5 | 6 | pub trait MultiaddrExt: Sized { 7 | fn without_p2p(self) -> Self; 8 | } 9 | 10 | impl MultiaddrExt for Multiaddr { 11 | fn without_p2p(mut self) -> Self { 12 | if let Some(multiaddr::Protocol::P2p(_)) = self.iter().last() { 13 | self.pop(); 14 | self 15 | } else { 16 | self 17 | } 18 | } 19 | } 20 | 21 | #[cfg(test)] 22 | mod tests { 23 | use super::*; 24 | use std::str::FromStr as _; 25 | 26 | #[test] 27 | fn test_without_p2p_positive() { 28 | let ma = Multiaddr::from_str("/dns/bootstrap-calibnet-1.chainsafe-fil.io/tcp/34000/p2p/12D3KooWS3ZRhMYL67b4bD5XQ6fcpTyVQXnDe8H89LvwrDqaSbiT").unwrap(); 29 | assert_eq!( 30 | ma.without_p2p().to_string().as_str(), 31 | "/dns/bootstrap-calibnet-1.chainsafe-fil.io/tcp/34000" 32 | ); 33 | } 34 | 35 | #[test] 36 | fn test_without_p2p_negative() { 37 | let ma = 38 | Multiaddr::from_str("/dns/bootstrap-calibnet-1.chainsafe-fil.io/tcp/34000").unwrap(); 39 | assert_eq!( 40 | ma.without_p2p().to_string().as_str(), 41 | "/dns/bootstrap-calibnet-1.chainsafe-fil.io/tcp/34000" 42 | ); 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /src/utils/proofs_api/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | mod parameters; 5 | mod paramfetch; 6 | 7 | pub use parameters::maybe_set_proofs_parameter_cache_dir_env; 8 | pub use paramfetch::{SectorSizeOpt, ensure_proof_params_downloaded, get_params_default}; 9 | -------------------------------------------------------------------------------- /src/utils/stream.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | use futures::{Stream, StreamExt}; 4 | 5 | /// Decouple stream generation and stream consumption into separate threads, 6 | /// keeping not-yet-consumed elements in a bounded queue. This is similar to 7 | /// [`stream::buffered`](https://docs.rs/futures/latest/futures/stream/trait.StreamExt.html#method.buffered) 8 | /// and 9 | /// [`sink::buffer`](https://docs.rs/futures/latest/futures/sink/trait.SinkExt.html#method.buffer). 10 | /// The key difference is that [`par_buffer`] is parallel rather than concurrent 11 | /// and will make use of multiple cores when both the stream and the stream 12 | /// consumer are CPU-bound. Because a new thread is spawned, the stream has to 13 | /// be [`Sync`], [`Send`] and `'static`. 14 | pub fn par_buffer( 15 | cap: usize, 16 | stream: impl Stream + Send + Sync + 'static, 17 | ) -> impl Stream { 18 | let (send, recv) = flume::bounded(cap); 19 | tokio::task::spawn(stream.map(Ok).forward(send.into_sink())); 20 | recv.into_stream() 21 | } 22 | -------------------------------------------------------------------------------- /src/wallet/main.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | use std::ffi::OsString; 5 | 6 | use super::subcommands::Cli; 7 | use crate::networks::NetworkChain; 8 | use crate::rpc::{self, prelude::*}; 9 | use crate::shim::address::{CurrentNetwork, Network}; 10 | use clap::Parser; 11 | use std::str::FromStr; 12 | 13 | pub fn main(args: impl IntoIterator) -> anyhow::Result<()> 14 | where 15 | ArgT: Into + Clone, 16 | { 17 | // Capture Cli inputs 18 | let Cli { 19 | opts, 20 | remote_wallet, 21 | encrypt, 22 | cmd, 23 | } = Cli::parse_from(args); 24 | 25 | let client = rpc::Client::default_or_from_env(opts.token.as_deref())?; 26 | 27 | tokio::runtime::Builder::new_multi_thread() 28 | .enable_all() 29 | .build()? 30 | .block_on(async { 31 | let name = StateNetworkName::call(&client, ()).await?; 32 | let chain = NetworkChain::from_str(&name)?; 33 | if chain.is_testnet() { 34 | CurrentNetwork::set_global(Network::Testnet); 35 | } 36 | // Run command 37 | cmd.run(client, remote_wallet, encrypt).await 38 | }) 39 | } 40 | -------------------------------------------------------------------------------- /src/wallet/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | pub mod main; 4 | pub mod subcommands; 5 | -------------------------------------------------------------------------------- /src/wallet/subcommands/mod.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | pub mod wallet_cmd; 5 | 6 | use crate::cli_shared::cli::{CliRpcOpts, HELP_MESSAGE}; 7 | use crate::utils::version::FOREST_VERSION_STRING; 8 | use clap::Parser; 9 | 10 | /// Command-line options for the `forest-wallet` binary 11 | #[derive(Parser)] 12 | #[command(name = env!("CARGO_PKG_NAME"), bin_name = "forest-wallet", author = env!("CARGO_PKG_AUTHORS"), version = FOREST_VERSION_STRING.as_str(), about = env!("CARGO_PKG_DESCRIPTION") 13 | )] 14 | #[command(help_template(HELP_MESSAGE))] 15 | pub struct Cli { 16 | #[clap(flatten)] 17 | pub opts: CliRpcOpts, 18 | 19 | /// Use remote wallet associated with the Filecoin node. 20 | /// Warning! You should ensure that your connection is encrypted and secure, 21 | /// as the communication between the wallet and the node is **not** encrypted. 22 | #[arg(long)] 23 | pub remote_wallet: bool, 24 | 25 | /// Encrypt local wallet 26 | #[arg(long)] 27 | pub encrypt: bool, 28 | 29 | #[command(subcommand)] 30 | pub cmd: wallet_cmd::WalletCommands, 31 | } 32 | -------------------------------------------------------------------------------- /taplo.toml: -------------------------------------------------------------------------------- 1 | exclude = ["**/tests/**/vendor/*.toml"] 2 | 3 | [formatting] 4 | column_width = 120 5 | array_auto_expand = true 6 | allowed_blank_lines = 1 7 | 8 | [[rule]] 9 | keys = ["dependencies", "dev-dependencies", "build-dependencies", "toolchain", "workspace.dependencies"] 10 | 11 | [rule.formatting] 12 | reorder_keys = true 13 | -------------------------------------------------------------------------------- /test-snapshots/carv2.car.zst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/test-snapshots/carv2.car.zst -------------------------------------------------------------------------------- /test-snapshots/chain4.car: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/test-snapshots/chain4.car -------------------------------------------------------------------------------- /test-snapshots/chain4.car.zst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/test-snapshots/chain4.car.zst -------------------------------------------------------------------------------- /test-snapshots/chain4.forest.car.zst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/test-snapshots/chain4.forest.car.zst -------------------------------------------------------------------------------- /tests/db_migration_tests.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | pub mod common; 5 | 6 | use crate::common::{CommonArgs, CommonEnv, create_tmp_config, daemon}; 7 | 8 | #[test] 9 | fn future_db_should_not_fail_daemon() { 10 | let (config_file, data_dir) = create_tmp_config(); 11 | 12 | // Create a future, versioned database in the data directory. 13 | // This should be ignored by the daemon. 14 | // In the end, we should have two databases in the data directory: 15 | // - The future database which should not be deleted, 16 | // - The new, fresh database which should be used by the daemon. 17 | 18 | let bad_db_path = data_dir.path().join("calibnet").join("666.42.13"); 19 | std::fs::create_dir_all(&bad_db_path).unwrap(); 20 | daemon() 21 | .common_env() 22 | .common_args() 23 | .arg("--config") 24 | .arg(config_file) 25 | .arg("--encrypt-keystore") 26 | .arg("false") 27 | .assert() 28 | .success(); 29 | 30 | assert!(bad_db_path.exists()); 31 | 32 | let forest_version = std::env::var("CARGO_PKG_VERSION").unwrap(); 33 | assert!( 34 | data_dir 35 | .path() 36 | .join("calibnet") 37 | .join(forest_version) 38 | .exists() 39 | ); 40 | } 41 | -------------------------------------------------------------------------------- /tests/import_snapshot_tests.rs: -------------------------------------------------------------------------------- 1 | // Copyright 2019-2025 ChainSafe Systems 2 | // SPDX-License-Identifier: Apache-2.0, MIT 3 | 4 | pub mod common; 5 | 6 | use crate::common::{CommonEnv, create_tmp_config, daemon}; 7 | 8 | #[test] 9 | fn importing_bad_snapshot_should_fail() { 10 | let (config_file, data_dir) = create_tmp_config(); 11 | let temp_file = data_dir.path().join("bad-snapshot.car"); 12 | std::fs::write(&temp_file, "bad-snapshot").unwrap(); 13 | daemon() 14 | .common_env() 15 | .arg("--rpc") 16 | .arg("false") 17 | .arg("--no-metrics") 18 | .arg("--no-healthcheck") 19 | .arg("--config") 20 | .arg(config_file) 21 | .arg("--encrypt-keystore") 22 | .arg("false") 23 | .arg("--skip-load-actors") 24 | .arg("--import-snapshot") 25 | .arg(temp_file) 26 | .assert() 27 | .failure(); 28 | } 29 | -------------------------------------------------------------------------------- /yarn.lock: -------------------------------------------------------------------------------- 1 | # This file is generated by running "yarn install" inside your project. 2 | # Manual changes might be lost - proceed with caution! 3 | 4 | __metadata: 5 | version: 8 6 | cacheKey: 10c0 7 | 8 | "@docusaurus/tsconfig@npm:^3.7.0": 9 | version: 3.7.0 10 | resolution: "@docusaurus/tsconfig@npm:3.7.0" 11 | checksum: 10c0/22a076fa3cf6da25a76f87fbe5b37c09997f5a8729fdc1a69c0c7955dff9f9850f16dc1de8c6d5096d258a95c428fb8839b252b9dbaa648acb7de8a0e5889dea 12 | languageName: node 13 | linkType: hard 14 | 15 | "prettier@npm:^3.5.3": 16 | version: 3.5.3 17 | resolution: "prettier@npm:3.5.3" 18 | bin: 19 | prettier: bin/prettier.cjs 20 | checksum: 10c0/3880cb90b9dc0635819ab52ff571518c35bd7f15a6e80a2054c05dbc8a3aa6e74f135519e91197de63705bcb38388ded7e7230e2178432a1468005406238b877 21 | languageName: node 22 | linkType: hard 23 | 24 | "root@workspace:.": 25 | version: 0.0.0-use.local 26 | resolution: "root@workspace:." 27 | dependencies: 28 | "@docusaurus/tsconfig": "npm:^3.7.0" 29 | prettier: "npm:^3.5.3" 30 | languageName: unknown 31 | linkType: soft 32 | --------------------------------------------------------------------------------