├── .cargo
└── config.toml
├── .clippy.toml
├── .config
├── en_US.dic
├── forest.dic
├── lychee.toml
├── nextest.toml
├── spellcheck.md
└── spellcheck.toml
├── .dockerignore
├── .eslintrc.yml
├── .gitattributes
├── .github
├── .prettierrc
├── CARGO_ADVISORIES_ISSUE_TEMPLATE.md
├── CHECKPOINT_ISSUE_TEMPLATE.md
├── CODEOWNERS
├── DOCKER_ISSUE_TEMPLATE.md
├── ISSUE_TEMPLATE
│ ├── 1-bug_report.md
│ ├── 2-user_request.md
│ ├── 3-epic.md
│ ├── 4-task.md
│ ├── 5-other.md
│ └── config.yml
├── PULL_REQUEST_TEMPLATE.md
├── RPC_PARITY_ISSUE_TEMPLATE.md
├── SNAPSHOT_PARITY_ISSUE_TEMPLATE.md
├── dependabot.yml
└── workflows
│ ├── butterflynet.yml
│ ├── cargo-advisories.yml
│ ├── checkpoints.yml
│ ├── curio-devnet-publish.yml
│ ├── docker-latest-tag.yml
│ ├── docker.yml
│ ├── dockerfile-check.yml
│ ├── docs-auto-update.yml
│ ├── docs-check.yml
│ ├── docs-deploy.yml
│ ├── docs-required-override.yml
│ ├── forest.yml
│ ├── link-check.yml
│ ├── lotus-api-bump.yml
│ ├── lotus-devnet-publish.yml
│ ├── release.yml
│ ├── release_dispatch.yml
│ ├── rpc-parity.yml
│ ├── rpc_test_repeat.yml
│ ├── rust-lint.yml
│ ├── scripts-lint.yml
│ ├── snapshot-parity.yml
│ └── unit-tests.yml
├── .gitignore
├── .prettierignore
├── .yarnrc.yml
├── CHANGELOG.md
├── CONTRIBUTING.md
├── Cargo.lock
├── Cargo.toml
├── Dockerfile
├── Dockerfile-ci
├── FUNDING.json
├── LICENSE-APACHE
├── LICENSE-MIT
├── Makefile
├── README.md
├── benches
├── car-index.rs
└── example-benchmark.rs
├── build.rs
├── build
├── bootstrap
│ ├── butterflynet
│ ├── calibnet
│ └── mainnet
├── known_blocks.yaml
├── manifest.json
└── vendored-docs-redirect.index.html
├── deny.toml
├── docs
├── .bookignore
├── .gitattributes
├── .gitignore
├── .spellcheck.yml
├── Makefile
├── README.md
├── babel.config.js
├── devSidebars.js
├── dictionary.txt
├── docs
│ ├── developers
│ │ ├── guides
│ │ │ ├── _category_.json
│ │ │ ├── network_upgrades.md
│ │ │ └── rpc_test_snapshot.md
│ │ └── introduction.md
│ └── users
│ │ ├── filecoin_services.md
│ │ ├── getting_started
│ │ ├── _category_.json
│ │ ├── hardware-reqs.md
│ │ ├── install.md
│ │ └── syncing.md
│ │ ├── guides
│ │ ├── _category_.json
│ │ ├── advanced
│ │ │ ├── _category_.json
│ │ │ ├── backups.md
│ │ │ └── generating_snapshots.md
│ │ ├── gc.md
│ │ ├── interacting_with_wallets.md
│ │ ├── methods_filtering.md
│ │ ├── monitoring
│ │ │ ├── _category_.json
│ │ │ ├── best_practices.md
│ │ │ ├── health_checks.md
│ │ │ ├── logs.md
│ │ │ └── metrics.md
│ │ ├── running_bootstrap_node.md
│ │ ├── running_with_curio.md
│ │ └── running_with_gateway.md
│ │ ├── introduction.md
│ │ ├── knowledge_base
│ │ ├── _category_.json
│ │ ├── docker_tips.md
│ │ ├── jwt_handling.md
│ │ ├── network_upgrades_state_migrations.md
│ │ └── snapshot_service.md
│ │ ├── openrpc.json
│ │ └── reference
│ │ ├── _category_.json
│ │ ├── cli.md
│ │ ├── cli.sh
│ │ ├── env_variables.md
│ │ ├── generate_cli_md.sh
│ │ ├── json_rpc.md
│ │ └── metrics.md
├── docusaurus.config.js
├── package.json
├── src
│ └── css
│ │ └── index.css
├── static
│ ├── .nojekyll
│ └── img
│ │ ├── chainsafe_logo.png
│ │ ├── favicon.ico
│ │ ├── filecoin_logo.png
│ │ ├── logo-with-text.png
│ │ └── logo.png
├── tsconfig.json
├── userSidebars.js
└── yarn.lock
├── documentation
├── .gitignore
├── book.toml
└── src
│ ├── SUMMARY.md
│ ├── developer_documentation
│ ├── archie_and_fuzzy.md
│ ├── chain_index_spike.md
│ ├── chain_muxer_state_machine.md
│ ├── database_migrations.md
│ ├── devnet_notes.md
│ ├── heaptrack
│ │ ├── bottom_up.png
│ │ ├── caller_callee.png
│ │ ├── consumed.png
│ │ ├── flamegraph.png
│ │ ├── sizes.png
│ │ └── summary.png
│ ├── introduction.md
│ ├── local_actions.md
│ ├── memory-analysis.md
│ ├── release_checklist.md
│ ├── rpc_api_compatibility.md
│ ├── state_migration_guide.md
│ └── test_plan.md
│ ├── img
│ └── forest_logo.png
│ ├── introduction.md
│ ├── offline-forest.md
│ └── trouble_shooting.md
├── f3-sidecar
├── .gitignore
├── README.md
├── api.go
├── ec.go
├── ec_test.go
├── f3manifest_2k.json
├── f3manifest_butterfly.json
├── f3manifest_calibnet.json
├── f3manifest_mainnet.json
├── ffi_gen.go
├── ffi_impl.go
├── go.mod
├── go.sum
├── main.go
├── manifest.go
├── p2p.go
├── pubsub.go
├── run.go
├── types.go
├── utils.go
└── utils_test.go
├── go.work
├── interop-tests
├── Cargo.toml
├── README.md
├── build.rs
└── src
│ ├── lib.rs
│ └── tests
│ ├── bitswap_go_compat.rs
│ ├── go_app
│ ├── .gitignore
│ ├── bitswap_impl.go
│ ├── common.go
│ ├── go.mod
│ ├── go.sum
│ └── kad_impl.go
│ ├── go_ffi.rs
│ ├── kad_go_compat.rs
│ └── mod.rs
├── monitoring
├── README.md
├── docker-compose.yml
├── grafana
│ ├── dashboards
│ │ ├── README.md
│ │ └── forest.json
│ └── provisioning
│ │ ├── dashboards
│ │ └── dashboard.yml
│ │ └── datasources
│ │ └── datasource.yml
└── prometheus
│ └── prometheus.yml
├── package.json
├── proto
└── bitswap_pb.proto
├── rust-toolchain.toml
├── scripts
├── add_license.sh
├── copyright.txt
├── db_params_hyperfine.sh
├── devnet-curio
│ ├── .env
│ ├── README.md
│ ├── curio.dockerfile
│ ├── curio.env
│ ├── docker-compose.yml
│ ├── forest_config.toml.tpl
│ ├── lotus-miner.env
│ ├── lotus.env
│ └── run_curio.sh
├── devnet
│ ├── .env
│ ├── README.md
│ ├── check.sh
│ ├── docker-compose.yml
│ ├── forest_ci.dockerfile
│ ├── forest_config.toml.tpl
│ ├── lotus-miner.env
│ ├── lotus.dockerfile
│ ├── lotus.env
│ └── setup.sh
├── linters
│ └── find_unused_deps.rb
├── s3
│ ├── requirement.txt
│ └── set_sccache_do_bucket_lifecycle.py
└── tests
│ ├── api_compare
│ ├── .env
│ ├── api_compare.sh
│ ├── docker-compose.yml
│ ├── filter-list
│ ├── filter-list-offline
│ └── setup.sh
│ ├── bootstrapper
│ ├── .env
│ ├── README.md
│ ├── docker-compose-forest.yml
│ ├── docker-compose-lotus.yml
│ └── test_bootstrapper.sh
│ ├── butterflynet_check.sh
│ ├── calibnet_db_migration.sh
│ ├── calibnet_eth_mapping_check.sh
│ ├── calibnet_export_check.sh
│ ├── calibnet_kademlia_check.sh
│ ├── calibnet_migration_regression_tests.sh
│ ├── calibnet_no_discovery_check.sh
│ ├── calibnet_other_check.sh
│ ├── calibnet_stateless_mode_check.sh
│ ├── calibnet_stateless_rpc_check.sh
│ ├── calibnet_wallet_check.sh
│ ├── forest_cli_check.sh
│ ├── harness.sh
│ ├── snapshot_parity
│ ├── .env
│ ├── docker-compose.yml
│ └── setup.sh
│ ├── test_data
│ └── calibnet_block_3000.json
│ └── upload_rcpsnaps.sh
├── src
├── auth
│ └── mod.rs
├── beacon
│ ├── beacon_entries.rs
│ ├── drand.rs
│ ├── mock_beacon.rs
│ ├── mod.rs
│ ├── signatures
│ │ ├── mod.rs
│ │ ├── public_key_impls.rs
│ │ ├── signature_impls.rs
│ │ └── tests.rs
│ └── tests
│ │ └── drand.rs
├── bin
│ ├── forest-cli.rs
│ ├── forest-tool.rs
│ ├── forest-wallet.rs
│ └── forest.rs
├── blocks
│ ├── block.rs
│ ├── chain4u.rs
│ ├── election_proof.rs
│ ├── gossip_block.rs
│ ├── header.rs
│ ├── mod.rs
│ ├── tests
│ │ ├── calibnet
│ │ │ └── HEAD
│ │ ├── serialization-vectors
│ │ │ ├── README.md
│ │ │ ├── block_headers.json
│ │ │ ├── message_signing.json
│ │ │ └── unsigned_messages.json
│ │ ├── serialization_vectors.rs
│ │ └── ticket_test.rs
│ ├── ticket.rs
│ ├── tipset.rs
│ └── vrf_proof.rs
├── chain
│ ├── mod.rs
│ ├── store
│ │ ├── base_fee.rs
│ │ ├── chain_store.rs
│ │ ├── errors.rs
│ │ ├── index.rs
│ │ ├── mod.rs
│ │ └── tipset_tracker.rs
│ └── weight.rs
├── chain_sync
│ ├── bad_block_cache.rs
│ ├── chain_follower.rs
│ ├── chain_muxer.rs
│ ├── consensus.rs
│ ├── metrics.rs
│ ├── mod.rs
│ ├── network_context.rs
│ ├── sync_status.rs
│ ├── tipset_syncer.rs
│ └── validation.rs
├── cid_collections
│ ├── hash_map.rs
│ ├── hash_set.rs
│ ├── mod.rs
│ └── small_cid_vec.rs
├── cli
│ ├── humantoken.rs
│ ├── main.rs
│ ├── mod.rs
│ └── subcommands
│ │ ├── auth_cmd.rs
│ │ ├── chain_cmd.rs
│ │ ├── chain_cmd
│ │ └── prune.rs
│ │ ├── config_cmd.rs
│ │ ├── f3_cmd.rs
│ │ ├── f3_cmd
│ │ ├── certificate.tpl
│ │ ├── manifest.tpl
│ │ ├── progress.tpl
│ │ └── tests.rs
│ │ ├── healthcheck_cmd.rs
│ │ ├── info_cmd.rs
│ │ ├── mod.rs
│ │ ├── mpool_cmd.rs
│ │ ├── net_cmd.rs
│ │ ├── send_cmd.rs
│ │ ├── shutdown_cmd.rs
│ │ ├── snapshot_cmd.rs
│ │ ├── state_cmd.rs
│ │ ├── sync_cmd.rs
│ │ └── wait_api_cmd.rs
├── cli_shared
│ ├── cli
│ │ ├── client.rs
│ │ ├── completion_cmd.rs
│ │ ├── config.rs
│ │ └── mod.rs
│ ├── logger
│ │ └── mod.rs
│ ├── mod.rs
│ └── snapshot.rs
├── daemon
│ ├── bundle.rs
│ ├── context.rs
│ ├── db_util.rs
│ ├── main.rs
│ └── mod.rs
├── db
│ ├── blockstore_with_read_cache.rs
│ ├── blockstore_with_write_buffer.rs
│ ├── car
│ │ ├── any.rs
│ │ ├── forest.rs
│ │ ├── forest
│ │ │ └── index
│ │ │ │ ├── hash.rs
│ │ │ │ └── mod.rs
│ │ ├── many.rs
│ │ ├── mod.rs
│ │ └── plain.rs
│ ├── db_mode.rs
│ ├── gc
│ │ ├── mod.rs
│ │ └── snapshot.rs
│ ├── memory.rs
│ ├── migration
│ │ ├── db_migration.rs
│ │ ├── migration_map.rs
│ │ ├── mod.rs
│ │ ├── v0_22_1.rs
│ │ ├── v0_26_0.rs
│ │ └── void_migration.rs
│ ├── mod.rs
│ ├── parity_db.rs
│ ├── parity_db_config.rs
│ ├── tests
│ │ ├── db_utils
│ │ │ ├── mod.rs
│ │ │ └── parity.rs
│ │ ├── mem_test.rs
│ │ ├── parity_test.rs
│ │ └── subtests
│ │ │ └── mod.rs
│ └── ttl
│ │ └── mod.rs
├── documentation.rs
├── eth
│ ├── eip_1559_transaction.rs
│ ├── eip_155_transaction.rs
│ ├── homestead_transaction.rs
│ ├── mod.rs
│ └── transaction.rs
├── f3
│ ├── go_ffi.rs
│ └── mod.rs
├── fil_cns
│ ├── mod.rs
│ ├── validation.rs
│ └── weight.rs
├── genesis
│ ├── export40.car
│ └── mod.rs
├── health
│ ├── endpoints.rs
│ └── mod.rs
├── interpreter
│ ├── errors.rs
│ ├── fvm2.rs
│ ├── fvm3.rs
│ ├── fvm4.rs
│ ├── mod.rs
│ └── vm.rs
├── ipld
│ ├── mod.rs
│ ├── selector
│ │ ├── empty_map.rs
│ │ └── mod.rs
│ ├── tests
│ │ ├── cbor_test.rs
│ │ ├── ipld-traversal-vectors
│ │ │ ├── selector_explore.json
│ │ │ ├── selector_walk.json
│ │ │ └── selector_walk_links.json
│ │ ├── selector_explore.rs
│ │ └── selector_gen_tests.rs
│ └── util.rs
├── key_management
│ ├── errors.rs
│ ├── keystore.rs
│ ├── mod.rs
│ ├── tests
│ │ └── keystore_encrypted_old
│ │ │ └── keystore
│ ├── wallet.rs
│ └── wallet_helpers.rs
├── lib.rs
├── libp2p
│ ├── behaviour.rs
│ ├── chain_exchange
│ │ ├── behaviour.rs
│ │ ├── message.rs
│ │ ├── mod.rs
│ │ └── provider.rs
│ ├── config.rs
│ ├── discovery.rs
│ ├── gossip_params.rs
│ ├── hello
│ │ ├── behaviour.rs
│ │ ├── codec.rs
│ │ ├── message.rs
│ │ └── mod.rs
│ ├── keypair.rs
│ ├── metrics.rs
│ ├── mod.rs
│ ├── peer_manager.rs
│ ├── ping.rs
│ ├── rpc
│ │ ├── decoder.rs
│ │ └── mod.rs
│ ├── service.rs
│ └── tests
│ │ └── decode_test.rs
├── libp2p_bitswap
│ ├── behaviour.rs
│ ├── bitswap_pb.rs
│ ├── internals
│ │ ├── codec.rs
│ │ ├── event_handlers.rs
│ │ ├── mod.rs
│ │ └── prefix.rs
│ ├── message.rs
│ ├── metrics.rs
│ ├── mod.rs
│ ├── request_manager.rs
│ ├── store.rs
│ └── tests
│ │ └── request_manager.rs
├── lotus_json
│ ├── actor_state.rs
│ ├── actor_states
│ │ ├── account_state.rs
│ │ ├── cron_state.rs
│ │ ├── entry.rs
│ │ ├── evm_state.rs
│ │ ├── market_state.rs
│ │ ├── miner_state.rs
│ │ ├── mod.rs
│ │ ├── system_state.rs
│ │ └── vesting_funds.rs
│ ├── address.rs
│ ├── allocation.rs
│ ├── beacon_entry.rs
│ ├── beneficiary_term.rs
│ ├── big_int.rs
│ ├── bit_field.rs
│ ├── block_header.rs
│ ├── bytecode_hash.rs
│ ├── cid.rs
│ ├── duration.rs
│ ├── election_proof.rs
│ ├── extended_sector_info.rs
│ ├── gossip_block.rs
│ ├── hash_map.rs
│ ├── ipld.rs
│ ├── key_info.rs
│ ├── message.rs
│ ├── miner_info.rs
│ ├── miner_power.rs
│ ├── mod.rs
│ ├── nonempty.rs
│ ├── opt.rs
│ ├── pending_beneficiary_change.rs
│ ├── po_st_proof.rs
│ ├── power_claim.rs
│ ├── raw_bytes.rs
│ ├── receipt.rs
│ ├── registered_po_st_proof.rs
│ ├── registered_seal_proof.rs
│ ├── sector_info.rs
│ ├── sector_size.rs
│ ├── signature.rs
│ ├── signature_type.rs
│ ├── signed_message.rs
│ ├── ticket.rs
│ ├── tipset_keys.rs
│ ├── token_amount.rs
│ ├── tombstone.rs
│ ├── transient_data.rs
│ ├── vec.rs
│ ├── vec_u8.rs
│ ├── verifreg_claim.rs
│ └── vrf_proof.rs
├── message
│ ├── chain_message.rs
│ ├── mod.rs
│ ├── signed_message.rs
│ └── tests
│ │ └── builder_test.rs
├── message_pool
│ ├── block_prob.rs
│ ├── config.rs
│ ├── errors.rs
│ ├── mod.rs
│ ├── msg_chain.rs
│ └── msgpool
│ │ ├── metrics.rs
│ │ ├── mod.rs
│ │ ├── msg_pool.rs
│ │ ├── provider.rs
│ │ ├── selection.rs
│ │ ├── test_provider.rs
│ │ └── utils.rs
├── metrics
│ ├── db.rs
│ └── mod.rs
├── networks
│ ├── actors_bundle.rs
│ ├── butterflynet
│ │ └── mod.rs
│ ├── calibnet
│ │ ├── genesis.car
│ │ └── mod.rs
│ ├── devnet
│ │ └── mod.rs
│ ├── drand.rs
│ ├── mainnet
│ │ ├── genesis.car
│ │ └── mod.rs
│ ├── metrics.rs
│ └── mod.rs
├── rpc
│ ├── actor_registry.rs
│ ├── auth_layer.rs
│ ├── channel.rs
│ ├── client.rs
│ ├── error.rs
│ ├── filter_layer.rs
│ ├── filter_list.rs
│ ├── log_layer.rs
│ ├── methods
│ │ ├── auth.rs
│ │ ├── beacon.rs
│ │ ├── chain.rs
│ │ ├── chain
│ │ │ └── types.rs
│ │ ├── common.rs
│ │ ├── eth.rs
│ │ ├── eth
│ │ │ ├── errors.rs
│ │ │ ├── eth_tx.rs
│ │ │ ├── filter
│ │ │ │ ├── event.rs
│ │ │ │ ├── mempool.rs
│ │ │ │ ├── mod.rs
│ │ │ │ ├── store.rs
│ │ │ │ └── tipset.rs
│ │ │ ├── trace.rs
│ │ │ ├── types.rs
│ │ │ └── utils.rs
│ │ ├── f3.rs
│ │ ├── f3
│ │ │ ├── contract_manifest_golden.json
│ │ │ ├── contract_return.hex
│ │ │ ├── types.rs
│ │ │ └── util.rs
│ │ ├── gas.rs
│ │ ├── market.rs
│ │ ├── miner.rs
│ │ ├── misc.rs
│ │ ├── mpool.rs
│ │ ├── msig.rs
│ │ ├── net.rs
│ │ ├── net
│ │ │ └── types.rs
│ │ ├── node.rs
│ │ ├── state.rs
│ │ ├── state
│ │ │ └── types.rs
│ │ ├── sync.rs
│ │ ├── sync
│ │ │ └── types.rs
│ │ └── wallet.rs
│ ├── metrics_layer.rs
│ ├── mod.rs
│ ├── reflect
│ │ ├── jsonrpc_types.rs
│ │ ├── mod.rs
│ │ ├── parser.rs
│ │ └── util.rs
│ ├── request.rs
│ ├── segregation_layer.rs
│ ├── set_extension_layer.rs
│ ├── snapshots
│ │ ├── .gitattributes
│ │ ├── .gitignore
│ │ └── README.md
│ └── types
│ │ ├── address_impl.rs
│ │ ├── deal_impl.rs
│ │ ├── mod.rs
│ │ ├── sector_impl.rs
│ │ ├── tests.rs
│ │ └── tsk_impl.rs
├── shim
│ ├── actors
│ │ ├── builtin
│ │ │ ├── account
│ │ │ │ └── mod.rs
│ │ │ ├── cron
│ │ │ │ └── mod.rs
│ │ │ ├── datacap
│ │ │ │ └── mod.rs
│ │ │ ├── eam.rs
│ │ │ ├── evm
│ │ │ │ └── mod.rs
│ │ │ ├── init
│ │ │ │ └── mod.rs
│ │ │ ├── market
│ │ │ │ ├── ext
│ │ │ │ │ ├── balance_table.rs
│ │ │ │ │ ├── mod.rs
│ │ │ │ │ └── state.rs
│ │ │ │ └── mod.rs
│ │ │ ├── miner
│ │ │ │ ├── ext
│ │ │ │ │ ├── deadline.rs
│ │ │ │ │ ├── mod.rs
│ │ │ │ │ ├── partition.rs
│ │ │ │ │ └── state.rs
│ │ │ │ └── mod.rs
│ │ │ ├── mod.rs
│ │ │ ├── multisig
│ │ │ │ ├── ext
│ │ │ │ │ ├── mod.rs
│ │ │ │ │ └── state.rs
│ │ │ │ └── mod.rs
│ │ │ ├── power
│ │ │ │ ├── ext.rs
│ │ │ │ └── mod.rs
│ │ │ ├── reward
│ │ │ │ └── mod.rs
│ │ │ ├── system
│ │ │ │ └── mod.rs
│ │ │ └── verifreg
│ │ │ │ ├── ext
│ │ │ │ ├── mod.rs
│ │ │ │ └── state.rs
│ │ │ │ └── mod.rs
│ │ ├── common.rs
│ │ ├── convert.rs
│ │ ├── macros.rs
│ │ ├── mod.rs
│ │ ├── state_load.rs
│ │ └── version.rs
│ ├── address.rs
│ ├── bigint.rs
│ ├── clock.rs
│ ├── crypto.rs
│ ├── deal.rs
│ ├── econ.rs
│ ├── error.rs
│ ├── executor.rs
│ ├── externs.rs
│ ├── gas.rs
│ ├── kernel.rs
│ ├── machine
│ │ ├── manifest.rs
│ │ └── mod.rs
│ ├── message.rs
│ ├── mod.rs
│ ├── piece.rs
│ ├── randomness.rs
│ ├── sector.rs
│ ├── state_tree.rs
│ ├── state_tree_v0.rs
│ ├── trace.rs
│ └── version.rs
├── state_manager
│ ├── cache.rs
│ ├── chain_rand.rs
│ ├── circulating_supply.rs
│ ├── errors.rs
│ ├── mod.rs
│ └── utils.rs
├── state_migration
│ ├── common
│ │ ├── macros
│ │ │ ├── mod.rs
│ │ │ ├── system.rs
│ │ │ └── verifier.rs
│ │ ├── migration_job.rs
│ │ ├── migrators.rs
│ │ ├── mod.rs
│ │ ├── state_migration.rs
│ │ └── verifier.rs
│ ├── mod.rs
│ ├── nv17
│ │ ├── datacap.rs
│ │ ├── migration.rs
│ │ ├── miner.rs
│ │ ├── mod.rs
│ │ ├── util.rs
│ │ └── verifreg_market.rs
│ ├── nv18
│ │ ├── eam.rs
│ │ ├── eth_account.rs
│ │ ├── init.rs
│ │ ├── migration.rs
│ │ └── mod.rs
│ ├── nv19
│ │ ├── migration.rs
│ │ ├── miner.rs
│ │ ├── mod.rs
│ │ └── power.rs
│ ├── nv21
│ │ ├── migration.rs
│ │ ├── miner.rs
│ │ └── mod.rs
│ ├── nv21fix
│ │ ├── migration.rs
│ │ └── mod.rs
│ ├── nv21fix2
│ │ ├── migration.rs
│ │ └── mod.rs
│ ├── nv22
│ │ ├── market.rs
│ │ ├── migration.rs
│ │ ├── miner.rs
│ │ └── mod.rs
│ ├── nv22fix
│ │ ├── migration.rs
│ │ └── mod.rs
│ ├── nv23
│ │ ├── migration.rs
│ │ ├── mining_reserve.rs
│ │ └── mod.rs
│ ├── nv24
│ │ ├── migration.rs
│ │ ├── mod.rs
│ │ └── power.rs
│ ├── nv25
│ │ ├── evm.rs
│ │ ├── migration.rs
│ │ ├── miner.rs
│ │ └── mod.rs
│ ├── nv26fix
│ │ ├── migration.rs
│ │ └── mod.rs
│ ├── tests
│ │ ├── data
│ │ │ └── .gitignore
│ │ └── mod.rs
│ └── type_migrations
│ │ ├── evm
│ │ ├── mod.rs
│ │ └── state_v15_to_v16.rs
│ │ ├── init
│ │ ├── mod.rs
│ │ └── state_v9_to_v10.rs
│ │ ├── market
│ │ ├── mod.rs
│ │ └── state_v8_to_v9.rs
│ │ ├── miner
│ │ ├── deadline_v15_to_v16.rs
│ │ ├── deadlines_v15_to_v16.rs
│ │ ├── info_v8_to_v9.rs
│ │ ├── mod.rs
│ │ ├── power_pair_v11_to_v12.rs
│ │ ├── power_pair_v8_to_v9.rs
│ │ ├── sector_onchain_info_v11_to_v12.rs
│ │ ├── sector_onchain_info_v8_to_v9.rs
│ │ ├── sector_precommit_info_v8_to_v9.rs
│ │ ├── sector_precommit_onchain_info_v8_to_v9.rs
│ │ ├── state_v10_to_v11.rs
│ │ ├── state_v15_to_v16.rs
│ │ ├── state_v8_to_v9.rs
│ │ └── vesting_funds_v15_to_v16.rs
│ │ └── mod.rs
├── statediff
│ ├── mod.rs
│ └── resolve.rs
├── test_utils
│ └── mod.rs
├── tool
│ ├── main.rs
│ ├── mod.rs
│ ├── offline_server
│ │ ├── mod.rs
│ │ └── server.rs
│ └── subcommands
│ │ ├── api_cmd.rs
│ │ ├── api_cmd
│ │ ├── api_compare_tests.rs
│ │ ├── contracts
│ │ │ ├── compile.sh
│ │ │ ├── invoke_cthulhu.hex
│ │ │ └── invoke_cthulhu.sol
│ │ ├── generate_test_snapshot.rs
│ │ ├── test_snapshot.rs
│ │ ├── test_snapshots.txt
│ │ └── test_snapshots_ignored.txt
│ │ ├── archive_cmd.rs
│ │ ├── backup_cmd.rs
│ │ ├── benchmark_cmd.rs
│ │ ├── car_cmd.rs
│ │ ├── db_cmd.rs
│ │ ├── fetch_params_cmd.rs
│ │ ├── index_cmd.rs
│ │ ├── mod.rs
│ │ ├── net_cmd.rs
│ │ ├── shed_cmd.rs
│ │ ├── shed_cmd
│ │ └── migration.rs
│ │ ├── snapshot_cmd.rs
│ │ └── state_migration_cmd.rs
├── utils
│ ├── cid
│ │ └── mod.rs
│ ├── db
│ │ ├── car_stream.rs
│ │ ├── car_util.rs
│ │ └── mod.rs
│ ├── encoding
│ │ ├── cid_de_cbor.rs
│ │ ├── fallback_de_ipld_dagcbor.rs
│ │ └── mod.rs
│ ├── flume
│ │ └── mod.rs
│ ├── io
│ │ ├── mmap.rs
│ │ ├── mod.rs
│ │ ├── progress_log.rs
│ │ └── writer_checksum.rs
│ ├── misc
│ │ ├── adaptive_value_provider.rs
│ │ ├── env.rs
│ │ ├── logo.rs
│ │ └── mod.rs
│ ├── mod.rs
│ ├── monitoring
│ │ ├── mem_tracker.rs
│ │ └── mod.rs
│ ├── multihash.rs
│ ├── net.rs
│ ├── net
│ │ └── download_file.rs
│ ├── p2p
│ │ └── mod.rs
│ ├── proofs_api
│ │ ├── mod.rs
│ │ ├── parameters.json
│ │ ├── parameters.rs
│ │ └── paramfetch.rs
│ ├── rand
│ │ └── mod.rs
│ ├── reqwest_resume
│ │ ├── mod.rs
│ │ └── tests.rs
│ ├── stats
│ │ └── mod.rs
│ ├── stream.rs
│ ├── tests
│ │ └── files.rs
│ └── version
│ │ └── mod.rs
└── wallet
│ ├── main.rs
│ ├── mod.rs
│ └── subcommands
│ ├── mod.rs
│ └── wallet_cmd.rs
├── taplo.toml
├── test-snapshots
├── carv2.car.zst
├── chain4.car
├── chain4.car.zst
└── chain4.forest.car.zst
├── tests
├── cmd_state_migration_tests.rs
├── common
│ └── mod.rs
├── config.rs
├── db_migration_tests.rs
├── db_mode_tests.rs
├── import_snapshot_tests.rs
├── keystore_tests.rs
├── lint.rs
├── lints
│ └── mod.rs
└── tool_tests.rs
└── yarn.lock
/.cargo/config.toml:
--------------------------------------------------------------------------------
1 | [alias]
2 | # Permits `cargo cli --chain calibnet ...`
3 | cli = "run --bin forest-cli --"
4 | daemon = "run --bin forest --"
5 | forest-tool = "run --bin forest-tool --release --"
6 |
7 | [build]
8 | incremental = true
9 |
10 | # TODO(aatifsyed): remove - this can be pushed out to readme
11 | # In all cases, pass --cfg=tokio_unstable for tokio console integration
12 | # See (https://github.com/ChainSafe/forest/pull/2245)
13 | # Note that this may be overriden by user configuration at ~/.cargo/config.toml
14 | rustflags = ["--cfg=tokio_unstable"]
15 |
16 | [net]
17 | git-fetch-with-cli = true
18 | retry = 5
19 |
20 | [registries.crates-io]
21 | protocol = "sparse"
22 |
23 | [env]
24 | # Disable exponential formatting in `bigdecimal`
25 | # as a workaround for https://github.com/ChainSafe/forest/issues/4035
26 | RUST_BIGDECIMAL_FMT_EXPONENTIAL_THRESHOLD = { value = "100", force = true } # 0.4.3
27 | RUST_BIGDECIMAL_FMT_EXPONENTIAL_LOWER_THRESHOLD = { value = "100", force = true } # 0.4.7
28 | RUST_BIGDECIMAL_FMT_EXPONENTIAL_UPPER_THRESHOLD = { value = "100", force = true } # 0.4.7
29 |
--------------------------------------------------------------------------------
/.config/lychee.toml:
--------------------------------------------------------------------------------
1 | # This is the common config used by lychee, our dead html link checker
2 | # See the github actions workflows to see the inputs
3 | # https://github.com/lycheeverse/lychee/blob/2109470dc380eaf66944b6bcfa86230e0a58e58f/lychee-bin/src/options.rs#L152
4 |
5 | verbose = "debug"
6 | no_progress = true
7 | exclude_path = ["./node_modules", "./docs/node_modules", "./documentation", "./target"]
8 | exclude = [
9 | # Avoid Github rate limits
10 | "github.com/ChainSafe/forest",
11 | # Requires CAPTCHA verification
12 | "faucet.calibnet.chainsafe-fil.io/funds.html",
13 | # Bot protection
14 | "jwt.io",
15 | "forest-explorer.chainsafe.dev",
16 | # Maybe temporarily down with 404, but it blocks the CI
17 | "filecoin.io/slack",
18 |
19 | ]
20 | timeout = 30
21 | max_retries = 6
22 | retry_wait_time = 10
23 |
24 | output = "lychee-report.md"
25 |
--------------------------------------------------------------------------------
/.config/spellcheck.toml:
--------------------------------------------------------------------------------
1 | dev_comments = false
2 | skip_readme = false
3 |
4 | [hunspell]
5 | lang = "en_US"
6 | search_dirs = ["."]
7 | skip_os_lookups = true
8 | use_builtin = true
9 | tokenization_splitchars = "\",;:.!?#(){}[]|/_-‒'`&@§¶…<>="
10 | extra_dictionaries = ["forest.dic", "en_US.dic"]
11 |
12 | [hunspell.quirks]
13 | transform_regex = [
14 | # 10.7%
15 | "^[0-9_]+(?:\\.[0-9]*)?%$",
16 | ]
17 | allow_concatenation = false
18 | allow_dashes = false
19 | allow_emojis = true
20 |
21 | [nlprules]
22 |
23 | [reflow]
24 | max_line_length = 80
25 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | .github
2 | .maintain
3 | Dockerfile
4 | .dockerignore
5 | *.md
6 | target/
7 | scripts/
8 |
9 | # Ignore CAR files fetched to the project directory which tends to happen during development.
10 | # Without it, the Docker context may bloat to hundreds of gigabytes of data.
11 | /*.car
12 | /*.car.zst
13 |
--------------------------------------------------------------------------------
/.eslintrc.yml:
--------------------------------------------------------------------------------
1 | env:
2 | commonjs: true
3 | es2021: true
4 | shared-node-browser: true
5 | extends: eslint:recommended
6 | overrides: []
7 | parserOptions:
8 | ecmaVersion: latest
9 | rules: {}
10 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | assets/actor_bundles.car.zst filter=lfs diff=lfs merge=lfs -text
2 |
--------------------------------------------------------------------------------
/.github/.prettierrc:
--------------------------------------------------------------------------------
1 | proseWrap: never
2 |
--------------------------------------------------------------------------------
/.github/CARGO_ADVISORIES_ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: "[automated] `cargo deny check advisories` failure @ {{ date | date('D/M/YY HH:mm') }}"
3 | labels: ["Bug"]
4 | ---
5 |
6 | ## Description
7 |
8 | Please [check the logs]({{ env.WORKFLOW_URL }}) for more information.
9 |
--------------------------------------------------------------------------------
/.github/CHECKPOINT_ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: "cron: update known_blocks.yaml"
3 | labels: ["Type: Task"]
4 | ---
5 |
6 | Forest uses checkpoints to improve performance when loading a snapshot. Without checkpoints, the blockchain has to be fully traversed to verify we have the right genesis block. Checkpoints short-circuit this search and shave off tens of minutes in boot time.
7 |
8 | Checkpoints have to be regularly updated, though, and [this issue](/.github/CHECKPOINT_ISSUE_TEMPLATE.md) is [automatically created once per month](/.github/workflows/checkpoints.yml). Follow the procedure below to update [`build/known_blocks.yaml`](/build/known_blocks.yaml), and close this issue.
9 |
10 | # Procedure
11 |
12 | ```bash
13 | #!/bin/bash
14 |
15 | # Perform this for `calibnet` AND `mainnet`
16 | chains=("mainnet" "calibnet")
17 |
18 | for chain in "${chains[@]}"
19 | do
20 | # download the latest snapshot.
21 | # =============================
22 | # - calibnet ~3G, ~1min on a droplet
23 | # - mainnet ~60G, ~15mins on a droplet
24 | aria2c -x5 https://forest-archive.chainsafe.dev/latest/"$chain"/ -o "$chain"
25 |
26 | # print out the checkpoints.
27 | # ==========================
28 | # The whole operation takes a long time, BUT you only need the first line or so.
29 | timeout 15s forest-tool archive checkpoints "$chain"
30 | done
31 |
32 | # Update `build/known_blocks.yaml` as appropriate, manually.
33 | ```
34 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | # Two members of the Forest team are automatically (and randomly) assigned to review all PRs.
2 | * @ChainSafe/Forest
3 |
--------------------------------------------------------------------------------
/.github/DOCKER_ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: "[automated] Docker check failure"
3 | labels: ["Bug"]
4 | ---
5 |
6 | ## Description
7 |
8 | Latest Docker check failed. Please [check the logs]({{ env.WORKFLOW_URL }}) for more information.
9 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/1-bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Provide a report of unexpected behaviour
4 | title: ""
5 | labels: "Type: Bug"
6 | assignees: ""
7 | ---
8 |
9 | ## Describe the bug
10 |
11 |
12 |
13 | ## To reproduce
14 |
15 |
16 |
17 | 1. Go to '...'
18 | 2. Run '....'
19 | 3. See error
20 |
21 | ## Log output
22 |
23 |
24 |
25 | Log Output
26 |
27 | ```Paste log output here
28 | paste log output...
29 | ```
30 |
31 |
32 | ## Expected behaviour
33 |
34 |
35 |
36 | ## Screenshots
37 |
38 |
39 |
40 | ## Environment (please complete the following information):
41 |
42 | - OS:
43 | - Branch/commit
44 | - Hardware
45 |
46 | ## Other information and links
47 |
48 |
49 |
50 |
51 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/2-user_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: User Request
3 | about: Request a feature or change
4 | title: ""
5 | labels: "Type: Request"
6 | assignees: ""
7 | ---
8 |
9 | # Summary
10 |
11 |
12 |
13 | # Requirements
14 |
15 |
16 |
17 | # Motivation
18 |
19 |
20 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/3-epic.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Epic
3 | about: "[Internal] Larger chunk of work that can be broken down into smaller tasks"
4 | title: ""
5 | labels: "Type: Epic"
6 | assignees: ""
7 | ---
8 |
9 | # Summary
10 |
11 |
12 |
13 | # Motivation
14 |
15 |
16 |
17 | # Tasks
18 |
19 |
20 |
21 |
22 | - [ ]
23 |
24 | # Risks & Dependencies
25 |
26 |
27 |
28 | # Additional Links & Resources
29 |
30 |
31 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/4-task.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Task
3 | about: "[Internal] A specific, actionable unit of work"
4 | title: ""
5 | labels: "Type: Task"
6 | assignees: ""
7 | ---
8 |
9 | # Summary
10 |
11 |
12 |
13 | # Completion Criteria
14 |
15 |
16 |
17 | - [ ]
18 |
19 | # Additional Links & Resources
20 |
21 |
22 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/5-other.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Other
3 | about: Use wisely 🧐
4 | title: ""
5 | labels: ""
6 | assignees: ""
7 | ---
8 |
9 |
10 |
11 | This issue is NOT:
12 |
13 | - [ ] A bug
14 | - [ ] A user-request
15 | - [ ] An epic
16 | - [ ] A task
17 |
18 | Please check the other issue types if any of these categories apply.
19 |
20 | ---
21 |
22 |
23 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | blank_issues_enabled: true
2 | contact_links:
3 | - name: "Filecoin Slack (#fil-forest-help or #fil-forest-dev)"
4 | url: https://join.slack.com/t/filecoinproject/shared_invite/enQtNTUwNTI1Mzk5MDYwLTY4YmFjMzRlZjFiNDc0NmI2N2JjMjk5YTAyMDUyODljODg3MGI0ZGRhZTI5ZDNkZTAyNjkyMzI1ODM1YjA1MWI
5 | about: Please ask questions here.
6 | - name: General Filecoin Discussion Forum
7 | url: https://github.com/filecoin-project/community/discussions
8 | about: Please ask any general Filecoin questions here.
9 | - name: Forest Discussion Forum
10 | url: https://github.com/ChainSafe/forest/discussions
11 | about: Please ask Forest specific questions here.
12 |
--------------------------------------------------------------------------------
/.github/RPC_PARITY_ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: "[automated] RPC parity test failure @ {{ date | date('D/M/YY HH:mm') }}"
3 | labels: ["Bug"]
4 | ---
5 |
6 | ## Description
7 |
8 | Latest RPC parity test failed. Please [check the logs]({{ env.WORKFLOW_URL }}) for more information.
9 |
--------------------------------------------------------------------------------
/.github/SNAPSHOT_PARITY_ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: "[automated] Snapshot parity test failure"
3 | labels: ["Bug"]
4 | ---
5 |
6 | ## Description
7 |
8 | Latest snapshot parity test failed. Please [check the logs]({{ env.WORKFLOW_URL }}) for more information.
9 |
--------------------------------------------------------------------------------
/.github/workflows/butterflynet.yml:
--------------------------------------------------------------------------------
1 | name: Butterflynet checks
2 | on:
3 | workflow_dispatch:
4 | env:
5 | CI: 1
6 | CARGO_INCREMENTAL: 0
7 | CACHE_TIMEOUT_MINUTES: 5
8 | SCRIPT_TIMEOUT_MINUTES: 30
9 | AWS_ACCESS_KEY_ID: "${{ secrets.AWS_ACCESS_KEY_ID }}"
10 | AWS_SECRET_ACCESS_KEY: "${{ secrets.AWS_SECRET_ACCESS_KEY }}"
11 | RUSTC_WRAPPER: sccache
12 | CC: sccache clang
13 | CXX: sccache clang++
14 | FIL_PROOFS_PARAMETER_CACHE: /var/tmp/filecoin-proof-parameters
15 | SHELL_IMAGE: busybox
16 | jobs:
17 | butterflynet-checks:
18 | name: Butterflynet checks
19 | runs-on: ubuntu-24.04-arm
20 | steps:
21 | - name: Checkout Sources
22 | uses: actions/checkout@v4
23 | - name: Setup sccache
24 | uses: mozilla-actions/sccache-action@v0.0.9
25 | timeout-minutes: "${{ fromJSON(env.CACHE_TIMEOUT_MINUTES) }}"
26 | continue-on-error: true
27 | - uses: actions/setup-go@v5
28 | with:
29 | go-version-file: "go.work"
30 | - name: Build and install Forest binaries
31 | env:
32 | # To minimize compile times: https://nnethercote.github.io/perf-book/build-configuration.html#minimizing-compile-times
33 | RUSTFLAGS: "-C linker=clang -C link-arg=-fuse-ld=lld"
34 | run: make install-slim-quick
35 | - name: Run butterflynet checks
36 | run: ./scripts/tests/butterflynet_check.sh
37 | timeout-minutes: "${{ fromJSON(env.SCRIPT_TIMEOUT_MINUTES) }}"
38 |
--------------------------------------------------------------------------------
/.github/workflows/cargo-advisories.yml:
--------------------------------------------------------------------------------
1 | name: cargo deny advisories
2 | on:
3 | workflow_dispatch:
4 | schedule:
5 | - cron: "0 0 * * *"
6 | jobs:
7 | rpc-parity:
8 | name: cargo deny advisories
9 | runs-on: ubuntu-24.04-arm
10 | steps:
11 | - uses: actions/checkout@v4
12 | - run: make install-cargo-binstall
13 | - run: cargo binstall --no-confirm cargo-deny
14 | - run: cargo deny check advisories
15 | - name: Set WORKFLOW_URL
16 | if: failure()
17 | run: |
18 | export WORKFLOW_URL="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}"
19 | echo ${WORKFLOW_URL}
20 | echo "WORKFLOW_URL=${WORKFLOW_URL}" >> $GITHUB_ENV
21 | - uses: JasonEtco/create-an-issue@v2
22 | if: failure()
23 | env:
24 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
25 | with:
26 | filename: .github/CARGO_ADVISORIES_ISSUE_TEMPLATE.md
27 |
--------------------------------------------------------------------------------
/.github/workflows/checkpoints.yml:
--------------------------------------------------------------------------------
1 | name: Checkpoints
2 |
3 | on:
4 | workflow_dispatch:
5 | schedule:
6 | - cron: "0 0 1 * *" # The 1st of every month
7 |
8 | jobs:
9 | checkpoint_issue:
10 | runs-on: ubuntu-24.04-arm
11 | steps:
12 | - uses: actions/checkout@v4
13 | - uses: JasonEtco/create-an-issue@v2
14 | env:
15 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
16 | with:
17 | filename: .github/CHECKPOINT_ISSUE_TEMPLATE.md
18 |
--------------------------------------------------------------------------------
/.github/workflows/dockerfile-check.yml:
--------------------------------------------------------------------------------
1 | # Checks periodically that the Dockerfile builds successfully, and if it doesn't, it creates an issue with the error message.
2 | name: Dockerfile Check
3 |
4 | on:
5 | workflow_dispatch:
6 | schedule:
7 | # Run every Sunday at midnight
8 | - cron: '0 0 * * 0'
9 |
10 | jobs:
11 | docker-check:
12 | runs-on: ubuntu-24.04-arm
13 | steps:
14 | - uses: actions/checkout@v4
15 | - run: |
16 | docker build -t forest-test .
17 | docker run --rm forest-test --version
18 | - name: Set WORKFLOW_URL
19 | if: always()
20 | run: |
21 | export WORKFLOW_URL="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}"
22 | echo ${WORKFLOW_URL}
23 | echo "WORKFLOW_URL=${WORKFLOW_URL}" >> $GITHUB_ENV
24 | - uses: JasonEtco/create-an-issue@v2
25 | if: failure()
26 | env:
27 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
28 | with:
29 | filename: .github/DOCKER_ISSUE_TEMPLATE.md
30 |
--------------------------------------------------------------------------------
/.github/workflows/docs-auto-update.yml:
--------------------------------------------------------------------------------
1 | # This workflows, run daily, updates the Forest CLI usage docs, and submits a PR with the changes.
2 | name: Update Forest usage docs
3 |
4 | on:
5 | workflow_dispatch:
6 | schedule:
7 | - cron: "0 0 * * *"
8 |
9 | jobs:
10 | update-docs:
11 | runs-on: ubuntu-24.04-arm
12 | steps:
13 | - uses: actions/checkout@v4
14 | - name: Run update script
15 | run: |
16 | cd docs/docs/users/reference
17 | ./generate_cli_md.sh docker > cli.md
18 | - uses: actions/setup-node@v4
19 | with:
20 | node-version: "18"
21 | - run: corepack enable
22 | - run: yarn --immutable
23 | - run: yarn md-fmt
24 | - name: Create Pull Request
25 | uses: peter-evans/create-pull-request@v7
26 | with:
27 | base: main
28 | branch: leshy/update-forest-docs
29 | token: ${{ secrets.ACTIONS_PAT }}
30 | commit-message: Update Forest CLI docs
31 | title: "[automated] Update Forest CLI docs"
32 | body: |
33 | ### Changes
34 | - Updates Forest CLI docs to the latest commit in the `main` branch.
35 |
--------------------------------------------------------------------------------
/.github/workflows/docs-check.yml:
--------------------------------------------------------------------------------
1 | name: Docs Check
2 |
3 | # Cancel workflow if there is a new change to the branch.
4 | concurrency:
5 | group: ${{ github.workflow }}-${{ github.ref }}
6 | cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
7 |
8 | on:
9 | push:
10 | branches:
11 | - main
12 | paths:
13 | - '**.md'
14 | - 'docs/**'
15 | - '.github/workflows/docs-check.yml'
16 | merge_group:
17 | pull_request:
18 | branches:
19 | - main
20 | paths:
21 | - '**.md'
22 | - 'docs/**'
23 | - '.github/workflows/docs-check.yml'
24 |
25 | jobs:
26 | docs-check:
27 | name: Check
28 | runs-on: ubuntu-24.04-arm
29 | defaults:
30 | run:
31 | working-directory: ./docs
32 | steps:
33 | - uses: actions/checkout@v4
34 | - uses: actions/setup-node@v4
35 | with:
36 | node-version: 18
37 | # See https://github.com/actions/setup-node/issues/1027
38 | # cache: yarn
39 | - run: corepack enable
40 | - run: make format-spellcheck-dictionary-check
41 | - run: yarn --immutable
42 | - run: yarn typecheck
43 | - run: yarn spellcheck
44 | - run: yarn format-check
45 | - run: yarn build
46 |
--------------------------------------------------------------------------------
/.github/workflows/docs-deploy.yml:
--------------------------------------------------------------------------------
1 | name: Docs Deploy
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | paths:
8 | - 'docs/**'
9 | - '.github/workflows/docs-deploy.yml'
10 | merge_group:
11 | pull_request:
12 | branches:
13 | - main
14 | paths:
15 | - 'docs/**'
16 | - '.github/workflows/docs-deploy.yml'
17 |
18 | permissions:
19 | contents: read
20 | deployments: write
21 | pull-requests: write
22 |
23 | jobs:
24 | docs-deploy:
25 | name: Deploy to Cloudflare Pages
26 | runs-on: ubuntu-24.04-arm
27 | defaults:
28 | run:
29 | working-directory: ./docs
30 | steps:
31 | - uses: actions/checkout@v4
32 | - uses: actions/setup-node@v4
33 | with:
34 | node-version: "18"
35 | # See https://github.com/actions/setup-node/issues/1027
36 | # cache: yarn
37 | - run: corepack enable
38 | - run: yarn --immutable
39 | - run: yarn run build
40 |
41 | - name: Deploy
42 | uses: cloudflare/wrangler-action@v3
43 | with:
44 | apiToken: ${{ secrets.CLOUDFLARE_PAGES_API_TOKEN }}
45 | accountId: ${{ secrets.CLOUDFLARE_PAGES_ACCOUNT_ID }}
46 | command: pages deploy ./docs/build --project-name=forest-docs
47 | gitHubToken: ${{ secrets.GITHUB_TOKEN }}
48 |
--------------------------------------------------------------------------------
/.github/workflows/link-check.yml:
--------------------------------------------------------------------------------
1 | name: Link Checker
2 |
3 | on:
4 | workflow_dispatch:
5 | schedule:
6 | - cron: "0 0 * * *"
7 | push:
8 | branches:
9 | - main
10 | paths:
11 | - "**.md"
12 | - "**.mdx"
13 | - "**.html"
14 | - ".github/workflows/link-check.yml"
15 | merge_group:
16 | pull_request:
17 | branches:
18 | - main
19 | paths:
20 | - "**.md"
21 | - "**.mdx"
22 | - "**.html"
23 | - ".github/workflows/link-check.yml"
24 |
25 | jobs:
26 | link-check:
27 | name: Link Check
28 | runs-on: ubuntu-24.04-arm
29 | permissions:
30 | contents: read
31 | issues: write
32 | steps:
33 | - uses: actions/checkout@v4
34 |
35 | - name: Link Checker
36 | id: lychee
37 | uses: lycheeverse/lychee-action@v2
38 | with:
39 | args: --user-agent "curl/8.5.0" --no-progress --config ./.config/lychee.toml './**/*.md' './**/*.mdx' './**/*.html'
40 |
41 | - name: Create Issue From File
42 | if: env.lychee_exit_code != 0 && github.event_name == 'schedule'
43 | uses: peter-evans/create-issue-from-file@v5
44 | with:
45 | title: Link Checker Report
46 | content-filepath: ./lychee-report.md
47 |
48 | - name: Fail job on error
49 | if: env.lychee_exit_code != 0 && github.event_name != 'schedule'
50 | run: exit 1
51 |
--------------------------------------------------------------------------------
/.github/workflows/rpc_test_repeat.yml:
--------------------------------------------------------------------------------
1 | name: RPC tests on repeat
2 |
3 | on:
4 | workflow_dispatch:
5 | inputs:
6 | image:
7 | description: 'Forest image to use'
8 | required: false
9 | default: 'ghcr.io/chainsafe/forest:edge-fat'
10 | type: string
11 |
12 | schedule:
13 | # Run every day at midnight
14 | - cron: 0 0 * * *
15 |
16 | env:
17 | SHELL_IMAGE: busybox
18 | SCRIPT_TIMEOUT_MINUTES: 30
19 |
20 | jobs:
21 | calibnet-rpc-checks:
22 | strategy:
23 | fail-fast: false
24 | matrix:
25 | # GH Actions do not support running jobs in a loop.
26 | # This is a workaround to run the same job 100 times.
27 | x: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
28 | y: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
29 | name: Calibnet RPC checks
30 | runs-on: ubuntu-24.04-arm
31 | steps:
32 | - uses: actions/checkout@v4
33 | - name: Run api compare tests
34 | shell: bash
35 | run: |
36 | IMAGE=${{ github.event.inputs.image }}
37 | if [ -z "$IMAGE" ]; then
38 | IMAGE="ghcr.io/chainsafe/forest:edge-fat"
39 | fi
40 | echo "FROM $IMAGE" > Dockerfile-RPC
41 | export FOREST_DOCKERFILE_OVERRIDE=Dockerfile-RPC
42 | ./scripts/tests/api_compare/setup.sh
43 | timeout-minutes: '${{ fromJSON(env.SCRIPT_TIMEOUT_MINUTES) }}'
44 | - name: Dump docker logs
45 | if: always()
46 | uses: jwalton/gh-docker-logs@v2
47 |
--------------------------------------------------------------------------------
/.github/workflows/snapshot-parity.yml:
--------------------------------------------------------------------------------
1 | name: Snapshot parity test
2 | on:
3 | workflow_dispatch:
4 | schedule:
5 | - cron: "0 0 * * 0" # Runs at 00:00, only on Sunday
6 | jobs:
7 | snapshot-parity:
8 | name: Snapshot parity test
9 | runs-on: buildjet-4vcpu-ubuntu-2204
10 | steps:
11 | - name: Checkout Sources
12 | uses: actions/checkout@v4
13 | - name: Run snapshot parity test
14 | run: ./scripts/tests/snapshot_parity/setup.sh
15 | timeout-minutes: 60
16 | - name: Dump docker logs
17 | if: always()
18 | uses: jwalton/gh-docker-logs@v2
19 | - name: Set WORKFLOW_URL
20 | if: always()
21 | run: |
22 | export WORKFLOW_URL="${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}"
23 | echo ${WORKFLOW_URL}
24 | echo "WORKFLOW_URL=${WORKFLOW_URL}" >> $GITHUB_ENV
25 | - uses: JasonEtco/create-an-issue@v2
26 | if: github.ref == 'refs/heads/main' && failure()
27 | env:
28 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
29 | with:
30 | filename: .github/SNAPSHOT_PARITY_ISSUE_TEMPLATE.md
31 |
--------------------------------------------------------------------------------
/.prettierignore:
--------------------------------------------------------------------------------
1 | target
2 |
--------------------------------------------------------------------------------
/.yarnrc.yml:
--------------------------------------------------------------------------------
1 | nodeLinker: node-modules
2 |
--------------------------------------------------------------------------------
/FUNDING.json:
--------------------------------------------------------------------------------
1 | {
2 | "drips": {
3 | "filecoin": {
4 | "ownedBy": "0xb4713f39476841fAF0ea5a555d0b1d451e6B05A1"
5 | }
6 | }
7 | }
8 |
--------------------------------------------------------------------------------
/LICENSE-MIT:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy
4 | of this software and associated documentation files (the "Software"), to deal
5 | in the Software without restriction, including without limitation the rights
6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 | copies of the Software, and to permit persons to whom the Software is
8 | furnished to do so, subject to the following conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in
11 | all copies or substantial portions of the Software.
12 |
13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
19 | THE SOFTWARE.
20 |
--------------------------------------------------------------------------------
/build/bootstrap/butterflynet:
--------------------------------------------------------------------------------
1 | /dnsaddr/bootstrap.butterfly.fildev.network
2 |
--------------------------------------------------------------------------------
/build/bootstrap/calibnet:
--------------------------------------------------------------------------------
1 | /dns/bootstrap.calibration.filecoin.chain.love/tcp/1237/p2p/12D3KooWQPYouEAsUQKzvFUA9sQ8tz4rfpqtTzh2eL6USd9bwg7x
2 | /dns/bootstrap-calibnet-0.chainsafe-fil.io/tcp/34000/p2p/12D3KooWABQ5gTDHPWyvhJM7jPhtNwNJruzTEo32Lo4gcS5ABAMm
3 | /dns/bootstrap-calibnet-1.chainsafe-fil.io/tcp/34000/p2p/12D3KooWS3ZRhMYL67b4bD5XQ6fcpTyVQXnDe8H89LvwrDqaSbiT
4 | /dns/bootstrap-calibnet-2.chainsafe-fil.io/tcp/34000/p2p/12D3KooWEiBN8jBX8EBoM3M47pVRLRWV812gDRUJhMxgyVkUoR48
5 | /dns/bootstrap-archive-calibnet-0.chainsafe-fil.io/tcp/1347/p2p/12D3KooWLcRpEfmUq1fC8vfcLnKc1s161C92rUewEze3ALqCd9yJ
6 |
--------------------------------------------------------------------------------
/build/bootstrap/mainnet:
--------------------------------------------------------------------------------
1 | /dns/bootstrap.filecoin.chain.love/tcp/1235/p2p/12D3KooWBF8cpp65hp2u9LK5mh19x67ftAam84z9LsfaquTDSBpt
2 | /dns/bootstrap-venus.mainnet.filincubator.com/tcp/8888/p2p/QmQu8C6deXwKvJP2D8B6QGyhngc3ZiDnFzEHBDx8yeBXST
3 | /dns/bootstrap-mainnet-0.chainsafe-fil.io/tcp/34000/p2p/12D3KooWKKkCZbcigsWTEu1cgNetNbZJqeNtysRtFpq7DTqw3eqH
4 | /dns/bootstrap-mainnet-1.chainsafe-fil.io/tcp/34000/p2p/12D3KooWGnkd9GQKo3apkShQDaq1d6cKJJmsVe6KiQkacUk1T8oZ
5 | /dns/bootstrap-mainnet-2.chainsafe-fil.io/tcp/34000/p2p/12D3KooWHQRSDFv4FvAjtU32shQ7znz7oRbLBryXzZ9NMK2feyyH
6 | /dns/n1.mainnet.fil.devtty.eu/udp/443/quic-v1/p2p/12D3KooWAke3M2ji7tGNKx3BQkTHCyxVhtV1CN68z6Fkrpmfr37F
7 | /dns/n1.mainnet.fil.devtty.eu/tcp/443/p2p/12D3KooWAke3M2ji7tGNKx3BQkTHCyxVhtV1CN68z6Fkrpmfr37F
8 | /dns/n1.mainnet.fil.devtty.eu/udp/443/quic-v1/webtransport/certhash/uEiAWlgd8EqbNhYLv86OdRvXHMosaUWFFDbhgGZgCkcmKnQ/certhash/uEiAvtq6tvZOZf_sIuityDDTyAXDJPfXSRRDK2xy9UVPsqA/p2p/12D3KooWAke3M2ji7tGNKx3BQkTHCyxVhtV1CN68z6Fkrpmfr37F
9 |
--------------------------------------------------------------------------------
/build/vendored-docs-redirect.index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/deny.toml:
--------------------------------------------------------------------------------
1 | [advisories]
2 | ignore = [
3 | # Unmaintained crates that fvm2 requires, and will not change for
4 | # compatability/consensus reasons - see
5 | # https://github.com/filecoin-project/ref-fvm/issues/1843
6 | "RUSTSEC-2022-0061", # parity-wasm is deprecated
7 | "RUSTSEC-2024-0436", # paste is unmaintained
8 | ]
9 |
10 | [licenses]
11 | allow = [
12 | "Apache-2.0",
13 | "Apache-2.0 WITH LLVM-exception",
14 | "BSD-2-Clause",
15 | "BSD-3-Clause",
16 | "CC0-1.0",
17 | "ISC",
18 | "MIT",
19 | "Unicode-3.0",
20 | "Unlicense",
21 | "Zlib",
22 | ]
23 |
24 | exceptions = [
25 | { allow = [
26 | "CDLA-Permissive-2.0",
27 | ], crate = "webpki-roots" },
28 | { allow = [
29 | "CDLA-Permissive-2.0",
30 | ], crate = "webpki-root-certs" },
31 | { allow = [
32 | "MPL-2.0",
33 | ], crate = "option-ext" },
34 | { allow = [
35 | "MPL-2.0",
36 | ], crate = "colored" },
37 | { allow = [
38 | "MPL-2.0",
39 | ], crate = "cbindgen" },
40 | { allow = [
41 | "MPL-2.0",
42 | ], crate = "attohttpc" },
43 | ]
44 |
45 | [[licenses.clarify]]
46 | crate = "ring"
47 | expression = "MIT AND ISC AND OpenSSL"
48 | license-files = [{ path = "LICENSE", hash = 0xbd0eed23 }]
49 |
50 | [bans]
51 | multiple-versions = "allow"
52 |
--------------------------------------------------------------------------------
/docs/.bookignore:
--------------------------------------------------------------------------------
1 | README.md
2 | .github
3 | .spellcheck.yml
4 | LICENSE
--------------------------------------------------------------------------------
/docs/.gitattributes:
--------------------------------------------------------------------------------
1 | docs/reference/cli.md linguist-generated=true
2 |
--------------------------------------------------------------------------------
/docs/.spellcheck.yml:
--------------------------------------------------------------------------------
1 | matrix:
2 | - name: Markdown
3 | aspell:
4 | lang: en
5 | dictionary:
6 | wordlists:
7 | - .wordlist.txt
8 | encoding: utf-8
9 | pipeline:
10 | - pyspelling.filters.markdown:
11 | - pyspelling.filters.html:
12 | comments: false
13 | ignores:
14 | - code
15 | - pre
16 | sources:
17 | - "**/*.md"
18 | default_encoding: utf-8
19 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | SPELLCHECK_DICTIONARY=./dictionary.txt
2 | format-spellcheck-dictionary:
3 | @cat $(SPELLCHECK_DICTIONARY) | sort --ignore-case | uniq > $(SPELLCHECK_DICTIONARY).tmp
4 | @mv $(SPELLCHECK_DICTIONARY).tmp $(SPELLCHECK_DICTIONARY)
5 |
6 | format-spellcheck-dictionary-check:
7 | @cat $(SPELLCHECK_DICTIONARY) | sort --ignore-case | uniq > $(SPELLCHECK_DICTIONARY).tmp
8 | @diff $(SPELLCHECK_DICTIONARY) $(SPELLCHECK_DICTIONARY).tmp
9 | @rm $(SPELLCHECK_DICTIONARY).tmp
10 |
--------------------------------------------------------------------------------
/docs/babel.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | presets: [require.resolve("@docusaurus/core/lib/babel/preset")],
3 | };
4 |
--------------------------------------------------------------------------------
/docs/devSidebars.js:
--------------------------------------------------------------------------------
1 | const sidebars = {
2 | userSidebar: [{ type: "autogenerated", dirName: "." }],
3 | };
4 |
5 | module.exports = sidebars;
6 |
--------------------------------------------------------------------------------
/docs/dictionary.txt:
--------------------------------------------------------------------------------
1 | 2k
2 | APIs
3 | backend
4 | benchmarking
5 | blockstore
6 | Butterflynet
7 | Calibnet
8 | calibnet
9 | calibnet-related
10 | cardinality
11 | ChainSafe
12 | chainsafe
13 | ChainSafe's
14 | changelog
15 | CIDs
16 | CLI
17 | cli
18 | Cloudflare
19 | codebase
20 | config
21 | Datacap
22 | datacap
23 | devnet
24 | Devops
25 | Devs
26 | DHT
27 | DigitalOcean
28 | Drand
29 | Ethereum
30 | F3
31 | f3
32 | f3-sidecar
33 | FFI
34 | FIL
35 | fil
36 | Filecoin
37 | filecoin-project
38 | Filfox
39 | FilOz
40 | FIP
41 | FIPs
42 | FVM
43 | GC
44 | GiB
45 | Github
46 | Grafana
47 | hardcoded
48 | hotfix
49 | ie.
50 | Implementers
51 | implementers
52 | io
53 | IPFS
54 | JSON
55 | JSON-RPC
56 | JWT
57 | JWTs
58 | keypair
59 | keystore
60 | Kubernetes
61 | kubernetes
62 | Linux
63 | Liveness
64 | liveness
65 | localhost
66 | localhost's
67 | M1
68 | M2
69 | macOS
70 | Mainnet
71 | mainnet
72 | multiaddress
73 | namespace
74 | NV22
75 | NV23
76 | NV24
77 | NVMe
78 | onwards
79 | Organisation
80 | P2P
81 | p2p
82 | performant
83 | pre-compiled
84 | preload
85 | preloaded
86 | Q4
87 | README
88 | RNG
89 | Roadmap
90 | roadmap
91 | RPC
92 | rustup
93 | S3-compatible
94 | semver
95 | serverless
96 | stateroots
97 | subcommands
98 | swappiness
99 | TabItem
100 | TBD
101 | Terraform
102 | testnet
103 | tipset
104 | tipsets
105 | V0
106 | V1
107 | VPS
108 | WIP
109 |
--------------------------------------------------------------------------------
/docs/docs/developers/guides/_category_.json:
--------------------------------------------------------------------------------
1 | {
2 | "label": "Guides"
3 | }
4 |
--------------------------------------------------------------------------------
/docs/docs/developers/introduction.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Introduction
3 | hide_title: true
4 | sidebar_position: 1
5 | slug: /
6 | ---
7 |
8 | ## Developer Documentation
9 |
10 | :::warning
11 |
12 | This documentation is intended for contributors to the Forest codebase. If you are a user of Forest you might want
13 | the [user documentation](/).
14 |
15 | :::
16 |
17 | Welcome to the Forest developer documentation!
18 |
--------------------------------------------------------------------------------
/docs/docs/users/getting_started/_category_.json:
--------------------------------------------------------------------------------
1 | {
2 | "label": "Getting Started",
3 | "position": 2
4 | }
5 |
--------------------------------------------------------------------------------
/docs/docs/users/getting_started/hardware-reqs.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Hardware Requirements
3 | sidebar_position: 1
4 | ---
5 |
6 | Forest is designed to be lightweight enough to run on consumer hardware.
7 |
8 | | | Minimum | Recommended | Notes |
9 | | ---------- | ------- | ----------- | --------------------------------------------------------- |
10 | | CPU | 4-core | 8-core | |
11 | | Memory | 16 GiB | 32 GiB | State migrations can require increased amounts of memory. |
12 | | Disk Space | 128 GiB | 256 GiB | NVMe recommended. Snapshots can require ~70+ GiB each. |
13 |
--------------------------------------------------------------------------------
/docs/docs/users/getting_started/syncing.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Syncing A Node
3 | sidebar_position: 3
4 | ---
5 |
6 | :::info
7 |
8 | All nodes joining the network are recommended to sync from a snapshot. This is the default behavior of Forest.
9 |
10 | Syncing from genesis (tipset 0) is generally infeasible.
11 |
12 | :::
13 |
14 | Once started, Forest will connect to the bootstrap peers and in parallel fetch the latest snapshot from [Forest's snapshot service](../knowledge_base/snapshot_service.md). Once the snapshot is downloaded, it will be loaded into the node, and then syncing will continue by utilizing its peers.
15 |
16 | ### Mainnet
17 |
18 | ```shell
19 | forest
20 | ```
21 |
22 | ### Calibnet
23 |
24 | ```shell
25 | forest --chain calibnet
26 | ```
27 |
28 | ## Monitoring Sync Status
29 |
30 | In another shell:
31 |
32 | ```shell
33 | forest-cli sync status
34 | ```
35 |
--------------------------------------------------------------------------------
/docs/docs/users/guides/_category_.json:
--------------------------------------------------------------------------------
1 | {
2 | "label": "Guides",
3 | "position": 3
4 | }
5 |
--------------------------------------------------------------------------------
/docs/docs/users/guides/advanced/_category_.json:
--------------------------------------------------------------------------------
1 | {
2 | "label": "Advanced",
3 | "position": 7
4 | }
5 |
--------------------------------------------------------------------------------
/docs/docs/users/guides/advanced/generating_snapshots.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Generating Snapshots
3 | sidebar_position: 1
4 | ---
5 |
6 | # Snapshot exporting 📸
7 |
8 | ## Hardware requirements
9 |
10 | To export a mainnet snapshot, you need a setup with at least 16 GB of RAM. On a
11 | machine with rapid NVMe, the default export should take around 30
12 | minutes.
13 |
14 | The requirements for calibnet snapshots are lower, but it is still recommended
15 | to have at least 8 GB of RAM. The export should take less than a minute.
16 |
17 | ## Running the node
18 |
19 | Wait until the node is fully synced. You can use the command:
20 |
21 | ```shell
22 | forest-cli sync wait
23 | ```
24 |
25 | ## Exporting the snapshot
26 |
27 | To export the snapshot with the defaults, run:
28 |
29 | ```shell
30 | forest-cli snapshot export
31 | ```
32 |
33 | The snapshot will be exported with 2000 recent stateroots to the current directory. The snapshot will be
34 | compressed.
35 |
36 | For mainnet, you should expect a file of over 70 GB. For calibnet, you should
37 | expect a file of over 5 GB. Note that the snapshot size grows over time.
38 |
39 | ### CLI reference
40 |
41 | Details on the `forest-cli snapshot export` command and its subcommands can be found at the [CLI reference](../../reference/cli.md#forest-cli-snapshot).
42 |
--------------------------------------------------------------------------------
/docs/docs/users/guides/monitoring/_category_.json:
--------------------------------------------------------------------------------
1 | {
2 | "label": "Monitoring",
3 | "position": 6
4 | }
5 |
--------------------------------------------------------------------------------
/docs/docs/users/guides/monitoring/best_practices.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Best Practices
3 | ---
4 |
5 | ### Node
6 |
7 | - **monitor disk space usage**, especially the database size - it can grow quickly, especially around network upgrades
8 | - **monitor the memory usage** - the node can use a lot of memory, especially during sync. Don't let it run too close to the limit
9 | - **monitor the chain sync status** - on average, the node should be able to sync two epochs per minute
10 | - **monitor the number of peers** - the more peers, the better, If a node has no peers, it cannot sync
11 | - **monitor the logs for errors and warnings** - they can indicate potential issues
12 |
13 | ### Monitoring
14 |
15 | - **monitor the monitoring system** - if the monitoring system goes down, you won't know if the node is down
16 | - **set up alerts for critical metrics** - disk space, memory usage, sync status, etc.
17 | - **ensure the persistence of the monitoring system** - if the monitoring system loses data, you won't be able to diagnose issues
18 | - **don't expose monitoring endpoints to the internet** - they are not secured and can leak sensitive information
19 | - **don't set the log levels too high** - this can generate a lot of data and slow down the node
20 |
--------------------------------------------------------------------------------
/docs/docs/users/guides/monitoring/logs.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Logs
3 | ---
4 |
5 | Logs are written to standard output by default. They can be written to rolling log files with the `--log-dir ` flag. The log level can be set with the `RUST_LOG` environment variable. The defaults are generally sufficient for most users but can be adjusted to provide more or less information. Different modules can have different log levels, and the log level can be set to `trace`, `debug`, `info`, `warn`, `error`, or `off`.
6 |
7 | ```bash
8 | RUST_LOG=info,forest_filecoin=debug forest --chain calibnet
9 | ```
10 |
11 | Sample output:
12 |
13 | ```console
14 | 2024-08-28T12:49:59.830012Z INFO forest::daemon::main: Using default calibnet config
15 | 2024-08-28T12:49:59.834109Z INFO forest::daemon: Starting Forest daemon, version 0.19.2+git.74fd562acce
16 | 2024-08-28T12:49:59.834123Z DEBUG forest::daemon: Increased file descriptor limit from 1024 to 8192
17 | 2024-08-28T12:49:59.834164Z DEBUG forest::libp2p::keypair: Recovered libp2p keypair from /home/rumcajs/.local/share/forest/libp2p/keypair
18 | ```
19 |
20 | :::tip
21 | Enabling `trace` or `debug` logging can generate gargantuan log files (gigabytes per minute). Make sure to adjust the log level to your needs.
22 | :::
23 |
24 | Sending logs to Loki is also possible. Pass `--loki` to the Forest daemon to enable it. The logs are sent to Loki via the HTTP API. The Loki endpoint can be set with the `--loki-endpoint` flag. The default endpoint is `http://localhost:3100`.
25 |
--------------------------------------------------------------------------------
/docs/docs/users/guides/running_with_curio.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Running Forest With Curio
3 | sidebar_position: 99
4 | ---
5 |
6 | :::note
7 |
8 | [Curio](https://curiostorage.org/) is the successor to Lotus-miner under active development by Curio Storage Inc. Compatibility with Forest is being worked on but has not yet been released. Check back in Q4 2024 for more updates.
9 |
10 | :::
11 |
12 | Coming soon!™
13 |
--------------------------------------------------------------------------------
/docs/docs/users/knowledge_base/_category_.json:
--------------------------------------------------------------------------------
1 | {
2 | "label": "Knowledge Base",
3 | "position": 5
4 | }
5 |
--------------------------------------------------------------------------------
/docs/docs/users/reference/_category_.json:
--------------------------------------------------------------------------------
1 | {
2 | "label": "Reference",
3 | "position": 4
4 | }
5 |
--------------------------------------------------------------------------------
/docs/docs/users/reference/generate_cli_md.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [ "$1" == "local" ]; then
4 | ENVIRONMENT="local"
5 | elif [ "$1" == "docker" ]; then
6 | ENVIRONMENT="docker"
7 | else
8 | echo "Usage: $0 "
9 | exit 1
10 | fi
11 |
12 | cat <
22 |
23 | This document lists every command line option and sub-command for Forest.
24 | EOF
25 |
26 | if [ "$ENVIRONMENT" == "local" ]; then
27 | bash ./cli.sh
28 | else
29 | docker run --rm --entrypoint /bin/bash -v "$(pwd)":/forest ghcr.io/chainsafe/forest:edge-fat /forest/cli.sh
30 | fi
31 |
--------------------------------------------------------------------------------
/docs/static/.nojekyll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/docs/static/.nojekyll
--------------------------------------------------------------------------------
/docs/static/img/chainsafe_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/docs/static/img/chainsafe_logo.png
--------------------------------------------------------------------------------
/docs/static/img/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/docs/static/img/favicon.ico
--------------------------------------------------------------------------------
/docs/static/img/filecoin_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/docs/static/img/filecoin_logo.png
--------------------------------------------------------------------------------
/docs/static/img/logo-with-text.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/docs/static/img/logo-with-text.png
--------------------------------------------------------------------------------
/docs/static/img/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/docs/static/img/logo.png
--------------------------------------------------------------------------------
/docs/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | // This file is not used in compilation. It is here just for a nice editor experience.
3 | "extends": "@docusaurus/tsconfig",
4 | "compilerOptions": {
5 | "baseUrl": ".",
6 | "skipLibCheck": true
7 | }
8 | }
9 |
--------------------------------------------------------------------------------
/docs/userSidebars.js:
--------------------------------------------------------------------------------
1 | const sidebars = {
2 | userSidebar: [{ type: "autogenerated", dirName: "." }],
3 | };
4 |
5 | module.exports = sidebars;
6 |
--------------------------------------------------------------------------------
/documentation/.gitignore:
--------------------------------------------------------------------------------
1 | # Build artifacts
2 | /book
3 |
--------------------------------------------------------------------------------
/documentation/book.toml:
--------------------------------------------------------------------------------
1 | [book]
2 | authors = []
3 | language = "en"
4 | multilingual = false
5 | src = "src"
6 |
7 | [build]
8 | create-missing = false
9 |
10 | [output.html]
11 |
12 | [output.linkcheck]
13 |
--------------------------------------------------------------------------------
/documentation/src/SUMMARY.md:
--------------------------------------------------------------------------------
1 | # Summary
2 |
3 | # User Guide
4 |
5 | - [Introduction](./introduction.md)
6 | - [Troubleshooting](./trouble_shooting.md)
7 | - [Offline Forest](./offline-forest.md)
8 |
9 | # Developer documentation
10 |
11 | - [Developer documentation](./developer_documentation/introduction.md)
12 | - [Database migrations](./developer_documentation/database_migrations.md)
13 | - [Local GH Actions](./developer_documentation/local_actions.md)
14 | - [Memory analysis](./developer_documentation/memory-analysis.md)
15 | - [Release checklist](./developer_documentation/release_checklist.md)
16 | - [State migration guide](./developer_documentation/state_migration_guide.md)
17 | - [Test plan](./developer_documentation/test_plan.md)
18 | - [Devnet Notes](./developer_documentation/devnet_notes.md)
19 | - [Archie and Fuzzy](./developer_documentation/archie_and_fuzzy.md)
20 | - [RPC API Compatibility](./developer_documentation/rpc_api_compatibility.md)
21 | - [ChainMuxer/TipsetProcessor state machine](./developer_documentation/chain_muxer_state_machine.md)
22 |
--------------------------------------------------------------------------------
/documentation/src/developer_documentation/chain_muxer_state_machine.md:
--------------------------------------------------------------------------------
1 | Date: 2023-10-21
2 |
3 | `ChainMuxer` state transitions:
4 |
5 | ```mermaid
6 | flowchart TD
7 | A[Idle]
8 | B[Connect]
9 | C[Bootstrap]
10 | D[Follow]
11 |
12 | A -->|sync| B
13 | A -->|skip| D
14 | B -->|behind| C
15 | B -->|in-sync| D
16 | D -->|on-error| A
17 | C --> A
18 | ```
19 |
20 | Once the `ChainMuxer` is in `follow` mode, it passes control to the
21 | `TipsetProcessor`. A typical start-up sequence looks like this:
22 |
23 | 1. `idle` state: Immediately switch to `connect` state.
24 | 2. `connect` state: Wait for 5 tipsets from peers. If we're within 1 epoch of
25 | the heaviest seen tipset, switch to `follow` state. Otherwise, switch to
26 | `bootstrap` state.
27 | 3. `bootstrap` state: Fetch tipsets between the heaviest seen tipset and the
28 | last validated tipset. Validate all of those tipsets and return to `idle`
29 | state.
30 | 4. `follow` state: Pass control to the `TipsetProcessor` state machine.
31 |
32 | `TipsetProcessor` state transitions:
33 |
34 | ```mermaid
35 | flowchart TD
36 | A[Idle]
37 | B[FindRange]
38 | C[SyncRange]
39 |
40 | A -->|new tipset group| B
41 | B --> C
42 | C --> A
43 | C --> B
44 | ```
45 |
--------------------------------------------------------------------------------
/documentation/src/developer_documentation/heaptrack/bottom_up.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/documentation/src/developer_documentation/heaptrack/bottom_up.png
--------------------------------------------------------------------------------
/documentation/src/developer_documentation/heaptrack/caller_callee.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/documentation/src/developer_documentation/heaptrack/caller_callee.png
--------------------------------------------------------------------------------
/documentation/src/developer_documentation/heaptrack/consumed.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/documentation/src/developer_documentation/heaptrack/consumed.png
--------------------------------------------------------------------------------
/documentation/src/developer_documentation/heaptrack/flamegraph.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/documentation/src/developer_documentation/heaptrack/flamegraph.png
--------------------------------------------------------------------------------
/documentation/src/developer_documentation/heaptrack/sizes.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/documentation/src/developer_documentation/heaptrack/sizes.png
--------------------------------------------------------------------------------
/documentation/src/developer_documentation/heaptrack/summary.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/documentation/src/developer_documentation/heaptrack/summary.png
--------------------------------------------------------------------------------
/documentation/src/developer_documentation/introduction.md:
--------------------------------------------------------------------------------
1 | # Developer documentation
2 |
3 | In this section you will find resources targeted for Forest developers.
4 |
--------------------------------------------------------------------------------
/documentation/src/img/forest_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/documentation/src/img/forest_logo.png
--------------------------------------------------------------------------------
/documentation/src/trouble_shooting.md:
--------------------------------------------------------------------------------
1 | # Trouble Shooting
2 |
3 | ## Common Issues
4 |
5 | #### Jemalloc issues on Apple Silicon macs
6 |
7 | Forest is compiled with `jemalloc` as a default allocator. If you are having
8 | problems running or compiling Forest, use this checklist:
9 |
10 | 1. Make sure you are using an arm64 version of homebrew; this could be a problem
11 | one inherits when migrating from an Intel Mac to Apple Silicon:
12 | [Stackoverflow example](https://stackoverflow.com/a/68443301).
13 | 2. Make sure your default host is set to `aarch64-apple-darwin` via
14 | `rustup set default-host aarch64-apple-darwin`.
15 | 3. This could result in various errors related to the fact that you still have
16 | some of the libraries symlinked to `/usr/local/lib` from an intel Homebrew
17 | installation. The easiest fix for this is:
18 | - Remove the libraries in question from `/usr/local/lib`.
19 | - Add `export LIBRARY_PATH=/opt/homebrew/lib` to your bash profile.
20 | - Source the new bash profile.
21 |
--------------------------------------------------------------------------------
/f3-sidecar/.gitignore:
--------------------------------------------------------------------------------
1 | # The default f3 database folder
2 | /f3-data
3 | # The binary
4 | f3-sidecar
5 |
--------------------------------------------------------------------------------
/f3-sidecar/manifest.go:
--------------------------------------------------------------------------------
1 | package main
2 |
3 | import (
4 | _ "embed"
5 | "encoding/json"
6 |
7 | "github.com/filecoin-project/go-f3/gpbft"
8 | "github.com/filecoin-project/go-f3/manifest"
9 | )
10 |
11 | var Network2PredefinedManifestMappings map[gpbft.NetworkName]*manifest.Manifest = make(map[gpbft.NetworkName]*manifest.Manifest)
12 |
13 | func init() {
14 | for _, bytes := range [][]byte{F3ManifestBytes2K, F3ManifestBytesButterfly, F3ManifestBytesCalibnet, F3ManifestBytesMainnet} {
15 | m := loadManifest(bytes)
16 | Network2PredefinedManifestMappings[m.NetworkName] = m
17 | }
18 | }
19 |
20 | //go:embed f3manifest_2k.json
21 | var F3ManifestBytes2K []byte
22 |
23 | //go:embed f3manifest_butterfly.json
24 | var F3ManifestBytesButterfly []byte
25 |
26 | //go:embed f3manifest_calibnet.json
27 | var F3ManifestBytesCalibnet []byte
28 |
29 | //go:embed f3manifest_mainnet.json
30 | var F3ManifestBytesMainnet []byte
31 |
32 | func loadManifest(bytes []byte) *manifest.Manifest {
33 | var m manifest.Manifest
34 | if err := json.Unmarshal(bytes, &m); err != nil {
35 | logger.Panicf("failed to unmarshal F3 manifest: %s", err)
36 | }
37 | if err := m.Validate(); err != nil {
38 | logger.Panicf("invalid F3 manifest: %s", err)
39 | }
40 | return &m
41 | }
42 |
--------------------------------------------------------------------------------
/f3-sidecar/types.go:
--------------------------------------------------------------------------------
1 | package main
2 |
3 | import (
4 | "encoding/json"
5 | "fmt"
6 | "time"
7 |
8 | "github.com/filecoin-project/go-f3/gpbft"
9 | )
10 |
11 | type TipSet struct {
12 | TsKey []byte `json:"key"`
13 | TsBeacon []byte `json:"beacon"`
14 | TsEpoch int64 `json:"epoch"`
15 | TsTimestamp int64 `json:"timestamp"`
16 | }
17 |
18 | func (ts TipSet) Key() gpbft.TipSetKey {
19 | return gpbft.TipSetKey(ts.TsKey)
20 | }
21 |
22 | func (ts TipSet) Beacon() []byte {
23 | return ts.TsBeacon
24 | }
25 |
26 | func (ts TipSet) Epoch() int64 {
27 | return ts.TsEpoch
28 | }
29 |
30 | func (ts TipSet) Timestamp() time.Time {
31 | return time.Unix(ts.TsTimestamp, 0)
32 | }
33 |
34 | func (ts TipSet) String() string {
35 | bytes, err := json.Marshal(&ts)
36 | if err != nil {
37 | return fmt.Sprintf("%s", err)
38 | }
39 | return string(bytes)
40 | }
41 |
--------------------------------------------------------------------------------
/f3-sidecar/utils.go:
--------------------------------------------------------------------------------
1 | package main
2 |
3 | import "github.com/ipfs/go-cid"
4 |
5 | var CID_UNDEF_RUST = cid.MustParse("baeaaaaa")
6 |
7 | func isCidDefined(c cid.Cid) bool {
8 | return c.Defined() && c != CID_UNDEF_RUST
9 | }
10 |
--------------------------------------------------------------------------------
/f3-sidecar/utils_test.go:
--------------------------------------------------------------------------------
1 | package main
2 |
3 | import (
4 | "testing"
5 |
6 | "github.com/ipfs/go-cid"
7 | "github.com/stretchr/testify/require"
8 | )
9 |
10 | func TestIsCidDefined(t *testing.T) {
11 | require.NotEqual(t, cid.Undef, CID_UNDEF_RUST)
12 | require.False(t, isCidDefined(cid.Undef))
13 | require.False(t, isCidDefined(CID_UNDEF_RUST))
14 | require.True(t, isCidDefined(cid.MustParse("bafy2bzaceac6jbaeolcsbh7rawcshcvh2cokvxrgsh4sxg5yu34i5xllbfpw4")))
15 | }
16 |
--------------------------------------------------------------------------------
/go.work:
--------------------------------------------------------------------------------
1 | go 1.23.9
2 |
3 | use (
4 | ./f3-sidecar
5 | ./interop-tests/src/tests/go_app
6 | )
7 |
--------------------------------------------------------------------------------
/interop-tests/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "forest-interop-tests"
3 | version = "0.1.0"
4 | authors = ["ChainSafe Systems "]
5 | repository = "https://github.com/ChainSafe/forest"
6 | edition = "2021"
7 | license = "MIT OR Apache-2.0"
8 | description = "Interop tests for Forest."
9 | publish = false
10 |
11 | [dependencies]
12 |
13 | [dev-dependencies]
14 | anyhow = { workspace = true }
15 | cid = { workspace = true }
16 | flume = { workspace = true }
17 | forest = { package = "forest-filecoin", path = "../", default-features = false, features = [
18 | "interop-tests-private",
19 | "no-f3-sidecar",
20 | ] }
21 | futures = { workspace = true }
22 | libp2p = { workspace = true, features = [
23 | 'kad',
24 | 'identify',
25 | 'noise',
26 | 'yamux',
27 | 'tcp',
28 | 'tokio',
29 | 'macros',
30 | 'serde',
31 | 'rsa',
32 | 'ecdsa',
33 | 'ed25519',
34 | 'secp256k1',
35 | ] }
36 | libp2p-swarm-test = { workspace = true }
37 | multihash-codetable = { workspace = true }
38 | rust2go = { workspace = true }
39 | tokio = { workspace = true, features = ['full'] }
40 |
41 | [build-dependencies]
42 | rust2go = { workspace = true, features = ["build"] }
43 |
--------------------------------------------------------------------------------
/interop-tests/README.md:
--------------------------------------------------------------------------------
1 | This crate contains interops tests for Forest.
2 |
3 | To compile and run the interop tests, below dependencies are required in
4 | addition to the Rust toolchain.
5 |
6 | - [Go 1.22.x](https://go.dev/dl/)
7 |
8 | To run the interop tests
9 |
10 | ```
11 | cargo test
12 | ```
13 |
--------------------------------------------------------------------------------
/interop-tests/build.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | fn main() {
5 | println!("cargo::rerun-if-changed=src/tests/go_app");
6 | unsafe {
7 | std::env::set_var("GOWORK", "off");
8 | std::env::set_var("GOFLAGS", "-tags=netgo");
9 | }
10 | rust2go::Builder::default()
11 | .with_go_src("./src/tests/go_app")
12 | .with_regen_arg(rust2go::RegenArgs {
13 | src: "./src/tests/go_ffi.rs".into(),
14 | dst: "./src/tests/go_app/gen.go".into(),
15 | ..Default::default()
16 | })
17 | .build();
18 | }
19 |
--------------------------------------------------------------------------------
/interop-tests/src/lib.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | #[cfg(test)]
5 | mod tests;
6 |
--------------------------------------------------------------------------------
/interop-tests/src/tests/go_app/.gitignore:
--------------------------------------------------------------------------------
1 | gen.go
2 |
--------------------------------------------------------------------------------
/interop-tests/src/tests/go_app/common.go:
--------------------------------------------------------------------------------
1 | package main
2 |
3 | import "os"
4 |
5 | const ListenAddr = "/ip4/127.0.0.1/tcp/0"
6 |
7 | func checkError(err error) {
8 | if err != nil {
9 | panic(err)
10 | }
11 | }
12 |
13 | // To avoid potential panics
14 | // See
15 | func setGoDebugEnv() {
16 | os.Setenv("GODEBUG", "invalidptr=0,cgocheck=0")
17 | }
18 |
--------------------------------------------------------------------------------
/interop-tests/src/tests/go_ffi.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | pub mod binding {
5 | #![allow(warnings)]
6 | #![allow(clippy::indexing_slicing)]
7 | rust2go::r2g_include_binding!();
8 | }
9 |
10 | #[rust2go::r2g]
11 | pub trait GoKadNode {
12 | fn run();
13 |
14 | fn connect(multiaddr: &String);
15 |
16 | fn get_n_connected() -> usize;
17 | }
18 |
19 | #[rust2go::r2g]
20 | pub trait GoBitswapNode {
21 | fn run();
22 |
23 | fn connect(multiaddr: &String);
24 |
25 | fn get_block(cid: &String) -> bool;
26 | }
27 |
--------------------------------------------------------------------------------
/interop-tests/src/tests/mod.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | mod go_ffi;
5 |
6 | mod bitswap_go_compat;
7 | mod kad_go_compat;
8 |
--------------------------------------------------------------------------------
/monitoring/docker-compose.yml:
--------------------------------------------------------------------------------
1 | # Docker compose file to start the metrics and monitoring stack for a local Forest node
2 | #
3 | # # Processes
4 | # - Prometheus server
5 | # - Loki server
6 | # - Grafana server
7 |
8 | version: "3.8"
9 |
10 | volumes:
11 | prometheus_data: {}
12 |
13 | services:
14 | prometheus:
15 | image: prom/prometheus
16 | command:
17 | - "--config.file=/etc/prometheus/prometheus.yml"
18 | - "--storage.tsdb.path=/prometheus"
19 | - "--web.console.libraries=/etc/prometheus/console_libraries"
20 | - "--web.console.templates=/etc/prometheus/consoles"
21 | - "--web.enable-lifecycle"
22 | volumes:
23 | - ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml
24 | - prometheus_data:/prometheus/
25 | restart: unless-stopped
26 | extra_hosts:
27 | - host.docker.internal:host-gateway
28 |
29 | loki:
30 | image: grafana/loki
31 | restart: unless-stopped
32 | ports:
33 | - "3100:3100"
34 |
35 | grafana:
36 | image: grafana/grafana
37 | depends_on:
38 | - prometheus
39 | - loki
40 | volumes:
41 | - ./grafana/provisioning/:/etc/grafana/provisioning
42 | - ./grafana/dashboards/:/etc/grafana/provisioning/dashboard-definitions
43 | restart: unless-stopped
44 | ports:
45 | - "3000:3000"
46 |
--------------------------------------------------------------------------------
/monitoring/grafana/dashboards/README.md:
--------------------------------------------------------------------------------
1 | # Preloaded Dashboards for Forest
2 |
3 | ## Dashboards
4 |
5 | - `forest`: The forest dashboard keeps track of process, syncing, and execution
6 | metrics
7 |
8 | ## Updating
9 |
10 | To update any dashboard, make changes to the dashboard in the Grafana web
11 | application, export the dashboard, and replace the dashboard JSON definition in
12 | this directory.
13 |
--------------------------------------------------------------------------------
/monitoring/grafana/provisioning/dashboards/dashboard.yml:
--------------------------------------------------------------------------------
1 | apiVersion: 1
2 |
3 | providers:
4 | - name: 'Prometheus'
5 | orgId: 1
6 | folder: ''
7 | type: file
8 | disableDeletion: false
9 | editable: false
10 | options:
11 | path: /etc/grafana/provisioning/dashboard-definitions
12 |
--------------------------------------------------------------------------------
/monitoring/prometheus/prometheus.yml:
--------------------------------------------------------------------------------
1 | global:
2 | scrape_interval: 5s
3 |
4 | scrape_configs:
5 | - job_name: 'forest'
6 | static_configs:
7 | - targets: ['host.docker.internal:6116']
8 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "root",
3 | "private": true,
4 | "devDependencies": {
5 | "@docusaurus/tsconfig": "^3.7.0",
6 | "prettier": "^3.5.3"
7 | },
8 | "scripts": {
9 | "prettier-version": "prettier --version",
10 | "md-fmt": "prettier --write '**/*.md'",
11 | "md-check": "prettier --check '**/*.md'"
12 | },
13 | "packageManager": "yarn@4.7.0"
14 | }
15 |
--------------------------------------------------------------------------------
/proto/bitswap_pb.proto:
--------------------------------------------------------------------------------
1 | syntax = "proto3";
2 |
3 | package bitswap_pb;
4 |
5 | message Message {
6 |
7 | message Wantlist {
8 | enum WantType {
9 | Block = 0;
10 | Have = 1;
11 | }
12 |
13 | message Entry {
14 | bytes block = 1; // the block cid (cidV0 in bitswap 1.0.0, cidV1 in bitswap 1.1.0)
15 | int32 priority = 2; // the priority (normalized). default to 1
16 | bool cancel = 3; // whether this revokes an entry
17 | WantType wantType = 4; // Note: defaults to enum 0, ie Block
18 | bool sendDontHave = 5; // Note: defaults to false
19 | }
20 |
21 | repeated Entry entries = 1; // a list of wantlist entries
22 | bool full = 2; // whether this is the full wantlist. default to false
23 | }
24 |
25 | message Block {
26 | bytes prefix = 1; // CID prefix (cid version, multicodec and multihash prefix (type + length)
27 | bytes data = 2;
28 | }
29 |
30 | enum BlockPresenceType {
31 | Have = 0;
32 | DontHave = 1;
33 | }
34 | message BlockPresence {
35 | bytes cid = 1;
36 | BlockPresenceType type = 2;
37 | }
38 |
39 | Wantlist wantlist = 1;
40 | repeated bytes blocks = 2; // used to send Blocks in bitswap 1.0.0
41 | repeated Block payload = 3; // used to send Blocks in bitswap 1.1.0
42 | repeated BlockPresence blockPresences = 4;
43 | int32 pendingBytes = 5;
44 | }
45 |
--------------------------------------------------------------------------------
/rust-toolchain.toml:
--------------------------------------------------------------------------------
1 | [toolchain]
2 | channel = "1.87.0"
3 | components = ["clippy", "llvm-tools-preview", "rustfmt"]
4 |
--------------------------------------------------------------------------------
/scripts/add_license.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | #
3 | # Checks if the source code contains required license and adds it if necessary.
4 | # Returns 1 if there was a missing license, 0 otherwise.
5 |
6 | PAT_APA="^// Copyright 2019-2025 ChainSafe Systems// SPDX-License-Identifier: Apache-2.0, MIT$"
7 |
8 | ret=0
9 | for file in $(git grep --cached -Il '' -- '*.rs' ':!*src/utils/encoding/fallback_de_ipld_dagcbor.rs' ':!*src/external/**/*.rs'); do
10 | header=$(head -2 "$file" | tr -d '\n')
11 | if ! echo "$header" | grep -q "$PAT_APA"; then
12 | echo "$file was missing header"
13 | cat ./scripts/copyright.txt "$file" > temp
14 | mv temp "$file"
15 | ret=1
16 | fi
17 | done
18 |
19 | exit $ret
20 |
--------------------------------------------------------------------------------
/scripts/copyright.txt:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
--------------------------------------------------------------------------------
/scripts/db_params_hyperfine.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -euo pipefail
3 | CHAIN=calibnet
4 |
5 | # https://forest-snapshots.fra1.cdn.digitaloceanspaces.com/debug/filecoin_full_calibnet_2023-04-07_450000.car
6 | SNAPSHOT=filecoin_full_calibnet_2023-04-07_450000.car
7 | if [ ! -f $SNAPSHOT ]
8 | then
9 | aria2c -x 4 "https://forest-snapshots.fra1.cdn.digitaloceanspaces.com/debug/filecoin_full_calibnet_2023-04-07_450000.car"
10 | fi
11 |
12 | cargo build --release
13 |
14 | # For some reason, cleaning the database with --cleanup gives me wildly inconsistent results.
15 | hyperfine \
16 | --runs 5 \
17 | --parameter-list CHUNK_SIZE 1000,5000,10000,20000,40000,200000,500000 \
18 | --parameter-list BUFFER_CAPACITY 0,1,2,3 \
19 | --export-markdown db_tune_params.md \
20 | --command-name 'forest-import-{CHUNK_SIZE}-{BUFFER_CAPACITY}' \
21 | "echo \"[client]\nchunk_size = {CHUNK_SIZE}\nbuffer_size = {BUFFER_CAPACITY}\" > /tmp/forest.conf; \
22 | ./target/release/forest \
23 | --chain ${CHAIN} --config /tmp/forest.conf --rpc false --no-gc --encrypt-keystore false --halt-after-import \
24 | --import-snapshot ${SNAPSHOT}; \
25 | ./target/release/forest-tool db destroy --chain ${CHAIN} --force"
26 |
--------------------------------------------------------------------------------
/scripts/devnet-curio/.env:
--------------------------------------------------------------------------------
1 | LOTUS_IMAGE=ghcr.io/chainsafe/lotus-devnet:2024-12-06-2368695
2 | CURIO_IMAGE=ghcr.io/chainsafe/curio-devnet:2025-01-20-622bacd
3 | FOREST_DATA_DIR=/forest_data
4 | LOTUS_DATA_DIR=/lotus_data
5 | CURIO_REPO_PATH=/var/lib/curio
6 | FIL_PROOFS_PARAMETER_CACHE=/var/tmp/filecoin-proof-parameters
7 | MINER_ACTOR_ADDRESS=t01000
8 | LOTUS_RPC_PORT=1234
9 | LOTUS_P2P_PORT=1235
10 | MINER_RPC_PORT=2345
11 | FOREST_RPC_PORT=3456
12 | FOREST_OFFLINE_RPC_PORT=3457
13 | F3_RPC_PORT=23456
14 | F3_FINALITY=10
15 | GENESIS_NETWORK_VERSION=25
16 | SHARK_HEIGHT=-10
17 | HYGGE_HEIGHT=-9
18 | LIGHTNING_HEIGHT=-8
19 | THUNDER_HEIGHT=-7
20 | WATERMELON_HEIGHT=-6
21 | DRAGON_HEIGHT=-5
22 | WAFFLE_HEIGHT=-4
23 | TUKTUK_HEIGHT=-3
24 | TEEP_HEIGHT=-2
25 | TARGET_HEIGHT=24
26 |
--------------------------------------------------------------------------------
/scripts/devnet-curio/curio.env:
--------------------------------------------------------------------------------
1 | LOTUS_PATH=/lotus_data/lotus-local-net
2 | LOTUS_MINER_PATH=/lotus_data/lotus-miner-local-net
3 | LOTUS_SKIP_GENESIS_CHECK=_yes_
4 | LOTUS_API_LISTENADDRESS=/dns/lotus-miner/tcp/2345/http
5 | CURIO_REPO_PATH=/var/lib/curio
6 | CURIO_HARMONYDB_HOSTS=yugabyte
7 |
--------------------------------------------------------------------------------
/scripts/devnet-curio/forest_config.toml.tpl:
--------------------------------------------------------------------------------
1 | [client]
2 | encrypt_keystore = false
3 | data_dir = "/forest_data"
4 |
5 | [network]
6 | kademlia = false
7 | target_peer_count = 1
8 |
9 | # Note that this has to come last. The actual TOML file will have
10 | # the chain name appended.
11 | [chain]
12 | type = "devnet"
13 |
--------------------------------------------------------------------------------
/scripts/devnet-curio/lotus-miner.env:
--------------------------------------------------------------------------------
1 | LOTUS_PATH=/lotus_data/lotus-local-net
2 | LOTUS_MINER_PATH=/lotus_data/lotus-miner-local-net
3 | LOTUS_SKIP_GENESIS_CHECK=_yes_
4 | LOTUS_API_LISTENADDRESS=/dns/lotus-miner/tcp/2345/http
5 |
--------------------------------------------------------------------------------
/scripts/devnet-curio/lotus.env:
--------------------------------------------------------------------------------
1 | LOTUS_PATH=/lotus_data/lotus-local-net
2 | LOTUS_MINER_PATH=/lotus_data/lotus-miner-local-net
3 | LOTUS_SKIP_GENESIS_CHECK=_yes_
4 | LOTUS_API_LISTENADDRESS=/dns/lotus/tcp/1234/http
5 | LOTUS_LIBP2P_LISTENADDRESSES=/ip4/0.0.0.0/tcp/1235
6 |
--------------------------------------------------------------------------------
/scripts/devnet/.env:
--------------------------------------------------------------------------------
1 | LOTUS_IMAGE=ghcr.io/chainsafe/lotus-devnet:2025-04-04-824e369
2 | FOREST_DATA_DIR=/forest_data
3 | LOTUS_DATA_DIR=/lotus_data
4 | FIL_PROOFS_PARAMETER_CACHE=/var/tmp/filecoin-proof-parameters
5 | MINER_ACTOR_ADDRESS=t01000
6 | LOTUS_RPC_PORT=1234
7 | LOTUS_P2P_PORT=1235
8 | MINER_RPC_PORT=2345
9 | FOREST_RPC_PORT=3456
10 | FOREST_OFFLINE_RPC_PORT=3457
11 | F3_RPC_PORT=23456
12 | F3_FINALITY=10
13 | GENESIS_NETWORK_VERSION=18
14 | SHARK_HEIGHT=-10
15 | HYGGE_HEIGHT=-9
16 | LIGHTNING_HEIGHT=3
17 | THUNDER_HEIGHT=6
18 | WATERMELON_HEIGHT=9
19 | DRAGON_HEIGHT=12
20 | WAFFLE_HEIGHT=18
21 | TUKTUK_HEIGHT=20
22 | TEEP_HEIGHT=22
23 | TOCK_HEIGHT=24
24 | TOCK_FIX_HEIGHT=26
25 | TARGET_HEIGHT=30
26 |
--------------------------------------------------------------------------------
/scripts/devnet/forest_ci.dockerfile:
--------------------------------------------------------------------------------
1 | # The version has to match Github CI runner images
2 | FROM ubuntu:24.04
3 |
4 | ENV DEBIAN_FRONTEND=noninteractive
5 |
6 | RUN apt-get update && apt-get install --no-install-recommends -y \
7 | ca-certificates \
8 | && rm -rf /var/lib/apt/lists/*
9 |
10 | COPY forest* /usr/local/bin/
11 | RUN chmod +x /usr/local/bin/forest*
12 |
13 | # Roughly verify that the binaries work.
14 | # This should ensure that all dynamically-linked libraries are present.
15 | RUN forest -V && forest-cli -V
16 |
--------------------------------------------------------------------------------
/scripts/devnet/forest_config.toml.tpl:
--------------------------------------------------------------------------------
1 | [client]
2 | encrypt_keystore = false
3 | data_dir = "/forest_data"
4 |
5 | [network]
6 | kademlia = false
7 | target_peer_count = 1
8 |
9 | # Note that this has to come last. The actual TOML file will have
10 | # the chain name appended.
11 | [chain]
12 | type = "devnet"
13 |
--------------------------------------------------------------------------------
/scripts/devnet/lotus-miner.env:
--------------------------------------------------------------------------------
1 | LOTUS_PATH=/lotus_data/lotus-local-net
2 | LOTUS_MINER_PATH=/lotus_data/lotus-miner-local-net
3 | LOTUS_SKIP_GENESIS_CHECK=_yes_
4 | LOTUS_API_LISTENADDRESS=/dns/lotus/tcp/2345/http
5 | LOTUS_API_LISTENADDRESS=/dns/lotus-miner/tcp/2345/http
6 |
--------------------------------------------------------------------------------
/scripts/devnet/lotus.env:
--------------------------------------------------------------------------------
1 | LOTUS_PATH=/lotus_data/lotus-local-net
2 | LOTUS_MINER_PATH=/lotus_data/lotus-miner-local-net
3 | LOTUS_SKIP_GENESIS_CHECK=_yes_
4 | LOTUS_API_LISTENADDRESS=/dns/lotus/tcp/1234/http
5 | LOTUS_LIBP2P_LISTENADDRESSES=/ip4/0.0.0.0/tcp/1235
6 |
--------------------------------------------------------------------------------
/scripts/devnet/setup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # This script is used to set up the CI environment for the
3 | # local devnet tests.
4 |
5 | set -euxo pipefail
6 |
7 | # Path to the directory containing this script.
8 | PARENT_PATH=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd -P )
9 | pushd "${PARENT_PATH}"
10 | source .env
11 |
12 | # This should not be needed in GH. It is useful for running locally.
13 | docker compose down --remove-orphans
14 | docker compose rm -f
15 | # Cleanup data volumes
16 | docker volume rm -f devnet_lotus-data
17 | docker volume rm -f devnet_forest-data
18 |
19 | # Run it in the background so we can perform checks on it.
20 | # Ideally, we could use `--wait` and `--wait-timeout` to wait for services
21 | # to be up. However, `compose` does not distinct between services and
22 | # init containers. See more: https://github.com/docker/compose/issues/10596
23 | docker compose up --build --force-recreate --detach
24 |
25 | # Wait for Forest to be ready. We can assume that it is ready when the
26 | # RPC server is up. This checks if Forest's RPC endpoint is up.
27 | function call_forest_chain_head {
28 | curl --silent -X POST -H "Content-Type: application/json" \
29 | --data '{"jsonrpc":"2.0","id":2,"method":"Filecoin.ChainHead","param":"null"}' \
30 | "http://127.0.0.1:${FOREST_RPC_PORT}/rpc/v1"
31 | }
32 |
33 | until call_forest_chain_head; do
34 | echo "Forest is unavailable - sleeping for 1s"
35 | sleep 1
36 | done
37 |
38 | popd
39 |
--------------------------------------------------------------------------------
/scripts/linters/find_unused_deps.rb:
--------------------------------------------------------------------------------
1 | # frozen_string_literal: true
2 |
3 | require 'toml-rb'
4 | require 'set'
5 |
6 | exit_code = 0
7 |
8 | def get_pattern(crate_raw)
9 | crate = crate_raw.gsub(/-/, '_')
10 | Regexp.new("(\\buse\\s#{crate}\\b)|(\\b#{crate}::)")
11 | end
12 |
13 | # Special cases to suppress false positives.
14 | def excluded?(crates, crate)
15 | # `quickcheck` is required implicitly by `quickcheck_macros`
16 | crate == 'quickcheck' && crates.include?('quickcheck_macros')
17 | end
18 |
19 | Dir.glob('**/*.toml').each do |file|
20 | crate_dir = File.dirname(file)
21 | toml = TomlRB.load_file(file)
22 | crates = Set.new
23 | toml['dependencies']&.each_key do |crate_name|
24 | crates.add crate_name
25 | end
26 | toml['dev-dependencies']&.each_key do |crate_name|
27 | crates.add crate_name
28 | end
29 | if toml['workspace']
30 | toml['workspace']['dependencies']&.each_key do |crate_name|
31 | crates.add crate_name
32 | end
33 | end
34 |
35 | # Load all the source code from the crate into an in-memory array
36 | # to improve performance.
37 | source_code = Dir.glob("#{crate_dir}/**/*.rs").map { |rs| File.read(rs) }
38 | crates.each do |crate|
39 | pattern = get_pattern(crate)
40 | unless source_code.any? { |line| line.match?(pattern) } || excluded?(crates, crate)
41 | puts "Potentially unused: #{crate} in #{crate_dir}"
42 | exit_code = 1
43 | end
44 | end
45 | end
46 |
47 | exit exit_code
48 |
--------------------------------------------------------------------------------
/scripts/s3/requirement.txt:
--------------------------------------------------------------------------------
1 | boto3==1.35.27
2 |
--------------------------------------------------------------------------------
/scripts/s3/set_sccache_do_bucket_lifecycle.py:
--------------------------------------------------------------------------------
1 | #
2 | # This script sets the bucket lifecycle policy of the sccache digitalocean
3 | # space backend to automatically delete records after a period of time.
4 | #
5 | # to run this script, DO space key and secret need to be configured with
6 | # environment variables `DO_SPACE_KEY` and `DO_SPACE_SECRET` respectively.
7 | #
8 |
9 | import boto3
10 | import json
11 | import os
12 |
13 |
14 | def main():
15 | bucket = "forest-sccache-us-west"
16 | lifecycle_config = {
17 | "Rules": [
18 | {
19 | "Expiration": {
20 | "Days": 30,
21 | },
22 | "ID": "cache-retention",
23 | "Prefix": "",
24 | "Status": "Enabled",
25 | },
26 | ]
27 | }
28 | s3 = boto3.client(
29 | "s3",
30 | region_name="sfo3",
31 | endpoint_url="https://sfo3.digitaloceanspaces.com",
32 | aws_access_key_id=os.getenv("DO_SPACE_KEY"),
33 | aws_secret_access_key=os.getenv("DO_SPACE_SECRET"),
34 | )
35 | s3.put_bucket_lifecycle_configuration(
36 | Bucket=bucket, LifecycleConfiguration=lifecycle_config
37 | )
38 | result = s3.get_bucket_lifecycle_configuration(Bucket=bucket)
39 | print(json.dumps(result))
40 |
41 |
42 | if __name__ == "__main__":
43 | main()
44 |
--------------------------------------------------------------------------------
/scripts/tests/api_compare/.env:
--------------------------------------------------------------------------------
1 | # Note: this should be a `fat` image so that it contains the pre-downloaded filecoin proof parameters
2 | FOREST_IMAGE=ghcr.io/chainsafe/forest:edge-fat
3 | LOTUS_IMAGE=filecoin/lotus-all-in-one:v1.33.0-calibnet
4 | FIL_PROOFS_PARAMETER_CACHE=/var/tmp/filecoin-proof-parameters
5 | LOTUS_RPC_PORT=1234
6 | FOREST_RPC_PORT=2345
7 | FOREST_OFFLINE_RPC_PORT=3456
8 | FOREST_HEALTHZ_RPC_PORT=2346
9 | CHAIN=calibnet
10 |
11 | # This is a pre-generated miner generated from Lotus
12 | # The process is too lengthy to create the miner on the fly (needs to send FIL to the miner, wait for confirmations, etc)
13 | # It's fine to use this miner for testing purposes, e.g., signing messages in tests.
14 | MINER_ADDRESS=t0111551 # t2nfplhzpyeck5dcc4fokj5ar6nbs3mhbdmq6xu3q
15 | MINER_WORKER_ADDRESS=t3sw466j35hqjbch5x7tcr7ona6idsgzypoturfci2ajqsfrrwhp7ty3ythtd7x646adaidnvxpdr5b2ftcciq
16 | MINER_WORKER_KEY=7b2254797065223a22626c73222c22507269766174654b6579223a225a6c4c784f55666d666f44332b577a2f386175482f6b2f456f4b674443365365584256563447714c4c6d6b3d227d
17 |
--------------------------------------------------------------------------------
/scripts/tests/api_compare/filter-list:
--------------------------------------------------------------------------------
1 | # This list contains potentially broken methods (or tests) that are ignored.
2 | # They should be considered bugged, and not used until the root cause is resolved.
3 | # Disable until next Lotus release with go-f3 0.8.0
4 | !Filecoin.F3GetManifest
5 |
--------------------------------------------------------------------------------
/scripts/tests/api_compare/filter-list-offline:
--------------------------------------------------------------------------------
1 | # This list contains potentially broken methods (or tests) that are ignored.
2 | # They should be considered bugged, and not used until the root cause is resolved.
3 | !Filecoin.EthSyncing
4 | !eth_syncing
5 | !Filecoin.NetAddrsListen
6 | !Filecoin.NetAgentVersion
7 | !Filecoin.NetAutoNatStatus
8 | !Filecoin.NetPeers
9 | !Filecoin.NetFindPeer
10 | !Filecoin.NetProtectAdd
11 | !Filecoin.NetProtectRemove
12 | !Filecoin.NetProtectList
13 | # Most F3 methods are not avaiable on offline Forest RPC server
14 | !Filecoin.F3GetManifest
15 | !Filecoin.F3GetLatestCertificate
16 | !Filecoin.F3ListParticipants
17 | !Filecoin.F3GetProgress
18 | !Filecoin.F3IsRunning
19 | !Filecoin.F3GetCertificate
20 | !Filecoin.F3GetOrRenewParticipationTicket
21 | !Filecoin.F3GetF3PowerTable
22 | # CustomCheckFailed in Forest: https://github.com/ChainSafe/forest/actions/runs/9593268587/job/26453560366
23 | !Filecoin.StateCall
24 | # These methods don't make sense in the context of an offline node
25 | !Filecoin.MinerCreateBlock
26 | # Offline server won't provide correct results for finality-related methods
27 | !Filecoin.EthGetBlockByNumber
28 | !eth_getBlockByNumber
29 | !Filecoin.ChainSetHead
30 |
--------------------------------------------------------------------------------
/scripts/tests/api_compare/setup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # This script is used to set up clean environment for the
3 | # API compare checks.
4 |
5 | set -euxo pipefail
6 |
7 | # Path to the directory containing this script.
8 | PARENT_PATH=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd -P )
9 | pushd "${PARENT_PATH}"
10 | source .env
11 |
12 | # This should not be needed in GH. It is useful for running locally.
13 | docker compose --profile include-offline-rpc down --remove-orphans
14 | docker compose rm -f
15 | # Cleanup data volumes
16 | # docker volume rm -f api_compare_node-data
17 |
18 | # Run it in the background so we can perform checks on it.
19 | # Ideally, we could use `--wait` and `--wait-timeout` to wait for services
20 | # to be up. However, `compose` does not distinct between services and
21 | # init containers. See more: https://github.com/docker/compose/issues/10596
22 | docker compose --profile include-offline-rpc up --build --force-recreate --detach --timestamps
23 |
24 | popd
25 |
--------------------------------------------------------------------------------
/scripts/tests/bootstrapper/.env:
--------------------------------------------------------------------------------
1 | # Note: this should be a `fat` image so that it contains the pre-downloaded filecoin proof parameters
2 | LOTUS_IMAGE=filecoin/lotus-all-in-one:v1.33.0-calibnet
3 | FIL_PROOFS_PARAMETER_CACHE=/var/tmp/filecoin-proof-parameters
4 | LOTUS_RPC_PORT=1234
5 | FOREST_RPC_PORT=2345
6 | FOREST_P2P_PORT=12345
7 | # Pre-generated keypair for the forest node. This is required to easily connect to the forest node from the lotus node.
8 | FOREST_PEER_KEYPAIR=7PCBrDPUebd7Pj+DqhbzNuKBWmldP9r2K5eEnbYelUoK4xd+ng8c6C9gDa/q31/U5b6FIlNnHDQLQ4WSop1y6w==
9 | # The PeerID is derived from the `FOREST_PEER_KEYPAIR`.
10 | FOREST_BOOTSTRAPPER_ADDRESS=/dns/forest-bootstrapper/tcp/12345/p2p/12D3KooWAYs5zbzniHaL9RnnH2RKdNvibuj3BCS4b3bHtYvC81yL
11 | CHAIN=calibnet
12 |
--------------------------------------------------------------------------------
/scripts/tests/bootstrapper/README.md:
--------------------------------------------------------------------------------
1 | # Forest as a bootstrapper test
2 |
3 | The setup here creates a single Forest bootstrap peer with a well-known peer id
4 | and p2p listening port. Then, a secondary Forest or Lotus are created and
5 | connected to that peer. The assertion succeeds if the secondary peer is able to
6 | sync the chain from the bootstrap peer and have multiple peers in their
7 | peerstores.
8 |
9 | Note that Lotus and Forest are checked separately, to limit the resources
10 | required for the test.
11 |
12 | This is illustrated in the following flowchart:
13 |
14 | ```mermaid
15 | flowchart TD
16 | A[Init] -->|Download proofs and snapshot| B(Start the Forest bootstrapper)
17 | B --> C(Start Forest/Lotus peer)
18 | C -->|Wait for sync| E(Assert peer store populated)
19 | E --> F(Finish)
20 | ```
21 |
22 | ## Usage
23 |
24 | ```bash
25 | ./test_bootstrapper.sh forest
26 | ./test_bootstrapper.sh lotus
27 | ```
28 |
29 | ## Teardown
30 |
31 | ```bash
32 | docker compose -f ./docker-compose-forest.yml down -v --rmi all
33 | docker compose -f ./docker-compose-lotus.yml down -v --rmi all
34 | ```
35 |
--------------------------------------------------------------------------------
/scripts/tests/bootstrapper/test_bootstrapper.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # This script is used to set up clean environment for the bootstrapper tests.
3 |
4 | set -euxo pipefail
5 |
6 | # Accepts one arg : Forest or Lotus
7 | if [ $# -ne 1 ]; then
8 | echo "Usage: $0 "
9 | exit 1
10 | fi
11 |
12 | if [ "$1" == "forest" ]; then
13 | COMPOSE_FILE="docker-compose-forest.yml"
14 | elif [ "$1" == "lotus" ]; then
15 | COMPOSE_FILE="docker-compose-lotus.yml"
16 | else
17 | echo "Usage: $0 "
18 | exit 1
19 | fi
20 |
21 | # Path to the directory containing this script.
22 | PARENT_PATH=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd -P )
23 | pushd "${PARENT_PATH}"
24 | source .env
25 |
26 | # This should not be needed in GH. It is useful for running locally.
27 | docker compose -f $COMPOSE_FILE down --remove-orphans
28 | docker compose -f $COMPOSE_FILE rm -f
29 |
30 | # Run it in the background so we can perform checks on it.
31 | # Ideally, we could use `--wait` and `--wait-timeout` to wait for services
32 | # to be up. However, `compose` does not distinct between services and
33 | # init containers. See more: https://github.com/docker/compose/issues/10596
34 | docker compose -f $COMPOSE_FILE up --build --force-recreate --detach --timestamps
35 |
36 | popd
37 |
--------------------------------------------------------------------------------
/scripts/tests/butterflynet_check.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -euxo pipefail
3 |
4 | # This script tests Forest is able to catch up the butterflynet.
5 |
6 | source "$(dirname "$0")/harness.sh"
7 |
8 | function shutdown {
9 | kill -KILL $FOREST_NODE_PID
10 | }
11 |
12 | trap shutdown EXIT
13 |
14 | $FOREST_PATH --chain butterflynet --encrypt-keystore false &
15 | FOREST_NODE_PID=$!
16 |
17 | forest_wait_api
18 |
19 | forest_wait_for_sync
20 |
--------------------------------------------------------------------------------
/scripts/tests/calibnet_export_check.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # This script is checking the correctness of
3 | # the snapshot export feature.
4 | # It requires both the `forest` and `forest-cli` binaries to be in the PATH.
5 |
6 | set -eu
7 |
8 | source "$(dirname "$0")/harness.sh"
9 |
10 | forest_init "$@"
11 |
12 | echo "Cleaning up the initial snapshot"
13 | rm --force --verbose ./*.{car,car.zst,sha256sum}
14 |
15 | echo "Exporting zstd compressed snapshot"
16 | $FOREST_CLI_PATH snapshot export
17 |
18 | echo "Testing snapshot validity"
19 | zstd --test ./*.car.zst
20 |
21 | echo "Verifying snapshot checksum"
22 | sha256sum --check ./*.sha256sum
23 |
24 | echo "Validating CAR files"
25 | zstd --decompress ./*.car.zst
26 | for f in *.car; do
27 | echo "Validating CAR file $f"
28 | $FOREST_TOOL_PATH snapshot validate "$f"
29 | done
30 |
31 | echo "Exporting zstd compressed snapshot at genesis"
32 | $FOREST_CLI_PATH snapshot export --tipset 0
33 |
34 | echo "Testing genesis snapshot validity"
35 | zstd --test forest_snapshot_calibnet_2022-11-01_height_0.forest.car.zst
36 |
--------------------------------------------------------------------------------
/scripts/tests/calibnet_migration_regression_tests.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -e
4 |
5 | # To test that migrations still work, we import a snapshot 100 epochs after the
6 | # migration point and then we validate the last 200 tipsets. This triggers the
7 | # migration logic without connecting to the real Filecoin network.
8 |
9 | FOREST_PATH="forest"
10 | MIGRATION_TEST="$FOREST_PATH --chain calibnet --encrypt-keystore false --halt-after-import --height=-200 --no-gc --import-snapshot"
11 |
12 | # NV17 - Shark, uncomment when we support the nv17 migration
13 | echo NV17 - Shark
14 | $MIGRATION_TEST "https://forest-snapshots.fra1.cdn.digitaloceanspaces.com/debug/filecoin_calibnet_height_16900.car.zst"
15 |
16 | echo NV18 - Hygge
17 | $MIGRATION_TEST "https://forest-snapshots.fra1.cdn.digitaloceanspaces.com/debug/filecoin_calibnet_height_322454.car.zst"
18 |
19 | echo NV19 - Lightning
20 | $MIGRATION_TEST "https://forest-snapshots.fra1.cdn.digitaloceanspaces.com/debug/filecoin_calibnet_height_489194.car.zst"
21 |
22 | echo NV20 - Thunder # (no migration should happen in practice, it's a shadow upgrade). We test it anyway.
23 | $MIGRATION_TEST "https://forest-snapshots.fra1.cdn.digitaloceanspaces.com/debug/filecoin_calibnet_height_492314.car.zst"
24 |
--------------------------------------------------------------------------------
/scripts/tests/calibnet_no_discovery_check.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | set -euxo pipefail
3 |
4 | # This script tests forest behaviours when discovery(mdns and kademlia) is disabled
5 |
6 | source "$(dirname "$0")/harness.sh"
7 |
8 | function shutdown {
9 | kill -KILL $FOREST_NODE_PID
10 | }
11 |
12 | trap shutdown EXIT
13 |
14 | $FOREST_PATH --chain calibnet --encrypt-keystore false --mdns false --kademlia false --auto-download-snapshot --exit-after-init
15 | $FOREST_PATH --chain calibnet --encrypt-keystore false --mdns false --kademlia false --auto-download-snapshot --log-dir "$LOG_DIRECTORY" &
16 | FOREST_NODE_PID=$!
17 |
18 | forest_wait_api
19 |
20 | # Verify that one of the seed nodes has been connected to
21 | until $FOREST_CLI_PATH net peers | grep "calib"; do
22 | sleep 1s;
23 | done
24 |
25 | # Verify F3 is getting certificates from the network
26 | until [[ $($FOREST_CLI_PATH f3 certs get --output json | jq '.GPBFTInstance') -gt 100 ]]; do
27 | sleep 1s;
28 | done
29 |
30 | echo "Test subcommands: f3 status"
31 | $FOREST_CLI_PATH f3 status
32 | echo "Test subcommands: f3 manifest"
33 | $FOREST_CLI_PATH f3 manifest
34 | echo "Test subcommands: f3 certs get"
35 | $FOREST_CLI_PATH f3 certs list
36 | echo "Test subcommands: f3 certs list"
37 | $FOREST_CLI_PATH f3 certs get
38 |
--------------------------------------------------------------------------------
/scripts/tests/calibnet_stateless_mode_check.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -euxo pipefail
3 |
4 | # This script tests the stateless mode of a forest node
5 |
6 | source "$(dirname "$0")/harness.sh"
7 |
8 | forest_init_stateless
9 |
10 | # Example format: /ip4/127.0.0.1/tcp/41937/p2p/12D3KooWAB9z7vZ1x1v9t4BViVkX1Hy1ScoRnWV2GgGy5ec6pfUZ
11 | STATELESS_NODE_ADDRESS=$($FOREST_CLI_PATH net listen | tail -n 1)
12 | echo "Stateless node address: $STATELESS_NODE_ADDRESS"
13 | # Example format: 12D3KooWAB9z7vZ1x1v9t4BViVkX1Hy1ScoRnWV2GgGy5ec6pfUZ
14 | STATELESS_NODE_PEER_ID=$(echo "$STATELESS_NODE_ADDRESS" | cut --delimiter="/" --fields=7 --zero-terminated)
15 | echo "Stateless node peer id: $STATELESS_NODE_PEER_ID"
16 |
17 | # Run a normal forest node that only connects to the stateless node
18 | CONFIG_PATH="./forest_config.toml"
19 | cat <<- EOF > $CONFIG_PATH
20 | [network]
21 | listening_multiaddrs = ["/ip4/127.0.0.1/tcp/0"]
22 | bootstrap_peers = ["$STATELESS_NODE_ADDRESS"]
23 | mdns = false
24 | kademlia = false
25 | EOF
26 |
27 | # Disable discovery to not connect to more nodes
28 | $FOREST_PATH --chain calibnet --encrypt-keystore false --auto-download-snapshot --config "$CONFIG_PATH" --rpc false --metrics-address 127.0.0.1:6117 --healthcheck-address 127.0.0.1:2347 &
29 | FOREST_NODE_PID=$!
30 | # Verify that the stateless node can respond to chain exchange requests
31 | until curl http://127.0.0.1:6117/metrics | grep "chain_exchange_response_in"; do
32 | sleep 1s;
33 | done
34 | kill -KILL $FOREST_NODE_PID
35 |
--------------------------------------------------------------------------------
/scripts/tests/snapshot_parity/.env:
--------------------------------------------------------------------------------
1 | LOTUS_IMAGE=filecoin/lotus-all-in-one:v1.33.0-calibnet
2 | FIL_PROOFS_PARAMETER_CACHE=/var/tmp/filecoin-proof-parameters
3 | LOTUS_RPC_PORT=1234
4 | FOREST_RPC_PORT=2345
5 | CHAIN=calibnet
6 | EXPORT_EPOCHS=900
7 |
--------------------------------------------------------------------------------
/scripts/tests/snapshot_parity/setup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # This script is used to set up clean environment for the
3 | # API compare checks.
4 |
5 | set -euxo pipefail
6 |
7 | # Path to the directory containing this script.
8 | PARENT_PATH=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd -P )
9 | pushd "${PARENT_PATH}"
10 | source .env
11 |
12 | # This should not be needed in GH. It is useful for running locally.
13 | docker compose down --remove-orphans
14 | docker compose rm -f
15 | # Cleanup data volumes
16 | # docker volume rm -f snapshot_parity_node-data
17 |
18 | # Run it in the background so we can perform checks on it.
19 | # Ideally, we could use `--wait` and `--wait-timeout` to wait for services
20 | # to be up. However, `compose` does not distinct between services and
21 | # init containers. See more: https://github.com/docker/compose/issues/10596
22 | docker compose up --build --force-recreate --detach --timestamps
23 |
24 | popd
25 |
--------------------------------------------------------------------------------
/src/beacon/mod.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | pub mod beacon_entries;
5 | mod drand;
6 | pub mod signatures;
7 | pub use beacon_entries::*;
8 | pub use drand::*;
9 |
10 | #[cfg(test)]
11 | pub mod mock_beacon;
12 | #[cfg(test)]
13 | mod tests {
14 | mod drand;
15 | }
16 |
--------------------------------------------------------------------------------
/src/beacon/signatures/signature_impls.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use super::*;
5 |
6 | impl From for SignatureOnG1 {
7 | fn from(val: G1Projective) -> Self {
8 | SignatureOnG1(val.into())
9 | }
10 | }
11 | impl From for G1Projective {
12 | fn from(val: SignatureOnG1) -> Self {
13 | val.0.into()
14 | }
15 | }
16 |
17 | impl From for SignatureOnG1 {
18 | fn from(val: G1Affine) -> Self {
19 | SignatureOnG1(val)
20 | }
21 | }
22 |
23 | impl From for G1Affine {
24 | fn from(val: SignatureOnG1) -> Self {
25 | val.0
26 | }
27 | }
28 |
29 | fn g1_from_slice(raw: &[u8]) -> Result {
30 | const SIZE: usize = G1Affine::compressed_size();
31 |
32 | if raw.len() != SIZE {
33 | return Err(Error::SizeMismatch);
34 | }
35 |
36 | let mut res = [0u8; SIZE];
37 | res.copy_from_slice(raw);
38 |
39 | Option::from(G1Affine::from_compressed(&res)).ok_or(Error::GroupDecode)
40 | }
41 |
42 | impl SignatureOnG1 {
43 | pub fn from_bytes(raw: &[u8]) -> Result {
44 | let g1 = g1_from_slice(raw)?;
45 | Ok(g1.into())
46 | }
47 |
48 | pub fn as_bytes(&self) -> [u8; G1Affine::compressed_size()] {
49 | self.0.to_compressed()
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/src/bin/forest-cli.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | fn main() -> anyhow::Result<()> {
5 | forest::forest_main(std::env::args_os())
6 | }
7 |
--------------------------------------------------------------------------------
/src/bin/forest-tool.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | fn main() -> anyhow::Result<()> {
5 | forest::forest_tool_main(std::env::args_os())
6 | }
7 |
--------------------------------------------------------------------------------
/src/bin/forest-wallet.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | fn main() -> anyhow::Result<()> {
5 | forest::forest_wallet_main(std::env::args_os())
6 | }
7 |
--------------------------------------------------------------------------------
/src/bin/forest.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | fn main() -> anyhow::Result<()> {
5 | forest::forestd_main(std::env::args_os())
6 | }
7 |
--------------------------------------------------------------------------------
/src/blocks/gossip_block.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use cid::Cid;
5 | use serde_tuple::{self, Deserialize_tuple, Serialize_tuple};
6 |
7 | use crate::blocks::CachingBlockHeader;
8 |
9 | /// Block message used as serialized `gossipsub` messages for blocks topic.
10 | #[cfg_attr(test, derive(derive_quickcheck_arbitrary::Arbitrary, Default))]
11 | #[derive(Clone, Debug, PartialEq, Serialize_tuple, Deserialize_tuple)]
12 | pub struct GossipBlock {
13 | pub header: CachingBlockHeader,
14 | pub bls_messages: Vec,
15 | pub secpk_messages: Vec,
16 | }
17 |
--------------------------------------------------------------------------------
/src/blocks/mod.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use thiserror::Error;
5 |
6 | mod block;
7 | #[cfg(test)]
8 | mod chain4u;
9 | mod election_proof;
10 | mod gossip_block;
11 | mod header;
12 | mod ticket;
13 | #[cfg(not(doc))]
14 | mod tipset;
15 | #[cfg(doc)]
16 | pub mod tipset;
17 | mod vrf_proof;
18 |
19 | pub use block::{BLOCK_MESSAGE_LIMIT, Block, TxMeta};
20 | pub use election_proof::ElectionProof;
21 | pub use gossip_block::GossipBlock;
22 | pub use header::{CachingBlockHeader, RawBlockHeader};
23 | pub use ticket::Ticket;
24 | pub use tipset::{CreateTipsetError, FullTipset, Tipset, TipsetKey};
25 | pub use vrf_proof::VRFProof;
26 |
27 | /// Blockchain blocks error
28 | #[derive(Debug, PartialEq, Eq, Error)]
29 | pub enum Error {
30 | /// Invalid signature
31 | #[error("Invalid signature: {0}")]
32 | InvalidSignature(String),
33 | /// Error in validating arbitrary data
34 | #[error("Error validating data: {0}")]
35 | Validation(String),
36 | }
37 |
38 | #[cfg(test)]
39 | pub(crate) use chain4u::{Chain4U, HeaderBuilder, chain4u};
40 |
41 | #[cfg(any(test, doc))]
42 | mod tests {
43 |
44 | mod serialization_vectors;
45 | mod ticket_test;
46 | }
47 |
--------------------------------------------------------------------------------
/src/blocks/tests/calibnet/HEAD:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/src/blocks/tests/calibnet/HEAD
--------------------------------------------------------------------------------
/src/blocks/tests/serialization-vectors/README.md:
--------------------------------------------------------------------------------
1 | # The vectors are copied from the archived [serialization-vectors](https://github.com/filecoin-project/serialization-vectors) repository.
2 |
--------------------------------------------------------------------------------
/src/blocks/tests/ticket_test.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use crate::blocks::*;
5 | use crate::test_utils::construct_ticket;
6 | use crate::utils::encoding::from_slice_with_fallback;
7 | use fvm_ipld_encoding::to_vec;
8 |
9 | // From Lotus
10 | const TICKET: [u8; 99] = [
11 | 0x81, 0x58, 0x60, 0x96, 0x64, 0x49, 0x2f, 0x30, 0xe9, 0xb9, 0x50, 0x3b, 0x71, 0x41, 0x0b, 0x1d,
12 | 0x38, 0x2e, 0x2b, 0xd4, 0x85, 0x7f, 0xe2, 0x15, 0x39, 0xac, 0x92, 0x1b, 0xcb, 0x7f, 0xd0, 0x86,
13 | 0xd5, 0x78, 0x71, 0xe6, 0xdd, 0x5c, 0x31, 0xcd, 0x23, 0x61, 0x8b, 0x52, 0x52, 0xb6, 0x2c, 0x7b,
14 | 0x44, 0x4c, 0x3a, 0x02, 0x9b, 0xba, 0xad, 0xc2, 0x50, 0x57, 0x56, 0x81, 0x06, 0x47, 0x77, 0xf6,
15 | 0x04, 0x06, 0xc4, 0xff, 0x00, 0x6f, 0x38, 0xfc, 0x61, 0x71, 0xfe, 0x45, 0xd4, 0x83, 0xe5, 0x15,
16 | 0x79, 0xd0, 0xe2, 0x47, 0x8b, 0x7e, 0x5f, 0xde, 0x2c, 0x51, 0xd2, 0xe8, 0x64, 0x63, 0xaf, 0x86,
17 | 0xd3, 0xcb, 0xd5,
18 | ];
19 |
20 | #[test]
21 | fn encode_ticket() {
22 | let ticket = construct_ticket();
23 | // Encode Ticket
24 | let encoded_ticket = to_vec(&ticket).unwrap();
25 | assert_eq!(&TICKET[..], &encoded_ticket[..]);
26 | }
27 |
28 | #[test]
29 | fn decode_ticket() {
30 | let ticket = construct_ticket();
31 | // Decode Ticket
32 | let decoded_ticket: Ticket = from_slice_with_fallback(&TICKET).unwrap();
33 | assert_eq!(ticket, decoded_ticket);
34 | }
35 |
--------------------------------------------------------------------------------
/src/blocks/ticket.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use crate::blocks::VRFProof;
5 | use serde_tuple::{self, Deserialize_tuple, Serialize_tuple};
6 |
7 | /// A Ticket is a marker of a tick of the blockchain's clock. It is the source
8 | /// of randomness for proofs of storage and leader election. It is generated
9 | /// by the miner of a block using a `VRF` and a `VDF`.
10 | #[derive(
11 | Clone, Debug, PartialEq, Eq, Default, Serialize_tuple, Deserialize_tuple, Hash, PartialOrd, Ord,
12 | )]
13 | pub struct Ticket {
14 | /// A proof output by running a `VRF` on the `VDFResult` of the parent
15 | /// ticket
16 | pub vrfproof: VRFProof,
17 | }
18 |
19 | impl Ticket {
20 | /// Ticket constructor
21 | pub fn new(vrfproof: VRFProof) -> Self {
22 | Self { vrfproof }
23 | }
24 | }
25 |
26 | #[cfg(test)]
27 | impl quickcheck::Arbitrary for Ticket {
28 | fn arbitrary(g: &mut quickcheck::Gen) -> Self {
29 | let fmt_str = format!("===={}=====", u64::arbitrary(g));
30 | let vrfproof = VRFProof::new(fmt_str.into_bytes());
31 | Self { vrfproof }
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/src/blocks/vrf_proof.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use crate::utils::encoding::{blake2b_256, serde_byte_array};
5 | use serde::{Deserialize, Serialize};
6 |
7 | /// The output from running a VRF proof.
8 | #[cfg_attr(test, derive(derive_quickcheck_arbitrary::Arbitrary))]
9 | #[derive(Clone, Debug, PartialEq, Eq, Ord, PartialOrd, Default, Serialize, Deserialize, Hash)]
10 | pub struct VRFProof(#[serde(with = "serde_byte_array")] pub Vec);
11 |
12 | impl VRFProof {
13 | /// Creates a `VRFProof` from a raw vector.
14 | pub fn new(output: Vec) -> Self {
15 | Self(output)
16 | }
17 |
18 | /// Returns reference to underlying proof bytes.
19 | pub fn as_bytes(&self) -> &[u8] {
20 | &self.0
21 | }
22 |
23 | /// Compute the `BLAKE2b256` digest of the proof.
24 | pub fn digest(&self) -> [u8; 32] {
25 | blake2b_256(&self.0)
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/src/chain/store/mod.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | pub mod base_fee;
5 | mod chain_store;
6 | mod errors;
7 | pub mod index;
8 | mod tipset_tracker;
9 |
10 | pub use self::{base_fee::*, chain_store::*, errors::*};
11 |
--------------------------------------------------------------------------------
/src/chain/weight.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | pub type Weight = num::BigInt;
5 |
--------------------------------------------------------------------------------
/src/chain_sync/bad_block_cache.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use std::num::NonZeroUsize;
5 |
6 | use cid::Cid;
7 | use lru::LruCache;
8 | use nonzero_ext::nonzero;
9 | use parking_lot::Mutex;
10 |
11 | /// Thread-safe cache for tracking bad blocks.
12 | /// This cache is checked before validating a block, to ensure no duplicate
13 | /// work.
14 | #[derive(Debug)]
15 | pub struct BadBlockCache {
16 | cache: Mutex>,
17 | }
18 |
19 | impl Default for BadBlockCache {
20 | fn default() -> Self {
21 | Self::new(nonzero!(1usize << 15))
22 | }
23 | }
24 |
25 | impl BadBlockCache {
26 | pub fn new(cap: NonZeroUsize) -> Self {
27 | Self {
28 | cache: Mutex::new(LruCache::new(cap)),
29 | }
30 | }
31 |
32 | /// Puts a bad block `Cid` in the cache with a given reason.
33 | pub fn put(&self, c: Cid, reason: String) -> Option {
34 | self.cache.lock().put(c, reason)
35 | }
36 |
37 | /// Returns `Some` with the reason if the block CID is in bad block cache.
38 | /// This function does not update the head position of the `Cid` key.
39 | pub fn peek(&self, c: &Cid) -> Option {
40 | self.cache.lock().peek(c).cloned()
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/src/chain_sync/chain_muxer.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use serde::{Deserialize, Serialize};
5 |
6 | const DEFAULT_RECENT_STATE_ROOTS: i64 = 2000;
7 |
8 | /// Structure that defines syncing configuration options
9 | #[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Eq)]
10 | #[cfg_attr(test, derive(derive_quickcheck_arbitrary::Arbitrary))]
11 | pub struct SyncConfig {
12 | /// Number of recent state roots to keep in the database after `sync`
13 | /// and to include in the exported snapshot.
14 | pub recent_state_roots: i64,
15 | }
16 |
17 | impl Default for SyncConfig {
18 | fn default() -> Self {
19 | Self {
20 | recent_state_roots: DEFAULT_RECENT_STATE_ROOTS,
21 | }
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/src/chain_sync/consensus.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use futures::{StreamExt, stream::FuturesUnordered};
5 | use nunny::Vec as NonEmpty;
6 |
7 | /// Helper function to collect errors from async validations.
8 | pub async fn collect_errs(
9 | mut handles: FuturesUnordered>>,
10 | ) -> Result<(), NonEmpty> {
11 | let mut errors = Vec::new();
12 |
13 | while let Some(result) = handles.next().await {
14 | if let Ok(Err(e)) = result {
15 | errors.push(e);
16 | }
17 | }
18 |
19 | match errors.try_into() {
20 | Ok(it) => Err(it),
21 | Err(_) => Ok(()),
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/src/chain_sync/mod.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | mod bad_block_cache;
5 | mod chain_follower;
6 | mod chain_muxer;
7 | pub mod consensus;
8 | pub mod metrics;
9 | pub mod network_context;
10 | mod sync_status;
11 | mod tipset_syncer;
12 | mod validation;
13 |
14 | pub use self::{
15 | bad_block_cache::BadBlockCache,
16 | chain_follower::ChainFollower,
17 | chain_muxer::SyncConfig,
18 | consensus::collect_errs,
19 | sync_status::{ForkSyncInfo, ForkSyncStage, NodeSyncStatus, SyncStatusReport},
20 | validation::{TipsetValidationError, TipsetValidator},
21 | };
22 |
--------------------------------------------------------------------------------
/src/cli/mod.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 | pub mod humantoken;
4 | pub mod main;
5 | pub mod subcommands;
6 |
--------------------------------------------------------------------------------
/src/cli/subcommands/chain_cmd/prune.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use crate::rpc::{self, RpcMethodExt, chain::ChainPruneSnapshot};
5 | use clap::Subcommand;
6 | use std::time::Duration;
7 |
8 | /// Prune chain database
9 | #[derive(Debug, Subcommand)]
10 | pub enum ChainPruneCommands {
11 | /// Run snapshot GC
12 | Snap {
13 | /// Do not block until GC is completed
14 | #[arg(long)]
15 | no_wait: bool,
16 | },
17 | }
18 |
19 | impl ChainPruneCommands {
20 | pub async fn run(self, client: rpc::Client) -> anyhow::Result<()> {
21 | match self {
22 | Self::Snap { no_wait } => {
23 | client
24 | .call(ChainPruneSnapshot::request((!no_wait,))?.with_timeout(Duration::MAX))
25 | .await?;
26 | }
27 | }
28 |
29 | Ok(())
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/src/cli/subcommands/config_cmd.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use std::io::Write;
5 |
6 | use anyhow::Context as _;
7 | use clap::Subcommand;
8 |
9 | use crate::cli::subcommands::Config;
10 |
11 | #[derive(Debug, Subcommand)]
12 | pub enum ConfigCommands {
13 | /// Dump default configuration to standard output
14 | Dump,
15 | }
16 |
17 | impl ConfigCommands {
18 | pub fn run(self, sink: &mut W) -> anyhow::Result<()> {
19 | match self {
20 | Self::Dump => writeln!(
21 | sink,
22 | "{}",
23 | toml::to_string(&Config::default())
24 | .context("Could not convert configuration to TOML format")?
25 | )
26 | .context("Failed to write the configuration"),
27 | }
28 | }
29 | }
30 |
31 | #[cfg(test)]
32 | mod tests {
33 | use super::*;
34 |
35 | #[tokio::test]
36 | async fn given_default_configuration_should_print_valid_toml() {
37 | let expected_config = Config::default();
38 | let mut sink = std::io::BufWriter::new(Vec::new());
39 |
40 | ConfigCommands::Dump.run(&mut sink).unwrap();
41 |
42 | let actual_config: Config = toml::from_str(std::str::from_utf8(sink.buffer()).unwrap())
43 | .expect("Invalid configuration!");
44 |
45 | assert_eq!(expected_config, actual_config);
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/src/cli/subcommands/f3_cmd/certificate.tpl:
--------------------------------------------------------------------------------
1 | Instance: {{ GPBFTInstance }}
2 | Power Table:
3 | Next: {{ power_table_cid }}
4 | Delta: {{ power_table_delta_string }}
5 | Finalized Chain:
6 | Length: {{ ECChain | length }}
7 | Epochs: {{ epochs }}
8 | Chain:
9 | {% for line in chain_lines %}
10 | {{- line }}
11 | {% endfor %}
12 |
--------------------------------------------------------------------------------
/src/cli/subcommands/f3_cmd/progress.tpl:
--------------------------------------------------------------------------------
1 | Progress:
2 | Instance: {{ ID }}
3 | Round: {{ Round }}
4 | Phase: {{ phase_string }}
5 |
--------------------------------------------------------------------------------
/src/cli/subcommands/shutdown_cmd.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use crate::cli::subcommands::prompt_confirm;
5 | use crate::rpc::{self, prelude::*};
6 |
7 | #[derive(Debug, clap::Args)]
8 | pub struct ShutdownCommand {
9 | /// Assume "yes" as answer to shutdown prompt
10 | #[arg(long)]
11 | force: bool,
12 | }
13 |
14 | impl ShutdownCommand {
15 | pub async fn run(self, client: rpc::Client) -> anyhow::Result<()> {
16 | println!("Shutting down Forest node");
17 | if !self.force && !prompt_confirm() {
18 | println!("Aborted.");
19 | return Ok(());
20 | }
21 | Shutdown::call(&client, ()).await?;
22 | Ok(())
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/src/db/migration/mod.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | mod db_migration;
5 | mod migration_map;
6 | mod v0_22_1;
7 | mod v0_26_0;
8 | mod void_migration;
9 |
10 | pub use db_migration::DbMigration;
11 |
--------------------------------------------------------------------------------
/src/db/parity_db_config.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use serde::{Deserialize, Serialize};
5 |
6 | /// `ParityDb` configuration exposed in Forest.
7 | #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, Default)]
8 | #[cfg_attr(test, derive(derive_quickcheck_arbitrary::Arbitrary))]
9 | #[serde(default)]
10 | pub struct ParityDbConfig {
11 | pub enable_statistics: bool,
12 | }
13 |
--------------------------------------------------------------------------------
/src/db/tests/db_utils/mod.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | pub(in crate::db) mod parity;
5 |
--------------------------------------------------------------------------------
/src/db/tests/db_utils/parity.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use std::ops::Deref;
5 |
6 | use crate::db::{parity_db::ParityDb, parity_db_config::ParityDbConfig};
7 |
8 | /// Temporary, self-cleaning ParityDB
9 | pub struct TempParityDB {
10 | pub db: Option,
11 | _dir: tempfile::TempDir, // kept for cleaning up during Drop
12 | }
13 |
14 | impl TempParityDB {
15 | /// Creates a new DB in a temporary path that gets wiped out when the
16 | /// variable gets out of scope.
17 | pub fn new() -> TempParityDB {
18 | let dir = tempfile::Builder::new()
19 | .tempdir()
20 | .expect("Failed to create temporary path for db.");
21 | let path = dir.path().join("paritydb");
22 | let config = ParityDbConfig::default();
23 |
24 | TempParityDB {
25 | db: Some(ParityDb::open(path, &config).unwrap()),
26 | _dir: dir,
27 | }
28 | }
29 | }
30 |
31 | impl Deref for TempParityDB {
32 | type Target = ParityDb;
33 |
34 | fn deref(&self) -> &Self::Target {
35 | self.db.as_ref().unwrap()
36 | }
37 | }
38 |
39 | impl AsRef for TempParityDB {
40 | fn as_ref(&self) -> &ParityDb {
41 | self.db.as_ref().unwrap()
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/src/db/tests/mem_test.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use super::subtests;
5 |
6 | use crate::db::MemoryDB;
7 |
8 | #[test]
9 | fn mem_db_write() {
10 | let db = MemoryDB::default();
11 | subtests::write_bin(&db);
12 | }
13 |
14 | #[test]
15 | fn mem_db_read() {
16 | let db = MemoryDB::default();
17 | subtests::read_bin(&db);
18 | }
19 |
20 | #[test]
21 | fn mem_db_exists() {
22 | let db = MemoryDB::default();
23 | subtests::exists(&db);
24 | }
25 |
26 | #[test]
27 | fn mem_db_does_not_exist() {
28 | let db = MemoryDB::default();
29 | subtests::does_not_exist(&db);
30 | }
31 |
32 | #[test]
33 | fn mem_write_read_obj() {
34 | let db = MemoryDB::default();
35 | subtests::write_read_obj(&db);
36 | }
37 |
--------------------------------------------------------------------------------
/src/db/tests/parity_test.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use super::{db_utils::parity::TempParityDB, subtests};
5 |
6 | #[test]
7 | fn db_write() {
8 | let db = TempParityDB::new();
9 | subtests::write_bin(&*db);
10 | }
11 |
12 | #[test]
13 | fn db_read() {
14 | let db = TempParityDB::new();
15 | subtests::read_bin(&*db);
16 | }
17 |
18 | #[test]
19 | fn db_exists() {
20 | let db = TempParityDB::new();
21 | subtests::exists(&*db);
22 | }
23 |
24 | #[test]
25 | fn db_does_not_exist() {
26 | let db = TempParityDB::new();
27 | subtests::does_not_exist(&*db);
28 | }
29 |
30 | #[test]
31 | fn db_write_read_obj() {
32 | let db = TempParityDB::new();
33 | subtests::write_read_obj(&*db);
34 | }
35 |
--------------------------------------------------------------------------------
/src/f3/go_ffi.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | pub mod binding {
5 | #![allow(warnings)]
6 | #![allow(clippy::indexing_slicing)]
7 | rust2go::r2g_include_binding!();
8 | }
9 |
10 | #[rust2go::r2g]
11 | pub trait GoF3Node {
12 | fn run(
13 | rpc_endpoint: String,
14 | jwt: String,
15 | f3_rpc_endpoint: String,
16 | initial_power_table: String,
17 | bootstrap_epoch: i64,
18 | finality: i64,
19 | f3_root: String,
20 | ) -> bool;
21 | }
22 |
--------------------------------------------------------------------------------
/src/genesis/export40.car:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/src/genesis/export40.car
--------------------------------------------------------------------------------
/src/interpreter/errors.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use crate::blocks;
5 | use thiserror::Error;
6 |
7 | /// Interpreter error.
8 | #[derive(Debug, Error)]
9 | pub enum Error {
10 | #[error("failed to read state from the database: {0}")]
11 | Lookup(#[from] anyhow::Error),
12 |
13 | #[error(transparent)]
14 | Signature(#[from] blocks::Error),
15 | }
16 |
--------------------------------------------------------------------------------
/src/interpreter/mod.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | mod errors;
5 | mod fvm2;
6 | pub mod fvm3;
7 | mod fvm4;
8 | mod vm;
9 |
10 | use crate::shim::actors::AccountActorStateLoad as _;
11 | use crate::shim::actors::account;
12 | use crate::shim::{
13 | address::{Address, Protocol},
14 | state_tree::StateTree,
15 | };
16 | use fvm_ipld_blockstore::Blockstore;
17 |
18 | pub use self::vm::*;
19 |
20 | /// returns the public key type of address (`BLS`/`SECP256K1`) of an account
21 | /// actor identified by `addr`.
22 | pub fn resolve_to_key_addr(
23 | st: &StateTree,
24 | store: &BS,
25 | addr: &Address,
26 | ) -> Result
27 | where
28 | BS: Blockstore,
29 | S: Blockstore,
30 | {
31 | if addr.protocol() == Protocol::BLS
32 | || addr.protocol() == Protocol::Secp256k1
33 | || addr.protocol() == Protocol::Delegated
34 | {
35 | return Ok(*addr);
36 | }
37 |
38 | let act = st
39 | .get_actor(addr)?
40 | .ok_or_else(|| anyhow::anyhow!("Failed to retrieve actor: {}", addr))?;
41 |
42 | // If there _is_ an f4 address, return it as "key" address
43 | if let Some(address) = act.delegated_address {
44 | return Ok(address.into());
45 | }
46 |
47 | let acc_st = account::State::load(store, act.code, act.state)?;
48 |
49 | Ok(acc_st.pubkey_address().into())
50 | }
51 |
--------------------------------------------------------------------------------
/src/ipld/mod.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | pub mod selector;
5 | pub mod util;
6 |
7 | pub use ipld_core::ipld::Ipld;
8 | pub use util::*;
9 |
10 | #[cfg(test)]
11 | mod tests {
12 | mod cbor_test;
13 | mod selector_explore;
14 | mod selector_gen_tests;
15 | }
16 |
--------------------------------------------------------------------------------
/src/ipld/selector/empty_map.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use serde::{Deserialize, Deserializer, Serialize, Serializer};
5 |
6 | // This is only used as a utility because go impl serializes no data as an empty
7 | // map
8 |
9 | #[derive(Serialize, Deserialize)]
10 | struct EmptyMap {}
11 |
12 | pub fn serialize(serializer: S) -> Result
13 | where
14 | S: Serializer,
15 | {
16 | EmptyMap {}.serialize(serializer)
17 | }
18 |
19 | pub fn deserialize<'de, D>(deserializer: D) -> Result<(), D::Error>
20 | where
21 | D: Deserializer<'de>,
22 | {
23 | let EmptyMap {} = Deserialize::deserialize(deserializer)?;
24 | Ok(())
25 | }
26 |
--------------------------------------------------------------------------------
/src/key_management/errors.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use std::io;
5 |
6 | use thiserror::Error;
7 |
8 | #[derive(Debug, Error)]
9 | pub enum Error {
10 | /// info that corresponds to key does not exist
11 | #[error("Key info not found")]
12 | KeyInfo,
13 | /// Key already exists in key store
14 | #[error("Key already exists")]
15 | KeyExists,
16 | #[error("Key does not exist")]
17 | KeyNotExists,
18 | #[error("Key not found")]
19 | NoKey,
20 | #[error(transparent)]
21 | IO(#[from] io::Error),
22 | #[error("{0}")]
23 | Other(String),
24 | #[error("Could not convert from KeyInfo to Key")]
25 | KeyInfoConversion,
26 | }
27 |
--------------------------------------------------------------------------------
/src/key_management/mod.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | mod errors;
5 | mod keystore;
6 | mod wallet;
7 | mod wallet_helpers;
8 |
9 | pub use errors::*;
10 | pub use keystore::*;
11 | pub use wallet::*;
12 | pub use wallet_helpers::*;
13 | #[cfg(test)]
14 | mod tests {}
15 |
--------------------------------------------------------------------------------
/src/key_management/tests/keystore_encrypted_old/keystore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChainSafe/forest/ea9d1601daa73b1dff98f60462e64194313aff59/src/key_management/tests/keystore_encrypted_old/keystore
--------------------------------------------------------------------------------
/src/libp2p/chain_exchange/mod.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | mod behaviour;
5 | mod message;
6 | mod provider;
7 | pub use behaviour::*;
8 |
9 | pub use self::{message::*, provider::*};
10 | use super::rpc::CborRequestResponse;
11 |
12 | /// Libp2p protocol name for `ChainExchange`.
13 | pub const CHAIN_EXCHANGE_PROTOCOL_NAME: &str = "/fil/chain/xchg/0.0.1";
14 |
15 | /// `ChainExchange` protocol codec to be used within the RPC service.
16 | pub type ChainExchangeCodec =
17 | CborRequestResponse<&'static str, ChainExchangeRequest, ChainExchangeResponse>;
18 |
--------------------------------------------------------------------------------
/src/libp2p/hello/codec.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use super::*;
5 | use crate::libp2p::rpc::CborRequestResponse;
6 |
7 | /// Hello protocol codec to be used within the RPC service.
8 | pub type HelloCodec = CborRequestResponse<&'static str, HelloRequest, HelloResponse>;
9 |
--------------------------------------------------------------------------------
/src/libp2p/hello/mod.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | mod message;
5 | pub use self::message::*;
6 | mod behaviour;
7 | pub use behaviour::*;
8 | mod codec;
9 | use codec::*;
10 |
11 | /// Libp2p Hello protocol name.
12 | pub const HELLO_PROTOCOL_NAME: &str = "/fil/hello/1.0.0";
13 |
--------------------------------------------------------------------------------
/src/libp2p/metrics.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use once_cell::sync::Lazy;
5 | use prometheus_client::metrics::{counter::Counter, gauge::Gauge};
6 |
7 | pub static PEER_FAILURE_TOTAL: Lazy = Lazy::new(|| {
8 | let metric = Counter::default();
9 | crate::metrics::default_registry().register(
10 | "peer_failure_total",
11 | "Total number of failed peer requests",
12 | metric.clone(),
13 | );
14 | metric
15 | });
16 |
17 | pub static FULL_PEERS: Lazy = Lazy::new(|| {
18 | let metric = Gauge::default();
19 | crate::metrics::default_registry().register(
20 | "full_peers",
21 | "Number of healthy peers recognized by the node",
22 | metric.clone(),
23 | );
24 | metric
25 | });
26 |
27 | pub static BAD_PEERS: Lazy = Lazy::new(|| {
28 | let metric = Gauge::default();
29 | crate::metrics::default_registry().register(
30 | "bad_peers",
31 | "Number of bad peers recognized by the node",
32 | metric.clone(),
33 | );
34 | metric
35 | });
36 |
--------------------------------------------------------------------------------
/src/libp2p/mod.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | mod behaviour;
5 | pub mod chain_exchange;
6 | mod config;
7 | pub mod discovery;
8 | mod gossip_params;
9 | pub mod hello;
10 | pub mod keypair;
11 | pub mod metrics;
12 | mod peer_manager;
13 | pub mod ping;
14 | pub mod rpc;
15 | mod service;
16 |
17 | // Re-export some libp2p types
18 | pub use cid::multihash::Multihash;
19 | pub use libp2p::{
20 | identity::{Keypair, ParseError, PeerId, ed25519},
21 | multiaddr::{Multiaddr, Protocol},
22 | };
23 |
24 | pub(in crate::libp2p) use self::behaviour::*;
25 | pub use self::{config::*, peer_manager::*, service::*};
26 | #[cfg(test)]
27 | mod tests {
28 | mod decode_test;
29 | }
30 |
--------------------------------------------------------------------------------
/src/libp2p_bitswap/internals/mod.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | pub(in crate::libp2p_bitswap) mod codec;
5 | pub(in crate::libp2p_bitswap) mod event_handlers;
6 | pub(in crate::libp2p_bitswap) mod prefix;
7 |
--------------------------------------------------------------------------------
/src/lotus_json/actor_states/mod.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 | use super::*;
4 | mod account_state;
5 | mod cron_state;
6 | mod entry;
7 | mod evm_state;
8 | mod market_state;
9 | mod miner_state;
10 | mod system_state;
11 | mod vesting_funds;
12 |
--------------------------------------------------------------------------------
/src/lotus_json/address.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use super::*;
5 | use crate::shim::address::Address;
6 |
7 | #[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
8 | #[schemars(rename = "Address")]
9 | pub struct AddressLotusJson(
10 | #[schemars(with = "String")]
11 | #[serde(with = "crate::lotus_json::stringify")]
12 | Address,
13 | );
14 |
15 | impl HasLotusJson for Address {
16 | type LotusJson = AddressLotusJson;
17 |
18 | #[cfg(test)]
19 | fn snapshots() -> Vec<(serde_json::Value, Self)> {
20 | vec![(json!("f00"), Address::default())]
21 | }
22 |
23 | fn into_lotus_json(self) -> Self::LotusJson {
24 | AddressLotusJson(self)
25 | }
26 |
27 | fn from_lotus_json(AddressLotusJson(address): Self::LotusJson) -> Self {
28 | address
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/src/lotus_json/beacon_entry.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use crate::beacon::BeaconEntry;
5 |
6 | use super::*;
7 |
8 | #[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
9 | #[serde(rename_all = "PascalCase")]
10 | #[schemars(rename = "BeaconEntry")]
11 | pub struct BeaconEntryLotusJson {
12 | round: u64,
13 | #[schemars(with = "LotusJson>")]
14 | #[serde(with = "crate::lotus_json")]
15 | data: Vec,
16 | }
17 |
18 | impl HasLotusJson for BeaconEntry {
19 | type LotusJson = BeaconEntryLotusJson;
20 |
21 | #[cfg(test)]
22 | fn snapshots() -> Vec<(serde_json::Value, Self)> {
23 | vec![(json!({"Round": 0, "Data": null}), BeaconEntry::default())]
24 | }
25 |
26 | fn into_lotus_json(self) -> Self::LotusJson {
27 | let (round, data) = self.into_parts();
28 | Self::LotusJson { round, data }
29 | }
30 |
31 | fn from_lotus_json(lotus_json: Self::LotusJson) -> Self {
32 | let Self::LotusJson { round, data } = lotus_json;
33 | Self::new(round, data)
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/src/lotus_json/big_int.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use super::*;
5 |
6 | use num::BigInt;
7 |
8 | #[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
9 | #[schemars(rename = "BigInt")]
10 | pub struct BigIntLotusJson(
11 | #[schemars(with = "String")]
12 | #[serde(with = "crate::lotus_json::stringify")]
13 | BigInt,
14 | );
15 |
16 | impl HasLotusJson for BigInt {
17 | type LotusJson = BigIntLotusJson;
18 |
19 | #[cfg(test)]
20 | fn snapshots() -> Vec<(serde_json::Value, Self)> {
21 | vec![(json!("1"), BigInt::from(1))]
22 | }
23 |
24 | fn into_lotus_json(self) -> Self::LotusJson {
25 | BigIntLotusJson(self)
26 | }
27 |
28 | fn from_lotus_json(BigIntLotusJson(big_int): Self::LotusJson) -> Self {
29 | big_int
30 | }
31 | }
32 |
--------------------------------------------------------------------------------
/src/lotus_json/bit_field.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use super::*;
5 |
6 | use fil_actors_shared::fvm_ipld_bitfield::{BitField, json::BitFieldJson};
7 |
8 | #[derive(Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
9 | #[schemars(rename = "BitField")]
10 | pub struct BitFieldLotusJson(#[schemars(with = "Option>")] pub BitFieldJson);
11 |
12 | impl Clone for BitFieldLotusJson {
13 | fn clone(&self) -> Self {
14 | Self(BitFieldJson(self.0.0.clone()))
15 | }
16 | }
17 |
18 | impl HasLotusJson for BitField {
19 | type LotusJson = BitFieldLotusJson;
20 | #[cfg(test)]
21 | fn snapshots() -> Vec<(serde_json::Value, Self)> {
22 | vec![
23 | (json!([0]), Self::new()),
24 | (json!([1, 1]), {
25 | let mut it = Self::new();
26 | it.set(1);
27 | it
28 | }),
29 | ]
30 | }
31 | fn into_lotus_json(self) -> Self::LotusJson {
32 | BitFieldLotusJson(BitFieldJson(self))
33 | }
34 | fn from_lotus_json(BitFieldLotusJson(BitFieldJson(it)): Self::LotusJson) -> Self {
35 | it
36 | }
37 | }
38 |
39 | #[test]
40 | fn snapshots() {
41 | assert_all_snapshots::();
42 | }
43 |
--------------------------------------------------------------------------------
/src/lotus_json/bytecode_hash.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use super::*;
5 | use fil_actor_evm_state::v16::BytecodeHash;
6 |
7 | #[derive(Debug, Serialize, Deserialize, JsonSchema)]
8 | #[serde(rename_all = "PascalCase")]
9 | #[schemars(rename = "BytecodeHash")]
10 | pub struct BytecodeHashLotusJson([u8; 32]);
11 |
12 | impl HasLotusJson for BytecodeHash {
13 | type LotusJson = BytecodeHashLotusJson;
14 |
15 | #[cfg(test)]
16 | fn snapshots() -> Vec<(serde_json::Value, Self)> {
17 | vec![]
18 | }
19 |
20 | fn into_lotus_json(self) -> Self::LotusJson {
21 | BytecodeHashLotusJson(self.into())
22 | }
23 |
24 | fn from_lotus_json(lotus_json: Self::LotusJson) -> Self {
25 | Self::from(lotus_json.0)
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/src/lotus_json/cid.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use super::*;
5 |
6 | #[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
7 | #[schemars(rename = "Cid")]
8 | pub struct CidLotusJson {
9 | #[schemars(with = "String")]
10 | #[serde(rename = "/", with = "crate::lotus_json::stringify")]
11 | slash: ::cid::Cid,
12 | }
13 |
14 | impl HasLotusJson for ::cid::Cid {
15 | type LotusJson = CidLotusJson;
16 |
17 | #[cfg(test)]
18 | fn snapshots() -> Vec<(serde_json::Value, Self)> {
19 | vec![(json!({"/": "baeaaaaa"}), ::cid::Cid::default())]
20 | }
21 |
22 | fn into_lotus_json(self) -> Self::LotusJson {
23 | Self::LotusJson { slash: self }
24 | }
25 |
26 | fn from_lotus_json(lotus_json: Self::LotusJson) -> Self {
27 | let Self::LotusJson { slash } = lotus_json;
28 | slash
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/src/lotus_json/duration.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use super::*;
5 | use std::time::Duration;
6 |
7 | impl HasLotusJson for Duration {
8 | type LotusJson = u64;
9 |
10 | #[cfg(test)]
11 | fn snapshots() -> Vec<(serde_json::Value, Self)> {
12 | vec![(json!(15000000000_u64), Duration::from_secs(15))]
13 | }
14 |
15 | fn into_lotus_json(self) -> Self::LotusJson {
16 | self.as_nanos() as _
17 | }
18 |
19 | fn from_lotus_json(lotus_json: Self::LotusJson) -> Self {
20 | Self::from_nanos(lotus_json)
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/src/lotus_json/election_proof.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use crate::blocks::{ElectionProof, VRFProof};
5 |
6 | use super::*;
7 |
8 | #[derive(Serialize, Deserialize, JsonSchema)]
9 | #[serde(rename_all = "PascalCase")]
10 | #[schemars(rename = "ElectionProof")]
11 | pub struct ElectionProofLotusJson {
12 | #[schemars(with = "LotusJson")]
13 | #[serde(with = "crate::lotus_json")]
14 | v_r_f_proof: VRFProof,
15 | win_count: i64,
16 | }
17 |
18 | impl HasLotusJson for ElectionProof {
19 | type LotusJson = ElectionProofLotusJson;
20 |
21 | #[cfg(test)]
22 | fn snapshots() -> Vec<(serde_json::Value, Self)> {
23 | vec![(
24 | json!({
25 | "WinCount": 0,
26 | "VRFProof": null
27 | }),
28 | ElectionProof::default(),
29 | )]
30 | }
31 |
32 | fn into_lotus_json(self) -> Self::LotusJson {
33 | let Self {
34 | win_count,
35 | vrfproof,
36 | } = self;
37 | Self::LotusJson {
38 | v_r_f_proof: vrfproof,
39 | win_count,
40 | }
41 | }
42 |
43 | fn from_lotus_json(lotus_json: Self::LotusJson) -> Self {
44 | let Self::LotusJson {
45 | v_r_f_proof,
46 | win_count,
47 | } = lotus_json;
48 | Self {
49 | win_count,
50 | vrfproof: v_r_f_proof,
51 | }
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/src/lotus_json/hash_map.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use super::*;
5 | use ahash::HashMap as AHashMap;
6 | use std::hash::Hash;
7 |
8 | impl HasLotusJson for AHashMap
9 | where
10 | K: Serialize + DeserializeOwned + Eq + Hash,
11 | V: HasLotusJson,
12 | {
13 | type LotusJson = AHashMap::LotusJson>;
14 |
15 | #[cfg(test)]
16 | fn snapshots() -> Vec<(serde_json::Value, Self)> {
17 | unimplemented!()
18 | }
19 |
20 | fn into_lotus_json(self) -> Self::LotusJson {
21 | self.into_iter()
22 | .map(|(k, v)| (k, v.into_lotus_json()))
23 | .collect()
24 | }
25 |
26 | fn from_lotus_json(value: Self::LotusJson) -> Self {
27 | value
28 | .into_iter()
29 | .map(|(k, v)| (k, V::from_lotus_json(v)))
30 | .collect()
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/src/lotus_json/nonempty.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use super::*;
5 |
6 | impl HasLotusJson for nunny::Vec
7 | where
8 | T: HasLotusJson,
9 | {
10 | type LotusJson = nunny::Vec<::LotusJson>;
11 |
12 | #[cfg(test)]
13 | fn snapshots() -> Vec<(serde_json::Value, Self)> {
14 | unimplemented!("only NonEmpty is tested, below")
15 | }
16 |
17 | fn into_lotus_json(self) -> Self::LotusJson {
18 | self.into_iter_ne()
19 | .map(HasLotusJson::into_lotus_json)
20 | .collect_vec()
21 | }
22 |
23 | fn from_lotus_json(lotus_json: Self::LotusJson) -> Self {
24 | lotus_json
25 | .into_iter_ne()
26 | .map(HasLotusJson::from_lotus_json)
27 | .collect_vec()
28 | }
29 | }
30 |
31 | #[cfg(test)]
32 | mod tests {
33 | use super::*;
34 | use ::cid::Cid;
35 | use nunny::vec as nonempty;
36 | use quickcheck_macros::quickcheck;
37 |
38 | #[test]
39 | fn shapshots() {
40 | assert_one_snapshot(json!([{"/": "baeaaaaa"}]), nonempty![::cid::Cid::default()]);
41 | }
42 |
43 | #[quickcheck]
44 | fn assert_unchanged(it: nunny::Vec) {
45 | assert_unchanged_via_json(it)
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/src/lotus_json/opt.rs:
--------------------------------------------------------------------------------
1 | // Copyright 2019-2025 ChainSafe Systems
2 | // SPDX-License-Identifier: Apache-2.0, MIT
3 |
4 | use super::*;
5 |
6 | // TODO(forest): https://github.com/ChainSafe/forest/issues/4032
7 | // Remove this - users should use `Option>` instead
8 | // of LotusJson