├── doc
├── .nojekyll
├── dist
│ ├── logo.png
│ ├── index.css
│ └── swagger-initializer.js
├── message_queue.png
├── building_block_diagram.drawio.png
├── api.html
├── message_queue.puml
└── index.html
├── internal
├── p2p
│ ├── README.md
│ ├── utils.go
│ ├── wire.go
│ ├── p2p_mocks.go
│ ├── interface.go
│ ├── peer_manager_options.go
│ ├── peer_options.go
│ └── wire_reader.go
├── blocktx
│ ├── store
│ │ ├── postgresql
│ │ │ ├── migrations
│ │ │ │ ├── 000001_create_blocks.down.sql
│ │ │ │ ├── 000007_processd_by.down.sql
│ │ │ │ ├── 000011_create_blocktx_schema.down.sql
│ │ │ │ ├── 000015_height_index.down.sql
│ │ │ │ ├── 000003_create_transactions.down.sql
│ │ │ │ ├── 000006_create_primary_blocktx.down.sql
│ │ │ │ ├── 000011_create_blocktx_schema.up.sql
│ │ │ │ ├── 000002_create_block_transactions_map.down.sql
│ │ │ │ ├── 000008_add_is_registered.down.sql
│ │ │ │ ├── 000014_remove_index_on_block_height.up.sql
│ │ │ │ ├── 000015_height_index.up.sql
│ │ │ │ ├── 000023_unique_height_is_longest.down.sql
│ │ │ │ ├── 000026_timestamp.down.sql
│ │ │ │ ├── 000018_remove_orphanedyn_field.up.sql
│ │ │ │ ├── 000026_timestamp.up.sql
│ │ │ │ ├── 000024_merkle_tree_index.down.sql
│ │ │ │ ├── 000008_add_is_registered.up.sql
│ │ │ │ ├── 000013_add_block_status.down.sql
│ │ │ │ ├── 000018_remove_orphanedyn_field.down.sql
│ │ │ │ ├── 000006_create_primary_blocktx.up.sql
│ │ │ │ ├── 000024_merkle_tree_index.up.sql
│ │ │ │ ├── 000005_add_inserted_at.down.sql
│ │ │ │ ├── 000014_remove_index_on_block_height.down.sql
│ │ │ │ ├── 000022_block_processing.down.sql
│ │ │ │ ├── 000019_add_is_longest.down.sql
│ │ │ │ ├── 000004_create_functions.down.sql
│ │ │ │ ├── 000020_remove_unique_height_is_longest.up.sql
│ │ │ │ ├── 000010_blocks_drop_inserted_at_num.up.sql
│ │ │ │ ├── 000022_block_processing.up.sql
│ │ │ │ ├── 000002_create_block_transactions_map.up.sql
│ │ │ │ ├── 000025_add_indices.down.sql
│ │ │ │ ├── 000007_processd_by.up.sql
│ │ │ │ ├── 000016_remove_unused_values.up.sql
│ │ │ │ ├── 000013_add_block_status.up.sql
│ │ │ │ ├── 000003_create_transactions.up.sql
│ │ │ │ ├── 000023_unique_height_is_longest.up.sql
│ │ │ │ ├── 000020_remove_unique_height_is_longest.down.sql
│ │ │ │ ├── 000016_remove_unused_values.down.sql
│ │ │ │ ├── 000010_blocks_drop_inserted_at_num.down.sql
│ │ │ │ ├── 000025_add_indices.up.sql
│ │ │ │ ├── 000012_move_tables_to_blocktx_schema.down.sql
│ │ │ │ ├── 000012_move_tables_to_blocktx_schema.up.sql
│ │ │ │ ├── 000017_move_merklepath_to_block_txs_map.down.sql
│ │ │ │ ├── 000017_move_merklepath_to_block_txs_map.up.sql
│ │ │ │ ├── 000001_create_blocks.up.sql
│ │ │ │ ├── 000019_add_is_longest.up.sql
│ │ │ │ ├── 000004_create_functions.up.sql
│ │ │ │ ├── 000005_add_inserted_at.up.sql
│ │ │ │ ├── 000009_inserted_at_timestampz.up.sql
│ │ │ │ ├── 000021_block_transactions.down.sql
│ │ │ │ ├── 000021_block_transactions.up.sql
│ │ │ │ └── 000009_inserted_at_timestampz.down.sql
│ │ │ ├── fixtures
│ │ │ │ ├── mark_block_as_done
│ │ │ │ │ └── blocktx.blocks.yaml
│ │ │ │ ├── get_transactions
│ │ │ │ │ ├── blocktx.registered_transactions.yaml
│ │ │ │ │ ├── blocktx.blocks.yaml
│ │ │ │ │ └── blocktx.block_transactions.yaml
│ │ │ │ ├── clear_data
│ │ │ │ │ └── blocktx.block_processing.yaml
│ │ │ │ ├── block_processing
│ │ │ │ │ ├── blocktx.block_processing.yaml
│ │ │ │ │ └── blocktx.blocks.yaml
│ │ │ │ ├── clear_blocks
│ │ │ │ │ ├── blocktx.blocks.yaml
│ │ │ │ │ └── blocktx.block_transactions.yaml
│ │ │ │ ├── insert_block_transactions
│ │ │ │ │ ├── blocktx.blocks.yaml
│ │ │ │ │ ├── blocktx.registered_transactions.yaml
│ │ │ │ │ └── blocktx.block_transactions.yaml
│ │ │ │ ├── register_transactions
│ │ │ │ │ └── blocktx.registered_transactions.yaml
│ │ │ │ ├── get_block_transactions
│ │ │ │ │ ├── blocktx.blocks.yaml
│ │ │ │ │ └── blocktx.block_transactions.yaml
│ │ │ │ └── insert_block
│ │ │ │ │ └── blocktx.blocks.yaml
│ │ │ ├── get_longest_chain.go
│ │ │ ├── mark_block_as_done.go
│ │ │ ├── register_transactions.go
│ │ │ ├── postgres_helpers.go
│ │ │ ├── get_block_transactions_hashes.go
│ │ │ ├── get_latest_blocks.go
│ │ │ ├── clear_data.go
│ │ │ └── update_block_statuses.go
│ │ ├── store_mocks.go
│ │ └── model.go
│ ├── integration_test
│ │ ├── fixtures
│ │ │ ├── stale_block
│ │ │ │ ├── blocktx.block_transactions.yaml
│ │ │ │ └── blocktx.blocks.yaml
│ │ │ ├── reorg
│ │ │ │ ├── blocktx.registered_transactions.yaml
│ │ │ │ └── blocktx.block_transactions.yaml
│ │ │ ├── merkle_paths
│ │ │ │ ├── blocktx.blocks.yaml
│ │ │ │ └── blocktx.block_transactions.yaml
│ │ │ └── reorg_orphans
│ │ │ │ ├── blocktx.registered_transactions.yaml
│ │ │ │ └── blocktx.block_transactions.yaml
│ │ ├── setup_test.go
│ │ └── merkle_paths_test.go
│ ├── blocktx_mocks.go
│ ├── bcnet
│ │ ├── block_message.go
│ │ └── blocktx_p2p
│ │ │ └── hybrid_message_handler.go
│ └── publish_adapter.go
├── version
│ └── version.go
├── metamorph
│ ├── store
│ │ ├── postgresql
│ │ │ ├── migrations
│ │ │ │ ├── 000002_create_blocks.down.sql
│ │ │ │ ├── 000001_create_transactions.down.sql
│ │ │ │ ├── 000011_response_map.down.sql
│ │ │ │ ├── 000021_mined_at.down.sql
│ │ │ │ ├── 000009_drop_merkle_proof.up.sql
│ │ │ │ ├── 000015_colliding_txs.down.sql
│ │ │ │ ├── 000005_status_locked_by_index.down.sql
│ │ │ │ ├── 000011_response_map.up.sql
│ │ │ │ ├── 000021_mined_at.up.sql
│ │ │ │ ├── 000009_drop_merkle_proof.down.sql
│ │ │ │ ├── 000015_colliding_txs.up.sql
│ │ │ │ ├── 000007_add_full_statuses_updates.down.sql
│ │ │ │ ├── 000008_transaction_status_default.down.sql
│ │ │ │ ├── 000008_transaction_status_default.up.sql
│ │ │ │ ├── 000007_add_full_statuses_updates.up.sql
│ │ │ │ ├── 000005_status_locked_by_index.up.sql
│ │ │ │ ├── 000006_drop_timestamps.up.sql
│ │ │ │ ├── 000010_merkle_path.up.sql
│ │ │ │ ├── 000004_create_timestamps.down.sql
│ │ │ │ ├── 000003_create_functions.down.sql
│ │ │ │ ├── 000004_create_timestamps.up.sql
│ │ │ │ ├── 000006_drop_timestamps.down.sql
│ │ │ │ ├── 000020_requested_transactions.down.sql
│ │ │ │ ├── 000017_competing_txs_default.up.sql
│ │ │ │ ├── 000017_competing_txs_default.down.sql
│ │ │ │ ├── 000020_requested_transactions.up.sql
│ │ │ │ ├── 000002_create_blocks.up.sql
│ │ │ │ ├── 000013_null_adjustment.down.sql
│ │ │ │ ├── 000019_add_status_history_default_constraint.up.sql
│ │ │ │ ├── 000019_add_status_history_default_constraint.down.sql
│ │ │ │ ├── 000018_status_history.down.sql
│ │ │ │ ├── 000018_status_history.up.sql
│ │ │ │ ├── 000012_last_submitted_at_timestampz.up.sql
│ │ │ │ ├── 000010_merkle_path.down.sql
│ │ │ │ ├── 000012_last_submitted_at_timestampz.down.sql
│ │ │ │ ├── 000016_multiple_callbacks.down.sql
│ │ │ │ ├── 000016_multiple_callbacks.up.sql
│ │ │ │ ├── 000013_null_adjustment.up.sql
│ │ │ │ ├── 000003_create_functions.up.sql
│ │ │ │ ├── 000001_create_transactions.up.sql
│ │ │ │ ├── 000014_rearrange_statuses.up.sql
│ │ │ │ └── 000014_rearrange_statuses.down.sql
│ │ │ └── fixtures
│ │ │ │ ├── set_bulk
│ │ │ │ └── metamorph.transactions.yaml
│ │ │ │ ├── mark_confirmed_requested
│ │ │ │ └── metamorph.transactions.yaml
│ │ │ │ ├── set_requested
│ │ │ │ └── metamorph.transactions.yaml
│ │ │ │ ├── set_locked
│ │ │ │ └── metamorph.transactions.yaml
│ │ │ │ ├── get_double_spends
│ │ │ │ └── metamorph.transactions.yaml
│ │ │ │ ├── update_mined_double_spend_attempted
│ │ │ │ └── metamorph.transactions.yaml
│ │ │ │ ├── set_unlocked_by_name
│ │ │ │ └── metamorph.transactions.yaml
│ │ │ │ ├── update_mined
│ │ │ │ └── metamorph.transactions.yaml
│ │ │ │ └── update_double_spend
│ │ │ │ └── metamorph.transactions.yaml
│ │ └── store_mocks.go
│ ├── integration_test
│ │ └── fixtures
│ │ │ ├── double_spend_detection
│ │ │ └── metamorph.transactions.yaml
│ │ │ └── reannounce_seen
│ │ │ └── metamorph.transactions.yaml
│ ├── types.go
│ ├── metamorph_mocks.go
│ ├── cache.go
│ ├── processor_routines_test.go
│ └── collector_processor.go
├── cache
│ ├── cache_mocks.go
│ └── cache.go
├── callbacker
│ ├── store
│ │ ├── postgresql
│ │ │ ├── migrations
│ │ │ │ ├── 000004_url_mapping.down.sql
│ │ │ │ ├── 000006_tx_callbacks.down.sql
│ │ │ │ ├── 000003_add_allow_batch.down.sql
│ │ │ │ ├── 000003_add_allow_batch.up.sql
│ │ │ │ ├── 000001_create_callbacks.down.sql
│ │ │ │ ├── 000004_url_mapping.up.sql
│ │ │ │ ├── 000002_add_postponed_until.down.sql
│ │ │ │ ├── 000005_drop_postponed_until.up.sql
│ │ │ │ ├── 000008_tx_callbacks_hash.down.sql
│ │ │ │ ├── 000009_retries.down.sql
│ │ │ │ ├── 000007_create_functions.down.sql
│ │ │ │ ├── 000002_add_postponed_until.up.sql
│ │ │ │ ├── 000005_drop_postponed_until.down.sql
│ │ │ │ ├── 000009_retries.up.sql
│ │ │ │ ├── 000008_tx_callbacks_hash.up.sql
│ │ │ │ ├── 000001_create_callbacks.up.sql
│ │ │ │ ├── 000007_create_functions.up.sql
│ │ │ │ └── 000006_tx_callbacks.up.sql
│ │ │ └── fixtures
│ │ │ │ ├── insert
│ │ │ │ └── callbacker.transaction_callbacks.yaml
│ │ │ │ ├── set_sent
│ │ │ │ └── callbacker.transaction_callbacks.yaml
│ │ │ │ └── unset_pending
│ │ │ │ └── callbacker.transaction_callbacks.yaml
│ │ └── store.go
│ ├── callbacker_mocks.go
│ ├── callbacker.go
│ └── callbacker_api
│ │ └── callbacker_api.proto
├── mq
│ └── mq_mocks.go
├── grpc_utils
│ ├── common_api
│ │ └── interface.go
│ ├── connection.go
│ └── server.go
├── validator
│ ├── validator_mocks.go
│ ├── defaultvalidator
│ │ └── testdata
│ │ │ └── 1.bin
│ ├── beef
│ │ └── beef_mocks.go
│ ├── validator.go
│ ├── error.go
│ └── helpers.go
├── k8s_watcher
│ ├── watcher_mocks.go
│ └── k8s_client
│ │ └── client.go
├── multicast
│ ├── multicast_mocks.go
│ ├── ipv6conn_adapter.go
│ └── group_test.go
├── api
│ ├── handler
│ │ ├── merkle_verifier
│ │ │ ├── merkle_verifier_mocks.go
│ │ │ └── merkle_verifier_adapter.go
│ │ ├── handler_mocks.go
│ │ ├── stats_test.go
│ │ ├── server.go
│ │ ├── testdata
│ │ │ └── config.yaml
│ │ ├── helpers.go
│ │ └── stats.go
│ ├── merkle_roots_verifier
│ │ └── allow_all.go
│ ├── arc_mocks.go
│ ├── helpers.go
│ ├── publish_adapter.go
│ └── transaction_handler
│ │ └── bitcoin_node_test.go
├── tx_finder
│ └── tx_finder_mocks.go
├── global
│ ├── global_mocks.go
│ ├── types_test.go
│ └── mocks
│ │ └── stoppable_mock.go
├── broadcaster
│ ├── broadcaster_mocks.go
│ ├── arc_client.go
│ ├── mutli_utxo_consolidator_test.go
│ ├── mutli_utxo_consolidator.go
│ ├── multi_utxo_creator.go
│ └── multi_utxo_creator_test.go
├── beef
│ ├── beef.go
│ └── bump.go
├── node_client
│ └── rpc_client_test.go
└── varintutils
│ └── varintutils.go
├── staticcheck.conf
├── deployments
└── passwd
├── assets
└── logo.png
├── .env.dev
├── pkg
├── api
│ └── config.yaml
├── message_queue
│ └── nats
│ │ └── client
│ │ ├── nats_core
│ │ └── nats_core_mocks.go
│ │ └── test_api
│ │ └── test_api.proto
├── tracing
│ └── helper.go
└── keyset
│ └── key_set_test.go
├── config
├── test_files
│ └── config.yaml
├── load_test.go
└── utils.go
├── .gitignore
├── test
├── config
│ ├── nats-server-host-1.conf
│ ├── nats-server-host-2.conf
│ └── config_common.yaml
├── e2e_globals.go
└── init_test.go
├── ROADMAP.md
├── cmd
├── broadcaster-cli
│ ├── main.go
│ ├── app
│ │ └── keyset
│ │ │ ├── keyset.go
│ │ │ ├── topup
│ │ │ └── topup.go
│ │ │ ├── address
│ │ │ └── address.go
│ │ │ ├── balance
│ │ │ └── balance.go
│ │ │ └── new
│ │ │ └── new.go
│ └── broadcaster-cli-example.yaml
└── services
│ └── k8s_watcher.go
├── .github
├── pull_request_template.md
├── ISSUE_TEMPLATE
│ ├── discussion.md
│ └── bug_report.md
└── workflows
│ ├── e2e.yaml
│ ├── pr-title-check.yml
│ ├── static.yaml
│ ├── go.yaml
│ ├── release.yaml
│ └── static-analysis.yaml
├── scripts
├── generate_docs.sh
└── compare_yamls.go
├── sonar-project.properties
├── .pre-commit-config.yaml
├── CODE_OF_CONDUCT.md
└── .goreleaser.yaml
/doc/.nojekyll:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/internal/p2p/README.md:
--------------------------------------------------------------------------------
1 | Work in progress - DO NOT USE IT
--------------------------------------------------------------------------------
/staticcheck.conf:
--------------------------------------------------------------------------------
1 | checks = ["all", "-SA1019", "-ST1000", "-ST1003"]
2 |
--------------------------------------------------------------------------------
/deployments/passwd:
--------------------------------------------------------------------------------
1 | nobody:*:65534:65534:nobody:/_nonexistent:/bin/false
2 |
--------------------------------------------------------------------------------
/assets/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bitcoin-sv/arc/HEAD/assets/logo.png
--------------------------------------------------------------------------------
/doc/dist/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bitcoin-sv/arc/HEAD/doc/dist/logo.png
--------------------------------------------------------------------------------
/doc/message_queue.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bitcoin-sv/arc/HEAD/doc/message_queue.png
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000001_create_blocks.down.sql:
--------------------------------------------------------------------------------
1 | DROP TABLE blocks;
2 |
--------------------------------------------------------------------------------
/.env.dev:
--------------------------------------------------------------------------------
1 | # .env
2 |
3 | PG_USERNAME="arcuser"
4 | PG_PASSWORD="arcpass"
5 | PG_DATABASE="main"
6 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000007_processd_by.down.sql:
--------------------------------------------------------------------------------
1 | DROP TABLE block_processing;
2 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000011_create_blocktx_schema.down.sql:
--------------------------------------------------------------------------------
1 | DROP SCHEMA blocktx;
2 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000015_height_index.down.sql:
--------------------------------------------------------------------------------
1 | DROP INDEX ix_block_height;
2 |
--------------------------------------------------------------------------------
/internal/version/version.go:
--------------------------------------------------------------------------------
1 | package version
2 |
3 | var (
4 | Version string
5 | Commit string
6 | )
7 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000003_create_transactions.down.sql:
--------------------------------------------------------------------------------
1 | DROP TABLE transactions;
2 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000002_create_blocks.down.sql:
--------------------------------------------------------------------------------
1 | DROP TABLE metamorph.blocks;
2 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000006_create_primary_blocktx.down.sql:
--------------------------------------------------------------------------------
1 | DROP TABLE primary_blocktx;
2 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000011_create_blocktx_schema.up.sql:
--------------------------------------------------------------------------------
1 | CREATE SCHEMA IF NOT EXISTS blocktx;
2 |
--------------------------------------------------------------------------------
/internal/cache/cache_mocks.go:
--------------------------------------------------------------------------------
1 | package cache
2 |
3 | //go:generate moq -pkg mocks -out ./mocks/cache_mock.go . Store
4 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000001_create_transactions.down.sql:
--------------------------------------------------------------------------------
1 | DROP TABLE metamorph.transactions;
2 |
--------------------------------------------------------------------------------
/doc/building_block_diagram.drawio.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bitcoin-sv/arc/HEAD/doc/building_block_diagram.drawio.png
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/migrations/000004_url_mapping.down.sql:
--------------------------------------------------------------------------------
1 | DROP TABLE IF EXISTS callbacker.url_mapping;
2 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/migrations/000006_tx_callbacks.down.sql:
--------------------------------------------------------------------------------
1 | DROP TABLE callbacker.transaction_callbacks;
2 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000002_create_block_transactions_map.down.sql:
--------------------------------------------------------------------------------
1 | DROP TABLE block_transactions_map;
2 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000008_add_is_registered.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE transactions DROP COLUMN is_registered
2 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000014_remove_index_on_block_height.up.sql:
--------------------------------------------------------------------------------
1 | DROP INDEX blocktx.pux_blocks_height;
2 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000015_height_index.up.sql:
--------------------------------------------------------------------------------
1 | CREATE INDEX ix_block_height ON blocktx.blocks(height);
2 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000023_unique_height_is_longest.down.sql:
--------------------------------------------------------------------------------
1 | DROP INDEX blocktx.pux_height_is_longest;
2 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000026_timestamp.down.sql:
--------------------------------------------------------------------------------
1 |
2 | ALTER TABLE blocktx.blocks DROP COLUMN timestamp;
3 |
--------------------------------------------------------------------------------
/internal/mq/mq_mocks.go:
--------------------------------------------------------------------------------
1 | package mq
2 |
3 | //go:generate moq -pkg mocks -out ./mocks/message_queue_mock.go . MessageQueueClient
4 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000018_remove_orphanedyn_field.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE blocktx.blocks DROP COLUMN orphanedyn;
2 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000026_timestamp.up.sql:
--------------------------------------------------------------------------------
1 |
2 | ALTER TABLE blocktx.blocks ADD COLUMN timestamp TIMESTAMPTZ;
3 |
--------------------------------------------------------------------------------
/internal/grpc_utils/common_api/interface.go:
--------------------------------------------------------------------------------
1 | package common_api
2 |
3 | type UnaryEvent interface {
4 | GetEventId() string
5 | }
6 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000011_response_map.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE metamorph.transactions DROP COLUMN retries;
2 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000021_mined_at.down.sql:
--------------------------------------------------------------------------------
1 |
2 | ALTER TABLE metamorph.transactions DROP COLUMN mined_at;
3 |
--------------------------------------------------------------------------------
/pkg/api/config.yaml:
--------------------------------------------------------------------------------
1 | package: api
2 | generate:
3 | echo-server: true
4 | client: true
5 | models: true
6 | embedded-spec: true
7 |
--------------------------------------------------------------------------------
/internal/blocktx/store/store_mocks.go:
--------------------------------------------------------------------------------
1 | package store
2 |
3 | //go:generate moq -pkg mocks -out ./mocks/blocktx_store_mock.go . BlocktxStore
4 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/migrations/000003_add_allow_batch.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE callbacker.callbacks DROP COLUMN allow_batch;
2 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000009_drop_merkle_proof.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE metamorph.transactions DROP COLUMN merkle_proof;
2 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000015_colliding_txs.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE metamorph.transactions DROP COLUMN competing_txs;
2 |
--------------------------------------------------------------------------------
/internal/metamorph/store/store_mocks.go:
--------------------------------------------------------------------------------
1 | package store
2 |
3 | //go:generate moq -pkg mocks -out ./mocks/store_mock.go . MetamorphStore
4 |
--------------------------------------------------------------------------------
/internal/validator/validator_mocks.go:
--------------------------------------------------------------------------------
1 | package validator
2 |
3 | //go:generate moq -pkg mocks -out ./mocks/tx_finder_mock.go . TxFinderI
4 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000024_merkle_tree_index.down.sql:
--------------------------------------------------------------------------------
1 |
2 | DROP INDEX blocktx.ix_block_transactions_hash_merkle_tree;
3 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000005_status_locked_by_index.down.sql:
--------------------------------------------------------------------------------
1 | DROP INDEX idx_metamorph_transactions_locked_by_status;
2 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000011_response_map.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE metamorph.transactions ADD COLUMN retries INT DEFAULT 0;
2 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000021_mined_at.up.sql:
--------------------------------------------------------------------------------
1 |
2 | ALTER TABLE metamorph.transactions ADD COLUMN mined_at TIMESTAMPTZ;
3 |
--------------------------------------------------------------------------------
/internal/k8s_watcher/watcher_mocks.go:
--------------------------------------------------------------------------------
1 | package k8s_watcher
2 |
3 | //go:generate moq -pkg mocks -out ./mocks/k8s_client_client_mock.go . K8sClient
4 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000009_drop_merkle_proof.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE metamorph.transactions ADD COLUMN merkle_proof TEXT;
2 |
--------------------------------------------------------------------------------
/internal/multicast/multicast_mocks.go:
--------------------------------------------------------------------------------
1 | package multicast
2 |
3 | //go:generate moq -pkg mocks -out ./mocks/message_handler_mock.go ./ MessageHandlerI
4 |
--------------------------------------------------------------------------------
/internal/validator/defaultvalidator/testdata/1.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bitcoin-sv/arc/HEAD/internal/validator/defaultvalidator/testdata/1.bin
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000015_colliding_txs.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE metamorph.transactions ADD COLUMN competing_txs TEXT DEFAULT '';
2 |
--------------------------------------------------------------------------------
/internal/validator/beef/beef_mocks.go:
--------------------------------------------------------------------------------
1 | package beef
2 |
3 | //go:generate moq -pkg mocks -skip-ensure -out ./mocks/chaintracker_mock.go . ChainTracker
4 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000008_add_is_registered.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE transactions ADD COLUMN is_registered BOOLEAN DEFAULT(FALSE) NOT NULL
2 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000013_add_block_status.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE blocktx.blocks
2 | DROP COLUMN status,
3 | DROP COLUMN chainwork;
4 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/migrations/000003_add_allow_batch.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE callbacker.callbacks ADD COLUMN allow_batch BOOLEAN DEFAULT false;
2 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000007_add_full_statuses_updates.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE metamorph.transactions DROP column full_status_updates;
2 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000008_transaction_status_default.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE metamorph.transactions ALTER COLUMN status DROP DEFAULT;
2 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000008_transaction_status_default.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE metamorph.transactions ALTER COLUMN status SET DEFAULT 3;
2 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000018_remove_orphanedyn_field.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE blocktx.block
2 | ADD COLUMN orphanedyn BOOLEAN NOT NULL DEFAULT FALSE;
3 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/migrations/000001_create_callbacks.down.sql:
--------------------------------------------------------------------------------
1 | DROP TABLE IF EXISTS callbacker.callbacks;
2 |
3 | DROP SCHEMA IF EXISTS callbacker;
4 |
--------------------------------------------------------------------------------
/pkg/message_queue/nats/client/nats_core/nats_core_mocks.go:
--------------------------------------------------------------------------------
1 | package nats_core
2 |
3 | //go:generate moq -pkg mocks -out ./mocks/nats_connection_mock.go . NatsConnection
4 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000007_add_full_statuses_updates.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE metamorph.transactions ADD column full_status_updates boolean default(false);
2 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000006_create_primary_blocktx.up.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE primary_blocktx (
2 | host_name TEXT PRIMARY KEY,
3 | primary_until TIMESTAMP
4 | );
5 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000024_merkle_tree_index.up.sql:
--------------------------------------------------------------------------------
1 |
2 | CREATE INDEX ix_block_transactions_hash_merkle_tree ON blocktx.block_transactions(hash, merkle_tree_index);
3 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000005_status_locked_by_index.up.sql:
--------------------------------------------------------------------------------
1 | CREATE INDEX idx_metamorph_transactions_locked_by_status ON metamorph.transactions(locked_by, status);
2 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000006_drop_timestamps.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE metamorph.blocks DROP COLUMN inserted_at;
2 | ALTER TABLE metamorph.transactions DROP COLUMN inserted_at;
3 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000010_merkle_path.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE metamorph.transactions ADD COLUMN merkle_path TEXT DEFAULT '' :: TEXT;
2 |
3 | DROP TABLE metamorph.blocks;
4 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000005_add_inserted_at.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE transactions DROP COLUMN inserted_at;
2 |
3 | ALTER TABLE block_transactions_map DROP COLUMN inserted_at;
4 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000014_remove_index_on_block_height.down.sql:
--------------------------------------------------------------------------------
1 | CREATE UNIQUE INDEX blocktx.pux_blocks_height ON blocktx.blocks(height)
2 | WHERE
3 | orphanedyn = FALSE;
4 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000022_block_processing.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE blocktx.block_processing ADD PRIMARY KEY (block_hash);
2 |
3 | DROP INDEX ix_block_processing_inserted_at;
4 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000004_create_timestamps.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE metamorph.blocks DROP COLUMN inserted_at;
2 | ALTER TABLE metamorph.transactions DROP COLUMN inserted_at;
3 |
--------------------------------------------------------------------------------
/internal/api/handler/merkle_verifier/merkle_verifier_mocks.go:
--------------------------------------------------------------------------------
1 | package merkle_verifier
2 |
3 | //go:generate moq -pkg mocks -out ./mocks/merkle_roots_verifier_mock.go ../../../global MerkleRootsVerifier
4 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000019_add_is_longest.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE blocktx.blocks
2 | DROP INDEX pux_height_is_longest,
3 | DROP INDEX ix_block_is_longest,
4 | DROP COLUMN is_longest;
5 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/migrations/000004_url_mapping.up.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE callbacker.url_mapping (
2 | url TEXT NOT NULL,
3 | instance TEXT NOT NULL,
4 | PRIMARY KEY (url)
5 | );
6 |
--------------------------------------------------------------------------------
/config/test_files/config.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | common:
3 | logLevel: INFO
4 | logFormat: text
5 | network: mainnet
6 | tracing:
7 | dialAddr: http://tracing:1234
8 | sample: 10 # sampling in percentage
9 |
--------------------------------------------------------------------------------
/internal/blocktx/integration_test/fixtures/stale_block/blocktx.block_transactions.yaml:
--------------------------------------------------------------------------------
1 | - block_id: 1002
2 | hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430
3 | merkle_tree_index: 3
4 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000004_create_functions.down.sql:
--------------------------------------------------------------------------------
1 | DROP FUNCTION reverse_bytes_iter(bytes bytea, length int, midpoint int, index int);
2 | DROP FUNCTION reverse_bytes(bytes bytea);
3 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/migrations/000002_add_postponed_until.down.sql:
--------------------------------------------------------------------------------
1 | DROP INDEX callbacker.ix_callbacks_postponed_until;
2 |
3 | ALTER TABLE callbacker.callbacks DROP COLUMN postponed_until;
4 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/migrations/000005_drop_postponed_until.up.sql:
--------------------------------------------------------------------------------
1 | DROP INDEX callbacker.ix_callbacks_postponed_until;
2 |
3 | ALTER TABLE callbacker.callbacks DROP COLUMN postponed_until;
4 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/migrations/000008_tx_callbacks_hash.down.sql:
--------------------------------------------------------------------------------
1 | DROP INDEX callbacker.ix_transaction_callbacks_hash;
2 |
3 | ALTER TABLE callbacker.transaction_callbacks DROP COLUMN hash;
4 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/migrations/000009_retries.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE callbacker.transaction_callbacks DROP COLUMN retries;
2 | ALTER TABLE callbacker.transaction_callbacks DROP COLUMN disable;
3 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000003_create_functions.down.sql:
--------------------------------------------------------------------------------
1 | DROP FUNCTION reverse_bytes_iter(bytes bytea, length int, midpoint int, index int);
2 | DROP FUNCTION reverse_bytes(bytes bytea);
3 |
--------------------------------------------------------------------------------
/pkg/message_queue/nats/client/test_api/test_api.proto:
--------------------------------------------------------------------------------
1 | syntax = "proto3";
2 |
3 | option go_package = ".;test_api";
4 |
5 | package test_api;
6 |
7 | message TestMessage {
8 | bool ok = 1;
9 | }
10 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000020_remove_unique_height_is_longest.up.sql:
--------------------------------------------------------------------------------
1 | -- This is a temporary solution to allow multiple blocks at the same height
2 | DROP INDEX blocktx.pux_height_is_longest;
3 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/migrations/000007_create_functions.down.sql:
--------------------------------------------------------------------------------
1 | DROP FUNCTION reverse_bytes_iter(bytes bytea, length int, midpoint int, index int);
2 | DROP FUNCTION reverse_bytes(bytes bytea);
3 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000004_create_timestamps.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE metamorph.transactions ADD COLUMN inserted_at TIMESTAMPTZ;
2 | ALTER TABLE metamorph.blocks ADD COLUMN inserted_at TIMESTAMPTZ;
3 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000006_drop_timestamps.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE metamorph.transactions ADD COLUMN inserted_at TIMESTAMPTZ;
2 | ALTER TABLE metamorph.blocks ADD COLUMN inserted_at TIMESTAMPTZ;
3 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000020_requested_transactions.down.sql:
--------------------------------------------------------------------------------
1 |
2 | ALTER TABLE metamorph.transactions DROP COLUMN requested_at;
3 | ALTER TABLE metamorph.transactions DROP COLUMN confirmed_at;
4 |
--------------------------------------------------------------------------------
/internal/tx_finder/tx_finder_mocks.go:
--------------------------------------------------------------------------------
1 | package txfinder
2 |
3 | //go:generate moq -pkg mocks -out ./mocks/woc_client_mock.go . WocClient
4 |
5 | //go:generate moq -pkg mocks -out ./mocks/node_client_mock.go . NodeClient
6 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000017_competing_txs_default.up.sql:
--------------------------------------------------------------------------------
1 | -- Up Migration: Change the default value of 'competing_txs' to NULL
2 | ALTER TABLE metamorph.transactions ALTER COLUMN competing_txs SET DEFAULT NULL;
3 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000010_blocks_drop_inserted_at_num.up.sql:
--------------------------------------------------------------------------------
1 | DROP INDEX ix_blocks_inserted_at;
2 | ALTER TABLE blocks DROP COLUMN inserted_at_num;
3 |
4 | CREATE INDEX ix_blocks_inserted_at ON blocks (inserted_at);
5 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000022_block_processing.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE blocktx.block_processing DROP CONSTRAINT block_processing_pkey;
2 |
3 | CREATE INDEX ix_block_processing_inserted_at ON blocktx.block_processing (inserted_at);
4 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/migrations/000002_add_postponed_until.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE callbacker.callbacks ADD COLUMN postponed_until TIMESTAMPTZ;
2 |
3 | CREATE INDEX ix_callbacks_postponed_until ON callbacker.callbacks (postponed_until);
4 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/migrations/000005_drop_postponed_until.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE callbacker.callbacks ADD COLUMN postponed_until TIMESTAMPTZ;
2 |
3 | CREATE INDEX ix_callbacks_postponed_until ON callbacker.callbacks (postponed_until);
4 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000017_competing_txs_default.down.sql:
--------------------------------------------------------------------------------
1 | -- Down Migration: Revert the default value of 'competing_txs' to an empty string ''
2 | ALTER TABLE metamorph.transactions ALTER COLUMN competing_txs SET DEFAULT '';
3 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000020_requested_transactions.up.sql:
--------------------------------------------------------------------------------
1 |
2 |
3 | ALTER TABLE metamorph.transactions ADD COLUMN requested_at TIMESTAMPTZ NULL;
4 | ALTER TABLE metamorph.transactions ADD COLUMN confirmed_at TIMESTAMPTZ NULL;
5 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000002_create_block_transactions_map.up.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE block_transactions_map (
2 | blockid BIGINT NOT NULL,
3 | txid BIGINT NOT NULL,
4 | pos BIGINT NOT NULL,
5 | PRIMARY KEY (blockid, txid)
6 | );
7 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000025_add_indices.down.sql:
--------------------------------------------------------------------------------
1 |
2 | DROP INDEX IF EXISTS blocktx.ix_block_transactions_block_id;
3 | DROP INDEX IF EXISTS blocktx.ix_block_blocks_id;
4 |
5 | ALTER TABLE blocktx.block_transactions DROP COLUMN inserted_at;
6 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/migrations/000009_retries.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE callbacker.transaction_callbacks ADD COLUMN retries INTEGER NOT NULL DEFAULT 0;
2 | ALTER TABLE callbacker.transaction_callbacks ADD COLUMN disable BOOLEAN NOT NULL DEFAULT FALSE;
3 |
--------------------------------------------------------------------------------
/internal/api/handler/handler_mocks.go:
--------------------------------------------------------------------------------
1 | package handler
2 |
3 | //go:generate moq -pkg mocks -skip-ensure -out ./mocks/default_validator_mock.go . DefaultValidator
4 |
5 | //go:generate moq -pkg mocks -skip-ensure -out ./mocks/beef_validator_mock.go . BeefValidator
6 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000007_processd_by.up.sql:
--------------------------------------------------------------------------------
1 |
2 | CREATE TABLE block_processing (
3 | block_hash BYTEA PRIMARY KEY,
4 | processed_by TEXT DEFAULT '' NOT NULL,
5 | inserted_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
6 | );
7 |
--------------------------------------------------------------------------------
/internal/p2p/utils.go:
--------------------------------------------------------------------------------
1 | package p2p
2 |
3 | import (
4 | "log/slog"
5 | "strings"
6 | )
7 |
8 | func slogUpperString(key, val string) slog.Attr {
9 | return slog.String(key, strings.ToUpper(val))
10 | }
11 |
12 | const slogLvlTrace slog.Level = slog.LevelDebug - 4
13 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/fixtures/set_bulk/metamorph.transactions.yaml:
--------------------------------------------------------------------------------
1 | - hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853
2 | locked_by: metamorph-3
3 | status: 60
4 | stored_at: 2023-10-01 14:00:00
5 | last_submitted_at: 2023-10-01 14:00:00
6 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea/
2 | .vscode/
3 | build/
4 | results/
5 | *.key
6 | .DS_Store
7 | cov.out
8 | cov_short.out
9 | gosec-report.json
10 | .scannerwork
11 | gotest.out
12 | *.env
13 | report.xml
14 | broadcaster-cli.yaml
15 | coverage_report.html
16 | coverage_report_short.html
17 |
--------------------------------------------------------------------------------
/doc/dist/index.css:
--------------------------------------------------------------------------------
1 | html {
2 | box-sizing: border-box;
3 | overflow: -moz-scrollbars-vertical;
4 | overflow-y: scroll;
5 | }
6 |
7 | *,
8 | *:before,
9 | *:after {
10 | box-sizing: inherit;
11 | }
12 |
13 | body {
14 | margin: 0;
15 | background: #fafafa;
16 | }
17 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000016_remove_unused_values.up.sql:
--------------------------------------------------------------------------------
1 | -- Remove unused fields
2 | ALTER TABLE blocktx.block_transactions_map DROP COLUMN pos;
3 | ALTER TABLE blocktx.transactions DROP COLUMN source;
4 |
5 | -- Remove unused table
6 | DROP TABLE blocktx.primary_blocktx;
7 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000013_add_block_status.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE blocktx.blocks
2 | ADD COLUMN status INTEGER NOT NULL DEFAULT 10, -- 10 is equal to status LONGEST
3 | ADD COLUMN chainwork TEXT NOT NULL DEFAULT '0'; -- chainwork is of type *big.Int, stored as TEXT for simplicity
4 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000003_create_transactions.up.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE transactions (
2 | id BIGSERIAL PRIMARY KEY,
3 | hash BYTEA NOT NULL,
4 | source TEXT,
5 | merkle_path TEXT DEFAULT('')
6 | );
7 |
8 | CREATE UNIQUE INDEX ux_transactions_hash ON transactions (hash);
9 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000023_unique_height_is_longest.up.sql:
--------------------------------------------------------------------------------
1 | -- This will make sure that there can only be ONE block at any
2 | -- given height that is considered part of the LONGEST chain.
3 | CREATE UNIQUE INDEX pux_height_is_longest ON blocktx.blocks(height)
4 | WHERE is_longest;
5 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000020_remove_unique_height_is_longest.down.sql:
--------------------------------------------------------------------------------
1 | -- This will make sure that there can only be ONE block at any
2 | -- given height that is considered part of the LONGEST chain.
3 | CREATE UNIQUE INDEX pux_height_is_longest ON blocktx.blocks(height)
4 | WHERE is_longest;
5 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000016_remove_unused_values.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE blocktx.block_transactions_map ADD COLUMN pos BIGINT NOT NULL;
2 | ALTER TABLE blocktx.transactions ADD COLUMN source TEXT;
3 |
4 | CREATE TABLE primary_blocktx (
5 | host_name TEXT PRIMARY KEY,
6 | primary_until TIMESTAMP
7 | );
8 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/fixtures/mark_confirmed_requested/metamorph.transactions.yaml:
--------------------------------------------------------------------------------
1 | - hash: 0x3296b4cca1c8b1de10b7d4a259963450bf0ed8b481f1fc79e2fb956cfe42242f
2 | locked_by: metamorph-1
3 | status: 90
4 | stored_at: 2023-10-01 14:04:00
5 | last_submitted_at: 2023-10-01 13:30:00
6 | requested_at: 2023-10-01 13:30:00
7 |
--------------------------------------------------------------------------------
/internal/p2p/wire.go:
--------------------------------------------------------------------------------
1 | package p2p
2 |
3 | import (
4 | "io"
5 |
6 | "github.com/libsv/go-p2p/wire"
7 | )
8 |
9 | type Message interface {
10 | Bsvdecode(io.Reader, uint32, wire.MessageEncoding) error
11 | BsvEncode(io.Writer, uint32, wire.MessageEncoding) error
12 | Command() string
13 | MaxPayloadLength(uint32) uint64
14 | }
15 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000010_blocks_drop_inserted_at_num.down.sql:
--------------------------------------------------------------------------------
1 | DROP INDEX ix_blocks_inserted_at;
2 |
3 | ALTER TABLE blocks
4 | ADD COLUMN inserted_at_num INTEGER DEFAULT TO_NUMBER(TO_CHAR((NOW()) AT TIME ZONE 'UTC', 'yyyymmddhh24'), '9999999999') NOT NULL;
5 |
6 | CREATE INDEX ix_blocks_inserted_at ON blocks (inserted_at_num);
7 |
--------------------------------------------------------------------------------
/test/config/nats-server-host-1.conf:
--------------------------------------------------------------------------------
1 | server_name=N1
2 | listen=4222
3 | http=8222
4 |
5 | jetstream={
6 | store_dir=/data/jetstream
7 | max_mem=2G
8 | max_file=5G
9 | }
10 |
11 | cluster={
12 | name=JSC
13 | listen=0.0.0.0:4245
14 |
15 | routes=[
16 | nats://nats-server-1:4245
17 | nats://nats-server-2:4245
18 | ]
19 | }
20 |
--------------------------------------------------------------------------------
/test/config/nats-server-host-2.conf:
--------------------------------------------------------------------------------
1 | server_name=N2
2 | listen=4222
3 | http=8222
4 |
5 | jetstream={
6 | store_dir=/data/jetstream
7 | max_mem=2G
8 | max_file=5G
9 | }
10 |
11 | cluster={
12 | name=JSC
13 | listen=0.0.0.0:4245
14 |
15 | routes=[
16 | nats://nats-server-1:4245
17 | nats://nats-server-2:4245
18 | ]
19 | }
20 |
--------------------------------------------------------------------------------
/ROADMAP.md:
--------------------------------------------------------------------------------
1 | # ROADMAP
2 |
3 | ## Update of transactions in case of block reorgs
4 |
5 | ARC updates the statuses of transactions in case of block reorgs. Transactions which are not in the block of the longest chain will be updated to `UNKNOWN` status and re-broadcasted. Transactions which are included in the block of the longest chain are updated to `MINED` status.
6 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000025_add_indices.up.sql:
--------------------------------------------------------------------------------
1 |
2 | CREATE INDEX IF NOT EXISTS ix_block_transactions_block_id ON blocktx.block_transactions(block_id);
3 | CREATE INDEX IF NOT EXISTS ix_block_blocks_id ON blocktx.blocks(id);
4 |
5 | ALTER TABLE blocktx.block_transactions ADD COLUMN inserted_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP;
6 |
--------------------------------------------------------------------------------
/internal/p2p/p2p_mocks.go:
--------------------------------------------------------------------------------
1 | package p2p
2 |
3 | //go:generate moq -pkg mocks -out ./mocks/peer_mock.go . PeerI
4 |
5 | //go:generate moq -pkg mocks -out ./mocks/message_handler_mock.go . MessageHandlerI
6 |
7 | //go:generate moq -pkg mocks -out ./mocks/wire_msg_mock.go . Message
8 |
9 | //go:generate moq -pkg mocks -out ./mocks/dialer_mock.go . Dialer
10 |
--------------------------------------------------------------------------------
/internal/validator/validator.go:
--------------------------------------------------------------------------------
1 | package validator
2 |
3 | type FeeValidation byte
4 |
5 | const (
6 | NoneFeeValidation FeeValidation = iota
7 | StandardFeeValidation
8 | CumulativeFeeValidation
9 | )
10 |
11 | const DustLimit = 1
12 |
13 | type ScriptValidation byte
14 |
15 | const (
16 | NoneScriptValidation ScriptValidation = iota
17 | StandardScriptValidation
18 | )
19 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000002_create_blocks.up.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE metamorph.blocks (
2 | hash BYTEA PRIMARY KEY,
3 | processed_at TIMESTAMPTZ,
4 | inserted_at_num INTEGER DEFAULT TO_NUMBER(TO_CHAR((NOW()) AT TIME ZONE 'UTC', 'yyyymmddhh24'), '9999999999') NOT NULL
5 | );
6 |
7 | CREATE INDEX ix_metamorph_blocks_inserted_at_num ON metamorph.blocks (inserted_at_num);
8 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000012_move_tables_to_blocktx_schema.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE blocktx.block_processing SET SCHEMA public;
2 | ALTER TABLE blocktx.block_transactions_map SET SCHEMA public;
3 | ALTER TABLE blocktx.blocks SET SCHEMA public;
4 | ALTER TABLE blocktx.primary_blocktx SET SCHEMA public;
5 | ALTER TABLE blocktx.transactions SET SCHEMA public;
6 | SET search_path TO public;
7 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000012_move_tables_to_blocktx_schema.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE public.block_processing SET SCHEMA blocktx;
2 | ALTER TABLE public.block_transactions_map SET SCHEMA blocktx;
3 | ALTER TABLE public.blocks SET SCHEMA blocktx;
4 | ALTER TABLE public.primary_blocktx SET SCHEMA blocktx;
5 | ALTER TABLE public.transactions SET SCHEMA blocktx;
6 | SET search_path TO blocktx;
7 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000017_move_merklepath_to_block_txs_map.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE blocktx.transactions
2 | ADD COLUMN merkle_path TEXT DEFAULT ('');
3 |
4 | UPDATE blocktx.transactions AS t
5 | SET merkle_path = btm.merkle_path
6 | FROM blocktx.block_transactions_map btm
7 | WHERE t.id = btm.txid;
8 |
9 | ALTER TABLE blocktx.block_transactions_map DROP COLUMN merkle_path;
10 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000013_null_adjustment.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE metamorph.transactions ALTER COLUMN stored_at DROP NOT NULL;
2 |
3 | ALTER TABLE metamorph.transactions ALTER COLUMN last_submitted_at DROP NOT NULL;
4 |
5 | ALTER TABLE metamorph.transactions ALTER COLUMN locked_by DROP NOT NULL;
6 |
7 | ALTER TABLE metamorph.transactions ALTER COLUMN full_status_updates DROP NOT NULL;
8 |
--------------------------------------------------------------------------------
/cmd/broadcaster-cli/main.go:
--------------------------------------------------------------------------------
1 | package main
2 |
3 | import (
4 | "log"
5 |
6 | "github.com/bitcoin-sv/arc/cmd/broadcaster-cli/app"
7 | )
8 |
9 | func main() {
10 | err := run()
11 | if err != nil {
12 | log.Fatalf("failed to run broadcaster-cli: %v", err)
13 | }
14 | }
15 |
16 | func run() error {
17 | err := app.Execute()
18 | if err != nil {
19 | return err
20 | }
21 |
22 | return nil
23 | }
24 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000019_add_status_history_default_constraint.up.sql:
--------------------------------------------------------------------------------
1 | -- Up Migration: Add default constraint to 'status_history' column in 'metamorph.transactions' table
2 | -- This migration sets the default value of the 'status_history' column to an empty JSON array ([]) for future records.
3 |
4 | ALTER TABLE metamorph.transactions
5 | ALTER COLUMN status_history SET DEFAULT '[]'::JSONB;
6 |
--------------------------------------------------------------------------------
/internal/api/merkle_roots_verifier/allow_all.go:
--------------------------------------------------------------------------------
1 | package merklerootsverifier
2 |
3 | import (
4 | "github.com/bitcoin-sv/arc/internal/global"
5 | "github.com/bitcoin-sv/arc/internal/global/mocks"
6 | )
7 |
8 | // NewAllowAllVerifier Returns a MerkleRootsVerifier that accepts all merkle roots.
9 | // For test purposes only!
10 | func NewAllowAllVerifier() global.BlocktxClient {
11 | return &mocks.BlocktxClientMock{}
12 | }
13 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/fixtures/mark_block_as_done/blocktx.blocks.yaml:
--------------------------------------------------------------------------------
1 | - inserted_at: 2023-12-15 14:00:00
2 | id: 1
3 | hash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000
4 | prevhash: 0x3a03313b727fa08c170fab2660c225d52b4d85516c92a0020000000000000000
5 | merkleroot: 0x3eeee879a8a08fc537a04682178687bb0e58a5103938eafc349705a2acb06410
6 | height: 822012
7 | status: 10
8 | chainwork: '1234'
9 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/migrations/000008_tx_callbacks_hash.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE callbacker.transaction_callbacks ADD COLUMN hash BYTEA;
2 |
3 | UPDATE callbacker.transaction_callbacks SET hash = reverse_bytes(decode(tx_id, 'hex')) WHERE hash IS NULL;
4 |
5 | CREATE INDEX ix_transaction_callbacks_hash ON callbacker.transaction_callbacks (hash);
6 |
7 | ALTER TABLE callbacker.transaction_callbacks ALTER COLUMN hash SET NOT NULL;
8 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000019_add_status_history_default_constraint.down.sql:
--------------------------------------------------------------------------------
1 | -- Down Migration: Remove default constraint from 'status_history' column in 'metamorph.transactions' table
2 | -- This migration removes the default value constraint from the 'status_history' column, so new records will require an explicit value or default to NULL.
3 |
4 | ALTER TABLE metamorph.transactions
5 | ALTER COLUMN status_history DROP DEFAULT;
6 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/fixtures/insert/callbacker.transaction_callbacks.yaml:
--------------------------------------------------------------------------------
1 | - url: https://arc-callback-1/callback
2 | token: token
3 | tx_id: 96cbf8ba96dc3bad6ecc19ce34d1edbf57b2bc6f76cc3d80efdca95599cf5c28
4 | tx_status: "MINED"
5 | timestamp: 2024-09-01T12:25:00+00:00
6 | block_hash: 0000000000000000072be13e375ffd673b1f37b0ec5ecde7b7e15b01f5685d07
7 | block_height: 4524235
8 | hash: 0x285ccf9955a9dcef803dcc766fbcb257bfedd134ce19cc6ead3bdc96baf8cb96
9 |
--------------------------------------------------------------------------------
/internal/callbacker/callbacker_mocks.go:
--------------------------------------------------------------------------------
1 | package callbacker
2 |
3 | //go:generate moq -skip-ensure -pkg mocks -out ./mocks/sender_mock.go ./ SenderI
4 |
5 | //go:generate moq -pkg mocks -out ./mocks/callbacker_api_client_mock.go ./callbacker_api/ CallbackerAPIClient
6 |
7 | //go:generate moq -pkg mocks -out ./mocks/processor_store_mock.go ./store/ ProcessorStore
8 |
9 | //go:generate moq -skip-ensure -pkg mocks -out ./mocks/health_watch_server_mock.go ./ HealthWatchServer
10 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/migrations/000001_create_callbacks.up.sql:
--------------------------------------------------------------------------------
1 | CREATE SCHEMA callbacker;
2 |
3 | CREATE TABLE callbacker.callbacks (
4 | id BIGSERIAL PRIMARY KEY,
5 | url TEXT NOT NULL,
6 | token TEXT NOT NULL,
7 | tx_id TEXT NOT NULL,
8 | tx_status TEXT NOT NULL,
9 | extra_info TEXT,
10 | merkle_path TEXT,
11 | block_hash TEXT,
12 | block_height BIGINT,
13 | competing_txs TEXT,
14 | timestamp TIMESTAMPTZ NOT NULL
15 | );
16 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000018_status_history.down.sql:
--------------------------------------------------------------------------------
1 | -- Down Migration: Remove columns 'status_history' and 'last_modified',
2 | -- and re-add columns 'mined_at' and 'announced_at' to 'metamorph.transactions' table
3 |
4 | ALTER TABLE metamorph.transactions
5 | DROP COLUMN status_history,
6 | DROP COLUMN last_modified;
7 |
8 | ALTER TABLE metamorph.transactions
9 | ADD COLUMN mined_at TIMESTAMPTZ,
10 | ADD COLUMN announced_at TIMESTAMPTZ;
11 |
--------------------------------------------------------------------------------
/internal/api/arc_mocks.go:
--------------------------------------------------------------------------------
1 | package api
2 |
3 | //go:generate moq -pkg mocks -out ./mocks/client_interface_mock.go ../../pkg/api ClientInterface
4 |
5 | //go:generate moq -pkg mocks -out ./mocks/script_verifier_mock.go ../../internal/api ScriptVerifier
6 |
7 | //go:generate moq -pkg mocks -out ./mocks/default_server_health_mock.go ../../internal/api ArcDefaultHandlerHealth
8 |
9 | //go:generate moq -pkg mocks -out ./mocks/chaintracker_mock.go ../../internal/validator/beef ChainTracker
10 |
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | ## Description of Changes
2 |
3 | Provide a brief description of the changes you've made.
4 |
5 | ## Testing Procedure
6 |
7 | - [ ] I have added new unit tests
8 | - [ ] All tests pass locally
9 | - [ ] I have tested manually in my local environment
10 |
11 | ## Checklist:
12 |
13 | - [ ] I have performed a self-review of my own code
14 | - [ ] I have made corresponding changes to the documentation
15 | - [ ] I have updated `CHANGELOG.md` with my changes
16 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000018_status_history.up.sql:
--------------------------------------------------------------------------------
1 | -- Up Migration: Add columns 'status_history' (JSONB) and 'last_modified' (TIMESTAMPTZ),
2 | -- and remove columns 'mined_at' and 'announced_at' from 'metamorph.transactions' table
3 |
4 | ALTER TABLE metamorph.transactions
5 | ADD COLUMN status_history JSONB,
6 | ADD COLUMN last_modified TIMESTAMPTZ;
7 |
8 | ALTER TABLE metamorph.transactions
9 | DROP COLUMN mined_at,
10 | DROP COLUMN announced_at;
11 |
--------------------------------------------------------------------------------
/internal/blocktx/blocktx_mocks.go:
--------------------------------------------------------------------------------
1 | package blocktx
2 |
3 | // from ./blocktx_api
4 | //go:generate moq -pkg mocks -out ./mocks/blocktx_api_mock.go ./blocktx_api BlockTxAPIClient
5 |
6 | // from health_check.go
7 | //go:generate moq -pkg mocks -out ./mocks/health_watch_server_mock.go . HealthWatchServer
8 |
9 | // from server.go
10 | //go:generate moq -pkg mocks -out ./mocks/blocktx_processor_mock.go . ProcessorI
11 | //go:generate moq -pkg mocks -out ./mocks/blocktx_peer_manager_mock.go . PeerManager
12 |
--------------------------------------------------------------------------------
/internal/metamorph/integration_test/fixtures/double_spend_detection/metamorph.transactions.yaml:
--------------------------------------------------------------------------------
1 | - hash: 0x887edceb52a70bc9b5e43b234eb7cd200d667dc5c3544a04438a6df810ae758e
2 | locked_by: metamorph-3
3 | status: 90
4 | stored_at: 2023-10-01 14:00:00
5 | last_submitted_at: 2023-10-01 14:00:00
6 | - hash: 0xb6b7a037d5162b3cf3862558a0412a8006bf25494975f4bdc65d106bcedf4ad6
7 | locked_by: metamorph-1
8 | status: 60
9 | stored_at: 2023-10-01 14:00:00
10 | last_submitted_at: 2023-10-01 14:00:00
11 |
--------------------------------------------------------------------------------
/internal/validator/error.go:
--------------------------------------------------------------------------------
1 | package validator
2 |
3 | import (
4 | "fmt"
5 |
6 | "github.com/bitcoin-sv/arc/pkg/api"
7 | )
8 |
9 | type Error struct {
10 | Err error
11 | ArcErrorStatus api.StatusCode
12 | }
13 |
14 | func NewError(err error, status api.StatusCode) *Error {
15 | return &Error{
16 | Err: err,
17 | ArcErrorStatus: status,
18 | }
19 | }
20 |
21 | func (e *Error) Error() string {
22 | return fmt.Sprintf("arc error %d: %s", e.ArcErrorStatus, e.Err.Error())
23 | }
24 |
--------------------------------------------------------------------------------
/internal/api/handler/stats_test.go:
--------------------------------------------------------------------------------
1 | package handler
2 |
3 | import (
4 | "testing"
5 |
6 | "github.com/prometheus/client_golang/prometheus/testutil"
7 | "github.com/stretchr/testify/require"
8 | )
9 |
10 | func TestNewStats(t *testing.T) {
11 | t.Run("register, add, unregister stats", func(t *testing.T) {
12 | sut, err := NewStats()
13 | require.NoError(t, err)
14 |
15 | sut.Add(5)
16 |
17 | require.Equal(t, 5.0, testutil.ToFloat64(sut.apiTxSubmissions))
18 |
19 | sut.UnregisterStats()
20 | })
21 | }
22 |
--------------------------------------------------------------------------------
/internal/global/global_mocks.go:
--------------------------------------------------------------------------------
1 | package global
2 |
3 | //go:generate moq -pkg mocks -out ./mocks/transaction_handler_mock.go . TransactionHandler
4 |
5 | //go:generate moq -pkg mocks -out ./mocks/blocktx_client_mock.go . BlocktxClient
6 |
7 | //go:generate moq -pkg mocks -out ./mocks/stoppable_mock.go . Stoppable
8 |
9 | //go:generate moq -pkg mocks -out ./mocks/stoppable_with_error_mock.go . StoppableWithError
10 |
11 | //go:generate moq -pkg mocks -out ./mocks/stoppable_with_context_mock.go . StoppableWithContext
12 |
--------------------------------------------------------------------------------
/internal/blocktx/integration_test/fixtures/reorg/blocktx.registered_transactions.yaml:
--------------------------------------------------------------------------------
1 | - hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853
2 | inserted_at: 2023-12-15 14:00:00
3 | - hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430
4 | inserted_at: 2023-12-15 14:00:00
5 | - hash: 0x2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f
6 | inserted_at: 2023-12-15 14:00:00
7 | - hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6
8 | inserted_at: 2023-12-15 14:00:00
9 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000012_last_submitted_at_timestampz.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE metamorph.transactions ADD COLUMN last_submitted_at TIMESTAMPTZ;
2 |
3 | UPDATE metamorph.transactions
4 | SET last_submitted_at = TO_TIMESTAMP(inserted_at_num::text, 'YYYYMMDDHH24');
5 |
6 | CREATE INDEX ix_metamorph_transactions_last_submitted_at ON metamorph.transactions (last_submitted_at);
7 |
8 | DROP INDEX metamorph.ix_metamorph_transactions_inserted_at_num;
9 | ALTER TABLE metamorph.transactions DROP COLUMN inserted_at_num;
10 |
--------------------------------------------------------------------------------
/scripts/generate_docs.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if ! [ -x "$(command -v widdershins)" ]; then
4 | npm install -g widdershins
5 | fi
6 |
7 | if ! [ -x "$(command -v swagger-cli)" ]; then
8 | npm install -g swagger-cli
9 | fi
10 |
11 | swagger-cli bundle -o pkg/api/arc.json pkg/api/arc.yaml
12 | cp pkg/api/arc.json doc/
13 | cp web/logo.png doc/dist/
14 | widdershins --search false --language_tabs 'http:HTTP' 'javascript:JavaScript' 'java:Java' 'go:Go' 'ruby:Ruby' 'python:Python' 'shell:curl' --summary pkg/api/arc.json -o doc/api.md
15 |
--------------------------------------------------------------------------------
/internal/blocktx/integration_test/fixtures/merkle_paths/blocktx.blocks.yaml:
--------------------------------------------------------------------------------
1 | - inserted_at: 2023-12-15 14:00:00
2 | id: 1001
3 | hash: 0x44ef5f3881cc92e97e0c3dcfcfc6cfb9f3de26c80e664ad62f02860200000000
4 | prevhash: 0xf5763737f86cd3ec83a8474d02f627aae912b0fb1097a7e648c31b4400000000
5 | merkleroot: 0xc991fcf57466c387779b009b13c85a8ab31f2e24a3b04e97d2dcbda608f7f2d0
6 | height: 1661719
7 | processed_at: 2023-12-15 14:10:00
8 | size: 86840000
9 | tx_count: 9
10 | status: 10
11 | is_longest: true
12 | chainwork: '62209952899966'
13 |
--------------------------------------------------------------------------------
/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.registered_transactions.yaml:
--------------------------------------------------------------------------------
1 | - hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853
2 | inserted_at: 2023-12-15 14:00:00
3 | - hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430
4 | inserted_at: 2023-12-15 14:00:00
5 | - hash: 0x2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f
6 | inserted_at: 2023-12-15 14:00:00
7 | - hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6
8 | inserted_at: 2023-12-15 14:00:00
9 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.registered_transactions.yaml:
--------------------------------------------------------------------------------
1 | - hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e
2 | inserted_at: 2023-12-10 14:00:00
3 | - hash: 0x213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0
4 | inserted_at: 2023-12-15 14:00:00
5 | - hash: 0x12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357
6 | inserted_at: 2023-12-15 14:00:00
7 | - hash: 0x2eb9f15adaf9e7d1de19f3ebc6bf95b62871a4e053c30ac0d1b1df85a6163d8e
8 | inserted_at: 2023-12-15 14:00:00
9 |
--------------------------------------------------------------------------------
/internal/metamorph/types.go:
--------------------------------------------------------------------------------
1 | package metamorph
2 |
3 | import (
4 | "github.com/bitcoin-sv/arc/internal/metamorph/metamorph_api"
5 | "github.com/bitcoin-sv/arc/internal/metamorph/store"
6 | "github.com/bsv-blockchain/go-bt/v2/chainhash"
7 | )
8 |
9 | type ProcessorRequest struct {
10 | Data *store.TransactionData
11 | ResponseChannel chan StatusAndError
12 | }
13 |
14 | type StatusAndError struct {
15 | Hash *chainhash.Hash
16 | Status metamorph_api.Status
17 | Err error
18 | CompetingTxs []string
19 | }
20 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000017_move_merklepath_to_block_txs_map.up.sql:
--------------------------------------------------------------------------------
1 | -- move merkle_path to block_transactions_map, because in case there are
2 | -- competing blocks - there may be multiple merkle paths for one transaction
3 | ALTER TABLE blocktx.block_transactions_map
4 | ADD COLUMN merkle_path TEXT DEFAULT ('');
5 |
6 | UPDATE blocktx.block_transactions_map AS btm
7 | SET merkle_path = t.merkle_path
8 | FROM blocktx.transactions t
9 | WHERE btm.txid = t.id;
10 |
11 | ALTER TABLE blocktx.transactions DROP COLUMN merkle_path;
12 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000010_merkle_path.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE metamorph.transactions DROP COLUMN merkle_path;
2 |
3 | CREATE TABLE metamorph.blocks (
4 | hash BYTEA PRIMARY KEY,
5 | processed_at TIMESTAMPTZ,
6 | inserted_at_num INTEGER DEFAULT TO_NUMBER(TO_CHAR((NOW()) AT TIME ZONE 'UTC', 'yyyymmddhh24'), '9999999999') NOT NULL
7 | );
8 |
9 | CREATE INDEX ix_metamorph_blocks_inserted_at_num ON metamorph.blocks (inserted_at_num);
10 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000012_last_submitted_at_timestampz.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE metamorph.transactions ADD COLUMN inserted_at_num INTEGER NOT NULL DEFAULT 0;
2 |
3 | UPDATE metamorph.transactions
4 | SET inserted_at_num = TO_NUMBER(TO_CHAR(last_submitted_at, 'yyyymmddhh24'),'9999999999');
5 |
6 | CREATE INDEX ix_metamorph_transactions_inserted_at_num ON metamorph.transactions (inserted_at_num);
7 |
8 | DROP INDEX metamorph.ix_metamorph_transactions_last_submitted_at;
9 | ALTER TABLE metamorph.transactions DROP COLUMN last_submitted_at;
10 |
--------------------------------------------------------------------------------
/test/e2e_globals.go:
--------------------------------------------------------------------------------
1 | //go:build e2e
2 |
3 | package test
4 |
5 | import (
6 | "github.com/ordishs/go-bitcoin"
7 | )
8 |
9 | var (
10 | nodeHost = "node1"
11 |
12 | arcEndpoint = "http://api:9090/" // nolint:revive // unsecure url scheme in test
13 | arcEndpointV1Tx = arcEndpoint + v1Tx
14 | arcEndpointV1Txs = arcEndpoint + v1Txs
15 | )
16 |
17 | const (
18 | nodePort = 18332
19 | nodeUser = "bitcoin"
20 | nodePassword = "bitcoin"
21 | )
22 |
23 | const (
24 | v1Tx = "v1/tx"
25 | v1Txs = "v1/txs"
26 | )
27 |
28 | var bitcoind *bitcoin.Bitcoind
29 |
--------------------------------------------------------------------------------
/internal/grpc_utils/connection.go:
--------------------------------------------------------------------------------
1 | package grpc_utils
2 |
3 | import (
4 | "google.golang.org/grpc"
5 |
6 | "github.com/bitcoin-sv/arc/config"
7 | )
8 |
9 | func DialGRPC(address string, prometheusEndpoint string, grpcMessageSize int, tracingConfig *config.TracingConfig) (*grpc.ClientConn, error) {
10 | dialOpts, err := GetGRPCClientOpts(prometheusEndpoint, grpcMessageSize, tracingConfig)
11 | if err != nil {
12 | return nil, err
13 | }
14 |
15 | conn, err := grpc.NewClient(address, dialOpts...)
16 | if err != nil {
17 | return nil, err
18 | }
19 |
20 | return conn, nil
21 | }
22 |
--------------------------------------------------------------------------------
/internal/multicast/ipv6conn_adapter.go:
--------------------------------------------------------------------------------
1 | package multicast
2 |
3 | import (
4 | "io"
5 | "net"
6 |
7 | "golang.org/x/net/ipv6"
8 | )
9 |
10 | var (
11 | _ io.Reader = (*ipv6ConnAdapter)(nil)
12 | _ io.Writer = (*ipv6ConnAdapter)(nil)
13 | )
14 |
15 | type ipv6ConnAdapter struct {
16 | Conn *ipv6.PacketConn
17 | dst *net.UDPAddr
18 | }
19 |
20 | func (c *ipv6ConnAdapter) Read(b []byte) (n int, err error) {
21 | n, _, _, err = c.Conn.ReadFrom(b)
22 | return
23 | }
24 |
25 | func (c *ipv6ConnAdapter) Write(b []byte) (n int, err error) {
26 | return c.Conn.WriteTo(b, nil, c.dst)
27 | }
28 |
--------------------------------------------------------------------------------
/internal/p2p/interface.go:
--------------------------------------------------------------------------------
1 | package p2p
2 |
3 | import (
4 | "github.com/libsv/go-p2p/wire"
5 | )
6 |
7 | type PeerI interface {
8 | Restart() (ok bool)
9 | Shutdown()
10 | Connected() bool
11 | IsUnhealthyCh() <-chan struct{}
12 | WriteMsg(msg wire.Message)
13 | Network() wire.BitcoinNet
14 | String() string
15 | }
16 |
17 | type MessageHandlerI interface {
18 | // OnReceive handles incoming messages depending on command type
19 | OnReceive(msg wire.Message, peer PeerI)
20 | // OnSend handles outgoing messages depending on command type
21 | OnSend(msg wire.Message, peer PeerI)
22 | }
23 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/discussion.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Discussion
3 | about: Propose a discussion or seek clarification about a feature or topic.
4 | title: "[DISCUSSION]"
5 | labels: discussion
6 | assignees: ''
7 |
8 | ---
9 |
10 | ## Summary
11 |
12 | Briefly describe the topic you'd like to discuss.
13 |
14 | ## Motivation
15 |
16 | Why do you believe this to be important?
17 |
18 | ## Description
19 |
20 | Provide a detailed description or elaborate on your topic.
21 |
22 | ## Additional References
23 |
24 | Provide any additional articles, links, or context that would help facilitate the discussion.
25 |
--------------------------------------------------------------------------------
/internal/api/helpers.go:
--------------------------------------------------------------------------------
1 | package api
2 |
3 | import (
4 | "github.com/bitcoin-sv/bdk/module/gobdk/script"
5 | feemodel "github.com/bsv-blockchain/go-sdk/transaction/fee_model"
6 | )
7 |
8 | func FeesToFeeModel(minMiningFee float64) *feemodel.SatoshisPerKilobyte {
9 | satoshisPerKB := uint64(minMiningFee * 1e8)
10 | return &feemodel.SatoshisPerKilobyte{Satoshis: satoshisPerKB}
11 | }
12 |
13 | type ScriptVerifier interface {
14 | VerifyScript(extendedTX []byte, utxoHeights []int32, blockHeight int32, consensus bool) script.ScriptError
15 | }
16 |
17 | type ArcDefaultHandlerHealth interface {
18 | CurrentBlockHeight() int32
19 | }
20 |
--------------------------------------------------------------------------------
/doc/dist/swagger-initializer.js:
--------------------------------------------------------------------------------
1 | window.onload = function() {
2 | //
3 |
4 | // the following lines will be replaced by docker/configurator, when it runs in a docker-container
5 | window.ui = SwaggerUIBundle({
6 | url: "arc.json",
7 | dom_id: '#swagger-ui',
8 | deepLinking: true,
9 | presets: [
10 | SwaggerUIBundle.presets.apis,
11 | SwaggerUIStandalonePreset
12 | ],
13 | plugins: [
14 | SwaggerUIBundle.plugins.DownloadUrl
15 | ],
16 | layout: "BaseLayout",
17 | supportedSubmitMethods: []
18 | });
19 |
20 | //
21 | };
22 |
--------------------------------------------------------------------------------
/internal/p2p/peer_manager_options.go:
--------------------------------------------------------------------------------
1 | package p2p
2 |
3 | import (
4 | "time"
5 |
6 | "github.com/libsv/go-p2p/wire"
7 | )
8 |
9 | type PeerManagerOptions func(p *PeerManager)
10 |
11 | func WithRestartUnhealthyPeers() PeerManagerOptions {
12 | return func(p *PeerManager) {
13 | p.restartUnhealthyPeers = true
14 | }
15 | }
16 |
17 | // SetExcessiveBlockSize sets global setting for block size
18 | func SetExcessiveBlockSize(ebs uint64) {
19 | wire.SetLimits(ebs)
20 | }
21 |
22 | func SetPeerCheckInterval(interval time.Duration) PeerManagerOptions {
23 | return func(p *PeerManager) {
24 | p.peerCheckInterval = interval
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/internal/blocktx/integration_test/fixtures/reorg/blocktx.block_transactions.yaml:
--------------------------------------------------------------------------------
1 | - block_id: 1002
2 | hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430
3 | merkle_tree_index: 0
4 | - block_id: 1999 # the same tx also in stale block
5 | hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430
6 | merkle_tree_index: 0
7 | - block_id: 1999 # the same tx also in stale block
8 | hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853
9 | merkle_tree_index: 1
10 | - block_id: 1004
11 | hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6
12 | merkle_tree_index: 0
13 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000001_create_blocks.up.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE blocks (
2 | inserted_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP,
3 | id BIGSERIAL PRIMARY KEY,
4 | hash BYTEA NOT NULL,
5 | prevhash BYTEA NOT NULL,
6 | merkleroot BYTEA NOT NULL,
7 | height BIGINT NOT NULL,
8 | processed_at TIMESTAMPTZ,
9 | size BIGINT,
10 | tx_count BIGINT,
11 | orphanedyn BOOLEAN NOT NULL DEFAULT FALSE,
12 | merkle_path TEXT DEFAULT '' :: TEXT
13 | );
14 |
15 | CREATE UNIQUE INDEX ux_blocks_hash ON blocks (hash);
16 |
17 | CREATE UNIQUE INDEX pux_blocks_height ON blocks(height)
18 | WHERE
19 | orphanedyn = FALSE;
20 |
--------------------------------------------------------------------------------
/sonar-project.properties:
--------------------------------------------------------------------------------
1 | sonar.projectKey=bitcoin-sv_arc
2 | sonar.organization=bitcoin-sv
3 | sonar.host.url=https://sonarcloud.io
4 |
5 | # This is the name and version displayed in the SonarCloud UI.
6 | sonar.projectName=ARC
7 | sonar.projectVersion=0.1
8 |
9 | sonar.sources=.
10 | sonar.exclusions=**/*_test.go,doc/**/*,**/examples/**/*,**/testdata/**/*,**/test/**/*,**/tests/**/*,**/integration_test/**/*,pkg/api/arc.go,**/*mock.go,**/*.pb.go,cmd/**/*,**/test_utils/**/*,scripts/**/*,**/test_utils.go,internal/grpc_utils/**/*
11 |
12 | sonar.tests=.
13 | sonar.test.inclusions=**/*_test.go
14 | sonar.go.coverage.reportPaths=cov.out
15 | sonar.externalIssuesReportPaths=gosec-report.json
16 |
--------------------------------------------------------------------------------
/internal/metamorph/metamorph_mocks.go:
--------------------------------------------------------------------------------
1 | package metamorph
2 |
3 | // from ./metamorph_api/
4 | //go:generate moq -pkg mocks -out ./mocks/metamorph_api_mock.go ./metamorph_api MetaMorphAPIClient
5 |
6 | // from health_check.go
7 | //go:generate moq -pkg mocks -out ./mocks/health_watch_server_mock.go . HealthWatchServer
8 |
9 | // from processor.go
10 | //go:generate moq -pkg mocks -out ./mocks/mediator_mock.go . Mediator
11 |
12 | // from zmq.go
13 | //go:generate moq -pkg mocks -out ./mocks/zmq_mock.go . ZMQI
14 |
15 | // from server.go
16 | //go:generate moq -pkg mocks -out ./mocks/processor_mock.go . ProcessorI
17 | //go:generate moq -pkg mocks -out ./mocks/bitcoin_mock.go . BitcoinNode
18 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/fixtures/set_requested/metamorph.transactions.yaml:
--------------------------------------------------------------------------------
1 | - hash: 0xee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa
2 | locked_by: metamorph-1
3 | status: 90
4 | stored_at: 2023-10-01 14:00:00
5 | last_submitted_at: 2023-10-01 14:00:00
6 | - hash: 0x3296b4cca1c8b1de10b7d4a259963450bf0ed8b481f1fc79e2fb956cfe42242f
7 | locked_by: metamorph-1
8 | status: 90
9 | stored_at: 2023-10-01 14:04:00
10 | last_submitted_at: 2023-10-01 13:30:00
11 | - hash: 0x319b5eb9d99084b72002640d1445f49b8c83539260a7e5b2cbb16c1d2954a743
12 | locked_by: NONE
13 | status: 90
14 | stored_at: 2023-10-01 14:00:00
15 | last_submitted_at: 2023-10-01 14:00:00
16 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000016_multiple_callbacks.down.sql:
--------------------------------------------------------------------------------
1 | -- Step 1: Add the old 'callback_url' and 'callback_token' columns back
2 | ALTER TABLE metamorph.transactions
3 | ADD COLUMN callback_url TEXT,
4 | ADD COLUMN callback_token TEXT;
5 |
6 | -- Step 2: Populate 'callback_url' and 'callback_token' with the first object in the 'callbacks' JSON array
7 | UPDATE metamorph.transactions
8 | SET callback_url = (callbacks->0->>'callback_url'),
9 | callback_token = (callbacks->0->>'callback_token')
10 | WHERE jsonb_array_length(callbacks) > 0;
11 |
12 | -- Step 3: Drop the new 'callbacks' column
13 | ALTER TABLE metamorph.transactions
14 | DROP COLUMN callbacks;
15 |
--------------------------------------------------------------------------------
/test/config/config_common.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | common:
3 | logLevel: INFO
4 | logFormat: tint
5 | profilerAddr: localhost:9999
6 | prometheus:
7 | enabled: true
8 | endpoint: /metrics
9 | addr: :2112
10 | grpcMessageSize: 100000000
11 | network: regtest
12 | messageQueue:
13 | streaming:
14 | enabled: true
15 | fileStorage: false
16 | URL: nats://nats-1:4222,nats://nats-2:4223
17 | user:
18 | password:
19 | reBroadcastExpiration: 24h
20 | tracing:
21 | enabled: false
22 | dialAddr: http://jaeger:4317
23 | sample: 100
24 | cache:
25 | engine: redis
26 | redis:
27 | addr: cache:6379
28 | password: ""
29 | db: 1
30 |
--------------------------------------------------------------------------------
/internal/blocktx/integration_test/fixtures/reorg_orphans/blocktx.block_transactions.yaml:
--------------------------------------------------------------------------------
1 | - block_id: 1002
2 | hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853
3 | merkle_tree_index: 0
4 | - block_id: 1002
5 | hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430
6 | merkle_tree_index: 1
7 | - block_id: 1004
8 | hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430
9 | merkle_tree_index: 0
10 | - block_id: 1003
11 | hash: 0x2ff4430eb883c6f6c0640a5d716b2d107bbc0efa5aeaa237aec796d4686b0a8f
12 | merkle_tree_index: 0
13 | - block_id: 1006
14 | hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6
15 | merkle_tree_index: 0
16 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000016_multiple_callbacks.up.sql:
--------------------------------------------------------------------------------
1 | -- Step 1: Add the new 'callback' column
2 | ALTER TABLE metamorph.transactions
3 | ADD COLUMN callbacks JSONB;
4 |
5 | -- Step 2: Populate the 'callback' column with data from 'callback_url' and 'callback_token'
6 | UPDATE metamorph.transactions
7 | SET callbacks = json_build_array(
8 | json_build_object(
9 | 'callback_url', callback_url,
10 | 'callback_token', callback_token
11 | )
12 | )WHERE LENGTH(callback_url) > 0 OR LENGTH(callback_token) > 0;
13 |
14 | -- Step 3: Drop the old 'callback_url' and 'callback_token' columns
15 | ALTER TABLE metamorph.transactions
16 | DROP COLUMN callback_url,
17 | DROP COLUMN callback_token;
18 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000019_add_is_longest.up.sql:
--------------------------------------------------------------------------------
1 | -- field `is_longest` is an implementation detail that will help to
2 | -- make sure that there is only one longest chain at any given height
3 | -- and is also used as a helper when querying for longest chain
4 | ALTER TABLE blocktx.blocks
5 | ADD COLUMN is_longest BOOLEAN NOT NULL DEFAULT TRUE;
6 |
7 | -- This will make is faster to search for blocks WHERE is_longest = true
8 | CREATE INDEX ix_block_is_longest ON blocktx.blocks(is_longest);
9 |
10 | -- This will make sure that there can only be ONE block at any
11 | -- given height that is considered part of the LONGEST chain.
12 | CREATE UNIQUE INDEX pux_height_is_longest ON blocktx.blocks(height)
13 | WHERE is_longest;
14 |
--------------------------------------------------------------------------------
/cmd/broadcaster-cli/app/keyset/keyset.go:
--------------------------------------------------------------------------------
1 | package keyset
2 |
3 | import (
4 | "github.com/spf13/cobra"
5 |
6 | "github.com/bitcoin-sv/arc/cmd/broadcaster-cli/app/keyset/address"
7 | "github.com/bitcoin-sv/arc/cmd/broadcaster-cli/app/keyset/balance"
8 | "github.com/bitcoin-sv/arc/cmd/broadcaster-cli/app/keyset/new"
9 | "github.com/bitcoin-sv/arc/cmd/broadcaster-cli/app/keyset/topup"
10 | "github.com/bitcoin-sv/arc/cmd/broadcaster-cli/app/keyset/utxos"
11 | )
12 |
13 | var Cmd = &cobra.Command{
14 | Use: "keyset",
15 | Short: "Function set for the keyset",
16 | }
17 |
18 | func init() {
19 | Cmd.AddCommand(address.Cmd)
20 | Cmd.AddCommand(balance.Cmd)
21 | Cmd.AddCommand(new.Cmd)
22 | Cmd.AddCommand(topup.Cmd)
23 | Cmd.AddCommand(utxos.Cmd)
24 | }
25 |
--------------------------------------------------------------------------------
/internal/broadcaster/broadcaster_mocks.go:
--------------------------------------------------------------------------------
1 | package broadcaster
2 |
3 | // from arc_client.go
4 | //go:generate moq -pkg mocks -out ./mocks/arc_client_mock.go . ArcClient
5 |
6 | // from broadcaster.go
7 | //go:generate moq -pkg mocks -out ./mocks/utxo_client_mock.go . UtxoClient
8 |
9 | // from mutli_utxo_consolidator.go
10 | //go:generate moq -pkg mocks -out ./mocks/consolidator_mock.go . Consolidator
11 |
12 | // from multi_rate_broadcaster.go
13 | //go:generate moq -pkg mocks -out ./mocks/rate_broadcaster_mock.go . RateBroadcaster
14 |
15 | // from multi_utxo_creator.go
16 | //go:generate moq -pkg mocks -out ./mocks/utxo_creator_mock.go . Creator
17 |
18 | // from rate_broadcaster.go
19 | //go:generate moq -pkg mocks -out ./mocks/ticker_mock.go . Ticker
20 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000013_null_adjustment.up.sql:
--------------------------------------------------------------------------------
1 | -- make full_status_updates not null
2 | UPDATE metamorph.transactions
3 | SET full_status_updates = false
4 | WHERE full_status_updates IS NULL;
5 |
6 | ALTER TABLE metamorph.transactions ALTER COLUMN full_status_updates SET NOT NULL;
7 |
8 | -- make locked_by not null
9 | UPDATE metamorph.transactions
10 | SET locked_by = 'NONE'
11 | WHERE locked_by IS NULL;
12 |
13 | ALTER TABLE metamorph.transactions ALTER COLUMN locked_by SET NOT NULL;
14 |
15 | -- make last_submitted_at not null
16 | ALTER TABLE metamorph.transactions ALTER COLUMN last_submitted_at SET NOT NULL;
17 |
18 | -- make stored_at not null
19 | ALTER TABLE metamorph.transactions ALTER COLUMN stored_at SET NOT NULL;
20 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v3.2.0
4 | hooks:
5 | - id: check-merge-conflict
6 | - id: trailing-whitespace
7 | - id: end-of-file-fixer
8 | - id: check-added-large-files
9 | - repo: https://github.com/dtaivpp/commit-msg-regex-hook
10 | rev: v0.2.2
11 | hooks:
12 | - id: commit-msg-regex-hook
13 | args: [ "--pattern='^(feat|chore|deps|sec|fix|refactor|docs|build|ci|test)(\\([A-Z]{3,6}-[0-9]{3,6}\\))?: .+$'",
14 | "--failure_message='Commits should match the pattern: (CODE-XXX)?: Message, where is one of feat, sec, fix, refactor, docs, build, ci, and (CODE-XXX) is optional.'" ]
15 | stages: [ commit-msg ]
16 |
--------------------------------------------------------------------------------
/.github/workflows/e2e.yaml:
--------------------------------------------------------------------------------
1 | name: E2E-tests
2 |
3 | on:
4 | pull_request:
5 | branches: ["**"]
6 | paths:
7 | - 'cmd/arc/**'
8 | - 'internal/**/*.go'
9 | - 'pkg/**/*.go'
10 | - 'go.mod'
11 | - 'go.sum'
12 | - 'Dockerfile'
13 | - 'test/**'
14 | - 'docker-compose.yaml'
15 |
16 | jobs:
17 | e2e:
18 | name: e2e
19 | runs-on: ubuntu-latest
20 | steps:
21 | - name: Check out the repo
22 | uses: actions/checkout@v4
23 |
24 | - name: e2e test
25 | run: |
26 | docker build . -t test-arc
27 | docker compose --env-file ./.env.dev up blocktx callbacker metamorph api tests --scale blocktx=2 --scale metamorph=2 --exit-code-from tests
28 | docker compose down
29 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/get_longest_chain.go:
--------------------------------------------------------------------------------
1 | package postgresql
2 |
3 | import (
4 | "context"
5 |
6 | "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api"
7 | )
8 |
9 | func (p *PostgreSQL) GetLongestChainFromHeight(ctx context.Context, height uint64) ([]*blocktx_api.Block, error) {
10 | q := `
11 | SELECT
12 | hash
13 | ,prevhash
14 | ,merkleroot
15 | ,height
16 | ,processed_at
17 | ,status
18 | ,chainwork
19 | ,timestamp
20 | FROM blocktx.blocks
21 | WHERE height >= $1 AND is_longest = true AND processed_at IS NOT NULL order by id asc
22 | `
23 |
24 | rows, err := p.db.QueryContext(ctx, q, height)
25 | if err != nil {
26 | return nil, err
27 | }
28 | defer rows.Close()
29 |
30 | return p.parseBlocks(rows)
31 | }
32 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000004_create_functions.up.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE FUNCTION reverse_bytes_iter(bytes bytea, length int, midpoint int, index int)
2 | RETURNS bytea AS
3 | $$
4 | SELECT CASE WHEN index >= midpoint THEN bytes ELSE
5 | reverse_bytes_iter(
6 | set_byte(
7 | set_byte(bytes, index, get_byte(bytes, length-index)),
8 | length-index, get_byte(bytes, index)
9 | ),
10 | length, midpoint, index + 1
11 | )
12 | END;
13 | $$ LANGUAGE SQL IMMUTABLE;
14 |
15 | CREATE
16 | OR REPLACE FUNCTION reverse_bytes(bytes bytea) RETURNS bytea AS 'SELECT reverse_bytes_iter(bytes, octet_length(bytes)-1, octet_length(bytes)/2, 0)' LANGUAGE SQL IMMUTABLE;
17 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/migrations/000007_create_functions.up.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE FUNCTION reverse_bytes_iter(bytes bytea, length int, midpoint int, index int)
2 | RETURNS bytea AS
3 | $$
4 | SELECT CASE WHEN index >= midpoint THEN bytes ELSE
5 | reverse_bytes_iter(
6 | set_byte(
7 | set_byte(bytes, index, get_byte(bytes, length-index)),
8 | length-index, get_byte(bytes, index)
9 | ),
10 | length, midpoint, index + 1
11 | )
12 | END;
13 | $$ LANGUAGE SQL IMMUTABLE;
14 |
15 | CREATE
16 | OR REPLACE FUNCTION reverse_bytes(bytes bytea) RETURNS bytea AS 'SELECT reverse_bytes_iter(bytes, octet_length(bytes)-1, octet_length(bytes)/2, 0)' LANGUAGE SQL IMMUTABLE;
17 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000003_create_functions.up.sql:
--------------------------------------------------------------------------------
1 | CREATE OR REPLACE FUNCTION reverse_bytes_iter(bytes bytea, length int, midpoint int, index int)
2 | RETURNS bytea AS
3 | $$
4 | SELECT CASE WHEN index >= midpoint THEN bytes ELSE
5 | reverse_bytes_iter(
6 | set_byte(
7 | set_byte(bytes, index, get_byte(bytes, length-index)),
8 | length-index, get_byte(bytes, index)
9 | ),
10 | length, midpoint, index + 1
11 | )
12 | END;
13 | $$ LANGUAGE SQL IMMUTABLE;
14 |
15 | CREATE
16 | OR REPLACE FUNCTION reverse_bytes(bytes bytea) RETURNS bytea AS 'SELECT reverse_bytes_iter(bytes, octet_length(bytes)-1, octet_length(bytes)/2, 0)' LANGUAGE SQL IMMUTABLE;
17 |
--------------------------------------------------------------------------------
/internal/broadcaster/arc_client.go:
--------------------------------------------------------------------------------
1 | package broadcaster
2 |
3 | import (
4 | "context"
5 |
6 | "github.com/bitcoin-sv/arc/internal/metamorph/metamorph_api"
7 |
8 | sdkTx "github.com/bsv-blockchain/go-sdk/transaction"
9 | )
10 |
11 | type ArcClient interface {
12 | BroadcastTransaction(ctx context.Context, tx *sdkTx.Transaction, waitForStatus metamorph_api.Status, callbackURL string) (*metamorph_api.TransactionStatus, error)
13 | BroadcastTransactions(ctx context.Context, txs sdkTx.Transactions, waitForStatus metamorph_api.Status, callbackURL string, callbackToken string, fullStatusUpdates bool, skipFeeValidation bool) ([]*metamorph_api.TransactionStatus, error)
14 | GetTransactionStatus(ctx context.Context, txID string) (*metamorph_api.TransactionStatus, error)
15 | }
16 |
--------------------------------------------------------------------------------
/.github/workflows/pr-title-check.yml:
--------------------------------------------------------------------------------
1 | name: Enforce Commit Message Format
2 |
3 | on:
4 | pull_request:
5 | types: [opened, reopened, edited, synchronize]
6 |
7 | jobs:
8 | check_commit_message:
9 | runs-on: ubuntu-latest
10 | steps:
11 | - name: Check commit message format
12 | env:
13 | PR_TITLE: ${{ github.event.pull_request.title }}
14 | run: |
15 | if [[ ! "$PR_TITLE" =~ ^(feat|chore|deps|sec|fix|refactor|docs|build|ci|test)(\([A-Z]{3,6}-[0-9]{3,6}\))?:\ .+$ ]]; then
16 | echo "PR title which is used as merge commit does not follow Conventional Commits format."
17 | echo "Please use format: (): as described in CONTRIBUTING.md"
18 | exit 1
19 | fi
20 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/fixtures/clear_data/blocktx.block_processing.yaml:
--------------------------------------------------------------------------------
1 | # completed
2 | - block_hash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000
3 | processed_by: pod-2
4 | inserted_at: 2023-12-11T11:40:00+00:00
5 |
6 | # in progress
7 | - block_hash: 0xf97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000
8 | processed_by: pod-2
9 | inserted_at: 2023-12-11T11:50:00+00:00
10 |
11 | # in progress
12 | - block_hash: 0xc20b4d510e1a7a4ab3da30e55676de0884b4cb79139ccc0a0000000000000000
13 | processed_by: pod-2
14 | inserted_at: 2023-12-11T12:10:00+00:00
15 |
16 | # in progress
17 | - block_hash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000
18 | processed_by: pod-3
19 | inserted_at: 2023-12-11T12:10:00+00:00
20 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/fixtures/block_processing/blocktx.block_processing.yaml:
--------------------------------------------------------------------------------
1 | # completed
2 | - block_hash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000
3 | processed_by: pod-2
4 | inserted_at: 2023-12-22T11:40:00+00:00
5 |
6 | # in progress
7 | - block_hash: 0xf97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000
8 | processed_by: pod-2
9 | inserted_at: 2023-12-22T11:50:00+00:00
10 |
11 | # in progress
12 | - block_hash: 0xc20b4d510e1a7a4ab3da30e55676de0884b4cb79139ccc0a0000000000000000
13 | processed_by: pod-2
14 | inserted_at: 2023-12-22T12:10:00+00:00
15 |
16 | # in progress
17 | - block_hash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000
18 | processed_by: pod-3
19 | inserted_at: 2023-12-22T12:10:00+00:00
20 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000005_add_inserted_at.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE blocks
2 | ADD COLUMN inserted_at_num INTEGER DEFAULT TO_NUMBER(TO_CHAR((NOW()) AT TIME ZONE 'UTC', 'yyyymmddhh24'), '9999999999') NOT NULL;
3 |
4 | ALTER TABLE transactions
5 | ADD COLUMN inserted_at_num INTEGER DEFAULT TO_NUMBER(TO_CHAR((NOW()) AT TIME ZONE 'UTC', 'yyyymmddhh24'), '9999999999') NOT NULL;
6 |
7 | ALTER TABLE block_transactions_map
8 | ADD COLUMN inserted_at_num INTEGER DEFAULT TO_NUMBER(TO_CHAR((NOW()) AT TIME ZONE 'UTC', 'yyyymmddhh24'), '9999999999') NOT NULL;
9 |
10 | CREATE INDEX ix_blocks_inserted_at ON blocks (inserted_at_num);
11 | CREATE INDEX ix_transactions_inserted_at ON transactions (inserted_at_num);
12 | CREATE INDEX ix_block_transactions_map_inserted_at ON block_transactions_map (inserted_at_num);
13 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/mark_block_as_done.go:
--------------------------------------------------------------------------------
1 | package postgresql
2 |
3 | import (
4 | "context"
5 |
6 | "github.com/libsv/go-p2p/chaincfg/chainhash"
7 |
8 | "github.com/bitcoin-sv/arc/pkg/tracing"
9 | )
10 |
11 | func (p *PostgreSQL) MarkBlockAsDone(ctx context.Context, hash *chainhash.Hash, size uint64, txCount uint64) (err error) {
12 | ctx, span := tracing.StartTracing(ctx, "MarkBlockAsDone", p.tracingEnabled, p.tracingAttributes...)
13 | defer func() {
14 | tracing.EndTracing(span, err)
15 | }()
16 |
17 | q := `
18 | UPDATE blocktx.blocks
19 | SET processed_at = $4,
20 | size = $1,
21 | tx_count = $2
22 | WHERE hash = $3
23 | `
24 |
25 | if _, err = p.db.ExecContext(ctx, q, size, txCount, hash[:], p.now()); err != nil {
26 | return err
27 | }
28 |
29 | return nil
30 | }
31 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/register_transactions.go:
--------------------------------------------------------------------------------
1 | package postgresql
2 |
3 | import (
4 | "context"
5 | "errors"
6 |
7 | "github.com/lib/pq"
8 |
9 | "github.com/bitcoin-sv/arc/internal/blocktx/store"
10 | )
11 |
12 | func (p *PostgreSQL) RegisterTransactions(ctx context.Context, txHashes [][]byte) (int64, error) {
13 | const q = `
14 | INSERT INTO blocktx.registered_transactions (hash)
15 | SELECT hash
16 | FROM UNNEST ($1::BYTEA[]) as hash
17 | ON CONFLICT (hash) DO NOTHING
18 | `
19 |
20 | res, err := p.db.ExecContext(ctx, q, pq.Array(txHashes))
21 | if err != nil {
22 | return 0, errors.Join(store.ErrFailedToInsertTransactions, err)
23 | }
24 |
25 | rowsAffected, err := res.RowsAffected()
26 | if err != nil {
27 | return 0, nil
28 | }
29 |
30 | return rowsAffected, nil
31 | }
32 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000001_create_transactions.up.sql:
--------------------------------------------------------------------------------
1 | CREATE SCHEMA metamorph;
2 | CREATE TABLE metamorph.transactions (
3 | hash BYTEA PRIMARY KEY,
4 | stored_at TIMESTAMPTZ,
5 | announced_at TIMESTAMPTZ,
6 | mined_at TIMESTAMPTZ,
7 | status INTEGER,
8 | block_height BIGINT,
9 | block_hash BYTEA,
10 | callback_url TEXT,
11 | callback_token TEXT,
12 | merkle_proof TEXT,
13 | reject_reason TEXT,
14 | raw_tx BYTEA,
15 | locked_by TEXT,
16 | inserted_at_num INTEGER DEFAULT TO_NUMBER(TO_CHAR((NOW()) AT TIME ZONE 'UTC', 'yyyymmddhh24'), '9999999999') NOT NULL
17 | );
18 |
19 | CREATE INDEX ix_metamorph_transactions_locked_by ON metamorph.transactions (locked_by);
20 | CREATE INDEX ix_metamorph_transactions_inserted_at_num ON metamorph.transactions (inserted_at_num);
21 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug Report
3 | about: Report a bug or an issue you've found with `@bitcoin-sv/arc`.
4 | title: "[BUG]"
5 | labels: bug
6 | assignees: ''
7 |
8 | ---
9 |
10 | ## Bug Description
11 |
12 | Briefly describe the bug/issue you've encountered.
13 |
14 | ## Steps to Reproduce
15 |
16 | 1. Step 1
17 | 2. Step 2
18 | 3. ...
19 |
20 | ## Expected Behavior
21 |
22 | What should have happened if the bug hadn't occurred?
23 |
24 | ## Actual Behavior
25 |
26 | What actually happened?
27 |
28 | ## Stack Traces or Screenshots
29 |
30 | If applicable, add screenshots or stack traces to help explain the issue.
31 |
32 | ## Environment
33 |
34 | - OS: [e.g. MacOS, Windows]
35 | - ARC version: [e.g. 1.1.79]
36 |
37 | ## Additional Information
38 |
39 | Provide any additional context or information about the bug.
40 |
--------------------------------------------------------------------------------
/internal/callbacker/callbacker.go:
--------------------------------------------------------------------------------
1 | package callbacker
2 |
3 | import (
4 | "time"
5 | )
6 |
7 | type SenderI interface {
8 | Send(url, token string, callback *Callback) (success, retry bool)
9 | SendBatch(url, token string, callbacks []*Callback) (success, retry bool)
10 | }
11 |
12 | type Callback struct {
13 | Timestamp time.Time `json:"timestamp"`
14 |
15 | CompetingTxs []string `json:"competingTxs,omitempty"`
16 |
17 | TxID string `json:"txid"`
18 | TxStatus string `json:"txStatus"`
19 | ExtraInfo *string `json:"extraInfo,omitempty"`
20 | MerklePath *string `json:"merklePath,omitempty"`
21 |
22 | BlockHash *string `json:"blockHash,omitempty"`
23 | BlockHeight *uint64 `json:"blockHeight,omitempty"`
24 | }
25 |
26 | type BatchCallback struct {
27 | Count int `json:"count"`
28 | Callbacks []*Callback `json:"callbacks,omitempty"`
29 | }
30 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/fixtures/set_locked/metamorph.transactions.yaml:
--------------------------------------------------------------------------------
1 | - hash: 0x319b5eb9d99084b72002640d1445f49b8c83539260a7e5b2cbb16c1d2954a743
2 | locked_by: NONE
3 | status: 90
4 | stored_at: 2023-10-01 14:00:00
5 | last_submitted_at: 2023-10-01 14:00:00
6 | - hash: 0x12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357
7 | locked_by: NONE
8 | status: 40
9 | stored_at: 2023-10-01 14:00:00
10 | last_submitted_at: 2023-10-01 14:00:00
11 | - hash: 0x104d50aba0aa4b7568fcd03c510a8c8e2362c6136768f08a546cb9bb11cf947c
12 | locked_by: NONE
13 | status: 100
14 | stored_at: 2023-10-01 14:00:00
15 | last_submitted_at: 2023-10-01 14:00:00
16 | - hash: 0x78d66c8391ff5e4a65b494e39645facb420b744f77f3f3b83a3aa8573282176e
17 | locked_by: NONE
18 | status: 50
19 | stored_at: 2023-10-01 14:00:00
20 | last_submitted_at: 2023-10-01 14:00:00
21 |
--------------------------------------------------------------------------------
/doc/api.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Swagger UI
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/internal/metamorph/cache.go:
--------------------------------------------------------------------------------
1 | package metamorph
2 |
3 | import (
4 | "context"
5 | "errors"
6 |
7 | "github.com/go-redis/redis/v8"
8 |
9 | "github.com/bitcoin-sv/arc/config"
10 | "github.com/bitcoin-sv/arc/internal/cache"
11 | )
12 |
13 | var ErrCacheUnknownType = errors.New("unknown cache type")
14 |
15 | // NewCacheStore creates a new CacheStore based on the provided configuration.
16 | func NewCacheStore(cacheConfig *config.CacheConfig) (cache.Store, error) {
17 | switch cacheConfig.Engine {
18 | case config.InMemory:
19 | return cache.NewMemoryStore(), nil
20 | case config.Redis:
21 | c := redis.NewClient(&redis.Options{
22 | Addr: cacheConfig.Redis.Addr,
23 | Password: cacheConfig.Redis.Password,
24 | DB: cacheConfig.Redis.DB,
25 | })
26 | return cache.NewRedisStore(context.Background(), c), nil
27 | default:
28 | return nil, ErrCacheUnknownType
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/.github/workflows/static.yaml:
--------------------------------------------------------------------------------
1 | # Simple workflow for deploying static content to GitHub Pages
2 | name: Deploy static content to Pages
3 |
4 | on:
5 | # Runs on pushes targeting the default branch
6 | push:
7 | branches: ["main"]
8 |
9 | # Allows you to run this workflow manually from the Actions tab
10 | workflow_dispatch:
11 |
12 | # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
13 | permissions:
14 | contents: write
15 | pages: write
16 | id-token: write
17 |
18 | # Allow one concurrent deployment
19 | concurrency:
20 | group: "pages"
21 | cancel-in-progress: true
22 |
23 | jobs:
24 | build:
25 | runs-on: [ubuntu-latest]
26 | steps:
27 | - name: Check out the repo
28 | uses: actions/checkout@v4
29 |
30 | - name: Deploy Files
31 | run: |
32 | git push --force origin `git subtree split --prefix doc main`:gh-pages
33 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/fixtures/get_double_spends/metamorph.transactions.yaml:
--------------------------------------------------------------------------------
1 | - hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853
2 | locked_by: metamorph-1
3 | status: 100
4 | stored_at: 2023-10-01T4:00:00+00:00
5 | last_submitted_at: 2023-10-01T14:00:00+00:00
6 | last_modified: 2023-10-01T14:00:00+00:00
7 | competing_txs: '4e6b3dd04f51ac6ce3d051f80d819bed366a4ff29143bb58c01154cb322d1321,30f409d6951483e4d65a586205f373c2f72431ade49abb6f143e82fc53ea6cb1'
8 | - hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430
9 | locked_by: metamorph-1
10 | status: 100
11 | stored_at: 2023-10-01T14:00:00+00:00
12 | last_submitted_at: 2023-10-01T16:00:00+00:00
13 | last_modified: 2023-10-01T16:00:00+00:00
14 | competing_txs: '4e6b3dd04f51ac6ce3d051f80d819bed366a4ff29143bb58c01154cb322d1321,538808e847d0add40ed9622fff53954c79e1f52db7c47ea0b6cdc0df972f3dcd'
15 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/fixtures/clear_blocks/blocktx.blocks.yaml:
--------------------------------------------------------------------------------
1 | - inserted_at: 2023-12-10 14:00:00
2 | id: 1
3 | hash: 0x000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9
4 | prevhash: 0x0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7
5 | merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483
6 | height: 822013
7 | processed_at: 2023-12-10 14:10:00
8 | size: 86840000
9 | tx_count: 23477
10 | timestamp: 2023-12-10 14:00:00
11 | - inserted_at: 2023-12-15 14:00:00
12 | id: 2
13 | hash: 0x0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7
14 | prevhash: 0x000000000000000002a0926c51854d2bd525c26026ab0f178ca07f723b31033a
15 | merkleroot: 0x3eeee879a8a08fc537a04682178687bb0e58a5103938eafc349705a2acb06410
16 | height: 822012
17 | processed_at: 2023-12-15 14:10:00
18 | size: 3030000
19 | tx_count: 856
20 | timestamp: 2023-12-15 14:00:00
21 |
--------------------------------------------------------------------------------
/internal/callbacker/store/store.go:
--------------------------------------------------------------------------------
1 | package store
2 |
3 | import (
4 | "context"
5 | "time"
6 | )
7 |
8 | type CallbackData struct {
9 | ID int64
10 | URL string
11 | Token string
12 | Timestamp time.Time
13 | CompetingTxs []string
14 | TxID string
15 | TxStatus string
16 | ExtraInfo *string
17 | MerklePath *string
18 | BlockHash *string
19 | BlockHeight *uint64
20 | AllowBatch bool
21 | }
22 |
23 | type ProcessorStore interface {
24 | Clear(ctx context.Context, t time.Time) error
25 | Insert(ctx context.Context, data []*CallbackData) (int64, error)
26 | GetUnsent(ctx context.Context, limit int, expiration time.Duration, batch bool, maxRetries int) ([]*CallbackData, error)
27 | SetSent(ctx context.Context, ids []int64) error
28 | UnsetPending(ctx context.Context, ids []int64) error
29 | UnsetPendingDisable(ctx context.Context, ids []int64) error
30 | Ping() error
31 | }
32 |
--------------------------------------------------------------------------------
/internal/blocktx/bcnet/block_message.go:
--------------------------------------------------------------------------------
1 | package bcnet
2 |
3 | import (
4 | "io"
5 |
6 | "github.com/libsv/go-p2p/chaincfg/chainhash"
7 | "github.com/libsv/go-p2p/wire"
8 | )
9 |
10 | // BlockMessage only stores the transaction IDs of the block, not the full transactions
11 | type BlockMessage struct {
12 | Hash *chainhash.Hash
13 | Header *wire.BlockHeader
14 | Height uint64
15 | TransactionHashes []*chainhash.Hash
16 | Size uint64
17 | }
18 |
19 | func (bm *BlockMessage) Bsvdecode(io.Reader, uint32, wire.MessageEncoding) error {
20 | return nil
21 | }
22 | func (bm *BlockMessage) BsvEncode(io.Writer, uint32, wire.MessageEncoding) error {
23 | return nil
24 | }
25 | func (bm *BlockMessage) Command() string {
26 | return "block"
27 | }
28 | func (bm *BlockMessage) MaxPayloadLength(uint32) uint64 {
29 | return wire.MaxExtMsgPayload
30 | }
31 |
32 | type BlockMessagePeer struct {
33 | BlockMessage
34 | Peer string
35 | }
36 |
--------------------------------------------------------------------------------
/pkg/tracing/helper.go:
--------------------------------------------------------------------------------
1 | package tracing
2 |
3 | import (
4 | "context"
5 |
6 | "go.opentelemetry.io/otel"
7 | "go.opentelemetry.io/otel/attribute"
8 | "go.opentelemetry.io/otel/codes"
9 | "go.opentelemetry.io/otel/trace"
10 | )
11 |
12 | func StartTracing(ctx context.Context, spanName string, tracingEnabled bool, attributes ...attribute.KeyValue) (context.Context, trace.Span) {
13 | if !tracingEnabled {
14 | return ctx, nil
15 | }
16 |
17 | var span trace.Span
18 | tracer := otel.Tracer("")
19 | if tracer == nil {
20 | return ctx, nil
21 | }
22 |
23 | if len(attributes) > 0 {
24 | ctx, span = tracer.Start(ctx, spanName, trace.WithAttributes(attributes...))
25 | return ctx, span
26 | }
27 |
28 | ctx, span = tracer.Start(ctx, spanName)
29 | return ctx, span
30 | }
31 |
32 | func EndTracing(span trace.Span, err error) {
33 | if span != nil {
34 | if err != nil {
35 | span.RecordError(err)
36 | span.SetStatus(codes.Error, err.Error())
37 | }
38 | span.End()
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/fixtures/insert_block_transactions/blocktx.blocks.yaml:
--------------------------------------------------------------------------------
1 | - inserted_at: 2024-01-10 13:06:03.375
2 | id: 9736
3 | hash: 0x6258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000
4 | prevhash: 0x000000000000000001a7aa3999410ca53fb645851531ec0a7a5cb9ce2d4ae313
5 | merkleroot: 0x0d72bf92e7862df18d1935c171ca4dbb70d268b0f025e46716e913bc7e4f2bdb
6 | height: 826481
7 | status: 10 # STALE
8 | is_longest: true
9 | processed_at: 2024-01-10 13:06:06.122
10 | size: 108689370
11 | tx_count: 799
12 | - inserted_at: 2024-01-10 13:06:03.375
13 | id: 9737
14 | hash: 0x7258b02da70a3e367e4c993b049fa9b76ef8f090ef9fd2010000000000000000
15 | prevhash: 0x000000000000000001a7aa3999410ca53fb645851531ec0a7a5cb9ce2d4ae313
16 | merkleroot: 0x0d72bf92e7862df18d1935c171ca4dbb70d268b0f025e46716e913bc7e4f2bdb
17 | height: 826481
18 | status: 20 # STALE
19 | is_longest: false
20 | processed_at: 2024-01-10 13:06:06.122
21 | size: 108689370
22 | tx_count: 799
23 |
--------------------------------------------------------------------------------
/internal/blocktx/integration_test/fixtures/stale_block/blocktx.blocks.yaml:
--------------------------------------------------------------------------------
1 | - inserted_at: 2023-12-15 14:00:00
2 | id: 1001
3 | hash: 0xf97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000
4 | prevhash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000
5 | merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483
6 | height: 822014
7 | processed_at: 2023-12-15 14:10:00
8 | size: 86840000
9 | tx_count: 23477
10 | status: 10
11 | is_longest: true
12 | chainwork: '62209952899966'
13 | - inserted_at: 2023-12-15 14:30:00
14 | id: 1002
15 | hash: 0xc9b4e1e4dcf9188416027511671b9346be8ef93c0ddf59060000000000000000
16 | prevhash: 0x67708796ef57464ed9eaf2a663d3da32372e4c2fb65558020000000000000000
17 | merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257
18 | height: 822015
19 | processed_at: 2023-12-15 14:30:00
20 | size: 20160000
21 | tx_count: 6523
22 | status: 10
23 | is_longest: true
24 | chainwork: '62209952899966'
25 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/fixtures/register_transactions/blocktx.registered_transactions.yaml:
--------------------------------------------------------------------------------
1 | - hash: 0x76732b80598326a18d3bf0a86518adbdf95d0ddc6ff6693004440f4776168c3b
2 | inserted_at: 2023-12-15 14:00:00
3 | - hash: 0x164e85a5d5bc2b2372e8feaa266e5e4b7d0808f8d2b784fb1f7349c4726392b0
4 | inserted_at: 2023-12-15 14:00:00
5 | - hash: 0xb4201cc6fc5768abff14adf75042ace6061da9176ee5bb943291b9ba7d7f5743
6 | inserted_at: 2023-12-15 14:00:00
7 | - hash: 0x37bd6c87927e75faeb3b3c939f64721cda48e1bb98742676eebe83aceee1a669
8 | inserted_at: 2023-12-15 14:00:00
9 | - hash: 0x952f80e20a0330f3b9c2dfd1586960064e797218b5c5df665cada221452c17eb
10 | inserted_at: 2023-12-15 14:00:00
11 | - hash: 0x861a281b27de016e50887288de87eab5ca56a1bb172cdff6dba965474ce0f608
12 | inserted_at: 2023-12-15 14:00:00
13 | - hash: 0x9421cc760c5405af950a76dc3e4345eaefd4e7322f172a3aee5e0ddc7b4f8313
14 | inserted_at: 2023-12-15 14:00:00
15 | - hash: 0x8b7d038db4518ac4c665abfc5aeaacbd2124ad8ca70daa8465ed2c4427c41b9b
16 | inserted_at: 2023-12-15 14:00:00
17 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/fixtures/insert_block_transactions/blocktx.registered_transactions.yaml:
--------------------------------------------------------------------------------
1 | - hash: 0x76732b80598326a18d3bf0a86518adbdf95d0ddc6ff6693004440f4776168c3b
2 | inserted_at: 2023-12-15 14:00:00
3 | - hash: 0x164e85a5d5bc2b2372e8feaa266e5e4b7d0808f8d2b784fb1f7349c4726392b0
4 | inserted_at: 2023-12-15 14:00:00
5 | - hash: 0xb4201cc6fc5768abff14adf75042ace6061da9176ee5bb943291b9ba7d7f5743
6 | inserted_at: 2023-12-15 14:00:00
7 | - hash: 0x37bd6c87927e75faeb3b3c939f64721cda48e1bb98742676eebe83aceee1a669
8 | inserted_at: 2023-12-15 14:00:00
9 | - hash: 0x952f80e20a0330f3b9c2dfd1586960064e797218b5c5df665cada221452c17eb
10 | inserted_at: 2023-12-15 14:00:00
11 | - hash: 0x861a281b27de016e50887288de87eab5ca56a1bb172cdff6dba965474ce0f608
12 | inserted_at: 2023-12-15 14:00:00
13 | - hash: 0x9421cc760c5405af950a76dc3e4345eaefd4e7322f172a3aee5e0ddc7b4f8313
14 | inserted_at: 2023-12-15 14:00:00
15 | - hash: 0x8b7d038db4518ac4c665abfc5aeaacbd2124ad8ca70daa8465ed2c4427c41b9b
16 | inserted_at: 2023-12-15 14:00:00
17 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000009_inserted_at_timestampz.up.sql:
--------------------------------------------------------------------------------
1 | -- add inserted_at timestampz to transactions table
2 | ALTER TABLE transactions
3 | ADD COLUMN inserted_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP;
4 |
5 | UPDATE transactions
6 | SET inserted_at = TO_TIMESTAMP(inserted_at_num::text, 'YYYYMMDDHH24');
7 |
8 | DROP INDEX ix_transactions_inserted_at;
9 | ALTER TABLE transactions DROP COLUMN inserted_at_num;
10 |
11 | CREATE INDEX ix_transactions_inserted_at ON transactions (inserted_at);
12 |
13 | -- add inserted_at timestampz to block_transactions_map table
14 | ALTER TABLE block_transactions_map
15 | ADD COLUMN inserted_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP;
16 |
17 | UPDATE block_transactions_map
18 | SET inserted_at = TO_TIMESTAMP(inserted_at_num::text, 'YYYYMMDDHH24');
19 |
20 | DROP INDEX ix_block_transactions_map_inserted_at;
21 | ALTER TABLE block_transactions_map DROP COLUMN inserted_at_num;
22 |
23 | CREATE INDEX ix_block_transactions_map_inserted_at ON block_transactions_map (inserted_at);
24 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000014_rearrange_statuses.up.sql:
--------------------------------------------------------------------------------
1 | -- QUEUED
2 | UPDATE metamorph.transactions SET status = 10 WHERE status = 1;
3 | -- RECEIVED
4 | UPDATE metamorph.transactions SET status = 20 WHERE status = 2;
5 | -- STORED
6 | UPDATE metamorph.transactions SET status = 30 WHERE status = 3;
7 | -- ANNOUNCED_TO_NETWORK
8 | UPDATE metamorph.transactions SET status = 40 WHERE status = 4;
9 | -- REQUESTED_BY_NETWORK
10 | UPDATE metamorph.transactions SET status = 50 WHERE status = 5;
11 | -- SENT_TO_NETWORK
12 | UPDATE metamorph.transactions SET status = 60 WHERE status = 6;
13 | -- ACCEPTED_BY_NETWORK
14 | UPDATE metamorph.transactions SET status = 70 WHERE status = 7;
15 |
16 | -- SEEN_IN_ORPHAN_MEMPOOL
17 | UPDATE metamorph.transactions SET status = 80 WHERE status = 10;
18 | -- SEEN_ON_NETWORK
19 | UPDATE metamorph.transactions SET status = 90 WHERE status = 8;
20 | -- REJECTED
21 | UPDATE metamorph.transactions SET status = 110 WHERE status = 109;
22 | -- MINED
23 | UPDATE metamorph.transactions SET status = 120 WHERE status = 9;
24 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/migrations/000006_tx_callbacks.up.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE callbacker.transaction_callbacks
2 | (
3 | id BIGSERIAL NOT NULL,
4 | url TEXT NOT NULL,
5 | "token" TEXT NOT NULL,
6 | tx_id TEXT NOT NULL,
7 | tx_status TEXT NOT NULL,
8 | extra_info TEXT NULL,
9 | merkle_path TEXT NULL,
10 | block_hash TEXT NULL,
11 | block_height int8 NULL,
12 | competing_txs TEXT NULL,
13 | "timestamp" timestamptz NOT NULL,
14 | allow_batch bool DEFAULT false NULL,
15 | sent_at timestamptz NULL,
16 | pending timestamptz NULL,
17 | CONSTRAINT transaction_callbacks_pkey PRIMARY KEY (id),
18 | CONSTRAINT unique_url_tx_id_status_block_hash UNIQUE (url, tx_id, tx_status, block_hash)
19 | );
20 | CREATE INDEX ix_callbacks_sent_at ON callbacker.transaction_callbacks USING btree (sent_at);
21 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/migrations/000014_rearrange_statuses.down.sql:
--------------------------------------------------------------------------------
1 | -- QUEUED
2 | UPDATE metamorph.transactions SET status = 1 WHERE status = 10;
3 | -- RECEIVED
4 | UPDATE metamorph.transactions SET status = 2 WHERE status = 20;
5 | -- STORED
6 | UPDATE metamorph.transactions SET status = 3 WHERE status = 30;
7 | -- ANNOUNCED_TO_NETWORK
8 | UPDATE metamorph.transactions SET status = 4 WHERE status = 40;
9 | -- REQUESTED_BY_NETWORK
10 | UPDATE metamorph.transactions SET status = 5 WHERE status = 50;
11 | -- SENT_TO_NETWORK
12 | UPDATE metamorph.transactions SET status = 6 WHERE status = 60;
13 | -- ACCEPTED_BY_NETWORK
14 | UPDATE metamorph.transactions SET status = 7 WHERE status = 70;
15 |
16 | -- SEEN_IN_ORPHAN_MEMPOOL
17 | UPDATE metamorph.transactions SET status = 10 WHERE status = 80;
18 | -- SEEN_ON_NETWORK
19 | UPDATE metamorph.transactions SET status = 8 WHERE status = 90;
20 | -- REJECTED
21 | UPDATE metamorph.transactions SET status = 109 WHERE status = 110;
22 | -- MINED
23 | UPDATE metamorph.transactions SET status = 9 WHERE status = 120;
24 |
--------------------------------------------------------------------------------
/internal/beef/beef.go:
--------------------------------------------------------------------------------
1 | package beef
2 |
3 | import (
4 | "errors"
5 | "fmt"
6 |
7 | sdkTx "github.com/bsv-blockchain/go-sdk/transaction"
8 | )
9 |
10 | const (
11 | beefVersionBytesCount = 4
12 | )
13 |
14 | const (
15 | beefMarkerPart1 = 0xBE
16 | beefMarkerPart2 = 0xEF
17 | )
18 |
19 | var (
20 | ErrBEEFPanic = errors.New("panic while parsing beef")
21 | ErrBEEFParse = errors.New("failed to parse beef")
22 | )
23 |
24 | func CheckBeefFormat(txHex []byte) bool {
25 | if len(txHex) < beefVersionBytesCount {
26 | return false
27 | }
28 |
29 | if txHex[2] != beefMarkerPart1 || txHex[3] != beefMarkerPart2 {
30 | return false
31 | }
32 |
33 | return true
34 | }
35 |
36 | func DecodeBEEF(beefHex []byte) (tx *sdkTx.Beef, txID string, err error) {
37 | defer func() {
38 | if r := recover(); r != nil {
39 | err = errors.Join(ErrBEEFPanic, fmt.Errorf("%v", r))
40 | }
41 | }()
42 |
43 | beef, _, txHash, err := sdkTx.ParseBeef(beefHex)
44 | if err != nil {
45 | return nil, "", errors.Join(ErrBEEFParse, err)
46 | }
47 |
48 | return beef, txHash.String(), nil
49 | }
50 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/fixtures/insert_block_transactions/blocktx.block_transactions.yaml:
--------------------------------------------------------------------------------
1 | - block_id: 9736
2 | hash: 0x76732b80598326a18d3bf0a86518adbdf95d0ddc6ff6693004440f4776168c3b
3 | merkle_tree_index: 1
4 | - block_id: 9736
5 | hash: 0x164e85a5d5bc2b2372e8feaa266e5e4b7d0808f8d2b784fb1f7349c4726392b0
6 | merkle_tree_index: 2
7 | - block_id: 9736
8 | hash: 0xb4201cc6fc5768abff14adf75042ace6061da9176ee5bb943291b9ba7d7f5743
9 | merkle_tree_index: 3
10 | - block_id: 9736
11 | hash: 0x37bd6c87927e75faeb3b3c939f64721cda48e1bb98742676eebe83aceee1a669
12 | merkle_tree_index: 4
13 | - block_id: 9736
14 | hash: 0x952f80e20a0330f3b9c2dfd1586960064e797218b5c5df665cada221452c17eb
15 | merkle_tree_index: 5
16 | - block_id: 9736
17 | hash: 0x861a281b27de016e50887288de87eab5ca56a1bb172cdff6dba965474ce0f608
18 | merkle_tree_index: 6
19 | - block_id: 9736
20 | hash: 0x9421cc760c5405af950a76dc3e4345eaefd4e7322f172a3aee5e0ddc7b4f8313
21 | merkle_tree_index: 7
22 | - block_id: 9736
23 | hash: 0x8b7d038db4518ac4c665abfc5aeaacbd2124ad8ca70daa8465ed2c4427c41b9b
24 | merkle_tree_index: 8
25 |
--------------------------------------------------------------------------------
/internal/broadcaster/mutli_utxo_consolidator_test.go:
--------------------------------------------------------------------------------
1 | package broadcaster_test
2 |
3 | import (
4 | "errors"
5 | "log/slog"
6 | "os"
7 | "testing"
8 | "time"
9 |
10 | "github.com/bitcoin-sv/arc/internal/broadcaster"
11 | "github.com/bitcoin-sv/arc/internal/broadcaster/mocks"
12 | )
13 |
14 | func TestMultiKeyUtxoConsolidatorStart(t *testing.T) {
15 | t.Run("start and shutdown", func(_ *testing.T) {
16 | t.Parallel()
17 | logger := slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{Level: slog.LevelDebug}))
18 |
19 | cs := []broadcaster.Consolidator{
20 | &mocks.ConsolidatorMock{
21 | StartFunc: func(_ int) error { return nil },
22 | WaitFunc: func() {},
23 | ShutdownFunc: func() {},
24 | },
25 | &mocks.ConsolidatorMock{
26 | StartFunc: func(_ int) error { return errors.New("failed to start") },
27 | WaitFunc: func() {},
28 | ShutdownFunc: func() {},
29 | },
30 | }
31 |
32 | sut := broadcaster.NewMultiKeyUtxoConsolidator(logger, cs)
33 |
34 | sut.Start()
35 | time.Sleep(50 * time.Millisecond)
36 | sut.Shutdown()
37 | })
38 | }
39 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/postgres_helpers.go:
--------------------------------------------------------------------------------
1 | package postgresql
2 |
3 | import (
4 | "database/sql"
5 |
6 | "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api"
7 | "google.golang.org/protobuf/types/known/timestamppb"
8 | )
9 |
10 | func (p *PostgreSQL) parseBlocks(rows *sql.Rows) ([]*blocktx_api.Block, error) {
11 | blocks := make([]*blocktx_api.Block, 0)
12 |
13 | for rows.Next() {
14 | var block blocktx_api.Block
15 | var processedAt sql.NullTime
16 | var timestamp sql.NullTime
17 |
18 | err := rows.Scan(
19 | &block.Hash,
20 | &block.PreviousHash,
21 | &block.MerkleRoot,
22 | &block.Height,
23 | &processedAt,
24 | &block.Status,
25 | &block.Chainwork,
26 | ×tamp,
27 | )
28 | if err != nil {
29 | return nil, err
30 | }
31 |
32 | block.Processed = processedAt.Valid
33 | if processedAt.Valid {
34 | block.ProcessedAt = timestamppb.New(processedAt.Time)
35 | }
36 |
37 | if timestamp.Valid {
38 | block.Timestamp = timestamppb.New(timestamp.Time)
39 | }
40 |
41 | blocks = append(blocks, &block)
42 | }
43 |
44 | return blocks, nil
45 | }
46 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000021_block_transactions.down.sql:
--------------------------------------------------------------------------------
1 |
2 | DROP TABLE blocktx.block_transactions;
3 | DROP TABLE blocktx.registered_transactions;
4 |
5 | DROP INDEX ix_registered_transactions_inserted_at;
6 |
7 | CREATE TABLE blocktx.block_transactions_map (
8 | blockid int8 NOT NULL,
9 | txid int8 NOT NULL,
10 | inserted_at timestamptz DEFAULT CURRENT_TIMESTAMP NOT NULL,
11 | merkle_path text DEFAULT ''::text NULL,
12 | CONSTRAINT block_transactions_map_pkey PRIMARY KEY (blockid, txid)
13 | );
14 |
15 | CREATE INDEX ix_block_transactions_map_inserted_at ON blocktx.block_transactions_map USING btree (inserted_at);
16 |
17 | CREATE TABLE blocktx.transactions (
18 | id bigserial NOT NULL,
19 | hash bytea NOT NULL,
20 | is_registered bool DEFAULT false NOT NULL,
21 | inserted_at timestamptz DEFAULT CURRENT_TIMESTAMP NOT NULL,
22 | CONSTRAINT transactions_pkey PRIMARY KEY (id)
23 | );
24 | CREATE INDEX ix_transactions_inserted_at ON blocktx.transactions USING btree (inserted_at);
25 | CREATE UNIQUE INDEX ux_transactions_hash ON blocktx.transactions USING btree (hash);
26 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000021_block_transactions.up.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE IF NOT EXISTS blocktx.block_transactions (
2 | block_id BIGINT,
3 | hash BYTEA NOT NULL,
4 | merkle_tree_index BIGINT DEFAULT -1, -- this means no merkle_tree_index
5 | PRIMARY KEY (hash, block_id),
6 | CONSTRAINT fk_block
7 | FOREIGN KEY(block_id)
8 | REFERENCES blocktx.blocks(id)
9 | ON DELETE CASCADE
10 | );
11 |
12 | CREATE TABLE IF NOT EXISTS blocktx.registered_transactions (
13 | hash BYTEA PRIMARY KEY,
14 | inserted_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP
15 | );
16 |
17 | CREATE INDEX IF NOT EXISTS ix_registered_transactions_inserted_at ON blocktx.registered_transactions USING btree (inserted_at);
18 |
19 | INSERT INTO blocktx.registered_transactions
20 | SELECT t.hash AS hash FROM blocktx.transactions t WHERE t.is_registered;
21 |
22 | DROP INDEX blocktx.ix_block_transactions_map_inserted_at;
23 | DROP TABLE blocktx.block_transactions_map;
24 | DROP INDEX blocktx.ix_transactions_inserted_at;
25 | DROP INDEX blocktx.ux_transactions_hash;
26 | DROP TABLE blocktx.transactions;
27 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/fixtures/set_sent/callbacker.transaction_callbacks.yaml:
--------------------------------------------------------------------------------
1 | - url: https://arc-callback-1/callback
2 | id: 1
3 | token: token
4 | tx_id: 96cbf8ba96dc3bad6ecc19ce34d1edbf57b2bc6f76cc3d80efdca95599cf5c28
5 | tx_status: "SEEN_ON_NETWORK"
6 | timestamp: 2024-09-01 12:00:00
7 | hash: 0x285ccf9955a9dcef803dcc766fbcb257bfedd134ce19cc6ead3bdc96baf8cb96
8 |
9 | - url: https://arc-callback-1/callback
10 | id: 2
11 | token: token
12 | tx_id: 96cbf8ba96dc3bad6ecc19ce34d1edbf57b2bc6f76cc3d80efdca95599cf5c28
13 | tx_status: "MINED"
14 | timestamp: 2024-09-01 12:01:00
15 | block_hash: 0000000000000000086527da012efb2d45e00fba9f31e84c35dce998abb409ad
16 | block_height: 860339
17 | pending: 2024-09-01 12:02:00
18 | hash: 0x285ccf9955a9dcef803dcc766fbcb257bfedd134ce19cc6ead3bdc96baf8cb96
19 |
20 | - url: https://arc-callback-1/callback
21 | id: 3
22 | token: token
23 | tx_id: 3413cc9b40d48661c7f36bee88ebb39fca1d593f9672f840afdf07b018e73bb7
24 | tx_status: "SEEN_ON_NETWORK"
25 | timestamp: 2024-09-01 12:00:01
26 | hash: 0xb73be718b007dfaf40f872963f591dca9fb3eb88ee6bf3c76186d4409bcc1334
27 |
--------------------------------------------------------------------------------
/internal/api/handler/server.go:
--------------------------------------------------------------------------------
1 | package handler
2 |
3 | import (
4 | "log/slog"
5 |
6 | "google.golang.org/grpc/health/grpc_health_v1"
7 | "google.golang.org/grpc/reflection"
8 |
9 | "github.com/bitcoin-sv/arc/internal/api"
10 | "github.com/bitcoin-sv/arc/internal/grpc_utils"
11 | )
12 |
13 | // Server type carries the logger within it.
14 | type Server struct {
15 | grpc_utils.GrpcServer
16 |
17 | handler api.ArcDefaultHandlerHealth
18 | logger *slog.Logger
19 | }
20 |
21 | // NewServer will return a server instance with the logger stored within it.
22 | func NewServer(logger *slog.Logger, handler api.ArcDefaultHandlerHealth, cfg grpc_utils.ServerConfig) (*Server, error) {
23 | logger = logger.With(slog.String("module", "server"))
24 |
25 | grpcServer, err := grpc_utils.NewGrpcServer(logger, cfg)
26 | if err != nil {
27 | return nil, err
28 | }
29 |
30 | s := &Server{
31 | GrpcServer: grpcServer,
32 | handler: handler,
33 | logger: logger,
34 | }
35 |
36 | // register health server endpoint
37 | grpc_health_v1.RegisterHealthServer(grpcServer.Srv, s)
38 | reflection.Register(s.GrpcServer.Srv)
39 | return s, nil
40 | }
41 |
--------------------------------------------------------------------------------
/internal/broadcaster/mutli_utxo_consolidator.go:
--------------------------------------------------------------------------------
1 | package broadcaster
2 |
3 | import (
4 | "log/slog"
5 | )
6 |
7 | const (
8 | broadcastRateTxsPerMinute = 50 * 60
9 | )
10 |
11 | type MultiKeyUtxoConsolidator struct {
12 | cs []Consolidator
13 | logger *slog.Logger
14 | }
15 |
16 | type Consolidator interface {
17 | Start(txsRateTxsPerSecond int) error
18 | Wait()
19 | Shutdown()
20 | }
21 |
22 | func NewMultiKeyUtxoConsolidator(logger *slog.Logger, cs []Consolidator) *MultiKeyUtxoConsolidator {
23 | mrb := &MultiKeyUtxoConsolidator{
24 | cs: cs,
25 | logger: logger,
26 | }
27 |
28 | return mrb
29 | }
30 |
31 | func (mrb *MultiKeyUtxoConsolidator) Start() {
32 | txsPerMinutePerBroadcaster := broadcastRateTxsPerMinute / len(mrb.cs)
33 | for _, c := range mrb.cs {
34 | err := c.Start(txsPerMinutePerBroadcaster)
35 | if err != nil {
36 | mrb.logger.Error("failed to start consolidator", slog.String("err", err.Error()))
37 | }
38 | }
39 |
40 | for _, c := range mrb.cs {
41 | c.Wait()
42 | }
43 | }
44 |
45 | func (mrb *MultiKeyUtxoConsolidator) Shutdown() {
46 | for _, c := range mrb.cs {
47 | c.Shutdown()
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/internal/cache/cache.go:
--------------------------------------------------------------------------------
1 | package cache
2 |
3 | import (
4 | "errors"
5 | "time"
6 | )
7 |
8 | var (
9 | ErrCacheNotFound = errors.New("key not found in cache")
10 | ErrCacheFailedToSet = errors.New("failed to set value in cache")
11 | ErrCacheFailedToDel = errors.New("failed to delete value from cache")
12 | ErrCacheFailedToGet = errors.New("failed to get value from cache")
13 | ErrCacheFailedToScan = errors.New("failed to scan cache")
14 | ErrCacheFailedToGetCount = errors.New("failed to get count from cache")
15 | ErrCacheFailedToExecuteTx = errors.New("failed to execute transaction")
16 | )
17 |
18 | type Store interface {
19 | Get(key string) ([]byte, error)
20 | Set(key string, value []byte, ttl time.Duration) error
21 | Del(keys ...string) error
22 |
23 | MapGet(hashsetKey string, field string) ([]byte, error)
24 | MapGetAll(hashsetKey string) (map[string][]byte, error)
25 | MapSet(hashsetKey string, field string, value []byte) error
26 | MapDel(hashsetKey string, fields ...string) error
27 | MapLen(hashsetKey string) (int64, error)
28 | MapExtractAll(hashsetKey string) (map[string][]byte, error)
29 | }
30 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/migrations/000009_inserted_at_timestampz.down.sql:
--------------------------------------------------------------------------------
1 | -- restore inserted_at_num on transactions
2 | ALTER TABLE transactions ADD COLUMN inserted_at_num INTEGER DEFAULT TO_NUMBER(TO_CHAR((NOW()) AT TIME ZONE 'UTC', 'yyyymmddhh24'), '9999999999') NOT NULL;
3 |
4 | UPDATE transactions
5 | SET inserted_at_num = TO_NUMBER(TO_CHAR(inserted_at, 'yyyymmddhh24'),'9999999999');
6 |
7 | DROP INDEX ix_transactions_inserted_at;
8 | ALTER TABLE transactions DROP COLUMN inserted_at;
9 |
10 | CREATE INDEX ix_transactions_inserted_at ON transactions (inserted_at_num);
11 |
12 | -- restore inserted_at_num on block_transactions_map
13 | ALTER TABLE block_transactions_map ADD COLUMN inserted_at_num INTEGER DEFAULT TO_NUMBER(TO_CHAR((NOW()) AT TIME ZONE 'UTC', 'yyyymmddhh24'), '9999999999') NOT NULL;
14 |
15 | UPDATE block_transactions_map
16 | SET inserted_at_num = TO_NUMBER(TO_CHAR(inserted_at, 'yyyymmddhh24'),'9999999999');
17 |
18 | DROP INDEX ix_block_transactions_map_inserted_at;
19 | ALTER TABLE block_transactions_map DROP COLUMN inserted_at;
20 |
21 | CREATE INDEX ix_block_transactions_map_inserted_at ON block_transactions_map (inserted_at_num);
22 |
--------------------------------------------------------------------------------
/internal/api/handler/testdata/config.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | api:
3 | defaultPolicy:
4 | excessiveblocksize: 2000000000
5 | blockmaxsize: 512000000
6 | maxtxsizepolicy: 100000000
7 | maxorphantxsize: 1000000000
8 | datacarriersize: 4294967295
9 | maxscriptsizepolicy: 100000000
10 | maxopsperscriptpolicy: 4294967295
11 | maxscriptnumlengthpolicy: 10000
12 | maxpubkeyspermultisigpolicy: 4294967295
13 | maxtxsigopscountspolicy: 4294967295
14 | maxstackmemoryusagepolicy: 100000000
15 | maxstackmemoryusageconsensus: 200000000
16 | limitancestorcount: 10000
17 | limitcpfpgroupmemberscount: 25
18 | maxmempool: 2000000000
19 | maxmempoolsizedisk: 0
20 | mempoolmaxpercentcpfp: 10
21 | acceptnonstdoutputs: true
22 | datacarrier: true
23 | minminingtxfee: 1e-8
24 | maxstdtxvalidationduration: 3
25 | maxnonstdtxvalidationduration: 1000
26 | maxtxchainvalidationbudget: 50
27 | validationclockcpu: true
28 | minconsolidationfactor: 20
29 | maxconsolidationinputscriptsize: 150
30 | minconfconsolidationinput: 6
31 | minconsolidationinputmaturity: 6
32 | acceptnonstdconsolidationinput: false
33 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/fixtures/update_mined_double_spend_attempted/metamorph.transactions.yaml:
--------------------------------------------------------------------------------
1 | - hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853
2 | locked_by: metamorph-1
3 | status: 100
4 | stored_at: 2023-10-01 14:00:00
5 | last_submitted_at: 2023-10-01 14:00:00
6 | competing_txs: '4e6b3dd04f51ac6ce3d051f80d819bed366a4ff29143bb58c01154cb322d1321,30f409d6951483e4d65a586205f373c2f72431ade49abb6f143e82fc53ea6cb1'
7 | - hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e
8 | locked_by: metamorph-1
9 | status: 100
10 | stored_at: 2023-10-01 14:00:00
11 | last_submitted_at: 2023-10-01 14:00:00
12 | competing_txs: '538808e847d0add40ed9622fff53954c79e1f52db7c47ea0b6cdc0df972f3dcd,30f409d6951483e4d65a586205f373c2f72431ade49abb6f143e82fc53ea6cb1'
13 | - hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430
14 | locked_by: metamorph-1
15 | status: 100
16 | stored_at: 2023-10-01 14:00:00
17 | last_submitted_at: 2023-10-01 14:00:00
18 | competing_txs: '4e6b3dd04f51ac6ce3d051f80d819bed366a4ff29143bb58c01154cb322d1321,538808e847d0add40ed9622fff53954c79e1f52db7c47ea0b6cdc0df972f3dcd'
19 |
--------------------------------------------------------------------------------
/internal/blocktx/store/model.go:
--------------------------------------------------------------------------------
1 | package store
2 |
3 | import (
4 | "encoding/hex"
5 | "slices"
6 | "time"
7 |
8 | "github.com/libsv/go-p2p/chaincfg/chainhash"
9 |
10 | "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api"
11 | )
12 |
13 | type BlockGap struct {
14 | Height uint64
15 | Hash *chainhash.Hash
16 | }
17 |
18 | type TxHashWithMerkleTreeIndex struct {
19 | Hash []byte
20 | MerkleTreeIndex int64
21 | }
22 |
23 | type BlockTransactionWithMerklePath struct {
24 | BlockTransaction
25 | MerklePath string
26 | }
27 |
28 | type BlockTransaction struct {
29 | TxHash []byte
30 | BlockHash []byte
31 | BlockHeight uint64
32 | MerkleTreeIndex int64
33 | BlockStatus blocktx_api.Status
34 | MerkleRoot []byte
35 | Timestamp time.Time
36 | }
37 |
38 | type BlockStatusUpdate struct {
39 | Hash []byte
40 | Status blocktx_api.Status
41 | }
42 |
43 | func (b *BlockTransaction) GetMerkleRootString() string {
44 | txMerkleRoot := make([]byte, len(b.MerkleRoot))
45 | copy(txMerkleRoot, b.MerkleRoot)
46 | slices.Reverse(txMerkleRoot)
47 | merkleRoot := hex.EncodeToString(txMerkleRoot)
48 |
49 | return merkleRoot
50 | }
51 |
--------------------------------------------------------------------------------
/internal/broadcaster/multi_utxo_creator.go:
--------------------------------------------------------------------------------
1 | package broadcaster
2 |
3 | import (
4 | "log/slog"
5 | )
6 |
7 | type MultiKeyUTXOCreator struct {
8 | creators []Creator
9 | logger *slog.Logger
10 | }
11 |
12 | type Creator interface {
13 | Start(outputs uint64, satoshisPerOutput uint64) error
14 | Wait()
15 | Shutdown()
16 | }
17 |
18 | func NewMultiKeyUTXOCreator(logger *slog.Logger, creators []Creator, opts ...func(p *MultiKeyUTXOCreator)) *MultiKeyUTXOCreator {
19 | mkuc := &MultiKeyUTXOCreator{
20 | creators: creators,
21 | logger: logger,
22 | }
23 |
24 | for _, opt := range opts {
25 | opt(mkuc)
26 | }
27 |
28 | return mkuc
29 | }
30 |
31 | func (mkuc *MultiKeyUTXOCreator) Start(outputs uint64, satoshisPerOutput uint64) {
32 | for _, creator := range mkuc.creators {
33 | err := creator.Start(outputs, satoshisPerOutput)
34 | if err != nil {
35 | mkuc.logger.Error("failed to start UTXO creator", slog.String("err", err.Error()))
36 | }
37 | }
38 |
39 | for _, creator := range mkuc.creators {
40 | creator.Wait()
41 | }
42 | }
43 |
44 | func (mkuc *MultiKeyUTXOCreator) Shutdown() {
45 | for _, creator := range mkuc.creators {
46 | creator.Shutdown()
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/internal/callbacker/store/postgresql/fixtures/unset_pending/callbacker.transaction_callbacks.yaml:
--------------------------------------------------------------------------------
1 | - url: https://arc-callback-1/callback
2 | id: 1
3 | token: token
4 | tx_id: 96cbf8ba96dc3bad6ecc19ce34d1edbf57b2bc6f76cc3d80efdca95599cf5c28
5 | tx_status: "SEEN_ON_NETWORK"
6 | timestamp: 2024-09-01 12:00:00
7 | pending: 2024-09-01 12:00:00
8 | hash: 0x285ccf9955a9dcef803dcc766fbcb257bfedd134ce19cc6ead3bdc96baf8cb96
9 |
10 | - url: https://arc-callback-1/callback
11 | id: 2
12 | token: token
13 | tx_id: 96cbf8ba96dc3bad6ecc19ce34d1edbf57b2bc6f76cc3d80efdca95599cf5c28
14 | tx_status: "MINED"
15 | timestamp: 2024-09-01 12:01:00
16 | block_hash: 0000000000000000086527da012efb2d45e00fba9f31e84c35dce998abb409ad
17 | block_height: 860339
18 | pending: 2024-09-01 12:02:00
19 | hash: 0x285ccf9955a9dcef803dcc766fbcb257bfedd134ce19cc6ead3bdc96baf8cb96
20 |
21 | - url: https://arc-callback-1/callback
22 | id: 3
23 | token: token
24 | tx_id: 3413cc9b40d48661c7f36bee88ebb39fca1d593f9672f840afdf07b018e73bb7
25 | tx_status: "SEEN_ON_NETWORK"
26 | timestamp: 2024-09-01 12:00:01
27 | pending: 2024-09-01 12:00:00
28 | hash: 0xb73be718b007dfaf40f872963f591dca9fb3eb88ee6bf3c76186d4409bcc1334
29 |
--------------------------------------------------------------------------------
/internal/blocktx/integration_test/fixtures/merkle_paths/blocktx.block_transactions.yaml:
--------------------------------------------------------------------------------
1 | - block_id: 1001
2 | hash: 0x7c54eba4d46391f403116160ca957fcc31234b826376ac28514da998f9a42eff
3 | merkle_tree_index: 0
4 | - block_id: 1001
5 | hash: 0x8308c059d119b87a9520a9fd3d765f8b74f726e96c77a1a6623d71796cc8c207
6 | merkle_tree_index: 1
7 | - block_id: 1001
8 | hash: 0x37d3de3b96c36dd1cbfa91585f51a5d7e0eb78227d76479adb715600b6bbd3e2
9 | merkle_tree_index: 2
10 | - block_id: 1001
11 | hash: 0x1e2e196b5214312d2334b5b474447984277c7556104c3835f7c1ee13823ac224
12 | merkle_tree_index: 3
13 | - block_id: 1001
14 | hash: 0x08ce28a419bd6c6dfa4a035964010c43aa976af76055f3307b6a6d9d617413ff
15 | merkle_tree_index: 4
16 | - block_id: 1001
17 | hash: 0x0860e409e418b932c879661ff7e26dc844019c133a21a62eef56ad7fc66ab94b
18 | merkle_tree_index: 5
19 | - block_id: 1001
20 | hash: 0x231373b8e687624e7c9977393990d0032744b16f2a0fc2b6ee324aa9eb4c02b3
21 | merkle_tree_index: 6
22 | - block_id: 1001
23 | hash: 0xd30205730b867de99e005f8bd2f53d1b7765ff57c31b25b6173ca5b8d8d128d8
24 | merkle_tree_index: 7
25 | - block_id: 1001
26 | hash: 0x64d2290d4c6919d9ae9744b019a9bd30172be9a4ee3db36912381de7862027bf
27 | merkle_tree_index: 8
28 |
--------------------------------------------------------------------------------
/internal/k8s_watcher/k8s_client/client.go:
--------------------------------------------------------------------------------
1 | package k8s_client
2 |
3 | import (
4 | "context"
5 | "fmt"
6 |
7 | v1 "k8s.io/api/core/v1"
8 |
9 | metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
10 | "k8s.io/client-go/kubernetes"
11 | "k8s.io/client-go/rest"
12 | )
13 |
14 | type K8sClient struct {
15 | client *kubernetes.Clientset
16 | }
17 |
18 | func New() (*K8sClient, error) {
19 | config, err := rest.InClusterConfig()
20 | if err != nil {
21 | return nil, err
22 | }
23 |
24 | clientSet, err := kubernetes.NewForConfig(config)
25 | if err != nil {
26 | return nil, err
27 | }
28 |
29 | return &K8sClient{client: clientSet}, nil
30 | }
31 |
32 | func (k *K8sClient) GetRunningPodNamesSlice(ctx context.Context, namespace string, podName string) ([]string, error) {
33 | pods, err := k.client.CoreV1().Pods(namespace).List(ctx, metav1.ListOptions{
34 | LabelSelector: fmt.Sprintf("app.kubernetes.io/instance=%s", podName),
35 | })
36 |
37 | if err != nil {
38 | return nil, err
39 | }
40 | var podNames []string
41 | for _, item := range pods.Items {
42 | if item.Status.Phase == v1.PodRunning && item.Name != "" {
43 | podNames = append(podNames, item.Name)
44 | }
45 | }
46 |
47 | return podNames, nil
48 | }
49 |
--------------------------------------------------------------------------------
/internal/broadcaster/multi_utxo_creator_test.go:
--------------------------------------------------------------------------------
1 | package broadcaster_test
2 |
3 | import (
4 | "errors"
5 | "log/slog"
6 | "os"
7 | "testing"
8 | "time"
9 |
10 | "github.com/bitcoin-sv/arc/internal/broadcaster"
11 | "github.com/bitcoin-sv/arc/internal/broadcaster/mocks"
12 | )
13 |
14 | func TestMultiKeyUTXOCreatorStart(t *testing.T) {
15 | t.Run("start and shutdown", func(_ *testing.T) {
16 | t.Parallel()
17 | logger := slog.New(slog.NewTextHandler(os.Stdout, &slog.HandlerOptions{Level: slog.LevelDebug}))
18 |
19 | // Create mocks for creators
20 | creators := []broadcaster.Creator{
21 | &mocks.CreatorMock{
22 | StartFunc: func(_ uint64, _ uint64) error { return nil },
23 | WaitFunc: func() {},
24 | ShutdownFunc: func() {},
25 | },
26 | &mocks.CreatorMock{
27 | StartFunc: func(_ uint64, _ uint64) error { return errors.New("failed to start") },
28 | WaitFunc: func() {},
29 | ShutdownFunc: func() {},
30 | },
31 | }
32 |
33 | // Initialize the MultiKeyUTXOCreator
34 | mkuc := broadcaster.NewMultiKeyUTXOCreator(logger, creators)
35 |
36 | // Start the MultiKeyUTXOCreator
37 | mkuc.Start(100, 1000)
38 | time.Sleep(50 * time.Millisecond)
39 |
40 | // Shutdown the MultiKeyUTXOCreator
41 | mkuc.Shutdown()
42 | })
43 | }
44 |
--------------------------------------------------------------------------------
/config/load_test.go:
--------------------------------------------------------------------------------
1 | package config
2 |
3 | import (
4 | "testing"
5 |
6 | "github.com/stretchr/testify/assert"
7 | "github.com/stretchr/testify/require"
8 | )
9 |
10 | func Test_Load(t *testing.T) {
11 | t.Run("default load", func(t *testing.T) {
12 | // given
13 | expectedConfig := getDefaultArcConfig()
14 |
15 | // when
16 | actualConfig, err := Load()
17 | require.NoError(t, err, "error loading config")
18 |
19 | // then
20 | assert.Equal(t, expectedConfig, actualConfig)
21 | })
22 |
23 | t.Run("partial file override", func(t *testing.T) {
24 | // given
25 | expectedConfig := getDefaultArcConfig()
26 |
27 | // when
28 | actualConfig, err := Load("./test_files/config.yaml")
29 | require.NoError(t, err, "error loading config")
30 |
31 | // then
32 | // verify not overridden default example value
33 | assert.Equal(t, expectedConfig.Common.GrpcMessageSize, actualConfig.Common.GrpcMessageSize)
34 |
35 | // verify correct override
36 | assert.Equal(t, "INFO", actualConfig.Common.LogLevel)
37 | assert.Equal(t, "text", actualConfig.Common.LogFormat)
38 | assert.Equal(t, "mainnet", actualConfig.Common.Network)
39 | assert.NotNil(t, actualConfig.Common.Tracing)
40 | assert.Equal(t, "http://tracing:1234", actualConfig.Common.Tracing.DialAddr)
41 | })
42 | }
43 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/fixtures/clear_blocks/blocktx.block_transactions.yaml:
--------------------------------------------------------------------------------
1 | - block_id: 1
2 | hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853
3 | merkle_tree_index: 1
4 | - block_id: 1
5 | hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e
6 | merkle_tree_index: 2
7 | - block_id: 1
8 | hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430
9 | merkle_tree_index: 3
10 | - block_id: 1
11 | hash: 0xee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa
12 | merkle_tree_index: 4
13 | - block_id: 1
14 | hash: 0x3e0b5b218c344110f09bf485bc58de4ea5378e55744185edf9c1dafa40068ecd
15 | merkle_tree_index: 5
16 | - block_id: 2
17 | hash: 0x213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0
18 | merkle_tree_index: 6
19 | - block_id: 2
20 | hash: 0x12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357
21 | merkle_tree_index: 7
22 | - block_id: 2
23 | hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6
24 | merkle_tree_index: 8
25 | - block_id: 2
26 | hash: 0x5c5b621b81fb63d9df4595ee2e6b3c50cce1f5f0e1b83510aac504931ed22799
27 | merkle_tree_index: 9
28 | - block_id: 2
29 | hash: 0xa3d4e78a8e11e97c8faf34880da861412273948edf467f23590601a1057079d8
30 | merkle_tree_index: 10
31 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/fixtures/set_unlocked_by_name/metamorph.transactions.yaml:
--------------------------------------------------------------------------------
1 | - hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853
2 | locked_by: metamorph-3
3 | status: 60
4 | stored_at: 2023-10-01 14:00:00
5 | last_submitted_at: 2023-10-01 14:00:00
6 | - hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e
7 | locked_by: metamorph-3
8 | status: 50
9 | stored_at: 2023-10-01 14:00:00
10 | last_submitted_at: 2023-10-01 14:00:00
11 | - hash: 0xf791ec50447e3001b9348930659527ea92dee506e9950014bcc7c5b146e2417f
12 | locked_by: metamorph-3
13 | status: 120
14 | stored_at: 2023-10-01 14:00:00
15 | last_submitted_at: 2023-10-01 14:00:00
16 | - hash: 0x89714f129748e5176a07fc4eb89cf27a9e60340117e6b56bb742acb2873f8140
17 | locked_by: metamorph-3
18 | status: 90
19 | stored_at: 2023-10-01 14:00:00
20 | last_submitted_at: 2023-10-01 14:00:00
21 | - hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430
22 | locked_by: metamorph-1
23 | status: 70
24 | stored_at: 2023-10-01 14:00:00
25 | last_submitted_at: 2023-10-01 14:00:00
26 | - hash: 0x549e32c2f00a08794ec127edc39380c6f5bb2751aff2484da073ff737df965bb
27 | locked_by: NONE
28 | status: 70
29 | stored_at: 2023-10-01 14:00:00
30 | last_submitted_at: 2023-10-01 14:00:00
31 |
--------------------------------------------------------------------------------
/internal/node_client/rpc_client_test.go:
--------------------------------------------------------------------------------
1 | package node_client_test
2 |
3 | import (
4 | "context"
5 | "testing"
6 |
7 | "github.com/stretchr/testify/require"
8 |
9 | "github.com/bitcoin-sv/arc/internal/node_client"
10 | "github.com/bitcoin-sv/arc/pkg/rpc_client"
11 | )
12 |
13 | func TestRPCClient(t *testing.T) {
14 | t.Helper()
15 | if testing.Short() {
16 | t.Skip("skipping integration test")
17 | }
18 |
19 | ctx := context.Background()
20 |
21 | setup()
22 | sut, err := rpc_client.NewRPCClient(host, hostPort, user, password)
23 | require.NoError(t, err)
24 |
25 | address, _ := node_client.FundNewWallet(t, bitcoind)
26 |
27 | utxos := node_client.GetUtxos(t, bitcoind, address)
28 | require.GreaterOrEqual(t, len(utxos), 1, "No UTXOs available for the address")
29 |
30 | t.Run("invalidate block", func(t *testing.T) {
31 | t.Parallel()
32 | // given
33 | blockHash, err := bitcoind.Generate(1)
34 | require.NoError(t, err)
35 |
36 | // when
37 | err = sut.InvalidateBlock(ctx, blockHash[0])
38 |
39 | // then
40 | require.NoError(t, err)
41 |
42 | // given
43 | cancelCtx, cancel := context.WithCancel(ctx)
44 | cancel()
45 |
46 | // when
47 | err = sut.InvalidateBlock(cancelCtx, blockHash[0])
48 |
49 | // then
50 | require.ErrorIs(t, err, context.Canceled)
51 | })
52 | }
53 |
--------------------------------------------------------------------------------
/cmd/broadcaster-cli/app/keyset/topup/topup.go:
--------------------------------------------------------------------------------
1 | package topup
2 |
3 | import (
4 | "context"
5 | "log/slog"
6 | "time"
7 |
8 | "github.com/spf13/cobra"
9 |
10 | "github.com/bitcoin-sv/arc/cmd/broadcaster-cli/helper"
11 | "github.com/bitcoin-sv/arc/pkg/woc_client"
12 | )
13 |
14 | var Cmd = &cobra.Command{
15 | Use: "topup",
16 | Short: "Top up funding address with BSV",
17 | RunE: func(_ *cobra.Command, _ []string) error {
18 | isTestnet := helper.GetBool("testnet")
19 | wocAPIKey := helper.GetString("wocAPIKey")
20 | logLevel := helper.GetString("logLevel")
21 | logFormat := helper.GetString("logFormat")
22 | logger := helper.NewLogger(logLevel, logFormat)
23 |
24 | wocClient := woc_client.New(!isTestnet, woc_client.WithAuth(wocAPIKey), woc_client.WithLogger(logger))
25 |
26 | keySetsMap, err := helper.GetSelectedKeySets()
27 | if err != nil {
28 | return err
29 | }
30 |
31 | for keyName, keySet := range keySetsMap {
32 | if wocAPIKey == "" {
33 | time.Sleep(500 * time.Millisecond)
34 | }
35 | err = wocClient.TopUp(context.Background(), keySet.Address(!isTestnet))
36 |
37 | if err != nil {
38 | return err
39 | }
40 | logger.Info("top up complete", slog.String("address", keySet.Address(!isTestnet)), slog.String("name", keyName))
41 | }
42 |
43 | return nil
44 | },
45 | }
46 |
--------------------------------------------------------------------------------
/internal/blocktx/integration_test/setup_test.go:
--------------------------------------------------------------------------------
1 | package integrationtest
2 |
3 | import (
4 | "database/sql"
5 | "flag"
6 | "log"
7 | "testing"
8 |
9 | _ "github.com/golang-migrate/migrate/v4/source/file"
10 | _ "github.com/lib/pq"
11 | "github.com/ory/dockertest/v3"
12 |
13 | testutils "github.com/bitcoin-sv/arc/pkg/test_utils"
14 | )
15 |
16 | const migrationsPath = "file://../store/postgresql/migrations"
17 |
18 | var (
19 | dbInfo string
20 | dbConn *sql.DB
21 | )
22 |
23 | func TestMain(m *testing.M) {
24 | flag.Parse()
25 |
26 | if testing.Short() {
27 | return
28 | }
29 |
30 | testmain(m)
31 | }
32 |
33 | func testmain(m *testing.M) int {
34 | pool, err := dockertest.NewPool("")
35 | if err != nil {
36 | log.Fatalf("failed to create pool: %v", err)
37 | return 1
38 | }
39 |
40 | port := "5437"
41 | resource, connStr, err := testutils.RunAndMigratePostgresql(pool, port, "blocktx", migrationsPath)
42 | if err != nil {
43 | log.Print(err)
44 | return 1
45 | }
46 | defer func() {
47 | err = pool.Purge(resource)
48 | if err != nil {
49 | log.Fatalf("failed to purge pool: %v", err)
50 | }
51 | }()
52 |
53 | dbInfo = connStr
54 |
55 | dbConn, err = sql.Open("postgres", dbInfo)
56 | if err != nil {
57 | log.Fatalf("failed to create db connection: %v", err)
58 | return 1
59 | }
60 |
61 | return m.Run()
62 | }
63 |
--------------------------------------------------------------------------------
/internal/api/publish_adapter.go:
--------------------------------------------------------------------------------
1 | package api
2 |
3 | import (
4 | "context"
5 | "log/slog"
6 | "sync"
7 |
8 | "github.com/bitcoin-sv/arc/internal/metamorph/metamorph_api"
9 | "github.com/bitcoin-sv/arc/internal/mq"
10 | )
11 |
12 | type PublishAdapter struct {
13 | mqClient mq.MessageQueueClient
14 | logger *slog.Logger
15 | ctx context.Context
16 | cancelAll context.CancelFunc
17 | wg *sync.WaitGroup
18 | }
19 |
20 | func NewPublishAdapter(mqClient mq.MessageQueueClient, logger *slog.Logger) *PublishAdapter {
21 | m := &PublishAdapter{
22 | mqClient: mqClient,
23 | logger: logger,
24 | wg: &sync.WaitGroup{},
25 | }
26 |
27 | m.ctx, m.cancelAll = context.WithCancel(context.Background())
28 |
29 | return m
30 | }
31 |
32 | func (p *PublishAdapter) StartPublishMarshal(topic string, postTransactionRequests chan *metamorph_api.PostTransactionRequest) {
33 | p.wg.Go(func() {
34 | for {
35 | select {
36 | case <-p.ctx.Done():
37 | return
38 | case request := <-postTransactionRequests:
39 | err := p.mqClient.PublishMarshal(p.ctx, topic, request)
40 | if err != nil {
41 | p.logger.Error("Failed to publish post transaction request message", slog.String("err", err.Error()))
42 | }
43 | }
44 | }
45 | })
46 | }
47 |
48 | func (p *PublishAdapter) Shutdown() {
49 | p.cancelAll()
50 | p.wg.Wait()
51 | }
52 |
--------------------------------------------------------------------------------
/scripts/compare_yamls.go:
--------------------------------------------------------------------------------
1 | package main
2 |
3 | import (
4 | "log"
5 | "os"
6 | "strings"
7 |
8 | "gopkg.in/yaml.v3"
9 | )
10 |
11 | func main() {
12 | exampleConfigFile, err := os.ReadFile("config/example_config.yaml")
13 | if err != nil {
14 | log.Fatal(err)
15 | }
16 |
17 | newConfigFile, err := os.ReadFile("config/dumped_config.yaml")
18 | if err != nil {
19 | log.Fatal(err)
20 | }
21 |
22 | // unmarshal yaml
23 | exampleConfig := make(map[string]interface{})
24 | if err := yaml.Unmarshal(exampleConfigFile, &exampleConfig); err != nil {
25 | log.Fatal(err)
26 | }
27 | exampleConfig = convertToLowercase(exampleConfig)
28 |
29 | newConfig := make(map[string]interface{})
30 | if err := yaml.Unmarshal(newConfigFile, &newConfig); err != nil {
31 | log.Fatal(err)
32 | }
33 |
34 | // iterate key in dumped config
35 | for newKey := range newConfig {
36 | // check whether key exists in example_config
37 | if _, ok := exampleConfig[newKey]; !ok {
38 | // return an error if not
39 | log.Fatalf("key: %s does not exist in /config/example_config.yaml", newKey)
40 | }
41 | }
42 | }
43 |
44 | func convertToLowercase(configMap map[string]interface{}) map[string]interface{} {
45 | lowercase := make(map[string]interface{}, len(configMap))
46 | for k, v := range configMap {
47 | lowercase[strings.ToLower(k)] = v
48 | }
49 | return lowercase
50 | }
51 |
--------------------------------------------------------------------------------
/cmd/services/k8s_watcher.go:
--------------------------------------------------------------------------------
1 | package services
2 |
3 | import (
4 | "fmt"
5 | "log/slog"
6 |
7 | "github.com/bitcoin-sv/arc/config"
8 | "github.com/bitcoin-sv/arc/internal/grpc_utils"
9 | "github.com/bitcoin-sv/arc/internal/k8s_watcher"
10 | "github.com/bitcoin-sv/arc/internal/k8s_watcher/k8s_client"
11 | "github.com/bitcoin-sv/arc/internal/metamorph/metamorph_api"
12 | )
13 |
14 | func StartK8sWatcher(logger *slog.Logger, k8sWatcherCfg *config.K8sWatcherConfig, commonCfg *config.CommonConfig) (func(), error) {
15 | logger.With(slog.String("service", "k8s-watcher"))
16 |
17 | mtmConn, err := grpc_utils.DialGRPC(k8sWatcherCfg.MetamorphDialAddr, commonCfg.Prometheus.Endpoint, commonCfg.GrpcMessageSize, nil)
18 | if err != nil {
19 | return nil, fmt.Errorf("failed to connect to metamorph server: %v", err)
20 | }
21 |
22 | metamorphClient := metamorph_api.NewMetaMorphAPIClient(mtmConn)
23 |
24 | k8sClient, err := k8s_client.New()
25 | if err != nil {
26 | return nil, fmt.Errorf("failed to get k8s-client: %v", err)
27 | }
28 |
29 | k8sWatcher := k8s_watcher.New(logger, metamorphClient, k8sClient, k8sWatcherCfg.Namespace)
30 | err = k8sWatcher.Start()
31 | if err != nil {
32 | return nil, fmt.Errorf("faile to start k8s-watcher: %v", err)
33 | }
34 |
35 | return func() {
36 | logger.Info("Shutting down K8s watcher")
37 | k8sWatcher.Shutdown()
38 | }, nil
39 | }
40 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/get_block_transactions_hashes.go:
--------------------------------------------------------------------------------
1 | package postgresql
2 |
3 | import (
4 | "context"
5 | "errors"
6 |
7 | "github.com/libsv/go-p2p/chaincfg/chainhash"
8 |
9 | "github.com/bitcoin-sv/arc/internal/blocktx/store"
10 | "github.com/bitcoin-sv/arc/pkg/tracing"
11 | )
12 |
13 | func (p *PostgreSQL) GetBlockTransactionsHashes(ctx context.Context, blockHash []byte) (txHashes []*chainhash.Hash, err error) {
14 | ctx, span := tracing.StartTracing(ctx, "GetBlockTransactionsHashes", p.tracingEnabled, p.tracingAttributes...)
15 | defer func() {
16 | tracing.EndTracing(span, err)
17 | }()
18 |
19 | q := `
20 | SELECT
21 | bt.hash
22 | FROM blocktx.block_transactions AS bt
23 | JOIN blocktx.blocks AS b ON bt.block_id = b.id
24 | WHERE b.hash = $1
25 | ORDER BY bt.merkle_tree_index ASC
26 | `
27 |
28 | rows, err := p.db.QueryContext(ctx, q, blockHash)
29 | if err != nil {
30 | return nil, err
31 | }
32 | defer rows.Close()
33 |
34 | for rows.Next() {
35 | var txHash []byte
36 | err = rows.Scan(&txHash)
37 | if err != nil {
38 | return nil, errors.Join(store.ErrFailedToGetRows, err)
39 | }
40 |
41 | cHash, err := chainhash.NewHash(txHash)
42 | if err != nil {
43 | return nil, errors.Join(store.ErrFailedToParseHash, err)
44 | }
45 |
46 | txHashes = append(txHashes, cHash)
47 | }
48 |
49 | return txHashes, nil
50 | }
51 |
--------------------------------------------------------------------------------
/internal/blocktx/publish_adapter.go:
--------------------------------------------------------------------------------
1 | package blocktx
2 |
3 | import (
4 | "context"
5 | "log/slog"
6 | "sync"
7 |
8 | "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api"
9 | "github.com/bitcoin-sv/arc/internal/mq"
10 | )
11 |
12 | type PublishAdapter struct {
13 | mqClient mq.MessageQueueClient
14 | logger *slog.Logger
15 | ctx context.Context
16 | cancelAll context.CancelFunc
17 | wg *sync.WaitGroup
18 | }
19 |
20 | func NewPublishAdapter(mqClient mq.MessageQueueClient, logger *slog.Logger) *PublishAdapter {
21 | m := &PublishAdapter{
22 | mqClient: mqClient,
23 | logger: logger,
24 | wg: &sync.WaitGroup{},
25 | }
26 |
27 | m.ctx, m.cancelAll = context.WithCancel(context.Background())
28 |
29 | return m
30 | }
31 |
32 | func (p *PublishAdapter) StartPublishMarshal(topic string, transactionBlocksChan chan *blocktx_api.TransactionBlocks) {
33 | p.wg.Go(func() {
34 | for {
35 | select {
36 | case <-p.ctx.Done():
37 | return
38 | case request := <-transactionBlocksChan:
39 | err := p.mqClient.PublishMarshalCore(topic, request)
40 | if err != nil {
41 | p.logger.Error("Failed to publish transaction blocks message", slog.Int("count", len(request.TransactionBlocks)), slog.String("err", err.Error()))
42 | }
43 | }
44 | }
45 | })
46 | }
47 |
48 | func (p *PublishAdapter) Shutdown() {
49 | p.cancelAll()
50 | p.wg.Wait()
51 | }
52 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.blocks.yaml:
--------------------------------------------------------------------------------
1 | - inserted_at: 2023-12-10 14:00:00
2 | id: 1
3 | hash: 0x000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9
4 | prevhash: 0x0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7
5 | merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483
6 | height: 822013
7 | processed_at: 2023-12-10 14:10:00
8 | size: 86840000
9 | tx_count: 23477
10 | status: 10 # LONGEST
11 | is_longest: true
12 | - inserted_at: 2023-12-15 14:00:00
13 | id: 2
14 | hash: 0x0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7
15 | prevhash: 0x000000000000000002a0926c51854d2bd525c26026ab0f178ca07f723b31033a
16 | merkleroot: 0x3eeee879a8a08fc537a04682178687bb0e58a5103938eafc349705a2acb06410
17 | height: 822012
18 | processed_at: 2023-12-15 14:10:00
19 | size: 3030000
20 | tx_count: 856
21 | status: 20 # STALE
22 | is_longest: false
23 | - inserted_at: 2023-12-10 14:00:00
24 | id: 3
25 | hash: 0x0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067
26 | prevhash: 0x000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9
27 | merkleroot: 0x713e7713ebc6414420a418e87bbd42ecaf7e0e38c923c2cf9e72718d3c329acb
28 | height: 822014
29 | processed_at: 2023-12-10 14:10:00
30 | size: 86840000
31 | tx_count: 23477
32 | status: 10 # LONGEST
33 | is_longest: true
34 |
--------------------------------------------------------------------------------
/internal/validator/helpers.go:
--------------------------------------------------------------------------------
1 | package validator
2 |
3 | import (
4 | "context"
5 |
6 | sdkTx "github.com/bsv-blockchain/go-sdk/transaction"
7 |
8 | "github.com/bitcoin-sv/arc/internal/beef"
9 | )
10 |
11 | type HexFormat byte
12 |
13 | const (
14 | RawHex HexFormat = iota
15 | EfHex
16 | BeefHex
17 | )
18 |
19 | type FindSourceFlag byte
20 |
21 | const (
22 | SourceTransactionHandler FindSourceFlag = 1 << iota // 1 (binary 0001)
23 | SourceNodes // 2 (binary 0010)
24 | SourceWoC // 4 (binary 0100)
25 | )
26 |
27 | func (flag FindSourceFlag) Has(v FindSourceFlag) bool {
28 | return v&flag != 0
29 | }
30 |
31 | type TxFinderI interface {
32 | GetRawTxs(ctx context.Context, source FindSourceFlag, ids []string) []*sdkTx.Transaction
33 | GetMempoolAncestors(ctx context.Context, ids []string) ([]string, error)
34 | }
35 |
36 | func GetHexFormat(hex []byte) HexFormat {
37 | if beef.CheckBeefFormat(hex) {
38 | return BeefHex
39 | }
40 |
41 | if isEf(hex) {
42 | return EfHex
43 | }
44 |
45 | return RawHex
46 | }
47 |
48 | func isEf(hex []byte) bool {
49 | // check markers - first 10 bytes
50 | // 4 bytes for version + 6 bytes for the marker - 0000000000EF
51 | return len(hex) > 10 &&
52 | hex[4] == 0 &&
53 | hex[5] == 0 &&
54 | hex[6] == 0 &&
55 | hex[7] == 0 &&
56 | hex[8] == 0 &&
57 | hex[9] == 0xEF
58 | }
59 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/fixtures/get_block_transactions/blocktx.blocks.yaml:
--------------------------------------------------------------------------------
1 | - inserted_at: 2023-12-10 14:00:00
2 | id: 1
3 | hash: 0x000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9
4 | prevhash: 0x0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7
5 | merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483
6 | height: 822013
7 | processed_at: 2023-12-10 14:10:00
8 | size: 86840000
9 | tx_count: 23477
10 | status: 10 # LONGEST
11 | is_longest: true
12 | - inserted_at: 2023-12-15 14:00:00
13 | id: 2
14 | hash: 0x0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7
15 | prevhash: 0x000000000000000002a0926c51854d2bd525c26026ab0f178ca07f723b31033a
16 | merkleroot: 0x3eeee879a8a08fc537a04682178687bb0e58a5103938eafc349705a2acb06410
17 | height: 822012
18 | processed_at: 2023-12-15 14:10:00
19 | size: 3030000
20 | tx_count: 856
21 | status: 20 # STALE
22 | is_longest: false
23 | - inserted_at: 2023-12-10 14:00:00
24 | id: 3
25 | hash: 0x0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067
26 | prevhash: 0x000000000000000005aa39a25e7e8bf440c270ec9a1bd30e99ab026f39207ef9
27 | merkleroot: 0x713e7713ebc6414420a418e87bbd42ecaf7e0e38c923c2cf9e72718d3c329acb
28 | height: 822014
29 | processed_at: 2023-12-10 14:10:00
30 | size: 86840000
31 | tx_count: 23477
32 | status: 10 # LONGEST
33 | is_longest: true
34 |
--------------------------------------------------------------------------------
/doc/message_queue.puml:
--------------------------------------------------------------------------------
1 | @startuml
2 |
3 | digraph arc {
4 | rankdir=TB;
5 | newrank=true
6 | rank1 [fixedsize=true; width=0.01; height=0.01; label="", style=invisible];
7 | rank2 [fixedsize=true; width=0.01; height=0.01; label="", style=invisible];
8 | rank3 [fixedsize=true; width=0.01; height=0.01; label="", style=invisible];
9 |
10 | rank1 -> rank2 [color=white];
11 |
12 | graph [fontsize=10 fontname="Verdana"];
13 | node [shape=record fontsize=10 fontname="Verdana"];
14 | edge [fontsize=9 fontname="Verdana"];
15 |
16 | subgraph cluster_message_queue_1 {
17 | label = "topic: \"register-tx\"";
18 | message_queue_1 [label=" msq| msg| ... | msg| msg"]
19 | }
20 |
21 | subgraph cluster_message_queue_2 {
22 | label = "topic: \"tx-mined\"";
23 | message_queue_2 [label=" msq| msg| ... | msg| msg"]
24 | }
25 |
26 | metamorph [shape=rectangle, style=filled]
27 | blocktx [shape=rectangle, style=filled]
28 |
29 | metamorph -> message_queue_1:f0 [label=" publish"]
30 | metamorph -> message_queue_2:f0 [label=" subscribe"]
31 |
32 | blocktx -> message_queue_1:f3 [label=" subscribe"]
33 | blocktx -> message_queue_2:f3 [label=" publish"]
34 |
35 | {rank=same; rank1; message_queue_1;}
36 | {rank=same; rank2; blocktx; metamorph;}
37 | {rank=same; rank3; message_queue_2;}
38 | }
39 |
40 | @enduml
41 |
--------------------------------------------------------------------------------
/.github/workflows/go.yaml:
--------------------------------------------------------------------------------
1 | # This workflow will build a golang project
2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-go
3 |
4 | name: Go
5 |
6 | on:
7 | pull_request:
8 | branches: ["**"]
9 | paths:
10 | - 'cmd/**'
11 | - 'config/**'
12 | - 'internal/**'
13 | - 'pkg/**'
14 | - 'go.mod'
15 | - 'go.sum'
16 | workflow_dispatch:
17 |
18 | jobs:
19 | build:
20 | runs-on: ubuntu-latest
21 | steps:
22 | - name: Check out the repo
23 | uses: actions/checkout@v4
24 |
25 | - name: Set up Go
26 | uses: actions/setup-go@v5
27 | with:
28 | go-version-file: "./go.mod"
29 |
30 | - name: Install Task
31 | run: go install github.com/go-task/task/v3/cmd/task@latest
32 |
33 | - name: Verify dependencies
34 | run: go mod verify
35 |
36 | - name: Build
37 | run: go build -v ./...
38 |
39 | - name: Run go vet
40 | run: go vet ./...
41 |
42 | - name: Install code generation tools
43 | run: task install_gen
44 |
45 | - name: Check go format
46 | run: gofmt -s -w . && git diff --exit-code
47 |
48 | - name: Check go generate
49 | run: task gen_go && git diff --exit-code
50 |
51 | - name: Check generate api
52 | run: task api && git diff --exit-code
53 |
54 | - name: Check config
55 | run: task compare_config
56 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/fixtures/insert_block/blocktx.blocks.yaml:
--------------------------------------------------------------------------------
1 | - inserted_at: 2023-12-15 14:30:00
2 | id: 1
3 | hash: 0x000000000000000003b15d668b54c4b91ae81a86298ee209d9f39fd7a769bcde
4 | prevhash: 0x0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067
5 | merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257
6 | height: 822015
7 | processed_at: 2023-12-15 14:30:00
8 | size: 20160000
9 | tx_count: 6523
10 | status: 10 # LONGEST
11 | chainwork: '123456'
12 | is_longest: true
13 | - inserted_at: 2023-12-15 14:30:00
14 | id: 2
15 | hash: 0x00000000000000000659df0d3cf98ebe46931b67117502168418f9dce4e1b4c9
16 | prevhash: 0x0000000000000000025855b62f4c2e3732dad363a6f2ead94e4657ef96877067
17 | merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257
18 | height: 822015
19 | processed_at: 2023-12-15 14:30:00
20 | size: 20160000
21 | tx_count: 6523
22 | status: 20 # STALE
23 | chainwork: '123456'
24 | is_longest: false
25 | - inserted_at: 2023-12-15 14:30:00
26 | id: 3
27 | hash: 0x0000000000000000072ded7ebd9ca6202a1894cc9dc5cd71ad6cf9c563b01ab7
28 | prevhash: 0x000000000000000002a0926c51854d2bd525c26026ab0f178ca07f723b31033a
29 | merkleroot: 0x7382df1b717287ab87e5e3e25759697c4c45eea428f701cdd0c77ad3fc707257
30 | height: 822015
31 | processed_at: 2023-12-15 14:30:00
32 | size: 20160000
33 | tx_count: 6523
34 | status: 30 # ORPHANED
35 | chainwork: '123456'
36 | is_longest: false
37 |
--------------------------------------------------------------------------------
/internal/global/types_test.go:
--------------------------------------------------------------------------------
1 | package global_test
2 |
3 | import (
4 | "context"
5 | "errors"
6 | "log/slog"
7 | "testing"
8 |
9 | "github.com/bitcoin-sv/arc/internal/global"
10 | "github.com/bitcoin-sv/arc/internal/global/mocks"
11 | )
12 |
13 | func TestStoppablesShutdown(t *testing.T) {
14 | t.Run("shutdown stoppable", func(_ *testing.T) {
15 | // given
16 | stoppables := global.Stoppables{&mocks.StoppableMock{ShutdownFunc: func() {}}, &mocks.StoppableMock{ShutdownFunc: func() {}}}
17 |
18 | // when
19 | stoppables.Shutdown()
20 | })
21 | }
22 |
23 | func TestStoppablesWithErrorShutdown(t *testing.T) {
24 | t.Run("shutdown stoppable", func(_ *testing.T) {
25 | // given
26 | stoppables := global.StoppablesWithError{
27 | &mocks.StoppableWithErrorMock{ShutdownFunc: func() error { return nil }},
28 | &mocks.StoppableWithErrorMock{ShutdownFunc: func() error { return errors.New("some error") }},
29 | }
30 |
31 | // when
32 | stoppables.Shutdown(slog.Default())
33 | })
34 | }
35 |
36 | func TestStoppablesWithContextShutdown(t *testing.T) {
37 | t.Run("shutdown stoppable", func(_ *testing.T) {
38 | // given
39 | stoppables := global.StoppablesWithContext{
40 | &mocks.StoppableWithContextMock{ShutdownFunc: func(_ context.Context) error { return nil }},
41 | &mocks.StoppableWithContextMock{ShutdownFunc: func(_ context.Context) error { return errors.New("some error") }},
42 | }
43 |
44 | // when
45 | stoppables.Shutdown(context.TODO(), slog.Default())
46 | })
47 | }
48 |
--------------------------------------------------------------------------------
/cmd/broadcaster-cli/app/keyset/address/address.go:
--------------------------------------------------------------------------------
1 | package address
2 |
3 | import (
4 | "log/slog"
5 |
6 | "github.com/spf13/cobra"
7 |
8 | "github.com/bitcoin-sv/arc/cmd/broadcaster-cli/helper"
9 | )
10 |
11 | var (
12 | Cmd = &cobra.Command{
13 | Use: "address",
14 | Short: "Show address of the keyset",
15 | RunE: func(_ *cobra.Command, _ []string) error {
16 | isTestnet := helper.GetBool("testnet")
17 |
18 | logLevel := helper.GetString("logLevel")
19 | logFormat := helper.GetString("logFormat")
20 | logger := helper.NewLogger(logLevel, logFormat)
21 |
22 | keySetsMap, err := helper.GetSelectedKeySets()
23 | if err != nil {
24 | return err
25 | }
26 |
27 | names := helper.GetOrderedKeys(keySetsMap)
28 |
29 | for _, name := range names {
30 | keySet := keySetsMap[name]
31 |
32 | logger.Info("address", slog.String("name", name), slog.String("address", keySet.Address(!isTestnet)), slog.String("key", keySet.GetMaster().String()))
33 | }
34 |
35 | return nil
36 | },
37 | }
38 | )
39 |
40 | func init() {
41 | logger := helper.NewLogger("INFO", "tint")
42 |
43 | Cmd.SetHelpFunc(func(command *cobra.Command, strings []string) {
44 | // Hide unused persistent flags
45 | err := command.Flags().MarkHidden("wocAPIKey")
46 | if err != nil {
47 | logger.Error("failed to mark flag hidden", slog.String("flag", "wocAPIKey"), slog.String("error", err.Error()))
48 | }
49 | // Call parent help func
50 | command.Parent().HelpFunc()(command, strings)
51 | })
52 | }
53 |
--------------------------------------------------------------------------------
/cmd/broadcaster-cli/app/keyset/balance/balance.go:
--------------------------------------------------------------------------------
1 | package balance
2 |
3 | import (
4 | "context"
5 | "log/slog"
6 | "time"
7 |
8 | "github.com/spf13/cobra"
9 |
10 | "github.com/bitcoin-sv/arc/cmd/broadcaster-cli/helper"
11 | "github.com/bitcoin-sv/arc/pkg/woc_client"
12 | )
13 |
14 | var Cmd = &cobra.Command{
15 | Use: "balance",
16 | Short: "Show balance of the keyset",
17 | RunE: func(_ *cobra.Command, _ []string) error {
18 | isTestnet := helper.GetBool("testnet")
19 | wocAPIKey := helper.GetString("wocAPIKey")
20 | logLevel := helper.GetString("logLevel")
21 | logFormat := helper.GetString("logFormat")
22 | logger := helper.NewLogger(logLevel, logFormat)
23 |
24 | wocClient := woc_client.New(!isTestnet, woc_client.WithAuth(wocAPIKey), woc_client.WithLogger(logger))
25 |
26 | keySetsMap, err := helper.GetSelectedKeySets()
27 | if err != nil {
28 | return err
29 | }
30 |
31 | names := helper.GetOrderedKeys(keySetsMap)
32 |
33 | for _, name := range names {
34 | keySet := keySetsMap[name]
35 | if wocAPIKey == "" {
36 | time.Sleep(500 * time.Millisecond)
37 | }
38 | confirmed, unconfirmed, err := wocClient.GetBalanceWithRetries(context.Background(), keySet.Address(!isTestnet), 1*time.Second, 5)
39 | if err != nil {
40 | return err
41 | }
42 | logger.Info("balance", slog.String("name", name), slog.String("address", keySet.Address(!isTestnet)), slog.Uint64("confirmed", confirmed), slog.Uint64("unconfirmed", unconfirmed))
43 | }
44 |
45 | return nil
46 | },
47 | }
48 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/get_latest_blocks.go:
--------------------------------------------------------------------------------
1 | package postgresql
2 |
3 | import (
4 | "context"
5 | "database/sql"
6 |
7 | "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api"
8 | "google.golang.org/protobuf/types/known/timestamppb"
9 | )
10 |
11 | func (p *PostgreSQL) LatestBlocks(ctx context.Context, numOfBlocks uint64) ([]*blocktx_api.Block, error) {
12 | q := `
13 | SELECT
14 | hash
15 | ,prevhash
16 | ,merkleroot
17 | ,height
18 | ,processed_at
19 | ,status
20 | ,chainwork
21 | ,timestamp
22 | FROM blocktx.blocks
23 | WHERE is_longest = TRUE AND processed_at IS NOT NULL ORDER BY height DESC LIMIT $1`
24 |
25 | rows, err := p.db.QueryContext(ctx, q, numOfBlocks)
26 | if err != nil {
27 | return nil, err
28 | }
29 | defer rows.Close()
30 |
31 | blocks := make([]*blocktx_api.Block, 0)
32 |
33 | for rows.Next() {
34 | var block blocktx_api.Block
35 | var processedAt sql.NullTime
36 | var timestamp sql.NullTime
37 | err := rows.Scan(
38 | &block.Hash,
39 | &block.PreviousHash,
40 | &block.MerkleRoot,
41 | &block.Height,
42 | &processedAt,
43 | &block.Status,
44 | &block.Chainwork,
45 | ×tamp,
46 | )
47 | if err != nil {
48 | return nil, err
49 | }
50 |
51 | if timestamp.Valid {
52 | block.Timestamp = timestamppb.New(timestamp.Time.UTC())
53 | }
54 |
55 | if processedAt.Valid {
56 | block.ProcessedAt = timestamppb.New(processedAt.Time.UTC())
57 | }
58 |
59 | blocks = append(blocks, &block)
60 | }
61 |
62 | return blocks, nil
63 | }
64 |
--------------------------------------------------------------------------------
/internal/p2p/peer_options.go:
--------------------------------------------------------------------------------
1 | package p2p
2 |
3 | import (
4 | "time"
5 |
6 | "github.com/libsv/go-p2p/wire"
7 | )
8 |
9 | type PeerOptions func(p *Peer)
10 |
11 | func WithMaximumMessageSize(maximumMessageSize int64) PeerOptions {
12 | return func(p *Peer) {
13 | p.maxMsgSize = maximumMessageSize
14 | }
15 | }
16 |
17 | func WithReadBufferSize(size int) PeerOptions {
18 | return func(p *Peer) {
19 | p.readBuffSize = size
20 | }
21 | }
22 |
23 | func WithUserAgent(userAgentName string, userAgentVersion string) PeerOptions {
24 | return func(p *Peer) {
25 | p.userAgentName = &userAgentName
26 | p.userAgentVersion = &userAgentVersion
27 | }
28 | }
29 |
30 | func WithNrOfWriteHandlers(n uint8) PeerOptions {
31 | return func(p *Peer) {
32 | p.nWriters = n
33 | }
34 | }
35 |
36 | func WithWriteChannelSize(n uint16) PeerOptions {
37 | return func(p *Peer) {
38 | p.writeCh = make(chan wire.Message, n)
39 | }
40 | }
41 |
42 | func WithPingInterval(interval time.Duration, connectionHealthThreshold time.Duration) PeerOptions {
43 | return func(p *Peer) {
44 | p.pingInterval = interval
45 | p.healthThreshold = connectionHealthThreshold
46 | }
47 | }
48 |
49 | func WithServiceFlag(flag wire.ServiceFlag) PeerOptions {
50 | return func(p *Peer) {
51 | p.servicesFlag = flag
52 | }
53 | }
54 |
55 | func WithDialer(dial Dialer) PeerOptions {
56 | return func(p *Peer) {
57 | p.dialer = dial
58 | }
59 | }
60 |
61 | func WithConnectionTimeout(d time.Duration) PeerOptions {
62 | return func(p *Peer) {
63 | p.connectionTimeout = d
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/fixtures/block_processing/blocktx.blocks.yaml:
--------------------------------------------------------------------------------
1 | - inserted_at: 2023-12-22T11:40:00+00:00
2 | id: 1
3 | hash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000
4 | prevhash: 0x3a03313b727fa08c170fab2660c225d52b4d85516c92a0020000000000000000
5 | merkleroot: 0x3eeee879a8a08fc537a04682178687bb0e58a5103938eafc349705a2acb06410
6 | height: 822012
7 | processed_at: 2023-12-22T11:45:00+00:00
8 | size: 3030000
9 | tx_count: 856
10 | - inserted_at: 2023-12-22T11:50:00+00:00
11 | id: 2
12 | hash: 0xf97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000
13 | prevhash: 0xb71ab063c5f96cad71cdc59dcc94182a20a69cbd7eed2d070000000000000000
14 | merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483
15 | height: 822013
16 | size: 86840000
17 | tx_count: 23477
18 | - inserted_at: 2023-12-22T11:50:00+00:00
19 | id: 3
20 | hash: 0xc20b4d510e1a7a4ab3da30e55676de0884b4cb79139ccc0a0000000000000000
21 | prevhash: 0xf97e20396f02ab990ed31b9aec70c240f48b7e5ea239aa050000000000000000
22 | merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483
23 | height: 822014
24 | size: 86840000
25 | tx_count: 23477
26 | - inserted_at: 2023-12-10T12:10:00+00:00
27 | id: 4
28 | hash: 0x51e9e0bacaf8ff4e993ca083aabbbd9bf56e724508d159fe2d43360500000000
29 | prevhash: 0xc20b4d510e1a7a4ab3da30e55676de0884b4cb79139ccc0a0000000000000000
30 | merkleroot: 0x7f4019eb006f5333cce752df387fa8443035c22291eb771ee5b16a02b81c8483
31 | height: 822015
32 | size: 86840000
33 | tx_count: 23477
34 |
--------------------------------------------------------------------------------
/internal/multicast/group_test.go:
--------------------------------------------------------------------------------
1 | //go:build multicast_test
2 |
3 | package multicast_test
4 |
5 | import (
6 | "log/slog"
7 | "testing"
8 | "time"
9 |
10 | "github.com/libsv/go-p2p/wire"
11 | "github.com/stretchr/testify/require"
12 |
13 | "github.com/bitcoin-sv/arc/internal/multicast"
14 | "github.com/bitcoin-sv/arc/internal/multicast/mocks"
15 | )
16 |
17 | var (
18 | addr = "[ff02::1]:1234"
19 | bcNet = wire.TestNet
20 | )
21 |
22 | func TestGroupCommunication(t *testing.T) {
23 | // given
24 | lMsgHandler := &mocks.MessageHandlerIMock{OnReceiveFromMcastFunc: func(_ wire.Message) {}}
25 | listener := multicast.NewGroup[*wire.MsgPing](slog.Default(), lMsgHandler, addr, multicast.Read, bcNet)
26 | require.True(t, listener.Connect())
27 | defer listener.Disconnect()
28 |
29 | wMsgHandler := &mocks.MessageHandlerIMock{OnSendToMcastFunc: func(_ wire.Message) {}}
30 | writer := multicast.NewGroup[*wire.MsgPing](slog.Default(), wMsgHandler, addr, multicast.Write, bcNet)
31 | require.True(t, writer.Connect())
32 | defer writer.Disconnect()
33 |
34 | msg := wire.NewMsgPing(825906425)
35 |
36 | // when
37 | writer.WriteMsg(msg)
38 | time.Sleep(200 * time.Millisecond)
39 |
40 | // then
41 | sentMsgs := wMsgHandler.OnSendToMcastCalls()
42 | require.Len(t, sentMsgs, 1, "writer didn't send message")
43 | require.Equal(t, msg, (sentMsgs[0].Msg).(*wire.MsgPing))
44 |
45 | receivedMsgs := lMsgHandler.OnReceiveFromMcastCalls()
46 | require.Len(t, receivedMsgs, 1, "listener didn't receive message")
47 | require.Equal(t, msg, (receivedMsgs[0].Msg).(*wire.MsgPing))
48 | }
49 |
--------------------------------------------------------------------------------
/pkg/keyset/key_set_test.go:
--------------------------------------------------------------------------------
1 | package keyset
2 |
3 | import (
4 | "testing"
5 |
6 | chaincfg "github.com/bsv-blockchain/go-sdk/transaction/chaincfg"
7 | "github.com/stretchr/testify/assert"
8 | "github.com/stretchr/testify/require"
9 | )
10 |
11 | func TestNew(t *testing.T) {
12 | keySet, err := New(&chaincfg.MainNet)
13 | require.NoError(t, err)
14 |
15 | assert.NotNil(t, keySet)
16 | assert.Empty(t, keySet.Path)
17 |
18 | t.Logf("master: %s", keySet.master.String())
19 | }
20 |
21 | func TestReadExtendedPrivateKey(t *testing.T) {
22 | key, err := NewFromExtendedKeyStr("xprv9s21ZrQH143K3uWZ5zfEG9v1JimHetdddkbnFAVKx2ELSws3T51wHoQuhfxsXTF4XGREBt7fVVbJiVpXJzrzb3dUVGsMsve5HaMGma4r6SG", "0/0")
23 | require.NoError(t, err)
24 |
25 | assert.Equal(t, "76a914fb4efeac628d6feda608898f543fee6520e8d33888ac", key.Script.String())
26 | assert.Equal(t, "1PuoEoj48mjgWdLetdvP9y4fx1uKefQoP1", key.Address(true))
27 | assert.Equal(t, "n4RkXrp2woAwHjpGcCtkytGzp1W2b1haPP", key.Address(false))
28 |
29 | key2, err := key.DeriveChildFromPath("0/1")
30 | require.NoError(t, err)
31 |
32 | assert.Equal(t, "76a9148a2558fc3f4e2c50f41c7380d9a1b1cfc0beb5f288ac", key2.Script.String())
33 | assert.Equal(t, "1DbSzRu4PTQFXNLTqD2hE8Fn5cGtQWxJsb", key2.Address(true))
34 | assert.Equal(t, "mt7QHUz3CUqWJUp5Yn1543U6wbsbNEi6YU", key2.Address(false))
35 |
36 | /* Don't get data from the network in tests
37 | unspent, err := key.GetUTXOs(true)
38 | require.NoError(t, err)
39 |
40 | for _, utxo := range unspent {
41 | t.Logf("%s:%d (%d sats)", utxo.TxIDStr(), utxo.Vout, utxo.Satoshis)
42 | }
43 | */
44 | }
45 |
--------------------------------------------------------------------------------
/.github/workflows/release.yaml:
--------------------------------------------------------------------------------
1 | name: release
2 |
3 | on:
4 | push:
5 | tags:
6 | - "*"
7 |
8 | permissions:
9 | contents: write
10 |
11 | jobs:
12 | check-current-branch:
13 | runs-on: ubuntu-latest
14 | outputs:
15 | branch: ${{ steps.check_step.outputs.branch }}
16 | steps:
17 | - name: Checkout
18 | uses: actions/checkout@v3
19 | with:
20 | fetch-depth: 0
21 |
22 | - name: Get current branch
23 | id: check_step
24 | # 1. Get the list of branches ref where this tag exists
25 | # 2. Remove 'origin/' from that result
26 | # 3. Put that string in output
27 | run: |
28 | raw=$(git branch -r --contains ${{ github.ref }})
29 | branch="$(echo ${raw//origin\//} | tr -d '\n')"
30 | echo "{name}=branch" >> $GITHUB_OUTPUT
31 | echo "Branches where this tag exists : $branch."
32 |
33 | goreleaser:
34 | runs-on: ubuntu-latest
35 | needs: check-current-branch
36 | if: contains(${{ needs.check.outputs.branch }}, 'main')`
37 | steps:
38 | - name: Checkout
39 | uses: actions/checkout@v4
40 | with:
41 | fetch-depth: 0
42 | - name: Set up Go
43 | uses: actions/setup-go@v5
44 | with:
45 | go-version: stable
46 | - name: Run GoReleaser
47 | uses: goreleaser/goreleaser-action@v6
48 | with:
49 | distribution: goreleaser
50 | version: latest
51 | args: release --clean
52 | env:
53 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
54 |
--------------------------------------------------------------------------------
/internal/metamorph/processor_routines_test.go:
--------------------------------------------------------------------------------
1 | package metamorph_test
2 |
3 | import (
4 | "context"
5 | "testing"
6 | "time"
7 |
8 | "github.com/bsv-blockchain/go-bt/v2/chainhash"
9 | "github.com/stretchr/testify/require"
10 | "go.opentelemetry.io/otel/attribute"
11 |
12 | "github.com/bitcoin-sv/arc/internal/metamorph"
13 | "github.com/bitcoin-sv/arc/internal/metamorph/mocks"
14 | storeMocks "github.com/bitcoin-sv/arc/internal/metamorph/store/mocks"
15 | )
16 |
17 | func TestStartRoutine(t *testing.T) {
18 | tt := []struct {
19 | name string
20 | }{
21 | {
22 | name: "start routine",
23 | },
24 | }
25 |
26 | for _, tc := range tt {
27 | t.Run(tc.name, func(t *testing.T) {
28 | s := &storeMocks.MetamorphStoreMock{
29 | SetUnlockedByNameFunc: func(_ context.Context, _ string) (int64, error) {
30 | return 0, nil
31 | },
32 | }
33 | messenger := &mocks.MediatorMock{
34 | AskForTxAsyncFunc: func(_ context.Context, _ *chainhash.Hash) {},
35 | AnnounceTxAsyncFunc: func(_ context.Context, _ *chainhash.Hash, _ []byte) {},
36 | }
37 | sut, err := metamorph.NewProcessor(
38 | s,
39 | nil,
40 | messenger,
41 | nil,
42 | )
43 | require.NoError(t, err)
44 |
45 | testFunc := func(_ context.Context, _ *metamorph.Processor) []attribute.KeyValue {
46 | time.Sleep(200 * time.Millisecond)
47 |
48 | return []attribute.KeyValue{attribute.Int("atr", 5)}
49 | }
50 |
51 | sut.StartRoutine(50*time.Millisecond, testFunc, "testFunc")
52 |
53 | time.Sleep(100 * time.Millisecond)
54 |
55 | sut.Shutdown()
56 | })
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/internal/api/handler/merkle_verifier/merkle_verifier_adapter.go:
--------------------------------------------------------------------------------
1 | package merkle_verifier
2 |
3 | import (
4 | "context"
5 | "errors"
6 |
7 | "github.com/bsv-blockchain/go-sdk/chainhash"
8 | "github.com/ccoveille/go-safecast/v2"
9 |
10 | "github.com/bitcoin-sv/arc/internal/blocktx"
11 | "github.com/bitcoin-sv/arc/internal/global"
12 | )
13 |
14 | var ErrVerifyMerkleRoots = errors.New("failed to verify merkle roots")
15 |
16 | type MerkleVerifier struct {
17 | verifier global.MerkleRootsVerifier
18 | blocktx global.BlocktxClient
19 | }
20 |
21 | func New(v global.MerkleRootsVerifier, blocktx global.BlocktxClient) MerkleVerifier {
22 | return MerkleVerifier{verifier: v, blocktx: blocktx}
23 | }
24 |
25 | func (a MerkleVerifier) IsValidRootForHeight(ctx context.Context, root *chainhash.Hash, height uint32) (bool, error) {
26 | heightUint64, err := safecast.Convert[uint64](height)
27 | if err != nil {
28 | return false, err
29 | }
30 |
31 | blocktxReq := []blocktx.MerkleRootVerificationRequest{{MerkleRoot: root.String(), BlockHeight: heightUint64}}
32 |
33 | unverifiedBlockHeights, err := a.verifier.VerifyMerkleRoots(ctx, blocktxReq)
34 | if err != nil {
35 | return false, errors.Join(ErrVerifyMerkleRoots, err)
36 | }
37 |
38 | if len(unverifiedBlockHeights) == 0 {
39 | return true, nil
40 | }
41 |
42 | return false, nil
43 | }
44 |
45 | func (a MerkleVerifier) CurrentHeight(ctx context.Context) (uint32, error) {
46 | height, err := a.blocktx.CurrentBlockHeight(ctx)
47 | if err != nil {
48 | return 0, err
49 | }
50 |
51 | return uint32(height.CurrentBlockHeight), nil // #nosec G115
52 | }
53 |
--------------------------------------------------------------------------------
/internal/blocktx/bcnet/blocktx_p2p/hybrid_message_handler.go:
--------------------------------------------------------------------------------
1 | package blocktx_p2p
2 |
3 | import (
4 | "log/slog"
5 |
6 | "github.com/libsv/go-p2p/wire"
7 |
8 | "github.com/bitcoin-sv/arc/internal/blocktx/bcnet"
9 | "github.com/bitcoin-sv/arc/internal/p2p"
10 | )
11 |
12 | var _ p2p.MessageHandlerI = (*HybridMsgHandler)(nil)
13 |
14 | type HybridMsgHandler struct {
15 | logger *slog.Logger
16 | blockProcessingCh chan<- *bcnet.BlockMessagePeer
17 | }
18 |
19 | func NewHybridMsgHandler(l *slog.Logger, blockProcessCh chan<- *bcnet.BlockMessagePeer) *HybridMsgHandler {
20 | return &HybridMsgHandler{
21 | logger: l.With(
22 | slog.String("module", "peer-msg-handler"),
23 | slog.String("mode", "hybrid"),
24 | ),
25 | blockProcessingCh: blockProcessCh,
26 | }
27 | }
28 |
29 | // OnReceive handles incoming messages depending on command type
30 | func (h *HybridMsgHandler) OnReceive(msg wire.Message, peer p2p.PeerI) {
31 | cmd := msg.Command()
32 |
33 | switch cmd {
34 | case wire.CmdBlock:
35 | blockMsg, ok := msg.(*bcnet.BlockMessage)
36 | if !ok {
37 | h.logger.Error("Block msg receive", slog.Any("err", ErrUnableToCastWireMessage))
38 | return
39 | }
40 | blockMsgPeer := &bcnet.BlockMessagePeer{
41 | BlockMessage: *blockMsg,
42 | }
43 |
44 | if peer != nil {
45 | blockMsgPeer.Peer = peer.String()
46 | }
47 |
48 | h.blockProcessingCh <- blockMsgPeer
49 |
50 | default:
51 | // ignore other messages
52 | }
53 | }
54 |
55 | // OnSend handles outgoing messages depending on command type
56 | func (h *HybridMsgHandler) OnSend(_ wire.Message, _ p2p.PeerI) {
57 | // ignore
58 | }
59 |
--------------------------------------------------------------------------------
/internal/blocktx/integration_test/merkle_paths_test.go:
--------------------------------------------------------------------------------
1 | package integrationtest
2 |
3 | import (
4 | "testing"
5 | "time"
6 |
7 | "github.com/stretchr/testify/require"
8 |
9 | testutils "github.com/bitcoin-sv/arc/pkg/test_utils"
10 | )
11 |
12 | func TestMerklePaths(t *testing.T) {
13 | if testing.Short() {
14 | t.Skip("skipping integration test")
15 | }
16 |
17 | t.Run("request unregistered tx", func(t *testing.T) {
18 | // given
19 | defer pruneTables(t, dbConn)
20 | testutils.LoadFixtures(t, dbConn, "fixtures/merkle_paths")
21 |
22 | processor, _, _, registerTxChannel, minedTxsCh := setupSut(t, dbInfo)
23 |
24 | registeredTxHash := testutils.RevHexDecodeString(t, "ff2ea4f998a94d5128ac7663824b2331cc7f95ca60611103f49163d4a4eb547c")
25 | expectedMerklePath := "fe175b1900040200027c54eba4d46391f403116160ca957fcc31234b826376ac28514da998f9a42eff01008308c059d119b87a9520a9fd3d765f8b74f726e96c77a1a6623d71796cc8c207010100333ff2bf5a4128823576d192e3e7f4a78e287b1acd0716127d28a890b7fd37930101006b473b8dc8557542d316becceae316b142f21b0ba4d08d17da55cf4b7b6817c90101009ea3342e55faab48aaa301e3f1935428b0bba92931e0ad24081d2ebae70bc160"
26 |
27 | // when
28 | registerTxChannel <- registeredTxHash[:]
29 | processor.StartProcessRegisterTxs()
30 |
31 | // give blocktx time to pull all transactions from block and calculate the merkle path
32 | time.Sleep(200 * time.Millisecond)
33 |
34 | // then
35 | minedTxs := getTxBlockItems(minedTxsCh)
36 | require.Len(t, minedTxs, 1)
37 | tx := minedTxs[0]
38 | require.Equal(t, registeredTxHash, tx.GetTransactionHash())
39 | require.Equal(t, expectedMerklePath, tx.GetMerklePath())
40 | })
41 | }
42 |
--------------------------------------------------------------------------------
/internal/metamorph/integration_test/fixtures/reannounce_seen/metamorph.transactions.yaml:
--------------------------------------------------------------------------------
1 | # return
2 | - hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e
3 | locked_by: re-announce-integration-test
4 | status: 90
5 | stored_at: 2025-05-08 12:53:34.260 +0200
6 | last_submitted_at: 2025-05-08 12:53:34.260 +0200
7 | status_history:
8 | - status: 20
9 | timestamp: 2025-05-08T10:53:34.260275433Z
10 | - status: 90
11 | timestamp: 2025-05-08T10:53:34.577864+00:00
12 |
13 | # confirmed longer ago than requested => don't return
14 | - hash: 0x4910f3dccc84bd77bccbb14b739d6512dcfc70fb8b3c61fb74d491baa01aea0a
15 | locked_by: re-announce-integration-test
16 | status: 90
17 | stored_at: 2025-05-08 12:53:34.260 +0200
18 | last_submitted_at: 2025-05-08 12:53:34.260 +0200
19 | status_history:
20 | - status: 20
21 | timestamp: 2025-05-08T10:53:34.260275433Z
22 | - status: 90
23 | timestamp: 2025-05-08T10:53:34.577864+00:00
24 | requested_at: 2025-05-08 12:10:00.000 +0200
25 | confirmed_at: 2025-05-08 11:12:34.260 +0200
26 |
27 | # requested and confirmed long ago => return
28 | - hash: 0x8289758c1929505f9476e71698623387fc16a20ab238a3e6ce1424bc0aae368e
29 | locked_by: re-announce-integration-test
30 | status: 90
31 | stored_at: 2025-05-08 12:53:34.260 +0200
32 | last_submitted_at: 2025-05-08 12:53:34.260 +0200
33 | status_history:
34 | - status: 20
35 | timestamp: 2025-05-08T10:53:34.260275433Z
36 | - status: 90
37 | timestamp: 2025-05-08T10:53:34.577864+00:00
38 | requested_at: 2025-05-08 12:10:00.000 +0200
39 | confirmed_at: 2025-05-08 12:12:34.260 +0200
40 |
--------------------------------------------------------------------------------
/config/utils.go:
--------------------------------------------------------------------------------
1 | package config
2 |
3 | import (
4 | "errors"
5 | "fmt"
6 | "net/url"
7 |
8 | "github.com/libsv/go-p2p/wire"
9 | "github.com/spf13/viper"
10 | )
11 |
12 | var (
13 | ErrConfigFailedToDump = errors.New("error occurred while dumping config")
14 | ErrConfigUnknownNetwork = errors.New("unknown bitcoin_network")
15 | ErrPortP2PNotSet = errors.New("port_p2p not set for peer")
16 | )
17 |
18 | func DumpConfig(configFile string) error {
19 | err := viper.SafeWriteConfigAs(configFile)
20 | if err != nil {
21 | return errors.Join(ErrConfigFailedToDump, err)
22 | }
23 | return nil
24 | }
25 |
26 | func GetNetwork(networkStr string) (wire.BitcoinNet, error) {
27 | var network wire.BitcoinNet
28 |
29 | switch networkStr {
30 | case "mainnet":
31 | network = wire.MainNet
32 | case "testnet":
33 | network = wire.TestNet3
34 | case "regtest":
35 | network = wire.TestNet
36 | default:
37 | return 0, errors.Join(ErrConfigUnknownNetwork, fmt.Errorf("network: %s", networkStr))
38 | }
39 |
40 | return network, nil
41 | }
42 |
43 | // GetZMQUrl gets the URL of the ZMQ port if available. If not available, nil is returned
44 | func (p *PeerConfig) GetZMQUrl() (*url.URL, error) {
45 | if p.Port == nil || p.Port.ZMQ == 0 {
46 | return nil, nil
47 | }
48 |
49 | zmqURLString := fmt.Sprintf("zmq://%s:%d", p.Host, p.Port.ZMQ)
50 |
51 | return url.Parse(zmqURLString)
52 | }
53 |
54 | func (p *PeerConfig) GetP2PUrl() (string, error) {
55 | if p.Port == nil || p.Port.P2P == 0 {
56 | return "", errors.Join(ErrPortP2PNotSet, fmt.Errorf("peer host %s", p.Host))
57 | }
58 |
59 | return fmt.Sprintf("%s:%d", p.Host, p.Port.P2P), nil
60 | }
61 |
--------------------------------------------------------------------------------
/internal/api/transaction_handler/bitcoin_node_test.go:
--------------------------------------------------------------------------------
1 | package transaction_handler
2 |
3 | import (
4 | "context"
5 | "testing"
6 |
7 | sdkTx "github.com/bsv-blockchain/go-sdk/transaction"
8 |
9 | "github.com/stretchr/testify/require"
10 | )
11 |
12 | func TestNewBitcoinNode(t *testing.T) {
13 | t.Run("new bitcoin node", func(t *testing.T) {
14 | //Given
15 | // This is the genesis coinbase transaction that is hardcoded and does not need connection to anything else
16 | tx1 := "4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b"
17 |
18 | // add a single bitcoin node
19 | txHandler, err := NewBitcoinNode("localhost", 8332, "user", "mypassword", false)
20 | require.NoError(t, err)
21 | var ctx = context.Background()
22 |
23 | //When
24 |
25 | //Then
26 |
27 | err = txHandler.Health(ctx)
28 | require.NoError(t, err)
29 |
30 | res1, err := txHandler.GetTransaction(ctx, tx1)
31 | require.NotNil(t, res1)
32 | require.NoError(t, err)
33 |
34 | res2, err := txHandler.GetTransactions(ctx, []string{tx1, tx1})
35 | require.NotNil(t, res2)
36 | require.NoError(t, err)
37 |
38 | res3, err := txHandler.GetTransactionStatuses(ctx, []string{tx1, tx1})
39 | require.NotNil(t, res3)
40 | require.NoError(t, err)
41 |
42 | res4, err := txHandler.GetTransactionStatus(ctx, tx1)
43 | require.NotNil(t, res4)
44 | require.NoError(t, err)
45 |
46 | res5, err := txHandler.SubmitTransaction(ctx, &sdkTx.Transaction{}, nil)
47 | require.Nil(t, res5)
48 | require.Error(t, err)
49 |
50 | res6, err := txHandler.SubmitTransactions(ctx, sdkTx.Transactions{}, nil)
51 | require.NotNil(t, res6)
52 | require.NoError(t, err)
53 | })
54 | }
55 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/fixtures/update_mined/metamorph.transactions.yaml:
--------------------------------------------------------------------------------
1 | - hash: 0xee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa
2 | locked_by: metamorph-1
3 | status: 90
4 | stored_at: 2023-10-01 14:00:00
5 | last_submitted_at: 2023-10-01 14:00:00
6 | status_history:
7 | - status: 20
8 | timestamp: 2023-10-01T14:00:00.0Z
9 | - status: 90
10 | timestamp: 2023-10-01T14:00:03.0Z
11 | - hash: 0x67fc757d9ed6d119fc0926ae5c82c1a2cf036ec823257cfaea396e49184ec7ff
12 | locked_by: metamorph-1
13 | status: 100
14 | stored_at: 2023-10-01 14:00:00
15 | last_submitted_at: 2023-10-01 14:00:00
16 | competing_txs: b79d90f8f09df3e85574b7bbb78fd4a0b990474e1a4fefdb87b3f937bd98fda7
17 | status_history:
18 | - status: 20
19 | timestamp: 2023-10-01T14:00:00.0Z
20 | - status: 90
21 | timestamp: 2023-10-01T14:00:03.0Z
22 | - hash: 0xa7fd98bd37f9b387dbef4f1a4e4790b9a0d48fb7bbb77455e8f39df0f8909db7
23 | locked_by: metamorph-1
24 | status: 100
25 | stored_at: 2023-10-01 14:00:00
26 | last_submitted_at: 2023-10-01 14:00:00
27 | competing_txs: ffc74e18496e39eafa7c2523c86e03cfa2c1825cae2609fc19d1d69e7d75fc67
28 | status_history:
29 | - status: 20
30 | timestamp: 2023-10-01T14:00:00.0Z
31 | - status: 90
32 | timestamp: 2023-10-01T14:00:03.0Z
33 | - hash: 0x3e0b5b218c344110f09bf485bc58de4ea5378e55744185edf9c1dafa40068ecd
34 | locked_by: metamorph-1
35 | status: 40
36 | stored_at: 2023-10-01 14:00:00
37 | last_submitted_at: 2023-10-01 14:00:00
38 | status_history:
39 | - status: 20
40 | timestamp: 2023-10-01T14:00:00.0Z
41 | - status: 90
42 | timestamp: 2023-10-01T14:00:03.0Z
43 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/fixtures/get_transactions/blocktx.block_transactions.yaml:
--------------------------------------------------------------------------------
1 | - block_id: 1
2 | hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853
3 | merkle_tree_index: 1
4 | - block_id: 1
5 | hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e
6 | merkle_tree_index: 2
7 | - block_id: 1
8 | hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430
9 | merkle_tree_index: 3
10 | - block_id: 1
11 | hash: 0xee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa
12 | merkle_tree_index: 4
13 | - block_id: 1
14 | hash: 0x3e0b5b218c344110f09bf485bc58de4ea5378e55744185edf9c1dafa40068ecd
15 | merkle_tree_index: 5
16 | - block_id: 2
17 | hash: 0x213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0
18 | merkle_tree_index: 6
19 | - block_id: 2
20 | hash: 0x12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357
21 | merkle_tree_index: 7
22 | - block_id: 2
23 | hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6
24 | merkle_tree_index: 8
25 | - block_id: 2
26 | hash: 0x5c5b621b81fb63d9df4595ee2e6b3c50cce1f5f0e1b83510aac504931ed22799
27 | merkle_tree_index: 9
28 | - block_id: 2
29 | hash: 0xa3d4e78a8e11e97c8faf34880da861412273948edf467f23590601a1057079d8
30 | merkle_tree_index: 10
31 | - block_id: 3
32 | hash: 0x2eb9f15adaf9e7d1de19f3ebc6bf95b62871a4e053c30ac0d1b1df85a6163d8e
33 | merkle_tree_index: 11
34 | - block_id: 3
35 | hash: 0x45ad0e3de133e386faeff8ecf12b665875d527031b9aa75ca96d3fc2b7098fa5
36 | merkle_tree_index: 12
37 | - block_id: 3
38 | hash: 0x4bac520c26dba4e24c3fb73bf4fd0d66e45ec39b976d1d052f8a4f499f4aa004
39 | merkle_tree_index: 13
40 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/fixtures/get_block_transactions/blocktx.block_transactions.yaml:
--------------------------------------------------------------------------------
1 | - block_id: 1
2 | hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853
3 | merkle_tree_index: 1
4 | - block_id: 1
5 | hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e
6 | merkle_tree_index: 2
7 | - block_id: 1
8 | hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430
9 | merkle_tree_index: 3
10 | - block_id: 1
11 | hash: 0xee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa
12 | merkle_tree_index: 4
13 | - block_id: 1
14 | hash: 0x3e0b5b218c344110f09bf485bc58de4ea5378e55744185edf9c1dafa40068ecd
15 | merkle_tree_index: 5
16 | - block_id: 2
17 | hash: 0x213a8c87c5460e82b5ae529212956b853c7ce6bf06e56b2e040eb063cf9a49f0
18 | merkle_tree_index: 1
19 | - block_id: 2
20 | hash: 0x12c04cfc5643f1cd25639ad42d6f8f0489557699d92071d7e0a5b940438c4357
21 | merkle_tree_index: 2
22 | - block_id: 2
23 | hash: 0xece2b7e40d98749c03c551b783420d6e3fdc3c958244bbf275437839585829a6
24 | merkle_tree_index: 3
25 | - block_id: 2
26 | hash: 0x5c5b621b81fb63d9df4595ee2e6b3c50cce1f5f0e1b83510aac504931ed22799
27 | merkle_tree_index: 4
28 | - block_id: 2
29 | hash: 0xa3d4e78a8e11e97c8faf34880da861412273948edf467f23590601a1057079d8
30 | merkle_tree_index: 5
31 | - block_id: 3
32 | hash: 0x2eb9f15adaf9e7d1de19f3ebc6bf95b62871a4e053c30ac0d1b1df85a6163d8e
33 | merkle_tree_index: 6
34 | - block_id: 3
35 | hash: 0x45ad0e3de133e386faeff8ecf12b665875d527031b9aa75ca96d3fc2b7098fa5
36 | merkle_tree_index: 7
37 | - block_id: 3
38 | hash: 0x4bac520c26dba4e24c3fb73bf4fd0d66e45ec39b976d1d052f8a4f499f4aa004
39 | merkle_tree_index: 8
40 |
--------------------------------------------------------------------------------
/cmd/broadcaster-cli/app/keyset/new/new.go:
--------------------------------------------------------------------------------
1 | package new
2 |
3 | import (
4 | "log/slog"
5 |
6 | chaincfg "github.com/bsv-blockchain/go-sdk/transaction/chaincfg"
7 | "github.com/spf13/cobra"
8 |
9 | "github.com/bitcoin-sv/arc/cmd/broadcaster-cli/helper"
10 | "github.com/bitcoin-sv/arc/pkg/keyset"
11 | )
12 |
13 | var (
14 | Cmd = &cobra.Command{
15 | Use: "new",
16 | Short: "Create new key set",
17 | RunE: func(_ *cobra.Command, _ []string) error {
18 | isTestnet := helper.GetBool("testnet")
19 |
20 | netCfg := chaincfg.MainNet
21 | if isTestnet {
22 | netCfg = chaincfg.TestNet
23 | }
24 |
25 | newKeyset, err := keyset.New(&netCfg)
26 | if err != nil {
27 | return err
28 | }
29 |
30 | logLevel := helper.GetString("logLevel")
31 | logFormat := helper.GetString("logFormat")
32 | logger := helper.NewLogger(logLevel, logFormat)
33 |
34 | logger.Info("new keyset", slog.String("keyset", newKeyset.GetMaster().String()))
35 | return nil
36 | },
37 | }
38 | )
39 |
40 | func init() {
41 | var err error
42 |
43 | logger := helper.NewLogger("INFO", "tint")
44 |
45 | Cmd.SetHelpFunc(func(command *cobra.Command, strings []string) {
46 | // Hide unused persistent flags
47 | err = command.Flags().MarkHidden("keyfile")
48 | if err != nil {
49 | logger.Error("failed to mark flag hidden", slog.String("flag", "keyfile"), slog.String("err", err.Error()))
50 | }
51 | err = command.Flags().MarkHidden("wocAPIKey")
52 | if err != nil {
53 | logger.Error("failed to mark flag hidden", slog.String("flag", "wocAPIKey"), slog.String("err", err.Error()))
54 | }
55 | // Call parent help func
56 | command.Parent().HelpFunc()(command, strings)
57 | })
58 | }
59 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/clear_data.go:
--------------------------------------------------------------------------------
1 | package postgresql
2 |
3 | import (
4 | "context"
5 | "errors"
6 | "fmt"
7 | "time"
8 |
9 | _ "github.com/lib/pq" // nolint: revive // required for postgres driver
10 |
11 | "github.com/bitcoin-sv/arc/internal/blocktx/store"
12 | )
13 |
14 | var clearBlocktxTableName = map[store.ClearBlocktxTable]string{
15 | store.TableRegisteredTransactions: "registered_transactions",
16 | store.TableBlockProcessing: "block_processing",
17 | }
18 |
19 | func (p *PostgreSQL) ClearBlocktxTable(ctx context.Context, retentionDays int32, table store.ClearBlocktxTable) (int64, error) {
20 | now := p.now()
21 | deleteBeforeDate := now.Add(-24 * time.Hour * time.Duration(retentionDays))
22 |
23 | tableName, ok := clearBlocktxTableName[table]
24 | if !ok {
25 | return 0, errors.Join(store.ErrInvalidTable, fmt.Errorf("invalid table: %d", table))
26 | }
27 |
28 | res, err := p.db.ExecContext(ctx, fmt.Sprintf("DELETE FROM blocktx.%s WHERE inserted_at <= $1", tableName), deleteBeforeDate)
29 | if err != nil {
30 | return 0, errors.Join(store.ErrUnableToDeleteRows, fmt.Errorf("table %s: %w", tableName, err))
31 | }
32 | rows, _ := res.RowsAffected()
33 | return rows, nil
34 | }
35 |
36 | func (p *PostgreSQL) ClearBlocks(ctx context.Context, retentionDays int32) (int64, error) {
37 | now := p.now()
38 | deleteBeforeDate := now.Add(-24 * time.Hour * time.Duration(retentionDays))
39 |
40 | res, err := p.db.ExecContext(ctx, `DELETE FROM blocktx.blocks WHERE timestamp <= $1`, deleteBeforeDate)
41 | if err != nil {
42 | return 0, errors.Join(store.ErrUnableToDeleteRows, fmt.Errorf("table blocks: %w", err))
43 | }
44 | rows, _ := res.RowsAffected()
45 | return rows, nil
46 | }
47 |
--------------------------------------------------------------------------------
/internal/callbacker/callbacker_api/callbacker_api.proto:
--------------------------------------------------------------------------------
1 | syntax = "proto3";
2 |
3 | option go_package = ".;callbacker_api";
4 |
5 | package callbacker_api;
6 |
7 | import "google/protobuf/timestamp.proto";
8 | import "google/protobuf/empty.proto";
9 |
10 | service CallbackerAPI {
11 | rpc Health (google.protobuf.Empty) returns (HealthResponse) {}
12 | rpc SendCallback (SendRequest) returns (google.protobuf.Empty) {}
13 | }
14 |
15 | // Note: Values of the statuses have a difference between them in case
16 | // it's necessary to add another values in between. This will allow to
17 | // create new statuses without changing the values in the existing db.
18 | enum Status {
19 | UNKNOWN = 0;
20 | QUEUED = 10;
21 | RECEIVED = 20;
22 | STORED = 30;
23 | ANNOUNCED_TO_NETWORK = 40;
24 | REQUESTED_BY_NETWORK = 50;
25 | SENT_TO_NETWORK = 60;
26 | ACCEPTED_BY_NETWORK = 70;
27 | SEEN_IN_ORPHAN_MEMPOOL = 80;
28 | SEEN_ON_NETWORK = 90;
29 | DOUBLE_SPEND_ATTEMPTED = 100;
30 | REJECTED = 110;
31 | MINED_IN_STALE_BLOCK = 115;
32 | MINED = 120;
33 | }
34 |
35 | // swagger:model HealthResponse
36 | message HealthResponse {
37 | google.protobuf.Timestamp timestamp = 1;
38 | string nats = 2;
39 | }
40 |
41 | // swagger:model SendRequest
42 | message SendRequest {
43 | CallbackRouting callback_routing = 1;
44 | string txid = 2;
45 | Status status = 3;
46 | string merkle_path = 4;
47 | string extra_info = 5;
48 | repeated string competing_txs = 6;
49 | string block_hash = 7;
50 | uint64 block_height = 8;
51 | google.protobuf.Timestamp timestamp = 9;
52 | }
53 |
54 | // swagger:model CallbackRouting
55 | message CallbackRouting {
56 | string url = 1;
57 | string token = 2;
58 | bool allow_batch = 3;
59 | }
60 |
--------------------------------------------------------------------------------
/internal/grpc_utils/server.go:
--------------------------------------------------------------------------------
1 | package grpc_utils
2 |
3 | import (
4 | "errors"
5 | "fmt"
6 | "log/slog"
7 | "net"
8 |
9 | "google.golang.org/grpc"
10 |
11 | "github.com/bitcoin-sv/arc/config"
12 | )
13 |
14 | var ErrServerFailedToListen = errors.New("GRPC server failed to listen")
15 |
16 | type GrpcServer struct {
17 | Srv *grpc.Server
18 |
19 | logger *slog.Logger
20 | cleanup func()
21 | }
22 |
23 | type ServerConfig struct {
24 | PrometheusEndpoint string
25 | MaxMsgSize int
26 | TracingConfig *config.TracingConfig
27 | Name string
28 | }
29 |
30 | func NewGrpcServer(logger *slog.Logger, cfg ServerConfig) (GrpcServer, error) {
31 | metrics, grpcOpts, cleanupFn, err := GetGRPCServerOpts(logger, cfg)
32 | if err != nil {
33 | return GrpcServer{}, err
34 | }
35 |
36 | grpcSrv := grpc.NewServer(grpcOpts...)
37 |
38 | metrics.InitializeMetrics(grpcSrv)
39 |
40 | return GrpcServer{
41 | Srv: grpcSrv,
42 | logger: logger,
43 | cleanup: cleanupFn,
44 | }, nil
45 | }
46 |
47 | func (s *GrpcServer) ListenAndServe(address string) error {
48 | listener, err := net.Listen("tcp", address)
49 | if err != nil {
50 | return errors.Join(ErrServerFailedToListen, fmt.Errorf("address %s: %v", address, err))
51 | }
52 |
53 | go func() {
54 | s.logger.Info("GRPC server listening", slog.String("address", address))
55 | err = s.Srv.Serve(listener)
56 | if err != nil {
57 | s.logger.Error("GRPC server failed to serve", slog.String("err", err.Error()))
58 | }
59 | }()
60 |
61 | return nil
62 | }
63 |
64 | func (s *GrpcServer) Shutdown() {
65 | s.logger.Info("Shutting down gRPC server")
66 |
67 | s.Srv.GracefulStop()
68 |
69 | if s.cleanup != nil {
70 | s.cleanup()
71 | }
72 |
73 | s.logger.Info("Shutdown gRPC server complete")
74 | }
75 |
--------------------------------------------------------------------------------
/internal/p2p/wire_reader.go:
--------------------------------------------------------------------------------
1 | package p2p
2 |
3 | import (
4 | "bufio"
5 | "context"
6 | "io"
7 | "strings"
8 |
9 | "github.com/libsv/go-p2p/wire"
10 | )
11 |
12 | type WireReader struct {
13 | bufio.Reader
14 | limitedReader *io.LimitedReader
15 | maxMsgSize int64
16 | }
17 |
18 | func NewWireReader(r io.Reader, maxMsgSize int64) *WireReader {
19 | lr := &io.LimitedReader{R: r, N: maxMsgSize}
20 |
21 | return &WireReader{
22 | Reader: *bufio.NewReader(lr),
23 | limitedReader: lr,
24 | maxMsgSize: maxMsgSize,
25 | }
26 | }
27 |
28 | func NewWireReaderSize(r io.Reader, maxMsgSize int64, buffSize int) *WireReader {
29 | lr := &io.LimitedReader{R: r, N: maxMsgSize}
30 |
31 | return &WireReader{
32 | Reader: *bufio.NewReaderSize(lr, buffSize),
33 | limitedReader: lr,
34 | maxMsgSize: maxMsgSize,
35 | }
36 | }
37 |
38 | func (r *WireReader) ReadNextMsg(ctx context.Context, pver uint32, network wire.BitcoinNet) (wire.Message, error) {
39 | result := make(chan readResult, 1)
40 | go handleRead(r, pver, network, result)
41 |
42 | // block until read complete or context is canceled
43 | select {
44 | case <-ctx.Done():
45 | return nil, ctx.Err()
46 |
47 | case readMsg := <-result:
48 | return readMsg.msg, readMsg.err
49 | }
50 | }
51 |
52 | func (r *WireReader) resetLimit() {
53 | r.limitedReader.N = r.maxMsgSize
54 | }
55 |
56 | type readResult struct {
57 | msg wire.Message
58 | err error
59 | }
60 |
61 | func handleRead(r *WireReader, pver uint32, bsvnet wire.BitcoinNet, result chan<- readResult) {
62 | for {
63 | msg, _, err := wire.ReadMessage(r, pver, bsvnet)
64 | r.resetLimit()
65 |
66 | if err != nil {
67 | if strings.Contains(err.Error(), "unhandled command [") {
68 | // ignore unknown msg
69 | continue
70 | }
71 | }
72 |
73 | result <- readResult{msg, err}
74 | return
75 | }
76 | }
77 |
--------------------------------------------------------------------------------
/internal/api/handler/helpers.go:
--------------------------------------------------------------------------------
1 | package handler
2 |
3 | import (
4 | "log"
5 |
6 | "github.com/bitcoin-sv/arc/internal/api/dictionary"
7 | "github.com/bitcoin-sv/arc/internal/global"
8 |
9 | "github.com/getkin/kin-openapi/openapi3"
10 | "github.com/labstack/echo/v4"
11 | middleware "github.com/oapi-codegen/echo-middleware"
12 |
13 | "github.com/bitcoin-sv/arc/pkg/api"
14 | )
15 |
16 | // CheckSwagger validates the request against the swagger definition.
17 | func CheckSwagger(e *echo.Echo) *openapi3.T {
18 | swagger, err := api.GetSwagger()
19 | if err != nil {
20 | log.Fatalf(dictionary.GetInternalMessage(dictionary.ErrorLoadingSwaggerSpec), err.Error())
21 | }
22 |
23 | // Clear out the servers array in the swagger spec, that skips validating
24 | // that server names match. We don't know how this thing will be run.
25 | swagger.Servers = nil
26 | // Clear out the security requirements, we check this ourselves
27 | swagger.Security = nil
28 |
29 | // Use our validation middleware to check all requests against the OpenAPI schema.
30 | e.Use(middleware.OapiRequestValidator(swagger))
31 |
32 | return swagger
33 | }
34 |
35 | func filterStatusesByTxIDs(txIDs []string, statuses []*global.TransactionStatus) []*global.TransactionStatus {
36 | if len(txIDs) == 1 && len(statuses) == 1 { // optimization for a common scenario
37 | if statuses[0] != nil && statuses[0].TxID == txIDs[0] {
38 | return statuses
39 | }
40 |
41 | return make([]*global.TransactionStatus, 0)
42 | }
43 |
44 | idsMap := make(map[string]struct{})
45 | for _, id := range txIDs {
46 | idsMap[id] = struct{}{}
47 | }
48 |
49 | filteredStatuses := make([]*global.TransactionStatus, 0)
50 | for _, txStatus := range statuses {
51 | if _, ok := idsMap[txStatus.TxID]; ok {
52 | filteredStatuses = append(filteredStatuses, txStatus)
53 | }
54 | }
55 |
56 | return filteredStatuses
57 | }
58 |
--------------------------------------------------------------------------------
/internal/varintutils/varintutils.go:
--------------------------------------------------------------------------------
1 | package varintutils
2 |
3 | import (
4 | "encoding/binary"
5 | "errors"
6 | "fmt"
7 | "io"
8 | )
9 |
10 | // VarInt (variable integer) is a field used in transaction data to indicate the number of
11 | // upcoming fields, or the length of an upcoming field.
12 | // See http://learnmeabitcoin.com/glossary/varint
13 | type VarInt uint64
14 |
15 | // Length return the length of the underlying byte representation of the `transaction.VarInt`.
16 | func (v VarInt) Length() int {
17 | if v < 253 {
18 | return 1
19 | }
20 | if v < 65536 {
21 | return 3
22 | }
23 | if v < 4294967296 {
24 | return 5
25 | }
26 | return 9
27 | }
28 |
29 | // ReadFrom reads the next varint from the io.Reader and assigned it to itself.
30 | func (v *VarInt) ReadFrom(r io.Reader) (int64, error) {
31 | b := make([]byte, 1)
32 | if _, err := io.ReadFull(r, b); err != nil {
33 | return 0, errors.Join(err, errors.New("could not read varint type"))
34 | }
35 |
36 | switch b[0] {
37 | case 0xff:
38 | bb := make([]byte, 8)
39 | if n, err := io.ReadFull(r, bb); err != nil {
40 | return 9, errors.Join(err, fmt.Errorf("varint(8): got %d bytes", n))
41 | }
42 | *v = VarInt(binary.LittleEndian.Uint64(bb))
43 | return 9, nil
44 |
45 | case 0xfe:
46 | bb := make([]byte, 4)
47 | if n, err := io.ReadFull(r, bb); err != nil {
48 | return 5, errors.Join(err, fmt.Errorf("varint(4): got %d bytes", n))
49 | }
50 | *v = VarInt(binary.LittleEndian.Uint32(bb))
51 | return 5, nil
52 |
53 | case 0xfd:
54 | bb := make([]byte, 2)
55 | if n, err := io.ReadFull(r, bb); err != nil {
56 | return 3, errors.Join(err, fmt.Errorf("varint(2): got %d bytes", n))
57 | }
58 | *v = VarInt(binary.LittleEndian.Uint16(bb))
59 | return 3, nil
60 |
61 | default:
62 | *v = VarInt(binary.LittleEndian.Uint16([]byte{b[0], 0x00}))
63 | return 1, nil
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/internal/beef/bump.go:
--------------------------------------------------------------------------------
1 | package beef
2 |
3 | import (
4 | "errors"
5 |
6 | sdkTx "github.com/bsv-blockchain/go-sdk/transaction"
7 | )
8 |
9 | var (
10 | ErrBUMPNoMerkleRoots = errors.New("no merkle roots found for validation")
11 | ErrBUMPDifferentMerkleRoots = errors.New("different merkle roots for the same block")
12 | ErrBUMPEmptyMerkleRoot = errors.New("no transactions marked as expected to verify in bump")
13 | )
14 |
15 | type MerkleRootVerificationRequest struct {
16 | MerkleRoot string
17 | BlockHeight uint64
18 | }
19 |
20 | func CalculateMerkleRootsFromBumps(bumps []*sdkTx.MerklePath) ([]MerkleRootVerificationRequest, error) {
21 | merkleRoots := make([]MerkleRootVerificationRequest, 0)
22 |
23 | for _, bump := range bumps {
24 | blockMerkleRoot, err := calculateMerkleRootFromBump(bump)
25 | if err != nil {
26 | return nil, err
27 | }
28 |
29 | merkleRoots = append(merkleRoots, MerkleRootVerificationRequest{
30 | MerkleRoot: blockMerkleRoot,
31 | BlockHeight: uint64(bump.BlockHeight),
32 | })
33 | }
34 |
35 | if len(merkleRoots) == 0 {
36 | return nil, ErrBUMPNoMerkleRoots
37 | }
38 |
39 | return merkleRoots, nil
40 | }
41 |
42 | func calculateMerkleRootFromBump(bump *sdkTx.MerklePath) (string, error) {
43 | var computedRoot string
44 |
45 | for _, pathGroup := range bump.Path {
46 | for _, element := range pathGroup {
47 | if element.Txid == nil {
48 | continue
49 | }
50 |
51 | root, err := bump.ComputeRoot(element.Hash)
52 | if err != nil {
53 | return "", err
54 | }
55 |
56 | rootStr := root.String()
57 | if computedRoot == "" {
58 | computedRoot = rootStr
59 | } else if computedRoot != rootStr {
60 | return "", ErrBUMPDifferentMerkleRoots
61 | }
62 | }
63 | }
64 |
65 | if computedRoot == "" {
66 | return "", ErrBUMPEmptyMerkleRoot
67 | }
68 |
69 | return computedRoot, nil
70 | }
71 |
--------------------------------------------------------------------------------
/internal/metamorph/store/postgresql/fixtures/update_double_spend/metamorph.transactions.yaml:
--------------------------------------------------------------------------------
1 | - hash: 0xcd3d2f97dfc0cdb6a07ec4b72df5e1794c9553ff2f62d90ed4add047e8088853
2 | locked_by: metamorph-3
3 | status: 100
4 | stored_at: 2023-10-01T14:00:00+00:00
5 | last_submitted_at: 2023-10-01T14:00:00+00:00
6 | competing_txs: '33332d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e'
7 | status_history:
8 | - status: 40
9 | timestamp: 2023-10-01T14:00:00+00:00
10 | - hash: 0x21132d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e
11 | locked_by: metamorph-3
12 | status: 50
13 | stored_at: 2023-10-01T14:00:00+00:00
14 | last_submitted_at: 2023-10-01T14:00:00+00:00
15 | competing_txs: '33332d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e,55532d32cb5411c058bb4391f24f6a36ed9b810df851d0e36cac514fd03d6b4e'
16 | - hash: 0xb16cea53fc823e146fbb9ae4ad3124f7c273f30562585ad6e4831495d609f430
17 | locked_by: metamorph-1
18 | status: 90
19 | stored_at: 2023-10-01T14:00:00+00:00
20 | last_submitted_at: 2023-10-01T14:00:00+00:00
21 | - hash: 0xee76f5b746893d3e6ae6a14a15e464704f4ebd601537820933789740acdcf6aa
22 | locked_by: metamorph-1
23 | status: 80
24 | stored_at: 2023-10-01T14:00:00+00:00
25 | last_submitted_at: 2023-10-01T14:00:00+00:00
26 | - hash: 0x3e0b5b218c344110f09bf485bc58de4ea5378e55744185edf9c1dafa40068ecd
27 | locked_by: metamorph-1
28 | status: 70
29 | stored_at: 2023-10-01T14:00:00+00:00
30 | last_submitted_at: 2023-10-01T14:00:00+00:00
31 | - hash: 0x7809b730cbe7bb723f299a4e481fb5165f31175876392a54cde85569a18cc75f
32 | locked_by: metamorph-1
33 | status: 70
34 | stored_at: 2023-10-01T14:00:00+00:00
35 | last_submitted_at: 2023-10-01T14:00:00+00:00
36 | - hash: 0xaaa350ca12a0dd9375540e13637b02e054a3436336e9d6b82fe7f2b23c710002
37 | locked_by: metamorph-1
38 | status: 70
39 | stored_at: 2023-10-01T14:00:00+00:00
40 | last_submitted_at: 2023-10-01T14:00:00+00:00
41 |
--------------------------------------------------------------------------------
/test/init_test.go:
--------------------------------------------------------------------------------
1 | //go:build e2e
2 |
3 | package test
4 |
5 | import (
6 | "log"
7 | "os"
8 | "testing"
9 | "time"
10 |
11 | "github.com/ordishs/go-bitcoin"
12 | )
13 |
14 | func TestMain(m *testing.M) {
15 | setupSut()
16 |
17 | info, err := bitcoind.GetInfo()
18 | if err != nil {
19 | log.Printf("failed to get info: %v", err)
20 | return
21 | }
22 |
23 | log.Printf("block height: %d", info.Blocks)
24 | m.Run()
25 | }
26 |
27 | func setupSut() {
28 | log.Println("init tests")
29 |
30 | if os.Getenv("TEST_LOCAL") != "" {
31 | nodeHost = "localhost"
32 | arcEndpoint = "http://localhost:9090/"
33 | arcEndpointV1Tx = arcEndpoint + v1Tx
34 | arcEndpointV1Txs = arcEndpoint + v1Txs
35 | }
36 |
37 | var err error
38 | bitcoind, err = bitcoin.New(nodeHost, nodePort, nodeUser, nodePassword, false)
39 | if err != nil {
40 | log.Fatalln("Failed to create bitcoind instance:", err)
41 | }
42 |
43 | info, err := bitcoind.GetInfo()
44 | if err != nil {
45 | log.Fatalln(err)
46 | }
47 |
48 | log.Printf("block height: %d", info.Blocks)
49 | // fund node
50 | const minNumbeOfBlocks = 101
51 |
52 | blocksToGenerate := minNumbeOfBlocks - info.Blocks
53 |
54 | if blocksToGenerate <= 0 {
55 | return
56 | }
57 | log.Printf("generate %d blocks", blocksToGenerate)
58 |
59 | // generate blocks in part to ensure blocktx is able to process all blocks
60 | blockBatch := int32(20)
61 | if os.Getenv("TEST_LOCAL_MCAST") != "" {
62 | blockBatch = 4
63 | }
64 |
65 | for blocksToGenerate > 0 {
66 | _, err = bitcoind.Generate(float64(blockBatch))
67 | if err != nil {
68 | log.Fatalln(err)
69 | }
70 |
71 | // give time to send all INV messages
72 | time.Sleep(5 * time.Second)
73 |
74 | info, err = bitcoind.GetInfo()
75 | if err != nil {
76 | log.Fatalln(err)
77 | }
78 |
79 | log.Printf("block height: %d", info.Blocks)
80 |
81 | blocksToGenerate = blocksToGenerate - blockBatch
82 | }
83 | }
84 |
--------------------------------------------------------------------------------
/doc/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Document
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
18 |
19 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
--------------------------------------------------------------------------------
/.github/workflows/static-analysis.yaml:
--------------------------------------------------------------------------------
1 | name: Static Analysis and Report
2 |
3 | on:
4 | push:
5 | branches: [ "main" ]
6 | pull_request:
7 | branches: ["**"]
8 | paths:
9 | - 'cmd/**'
10 | - 'config/**'
11 | - 'internal/**'
12 | - 'examples/**'
13 | - 'pkg/**'
14 | - 'go.mod'
15 | - 'go.sum'
16 | - '.golangci.yml'
17 | jobs:
18 | analyze:
19 | name: Static analysis
20 | runs-on: ubuntu-latest
21 | steps:
22 | - name: Check out the repo
23 | uses: actions/checkout@v4
24 | with:
25 | fetch-depth: 0
26 |
27 | - name: Setup Go
28 | uses: actions/setup-go@v5
29 | with:
30 | go-version-file: "./go.mod"
31 |
32 | - name: Run unit tests
33 | run: go test -race -parallel=8 -vet=off -coverprofile=./cov.out ./... -tags=multicast_test -coverpkg ./...
34 |
35 | - name: Run gosec Security Scanner
36 | continue-on-error: true
37 | uses: securego/gosec@master
38 | with:
39 | args: -exclude-dir=testdata -exclude-dir=test -exclude-dir=blocktx/store/sql -exclude-generated -fmt=sonarqube -out gosec-report.json ./...
40 |
41 | - name: SonarQube Scan
42 | uses: SonarSource/sonarcloud-github-action@4006f663ecaf1f8093e8e4abb9227f6041f52216
43 | env:
44 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
45 | SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
46 |
47 | lint:
48 | name: Golangci-lint
49 | runs-on: ubuntu-latest
50 | steps:
51 | - name: Check out the repo
52 | uses: actions/checkout@v4
53 |
54 | - name: Setup Go
55 | uses: actions/setup-go@v5
56 | with:
57 | go-version-file: "./go.mod"
58 | cache: false
59 |
60 | - name: golangci-lint
61 | uses: golangci/golangci-lint-action@4afd733a84b1f43292c63897423277bb7f4313a9 # v8.0.0
62 | with:
63 | version: v2.5.0
64 |
--------------------------------------------------------------------------------
/internal/blocktx/store/postgresql/update_block_statuses.go:
--------------------------------------------------------------------------------
1 | package postgresql
2 |
3 | import (
4 | "context"
5 | "errors"
6 |
7 | "github.com/bitcoin-sv/arc/internal/blocktx/blocktx_api"
8 | "github.com/bitcoin-sv/arc/internal/blocktx/store"
9 | "github.com/lib/pq"
10 | )
11 |
12 | func (p *PostgreSQL) UpdateBlocksStatuses(ctx context.Context, blockStatusUpdates []store.BlockStatusUpdate) error {
13 | q := `
14 | UPDATE blocktx.blocks b
15 | SET status = updates.status, is_longest = updates.is_longest
16 | FROM (
17 | SELECT * FROM UNNEST($1::BYTEA[], $2::INTEGER[], $3::BOOLEAN[]) AS u(hash, status, is_longest)
18 | WHERE is_longest = $4
19 | ) AS updates
20 | WHERE b.hash = updates.hash
21 | `
22 |
23 | blockHashes := make([][]byte, len(blockStatusUpdates))
24 | statuses := make([]blocktx_api.Status, len(blockStatusUpdates))
25 | isLongest := make([]bool, len(blockStatusUpdates))
26 |
27 | for i, update := range blockStatusUpdates {
28 | blockHashes[i] = update.Hash
29 | statuses[i] = update.Status
30 | isLongest[i] = update.Status == blocktx_api.Status_LONGEST
31 | }
32 |
33 | tx, err := p.db.Begin()
34 | if err != nil {
35 | return errors.Join(store.ErrFailedToUpdateBlockStatuses, err)
36 | }
37 | defer func() {
38 | _ = tx.Rollback()
39 | }()
40 |
41 | // first update blocks that are changing statuses to non-LONGEST
42 | _, err = tx.ExecContext(ctx, q, pq.Array(blockHashes), pq.Array(statuses), pq.Array(isLongest), false)
43 | if err != nil {
44 | return errors.Join(store.ErrFailedToUpdateBlockStatuses, err)
45 | }
46 |
47 | // then update blocks that are changing statuses to LONGEST
48 | _, err = tx.ExecContext(ctx, q, pq.Array(blockHashes), pq.Array(statuses), pq.Array(isLongest), true)
49 | if err != nil {
50 | return errors.Join(store.ErrFailedToUpdateBlockStatuses, err)
51 | }
52 |
53 | err = tx.Commit()
54 | if err != nil {
55 | return errors.Join(store.ErrFailedToUpdateBlockStatuses, err)
56 | }
57 |
58 | return nil
59 | }
60 |
--------------------------------------------------------------------------------
/cmd/broadcaster-cli/broadcaster-cli-example.yaml:
--------------------------------------------------------------------------------
1 | logLevel: DEBUG # mode of logging. Value can be one of DEBUG | INFO | WARN | ERROR
2 | logFormat: text # format of logging. Value can be one of text | json | tint
3 | privateKeys: # map of private keys
4 | key-1: xprv9s21ZrQH143K2XcKfb8mRDRcHdDLBpV2mrYon4JcYc2CZD4cii9Rbwo1fdcH28Md6FChCpm55dXg4NiyznuJ2PNEQfXbCR4pNRezAdcM131
5 | key-2: xprv9s21ZrQH143K4GD3dFcMGxeF2pWUQsx89PDT1345ZkinZ3qa6irjYfJ1t71PmQUfm1aCLsqM99fCxKjE13zNodgg4XVp8xSTw93erJTPLKf
6 | key-3: xprv9s21ZrQH143K4V3QE96dXDBrrxY63S3aRbr8pHoRTffsgjaWzPZLabkjkLgbCLJjvrkhNLLn8L2Vm9HGntxNc5bhkLfbTzuQShaCada49pv
7 | key-4: xprv9s21ZrQH143K2eu6ncRqkGU8TytLYiaH7BS7ZzgmXEjPRByoCx2GBuRDQAqe7gCHkN2Gpv9MqekacpgAFisfo3DFfBHvt6vA5CMqRkvJdaJ
8 | key-5: xprv9s21ZrQH143K2dWRKF4eaffCiqM997F3Mz9JiF2jq345AjiDW3LG5LFHtoeKKrnpfyX3ebGPExmST1T9x9jMC3dsnEimF3S2tBeRwNEyfwh
9 |
10 | # flags
11 | miningFeeSatPerKb: 1 # Fee offered in transactions
12 | apiURL: https://arc-test.taal.com # URL of ARC instance
13 | authorization: Bearer some-token # Authorization header to use for the http api client
14 | testnet: true # Use testnet
15 | wocAPIKey: testnet_XXXX # WoC api key for faster requests to WoC
16 | callbackURL: http://callbacks.example.com # callback URL header
17 | callbackToken: some-token # callback token header
18 | addTimestampToToken: true # add a test-run_ suffix to the callbackToken
19 | prometheusOLTPExport: # prometheus metrics export
20 | enabled: false # whether to enable prometheus metrics export
21 | endpoint: "localhost:9090" # prometheus metrics endpoint
22 | path: "/api/v1/otlp/v1/metrics" # prometheus metrics path
23 | fullStatusUpdates: false # full status updates header
24 | opReturn: example text # text which will be added to an OP_RETURN output. If empty, no OP_RETURN output will be added
25 | rampUpTickerEnabled: false # whether to use the ramp up ticker. If false, constant ticker will be used
26 | keys: # list of selected private keys
27 | - key-1
28 | - key-2
29 |
--------------------------------------------------------------------------------
/internal/global/mocks/stoppable_mock.go:
--------------------------------------------------------------------------------
1 | // Code generated by moq; DO NOT EDIT.
2 | // github.com/matryer/moq
3 |
4 | package mocks
5 |
6 | import (
7 | "github.com/bitcoin-sv/arc/internal/global"
8 | "sync"
9 | )
10 |
11 | // Ensure, that StoppableMock does implement global.Stoppable.
12 | // If this is not the case, regenerate this file with moq.
13 | var _ global.Stoppable = &StoppableMock{}
14 |
15 | // StoppableMock is a mock implementation of global.Stoppable.
16 | //
17 | // func TestSomethingThatUsesStoppable(t *testing.T) {
18 | //
19 | // // make and configure a mocked global.Stoppable
20 | // mockedStoppable := &StoppableMock{
21 | // ShutdownFunc: func() {
22 | // panic("mock out the Shutdown method")
23 | // },
24 | // }
25 | //
26 | // // use mockedStoppable in code that requires global.Stoppable
27 | // // and then make assertions.
28 | //
29 | // }
30 | type StoppableMock struct {
31 | // ShutdownFunc mocks the Shutdown method.
32 | ShutdownFunc func()
33 |
34 | // calls tracks calls to the methods.
35 | calls struct {
36 | // Shutdown holds details about calls to the Shutdown method.
37 | Shutdown []struct {
38 | }
39 | }
40 | lockShutdown sync.RWMutex
41 | }
42 |
43 | // Shutdown calls ShutdownFunc.
44 | func (mock *StoppableMock) Shutdown() {
45 | if mock.ShutdownFunc == nil {
46 | panic("StoppableMock.ShutdownFunc: method is nil but Stoppable.Shutdown was just called")
47 | }
48 | callInfo := struct {
49 | }{}
50 | mock.lockShutdown.Lock()
51 | mock.calls.Shutdown = append(mock.calls.Shutdown, callInfo)
52 | mock.lockShutdown.Unlock()
53 | mock.ShutdownFunc()
54 | }
55 |
56 | // ShutdownCalls gets all the calls that were made to Shutdown.
57 | // Check the length with:
58 | //
59 | // len(mockedStoppable.ShutdownCalls())
60 | func (mock *StoppableMock) ShutdownCalls() []struct {
61 | } {
62 | var calls []struct {
63 | }
64 | mock.lockShutdown.RLock()
65 | calls = mock.calls.Shutdown
66 | mock.lockShutdown.RUnlock()
67 | return calls
68 | }
69 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Code of Conduct
2 |
3 | ## Introduction
4 | This Code of Conduct outlines the expectations for all contributors to our open-source project, as well as the responsibilities of the maintainers in ensuring a positive and productive environment. Our goal is to create excellent software. To achieve that, we foster a welcoming atmosphere for developers to collaborate and contribute to the project's success.
5 |
6 | ## Expectations for Contributors
7 | All contributors are expected to:
8 | a. Treat others with respect and dignity.
9 | b. Be considerate of differing viewpoints.
10 | c. Collaborate constructively with others.
11 | d. Refrain from demeaning or harassing others.
12 |
13 | ## Reporting Violations
14 | If you witness or experience any behavior that violates this Code of Conduct, please report it to the project maintainers. Reports can be made anonymously or by contacting a specific maintainer.
15 |
16 | ## Enforcement
17 | The project maintainers are responsible for enforcing this Code of Conduct. They will investigate any reported violations and take appropriate action, which may include:
18 | a. Issuing a warning.
19 | b. Requiring an apology or remediation.
20 | c. Temporarily suspending or permanently banning a contributor from the project.
21 |
22 | ## Maintainer Responsibilities
23 | Maintainers are responsible for:
24 | a. Upholding the Code of Conduct and setting a positive example for the community.
25 | b. Investigating and addressing reported violations in a fair and timely manner.
26 | c. Making decisions about merging code based on its merit alone, without bias or favoritism.
27 |
28 | ## Changes to the Code of Conduct
29 | This Code of Conduct is subject to change as the project evolves. Any updates will be communicated to all contributors and posted in the project repository.
30 | By participating in this open-source project, you agree to abide by this Code of Conduct. We appreciate your cooperation in creating a positive and productive environment for everyone involved.
31 |
--------------------------------------------------------------------------------
/internal/api/handler/stats.go:
--------------------------------------------------------------------------------
1 | package handler
2 |
3 | import (
4 | "errors"
5 |
6 | "github.com/prometheus/client_golang/prometheus"
7 | )
8 |
9 | var ErrFailedToRegisterStats = errors.New("failed to register stats collector")
10 |
11 | type Stats struct {
12 | apiTxSubmissions prometheus.Counter
13 | AvailableBlockHeaderServices prometheus.Gauge
14 | UnavailableBlockHeaderServices prometheus.Gauge
15 | }
16 |
17 | func NewStats() (*Stats, error) {
18 | p := &Stats{
19 | apiTxSubmissions: prometheus.NewCounter(prometheus.CounterOpts{
20 | Name: "api_submit_txs",
21 | Help: "Nr of txs submitted",
22 | }),
23 | AvailableBlockHeaderServices: prometheus.NewGauge(prometheus.GaugeOpts{
24 | Name: "arc_api_available_block_header_services",
25 | Help: "Current number of available block header services",
26 | }),
27 | UnavailableBlockHeaderServices: prometheus.NewGauge(prometheus.GaugeOpts{
28 | Name: "arc_api_unavailable_block_header_services",
29 | Help: "Current number of unavailable block header services",
30 | }),
31 | }
32 |
33 | err := registerStats(
34 | p.apiTxSubmissions,
35 | p.AvailableBlockHeaderServices,
36 | p.UnavailableBlockHeaderServices,
37 | )
38 | if err != nil {
39 | return nil, errors.Join(ErrFailedToRegisterStats, err)
40 | }
41 |
42 | return p, nil
43 | }
44 |
45 | func (s *Stats) Add(inc int) {
46 | s.apiTxSubmissions.Add(float64(inc))
47 | }
48 |
49 | func (s *Stats) UnregisterStats() {
50 | unregisterStats(
51 | s.apiTxSubmissions,
52 | s.AvailableBlockHeaderServices,
53 | s.UnavailableBlockHeaderServices,
54 | )
55 | }
56 |
57 | func registerStats(cs ...prometheus.Collector) error {
58 | for _, c := range cs {
59 | err := prometheus.Register(c)
60 | if err != nil {
61 | return errors.Join(ErrFailedToRegisterStats, err)
62 | }
63 | }
64 |
65 | return nil
66 | }
67 |
68 | func unregisterStats(cs ...prometheus.Collector) {
69 | for _, c := range cs {
70 | _ = prometheus.Unregister(c)
71 | }
72 | }
73 |
--------------------------------------------------------------------------------
/internal/metamorph/collector_processor.go:
--------------------------------------------------------------------------------
1 | package metamorph
2 |
3 | import (
4 | "sync/atomic"
5 |
6 | "github.com/prometheus/client_golang/prometheus"
7 | )
8 |
9 | type prometheusCollector struct {
10 | processor ProcessorI
11 | channelMapSize *prometheus.Desc
12 | healthyPeerConnections *prometheus.Desc
13 | }
14 |
15 | var collectorLoaded = atomic.Bool{}
16 |
17 | // You must create a constructor for you prometheusCollector that
18 | // initializes every descriptor and returns a pointer to the prometheusCollector
19 | func newPrometheusCollector(p ProcessorI) error {
20 | if !collectorLoaded.CompareAndSwap(false, true) {
21 | return nil
22 | }
23 |
24 | c := &prometheusCollector{
25 | processor: p,
26 | channelMapSize: prometheus.NewDesc("arc_metamorph_processor_map_size",
27 | "Number of ResponseItems in the processor map",
28 | nil, nil,
29 | ),
30 | healthyPeerConnections: prometheus.NewDesc("arc_healthy_peers_count", "Number of healthy peer connections", nil, nil),
31 | }
32 |
33 | return prometheus.Register(c)
34 | }
35 |
36 | // Describe writes all descriptors to the prometheus desc channel.
37 | func (c *prometheusCollector) Describe(ch chan<- *prometheus.Desc) {
38 | // Update this section with each metric you create for a given prometheusCollector
39 | ch <- c.channelMapSize
40 | }
41 |
42 | // Collect implements required collect function for all prometheus collectors
43 | func (c *prometheusCollector) Collect(ch chan<- prometheus.Metric) {
44 | stats := c.processor.GetProcessorMapSize()
45 |
46 | // Note that you can pass erValue, GaugeValue, or UntypedValue types here.
47 | ch <- prometheus.MustNewConstMetric(c.channelMapSize, prometheus.GaugeValue, float64(stats))
48 |
49 | healthyConnections := 0
50 |
51 | for _, peer := range c.processor.GetPeers() {
52 | if peer.Connected() {
53 | healthyConnections++
54 | continue
55 | }
56 | }
57 |
58 | ch <- prometheus.MustNewConstMetric(c.healthyPeerConnections, prometheus.GaugeValue, float64(healthyConnections))
59 | }
60 |
--------------------------------------------------------------------------------
/.goreleaser.yaml:
--------------------------------------------------------------------------------
1 | # Make sure to check the documentation at http://goreleaser.com
2 | # ---------------------------
3 | # General
4 | # ---------------------------
5 | version: 2
6 |
7 | snapshot:
8 | version_template: "{{ .Tag }}"
9 |
10 | # ---------------------------
11 | # Changelog
12 | # ---------------------------
13 | changelog:
14 | sort: asc
15 | groups:
16 | - title: Dependency updates
17 | regexp: '^.*?(.+)\(deps\)!?:.+$'
18 | order: 300
19 | - title: "New Features"
20 | regexp: '^.*?feat(\(.+\))??!?:.+$'
21 | order: 100
22 | - title: "Security updates"
23 | regexp: '^.*?sec(\(.+\))??!?:.+$'
24 | order: 150
25 | - title: "Bug fixes"
26 | regexp: '^.*?(fix)(\(.+\))??!?:.+$'
27 | order: 200
28 | - title: "Refactor"
29 | regexp: '^.*?(refactor)(\(.+\))??!?:.+$'
30 | order: 300
31 | - title: "Documentation updates"
32 | regexp: '^.*?docs?(\(.+\))??!?:.+$'
33 | order: 400
34 | - title: "Build process updates"
35 | regexp: '^.*?(build|ci)(\(.+\))??!?:.+$'
36 | order: 400
37 | - title: Other work
38 | order: 9999
39 |
40 |
41 | # ---------------------------
42 | # Builder
43 | # ---------------------------
44 | builds:
45 | - skip: true
46 |
47 | # ---------------------------
48 | # Github Release
49 | # ---------------------------
50 | release:
51 | prerelease: false
52 | name_template: "{{.Tag}}"
53 | header: |
54 | # 🚀 Release of {{ .ProjectName }} - {{ .Tag }}
55 |
56 | **Released on**: {{ .Date }}
57 |
58 | ## What's New
59 | Here are the latest updates, bug fixes, and features in this release:
60 |
61 | footer: |
62 | ## Full Changelog:
63 | You can find full changelog here: https://github.com/bitcoin-sv/arc/compare/{{ .PreviousTag }}...{{ .Tag }}
64 |
65 | ## Docker Image:
66 | You can find docker container at [Docker Hub](https://hub.docker.com/r/bsvb/arc)
67 | or get it by running `docker pull bsvb/arc:{{.Version}}`
68 |
69 | ### Thank you for using {{ .ProjectName }}! 🎉
70 |
--------------------------------------------------------------------------------