├── .dockerignore ├── .github ├── CONTRIBUTING.md ├── issue-template.md ├── pull_request_template.md └── workflows │ ├── bandit_scanner.yml │ ├── codeql_scanner.yml │ ├── docker.yml │ ├── functionals_test.yml │ ├── integrations_test.yml │ ├── licenses_scanner.yml │ ├── property_test.yml │ ├── pylint_scanner.yml │ ├── pytest_action.yml │ ├── ruff_scanner.yml │ ├── rust_test.yml │ └── units_test.yml ├── .gitignore ├── .pre-commit-config.yaml ├── Dockerfile ├── LICENSE ├── README.md ├── RELEASE.md ├── SECURITY.md ├── apiary.apib ├── counterparty-core ├── .coveragerc ├── counterpartycore │ ├── __init__.py │ ├── lib │ │ ├── __init__.py │ │ ├── api │ │ │ ├── __init__.py │ │ │ ├── apiserver.py │ │ │ ├── apiv1.py │ │ │ ├── apiwatcher.py │ │ │ ├── caches.py │ │ │ ├── compose.py │ │ │ ├── composer.py │ │ │ ├── dbbuilder.py │ │ │ ├── healthz.py │ │ │ ├── migrations │ │ │ │ ├── 0001.create_and_populate_address_events.py │ │ │ │ ├── 0002.create_and_populate_parsed_events.py │ │ │ │ ├── 0003.create_and_populate_all_expirations.py │ │ │ │ ├── 0004.create_and_populate_assets_info.py │ │ │ │ ├── 0005.create_and_populate_events_count.py │ │ │ │ ├── 0006.create_and_populate_consolidated_tables.py │ │ │ │ ├── 0007.create_views.py │ │ │ │ ├── 0008.create_config_table.py │ │ │ │ ├── 0009.create_and_populate_transaction_types_count.py │ │ │ │ └── 0010.fix_bet_match_resolution_event_name.py │ │ │ ├── queries.py │ │ │ ├── routes.py │ │ │ ├── verbose.py │ │ │ └── wsgi.py │ │ ├── backend │ │ │ ├── __init__.py │ │ │ ├── bitcoind.py │ │ │ ├── electrs.py │ │ │ └── rsfetcher.py │ │ ├── cli │ │ │ ├── __init__.py │ │ │ ├── bootstrap.py │ │ │ ├── initialise.py │ │ │ ├── log.py │ │ │ ├── main.py │ │ │ ├── publickeys.py │ │ │ ├── server.py │ │ │ └── setup.py │ │ ├── config.py │ │ ├── exceptions.py │ │ ├── ledger │ │ │ ├── __init__.py │ │ │ ├── backendheight.py │ │ │ ├── balances.py │ │ │ ├── blocks.py │ │ │ ├── caches.py │ │ │ ├── currentstate.py │ │ │ ├── events.py │ │ │ ├── issuances.py │ │ │ ├── markets.py │ │ │ ├── migrations │ │ │ │ ├── 0001.initial_migration.sql │ │ │ │ ├── 0002.create_mempool_transactions_table.sql │ │ │ │ ├── 0003.add_indexes_in_sends_table.sql │ │ │ │ ├── 0005.add_max_mint_by_address.sql │ │ │ │ ├── 0006.fix_bet_match_resolution_event_name.sql │ │ │ │ └── 0007.add_mime_type_field.sql │ │ │ ├── other.py │ │ │ └── supplies.py │ │ ├── messages │ │ │ ├── __init__.py │ │ │ ├── attach.py │ │ │ ├── bet.py │ │ │ ├── broadcast.py │ │ │ ├── btcpay.py │ │ │ ├── burn.py │ │ │ ├── cancel.py │ │ │ ├── data │ │ │ │ ├── __init__.py │ │ │ │ ├── checkpoints.py │ │ │ │ ├── get_oldest_tx.json │ │ │ │ ├── mainnet_burns.csv │ │ │ │ └── rps_events.json │ │ │ ├── destroy.py │ │ │ ├── detach.py │ │ │ ├── dispense.py │ │ │ ├── dispenser.py │ │ │ ├── dividend.py │ │ │ ├── fairmint.py │ │ │ ├── fairminter.py │ │ │ ├── gas.py │ │ │ ├── issuance.py │ │ │ ├── move.py │ │ │ ├── order.py │ │ │ ├── rps.py │ │ │ ├── rpsresolve.py │ │ │ ├── send.py │ │ │ ├── sweep.py │ │ │ ├── utxo.py │ │ │ └── versions │ │ │ │ ├── __init__.py │ │ │ │ ├── enhancedsend.py │ │ │ │ ├── mpma.py │ │ │ │ └── send1.py │ │ ├── monitors │ │ │ ├── __init__.py │ │ │ ├── profiler.py │ │ │ ├── sentry.py │ │ │ ├── slack.py │ │ │ └── telemetry │ │ │ │ ├── clients │ │ │ │ ├── __init__.py │ │ │ │ ├── influxdb.py │ │ │ │ ├── interface.py │ │ │ │ └── local.py │ │ │ │ ├── collectors │ │ │ │ ├── __init__.py │ │ │ │ ├── base.py │ │ │ │ ├── influxdb.py │ │ │ │ └── interface.py │ │ │ │ ├── daemon.py │ │ │ │ ├── oneshot.py │ │ │ │ └── util.py │ │ ├── parser │ │ │ ├── __init__.py │ │ │ ├── blocks.py │ │ │ ├── check.py │ │ │ ├── deserialize.py │ │ │ ├── follow.py │ │ │ ├── gettxinfo.py │ │ │ ├── gettxinfolegacy.py │ │ │ ├── mempool.py │ │ │ ├── messagetype.py │ │ │ ├── p2sh.py │ │ │ ├── protocol.py │ │ │ └── utxosinfo.py │ │ └── utils │ │ │ ├── address.py │ │ │ ├── assetnames.py │ │ │ ├── base58.py │ │ │ ├── database.py │ │ │ ├── helpers.py │ │ │ ├── mpmaencoding.py │ │ │ ├── multisig.py │ │ │ ├── opcodes.py │ │ │ └── script.py │ ├── protocol_changes.json │ └── test │ │ ├── __init__.py │ │ ├── fixtures │ │ ├── __init__.py │ │ ├── decodedtxs.py │ │ ├── defaults.py │ │ ├── ledgerdb.py │ │ ├── test_public_key.asc │ │ ├── test_snapshot.sig │ │ └── test_snapshot.tar.gz │ │ ├── functionals │ │ ├── conftest.py │ │ ├── fixtures_test.py │ │ ├── multisig_scenarios_test.py │ │ ├── p2sh_scenarios_test.py │ │ └── taproot_scenarios_test.py │ │ ├── integrations │ │ ├── comparehashes_test.py │ │ ├── dockercompose_test.py │ │ ├── load_test.py │ │ ├── locustrunner.py │ │ ├── mainnet_test.py │ │ ├── rebuild_test.py │ │ ├── regtest │ │ │ ├── apidoc │ │ │ │ ├── apicache.json │ │ │ │ ├── blueprint-template.md │ │ │ │ └── group-compose.md │ │ │ ├── dreddhooks.py │ │ │ ├── genapidoc.py │ │ │ ├── property_test.py │ │ │ ├── propertytestnode.py │ │ │ ├── regtestcli.py │ │ │ ├── regtestnode.py │ │ │ ├── scenarios │ │ │ │ ├── __init__.py │ │ │ │ ├── scenario_10_orders.py │ │ │ │ ├── scenario_11_btcpay.py │ │ │ │ ├── scenario_12_send.py │ │ │ │ ├── scenario_13_cancel.py │ │ │ │ ├── scenario_14_sweep.py │ │ │ │ ├── scenario_15_destroy.py │ │ │ │ ├── scenario_16_fairminter.py │ │ │ │ ├── scenario_17_dispenser.py │ │ │ │ ├── scenario_18_utxo.py │ │ │ │ ├── scenario_19_mpma.py │ │ │ │ ├── scenario_1_fairminter.py │ │ │ │ ├── scenario_20_fairminter.py │ │ │ │ ├── scenario_21_fairminter.py │ │ │ │ ├── scenario_22_chaining.py │ │ │ │ ├── scenario_23_detach.py │ │ │ │ ├── scenario_24_dispenser.py │ │ │ │ ├── scenario_25_issuance.py │ │ │ │ ├── scenario_2_fairminter.py │ │ │ │ ├── scenario_3_fairminter.py │ │ │ │ ├── scenario_4_broadcast.py │ │ │ │ ├── scenario_5_dispenser.py │ │ │ │ ├── scenario_6_dispenser.py │ │ │ │ ├── scenario_7_utxo.py │ │ │ │ ├── scenario_8_atomicswap.py │ │ │ │ ├── scenario_9_issuance.py │ │ │ │ └── scenario_last_mempool.py │ │ │ ├── scenarios_test.py │ │ │ ├── taprootdata_test.py │ │ │ └── testp2sh.py │ │ ├── reparsetest.py │ │ ├── rsfetcher_test.py │ │ ├── shutdown_test.py │ │ └── testnet4_test.py │ │ ├── mocks │ │ ├── apis.py │ │ ├── bitcoind.py │ │ ├── conftest.py │ │ └── counterpartydbs.py │ │ └── units │ │ ├── api │ │ ├── apicaches_test.py │ │ ├── apiserver_test.py │ │ ├── apiv1_test.py │ │ ├── compose_test.py │ │ ├── composer_test.py │ │ ├── composertaproot_test.py │ │ ├── dbbuilder_test.py │ │ ├── healthz_test.py │ │ └── wsgi_test.py │ │ ├── backend │ │ ├── bitcoind_test.py │ │ ├── electrs_test.py │ │ └── rsfetcher_test.py │ │ ├── cli │ │ ├── bootstrap_test.py │ │ ├── log_test.py │ │ ├── main_test.py │ │ ├── server_test.py │ │ └── zmqpublisher_test.py │ │ ├── conftest.py │ │ ├── ledger │ │ ├── balances_test.py │ │ ├── caches_test.py │ │ ├── currentstate_test.py │ │ ├── events_test.py │ │ ├── issuances_test.py │ │ ├── ledgerblocks_test.py │ │ ├── markets_test.py │ │ ├── other_test.py │ │ └── supplies_test.py │ │ ├── messages │ │ ├── attach_test.py │ │ ├── bet_test.py │ │ ├── broadcast_test.py │ │ ├── burn_test.py │ │ ├── cancel_test.py │ │ ├── destroy_test.py │ │ ├── detach_test.py │ │ ├── dispense_test.py │ │ ├── dispenser_test.py │ │ ├── dividend_test.py │ │ ├── fairmint_test.py │ │ ├── fairminter_test.py │ │ ├── gas_test.py │ │ ├── issuance_test.py │ │ ├── move_test.py │ │ ├── order_test.py │ │ ├── send_test.py │ │ ├── sweep_test.py │ │ ├── utxo_test.py │ │ └── versions │ │ │ ├── enhancedsend_test.py │ │ │ └── mpma_test.py │ │ ├── monitors │ │ ├── clients_test.py │ │ ├── collectors_test.py │ │ ├── conftest.py │ │ ├── daemon_test.py │ │ ├── oneshot_test.py │ │ ├── profiler_test.py │ │ ├── sentry_test.py │ │ ├── slack_test.py │ │ ├── telemetry_test.py │ │ └── util_test.py │ │ ├── parser │ │ ├── blocks_test.py │ │ ├── deserialize_test.py │ │ ├── gettxinfo_test.py │ │ ├── gettxinfolegacy_test.py │ │ ├── mempool │ │ │ ├── conftest.py │ │ │ ├── mempool_base_test.py │ │ │ ├── mempool_complex_test.py │ │ │ ├── mempool_exception_test.py │ │ │ └── mempool_workflow_test.py │ │ ├── messagetype_test.py │ │ ├── p2sh_test.py │ │ └── protocol_test.py │ │ └── utils │ │ ├── address_test.py │ │ ├── assetnames_test.py │ │ ├── base58_test.py │ │ ├── database_test.py │ │ ├── helpers_test.py │ │ ├── multisig_test.py │ │ └── script_test.py ├── pyproject.toml ├── requirements.txt └── tools │ ├── benchmark.py │ ├── checklicences.py │ ├── comparebalances.py │ ├── compareledger.py │ ├── copyscenarios.py │ ├── dumprps.py │ ├── finddivergence.py │ ├── gennewcheckpoints.py │ ├── rebuild.sh │ ├── updatetxids.py │ ├── upgradesqlitepagesize.py │ └── xcpcli.py ├── counterparty-rs ├── Cargo.lock ├── Cargo.toml ├── README.md ├── build.rs ├── pyproject.toml ├── rustfmt.toml └── src │ ├── b58.rs │ ├── indexer │ ├── bitcoin_client.rs │ ├── block.rs │ ├── config.rs │ ├── constants.rs │ ├── database.rs │ ├── handlers │ │ ├── get_block.rs │ │ ├── mod.rs │ │ ├── new.rs │ │ ├── start.rs │ │ └── stop.rs │ ├── logging.rs │ ├── mod.rs │ ├── rpc_client.rs │ ├── stopper.rs │ ├── test_utils.rs │ ├── types │ │ ├── entry.rs │ │ ├── error.rs │ │ ├── mod.rs │ │ └── pipeline.rs │ ├── utils.rs │ └── workers │ │ ├── consumer.rs │ │ ├── extractor.rs │ │ ├── fetcher.rs │ │ ├── mod.rs │ │ ├── orderer.rs │ │ ├── producer.rs │ │ ├── reporter.rs │ │ └── writer.rs │ ├── lib.rs │ └── utils.rs ├── docker-compose.yml ├── dredd.yml ├── pylintrc ├── release-notes ├── defunct │ ├── counterparty-cli-changelog.md │ └── counterparty-lib-changelog.md ├── release-notes-v10.0.0.md ├── release-notes-v10.0.1.md ├── release-notes-v10.1.0.md ├── release-notes-v10.1.1.md ├── release-notes-v10.1.2.md ├── release-notes-v10.10.0.md ├── release-notes-v10.10.1.md ├── release-notes-v10.2.0.md ├── release-notes-v10.3.0.md ├── release-notes-v10.3.1.md ├── release-notes-v10.3.2.md ├── release-notes-v10.4.0.md ├── release-notes-v10.4.1.md ├── release-notes-v10.4.2.md ├── release-notes-v10.4.3.md ├── release-notes-v10.4.4.md ├── release-notes-v10.4.5.md ├── release-notes-v10.4.6.md ├── release-notes-v10.4.7.md ├── release-notes-v10.4.8.md ├── release-notes-v10.5.0.md ├── release-notes-v10.6.0.md ├── release-notes-v10.6.1.md ├── release-notes-v10.7.0.md ├── release-notes-v10.7.1.md ├── release-notes-v10.7.2.md ├── release-notes-v10.7.3.md ├── release-notes-v10.7.4.md ├── release-notes-v10.8.0.md ├── release-notes-v10.9.0.md ├── release-notes-v10.9.1.md ├── release-notes-v11.0.0.md ├── release-notes-v9.61.2.md └── release-notes-v9.61.3.md └── ruff.toml /.dockerignore: -------------------------------------------------------------------------------- 1 | **/__pycache__ 2 | *.pyc -------------------------------------------------------------------------------- /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Security Issues 2 | 3 | * If you’ve identified a potential **security issue**, please contact the developers 4 | directly at . 5 | 6 | 7 | # Reporting an Issue 8 | 9 | * Check to see if the issue has already been reported. 10 | 11 | * If possible, list the exact steps required to reproduce the issue, including the exact version/commit being run, as well as the platform the software 12 | is running on. 13 | 14 | * Try to reproduce the issue with verbose logging enabled and then share the relevant log output. 15 | 16 | 17 | # Making a Pull Request 18 | 19 | * Make (almost) all pull requests against the `develop` branch. 20 | 21 | * All original code should follow [PEP8](https://www.python.org/dev/peps/pep-0008/). 22 | 23 | * Code contributions should be well‐commented. 24 | 25 | * Commit messages should be neatly formatted and descriptive, with a summary line. 26 | 27 | * Commits should be organized into logical units. 28 | 29 | * Verify that your fork passes all tests. 30 | -------------------------------------------------------------------------------- /.github/issue-template.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: New Issue 3 | about: Report a Bug or Request a Feature 4 | title: '' 5 | labels: '' 6 | assignees: adamkrellenstein 7 | 8 | --- 9 | 10 | **Steps To Reproduce** 11 | If you think you've found a bug, list the steps to reproduce the unexpected behavior: 12 | 1. ... 13 | 14 | **Screenshots and Logfiles** 15 | If applicable, upload screenshots and log files to help explain your problem. Please run everything with the `--verbose` flag whenever possible. 16 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | * [ ] Double-check the spelling and grammar of all strings, code comments, etc. 2 | * [ ] Double-check that all code is deterministic that needs to be 3 | * [ ] Add tests to cover any new or revised logic 4 | * [ ] Ensure that the test suite passes 5 | * [ ] Update the project [release notes](release-notes/) 6 | * [ ] Update the project documentation, as appropriate, with a corresponding Pull Request in the [Documentation repository](https://github.com/CounterpartyXCP/Documentation) 7 | -------------------------------------------------------------------------------- /.github/workflows/bandit_scanner.yml: -------------------------------------------------------------------------------- 1 | name: Bandit Scanner 2 | 3 | on: 4 | push: 5 | branches: "**" 6 | 7 | concurrency: 8 | group: ${{ github.workflow }}-${{ github.ref }} 9 | cancel-in-progress: true 10 | 11 | jobs: 12 | Bandit: 13 | permissions: 14 | contents: read # for actions/checkout to fetch code 15 | security-events: write # for github/codeql-action/upload-sarif to upload SARIF results 16 | runs-on: ubuntu-latest 17 | steps: 18 | - uses: actions/checkout@v2 19 | - name: Bandit Scan 20 | uses: shundor/python-bandit-scan@9cc5aa4a006482b8a7f91134412df6772dbda22c 21 | with: # optional arguments 22 | exit_zero: true # optional, default is DEFAULT 23 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information. 24 | excluded_paths: "*/test/*,*/counterparty-core/tools/*" # optional, default is DEFAULT 25 | skips: "B101" # optional, default is DEFAULT 26 | -------------------------------------------------------------------------------- /.github/workflows/codeql_scanner.yml: -------------------------------------------------------------------------------- 1 | name: Codeql Scanner 2 | 3 | on: 4 | push: 5 | branches: "**" 6 | 7 | concurrency: 8 | group: ${{ github.workflow }}-${{ github.ref }} 9 | cancel-in-progress: true 10 | 11 | jobs: 12 | Codeql: 13 | runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} 14 | timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }} 15 | permissions: 16 | actions: read 17 | contents: read 18 | security-events: write 19 | strategy: 20 | fail-fast: false 21 | matrix: 22 | language: [ 'python' ] 23 | steps: 24 | - name: Checkout repository 25 | uses: actions/checkout@v4 26 | - name: Initialize CodeQL 27 | uses: github/codeql-action/init@v3 28 | with: 29 | languages: ${{ matrix.language }} 30 | - name: Autobuild 31 | uses: github/codeql-action/autobuild@v3 32 | - name: Perform CodeQL Analysis 33 | uses: github/codeql-action/analyze@v3 34 | with: 35 | category: "/language:${{matrix.language}}" 36 | -------------------------------------------------------------------------------- /.github/workflows/docker.yml: -------------------------------------------------------------------------------- 1 | name: Docker 2 | 3 | on: 4 | release: 5 | types: [published] 6 | push: 7 | branches: "**" 8 | 9 | env: 10 | DOCKER_REPO: counterparty/counterparty 11 | DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} 12 | DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} 13 | 14 | jobs: 15 | Build: 16 | name: Build And Push Docker Image 17 | runs-on: ubuntu-latest 18 | steps: 19 | - uses: actions/checkout@v4 20 | - name: Install Docker 21 | run: | 22 | curl -fsSL https://get.docker.com -o get-docker.sh 23 | sudo sh get-docker.sh 24 | - name: Build, tag, login and push image 25 | run: | 26 | if [[ ${{ github.event_name }} == 'release' ]]; then 27 | export VERSION=v$(cat counterparty-core/counterpartycore/lib/config.py | grep '__version__ =' | awk -F '"' '{print $2}') 28 | elif [[ ${{ github.event_name }} == 'push' && ${{ github.ref }} == 'refs/heads/develop' ]]; then 29 | export VERSION=develop 30 | else 31 | export VERSION=${{ github.sha }} 32 | fi 33 | docker build -t $DOCKER_REPO:$VERSION . 34 | docker tag $DOCKER_REPO:$VERSION $DOCKER_REPO:latest 35 | docker login -u "$DOCKER_USERNAME" -p "$DOCKER_PASSWORD" 36 | docker push $DOCKER_REPO:$VERSION 37 | if [[ ${{ github.event_name }} == 'release' ]]; then 38 | docker push $DOCKER_REPO:latest 39 | echo "Image pushed: $DOCKER_REPO:latest" 40 | fi 41 | echo "Image pushed: $DOCKER_REPO:$VERSION" 42 | -------------------------------------------------------------------------------- /.github/workflows/functionals_test.yml: -------------------------------------------------------------------------------- 1 | name: Functionals Test 2 | 3 | on: 4 | push: 5 | branches: "**" 6 | 7 | concurrency: 8 | group: ${{ github.workflow }}-${{ github.ref }} 9 | cancel-in-progress: true 10 | 11 | jobs: 12 | Functionals: 13 | uses: ./.github/workflows/pytest_action.yml 14 | with: 15 | name: Functionals 16 | test-path: functionals 17 | secrets: inherit 18 | -------------------------------------------------------------------------------- /.github/workflows/integrations_test.yml: -------------------------------------------------------------------------------- 1 | name: Integrations Test 2 | 3 | on: 4 | push: 5 | branches: "**" 6 | 7 | concurrency: 8 | group: ${{ github.workflow }}-${{ github.ref }} 9 | cancel-in-progress: true 10 | 11 | jobs: 12 | Integrations: 13 | strategy: 14 | fail-fast: false 15 | # define the matrix 16 | matrix: 17 | 18 | # Units tests 19 | include: 20 | # Mainnet tests 21 | 22 | # bootstrap, run the server with --api-only and run 23 | # the locust tests 24 | - name: Mainnet API Load 25 | test-path: integrations/load_test.py 26 | os: Linux-Large-1 27 | only_on_develop: false 28 | no_cov_report: true 29 | 30 | # bootstrap, reparse 1000 blocks, 31 | # rollback 3 checkpoints and catchup 32 | #- name: Mainnet Bootstrap And Catchup 33 | # test-path: integrations/mainnet_test.py 34 | # os: Linux-Large-2 35 | # only_on_develop: true 36 | 37 | # Regtest tests 38 | 39 | # run scenarios on regtest 40 | # testing rollback, reparse, reorg, etc 41 | - name: Regtest Scenarios 42 | test-path: integrations/regtest/scenarios_test.py 43 | install_bitcoin: true 44 | 45 | - name: Taproot Data Envelope Test 46 | test-path: integrations/regtest/taprootdata_test.py 47 | install_bitcoin: true 48 | 49 | # Testnet4 tests 50 | 51 | # bootstrap, reparse 1000 blocks, 52 | # rollback 3 checkpoints and catchup 53 | - name: Testnet4 Bootstrap And Catchup 54 | test-path: integrations/testnet4_test.py 55 | 56 | - name: Testnet4 Start and Shutdown 57 | test-path: integrations/shutdown_test.py 58 | 59 | - name: Testnet4 Rebuild 60 | test-path: integrations/rebuild_test.py 61 | 62 | # Other tests 63 | 64 | # run the docker-compose tests 65 | - name: Docker Compose 66 | test-path: integrations/dockercompose_test.py 67 | 68 | # compare hashes between several servers 69 | # - name: Compare Hashes 70 | # test-path: integrations/comparehashes_test.py 71 | 72 | - name: RSFetcher Test 73 | test-path: integrations/rsfetcher_test.py 74 | 75 | # run pytest_action.yml for the matrix 76 | uses: ./.github/workflows/pytest_action.yml 77 | with: 78 | name: ${{ matrix.name }} 79 | test-path: ${{ matrix.test-path }} 80 | install_bitcoin: ${{ matrix.install_bitcoin || false }} 81 | only_on_develop: ${{ matrix.only_on_develop || false }} 82 | os: ${{ matrix.os || 'ubuntu-22.04' }} 83 | no_cov_report: ${{ matrix.no_cov_report || false }} 84 | secrets: inherit 85 | -------------------------------------------------------------------------------- /.github/workflows/licenses_scanner.yml: -------------------------------------------------------------------------------- 1 | name: Licenses Scanner 2 | 3 | on: 4 | push: 5 | branches: "**" 6 | 7 | concurrency: 8 | group: ${{ github.workflow }}-${{ github.ref }} 9 | cancel-in-progress: true 10 | 11 | jobs: 12 | Licenses: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v4 16 | - uses: actions/setup-python@v3 17 | with: 18 | python-version: "3.11" 19 | - name: Install dependencies 20 | run: | 21 | python -m pip install --upgrade pip 22 | pip install license_scanner sh 23 | - name: Analysing dependencies with licence_scanner 24 | run: | 25 | python counterparty-core/tools/checklicences.py 26 | - name: Upload SARIF 27 | uses: github/codeql-action/upload-sarif/@v2 28 | with: 29 | sarif_file: license_scanner.sarif -------------------------------------------------------------------------------- /.github/workflows/property_test.yml: -------------------------------------------------------------------------------- 1 | name: Property Test 2 | 3 | on: 4 | push: 5 | branches: "**" 6 | 7 | concurrency: 8 | group: ${{ github.workflow }}-${{ github.ref }} 9 | cancel-in-progress: true 10 | 11 | jobs: 12 | Property: 13 | uses: ./.github/workflows/pytest_action.yml 14 | with: 15 | name: Property Test 16 | test-path: integrations/regtest/property_test.py 17 | install_bitcoin: true 18 | secrets: inherit 19 | -------------------------------------------------------------------------------- /.github/workflows/pylint_scanner.yml: -------------------------------------------------------------------------------- 1 | name: Pylint Scanner 2 | 3 | on: 4 | push: 5 | branches: "**" 6 | 7 | concurrency: 8 | group: ${{ github.workflow }}-${{ github.ref }} 9 | cancel-in-progress: true 10 | 11 | jobs: 12 | Pylint: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v4 16 | - uses: actions/setup-python@v3 17 | with: 18 | python-version: "3.11" 19 | - name: Install dependencies 20 | run: | 21 | python -m pip install --upgrade pip 22 | pip install --upgrade pylint pylint-sarif-unofficial 23 | cd counterparty-rs && pip install -e . && cd .. 24 | cd counterparty-core && pip install -e . && cd .. 25 | - name: Analysing the code with pylint 26 | run: | 27 | pylint2sarif $(git ls-files '*.py' | grep -v counterparty-rs/tests/ | grep -v counterparty-core/counterpartycore/test/ | grep -v counterparty-core/tools/) || true 28 | - name: Upload SARIF 29 | uses: github/codeql-action/upload-sarif/@v2 30 | with: 31 | sarif_file: pylint.sarif 32 | -------------------------------------------------------------------------------- /.github/workflows/pytest_action.yml: -------------------------------------------------------------------------------- 1 | name: Pytest Action 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | name: 7 | type: string 8 | default: "Pytest" 9 | python-version: 10 | type: string 11 | default: "3.11" 12 | os: 13 | type: string 14 | default: "ubuntu-22.04" 15 | test-path: 16 | type: string 17 | required: true 18 | install_bitcoin: 19 | type: boolean 20 | default: false 21 | only_on_develop: 22 | type: boolean 23 | default: false 24 | no_cov_report: 25 | type: boolean 26 | default: false 27 | 28 | jobs: 29 | pytest: 30 | name: ${{ inputs.name }} 31 | runs-on: ${{ inputs.os }} 32 | if: ${{ !inputs.only_on_develop || github.ref == 'refs/heads/develop' }} 33 | steps: 34 | - uses: actions/checkout@v4 35 | 36 | - name: Set up Python 37 | uses: actions/setup-python@v3 38 | with: 39 | python-version: ${{ inputs.python-version }} 40 | 41 | - name: Install Rust toolchain 42 | uses: actions-rs/toolchain@v1 43 | with: 44 | toolchain: stable 45 | profile: minimal 46 | default: true 47 | 48 | - name: Install dependencies 49 | run: | 50 | pip install --upgrade pytest hatchling hatch pytest-cov coveralls 51 | 52 | - name: Install Bitcoin & Electrs 53 | if: inputs.install_bitcoin 54 | run: | 55 | wget https://bitcoincore.org/bin/bitcoin-core-28.0/bitcoin-28.0-x86_64-linux-gnu.tar.gz 56 | tar -xvf bitcoin-28.0-x86_64-linux-gnu.tar.gz 57 | sudo cp bitcoin-28.0/bin/bitcoin-cli /usr/local/bin/bitcoin-cli 58 | sudo cp bitcoin-28.0/bin/bitcoind /usr/local/bin/bitcoind 59 | sudo cp bitcoin-28.0/bin/bitcoin-wallet /usr/local/bin/bitcoin-wallet 60 | npm install dredd --global 61 | rustup toolchain install 1.83-x86_64-unknown-linux-gnu 62 | git clone https://github.com/mempool/electrs && cd electrs 63 | cargo install --path=. 64 | 65 | - name: Run tests 66 | run: | 67 | cd counterparty-core 68 | hatch run pytest counterpartycore/test/${{ inputs.test-path }} \ 69 | ${{ !inputs.no_cov_report && '-s -vv -x --cov=counterpartycore/lib --cov-report=term-missing --cov-report=' || '' }} 70 | if [ "${{ !inputs.no_cov_report }}" = "true" ]; then 71 | mv .coverage ../ 72 | fi 73 | 74 | - name: Upload coverage 75 | uses: codecov/codecov-action@v5 76 | with: 77 | token: ${{ secrets.CODECOV_TOKEN }} 78 | slug: CounterpartyXCP/counterparty-core -------------------------------------------------------------------------------- /.github/workflows/ruff_scanner.yml: -------------------------------------------------------------------------------- 1 | name: Ruff Scanner 2 | 3 | on: 4 | push: 5 | branches: "**" 6 | 7 | concurrency: 8 | group: ${{ github.workflow }}-${{ github.ref }} 9 | cancel-in-progress: true 10 | 11 | jobs: 12 | Ruff: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v4 16 | - uses: chartboost/ruff-action@v1 17 | with: 18 | args: "format --check" 19 | version: 0.9.10 20 | - uses: chartboost/ruff-action@v1 21 | with: 22 | version: 0.9.10 23 | -------------------------------------------------------------------------------- /.github/workflows/rust_test.yml: -------------------------------------------------------------------------------- 1 | name: Rust Test 2 | 3 | on: 4 | push: 5 | branches: "**" 6 | 7 | concurrency: 8 | group: ${{ github.workflow }}-${{ github.ref }} 9 | cancel-in-progress: true 10 | 11 | jobs: 12 | Cargo: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v4 16 | 17 | - name: Install Rust toolchain 18 | uses: actions-rs/toolchain@v1 19 | with: 20 | toolchain: stable 21 | profile: minimal 22 | default: true 23 | 24 | - name: Run tests 25 | run: | 26 | cd counterparty-rs 27 | cargo test -------------------------------------------------------------------------------- /.github/workflows/units_test.yml: -------------------------------------------------------------------------------- 1 | name: Units Test 2 | 3 | on: 4 | push: 5 | branches: "**" 6 | 7 | concurrency: 8 | group: ${{ github.workflow }}-${{ github.ref }} 9 | cancel-in-progress: true 10 | 11 | jobs: 12 | Units: 13 | strategy: 14 | fail-fast: false 15 | matrix: 16 | test-path: ["units"] 17 | os: ["ubuntu-22.04", "macos-14"] 18 | python-version: ["3.10", "3.11", "3.12", "3.13"] 19 | uses: ./.github/workflows/pytest_action.yml 20 | with: 21 | name: Units (${{ matrix.os }} - Python ${{ matrix.python-version }}) 22 | test-path: ${{ matrix.test-path }} 23 | os: ${{ matrix.os || 'ubuntu-22.04' }} 24 | python-version: ${{ matrix.python-version || '3.11' }} 25 | secrets: inherit 26 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | env 2 | venv 3 | docs/_build/* 4 | target/ 5 | typings/ 6 | pyrightconfig.json 7 | .DS_Store 8 | test_dbs/ 9 | 10 | profile.txt 11 | 12 | # precompiled python 13 | *.pyc 14 | 15 | # files generated by py.test 16 | /counterpartycore/test/fixtures/scenarios/*.new.* 17 | .coverage 18 | 19 | # Compiled python modules. 20 | *.pyc 21 | 22 | # Setuptools distribution folder. 23 | dist/ 24 | 25 | # Setuptools build folder. 26 | /build/ 27 | 28 | # Python egg metadata, regenerated from source files by setuptools. 29 | /*.egg-info 30 | 31 | # Virtualenv folders 32 | /bin/ 33 | /lib/ 34 | /include/ 35 | 36 | # Cache folders 37 | .cache 38 | 39 | .python-version 40 | 41 | .eggs 42 | .env 43 | .env.leave 44 | .venv/ 45 | test-results/ 46 | 47 | # cursor 48 | ._* 49 | 50 | .hypothesis/ 51 | .coverage.* -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/astral-sh/ruff-pre-commit 3 | rev: v0.9.10 4 | hooks: 5 | - id: ruff 6 | args: [ --fix ] 7 | - id: ruff-format 8 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # docker build -t counterparty . 2 | # docker run --rm counterparty counterparty-server -h 3 | 4 | FROM ubuntu:22.04 5 | 6 | RUN apt update 7 | # install dependencies 8 | RUN apt install -y python3 python3-dev python3-pip libleveldb-dev curl gnupg libclang-dev pkg-config libssl-dev 9 | 10 | # install rust 11 | RUN curl https://sh.rustup.rs -sSf | sh -s -- -y 12 | ENV PATH="/root/.cargo/bin:${PATH}" 13 | 14 | # install maturin 15 | RUN pip3 install maturin 16 | 17 | # copy README 18 | COPY README.md /README.md 19 | 20 | # install counterparty-rs 21 | COPY ./counterparty-rs /counterparty-rs 22 | WORKDIR /counterparty-rs 23 | RUN pip3 install . 24 | 25 | # install counterparty-core 26 | COPY ./counterparty-core /counterparty-core 27 | WORKDIR /counterparty-core 28 | RUN pip3 install . 29 | 30 | ENTRYPOINT [ "counterparty-server"] 31 | CMD [ "start" ] 32 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2013-Present Counterparty Developers 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /RELEASE.md: -------------------------------------------------------------------------------- 1 | # Codebase 2 | - [ ] Update `VERSION_` variables in `lib/config.py` 3 | - [ ] Update `protocol_changes.json` (as necessary) 4 | - [ ] Update Counterparty package versions in the `requirements.txt` files 5 | - [ ] Update Counterparty Docker images versions in the `docker-compose.yml` files 6 | - [ ] Review all open pull requests 7 | - [ ] Write release notes 8 | - [ ] Add a checkpoint verified on all supported versions 9 | - [ ] Update the list of servers to compare hashes with (in `test/compare_hashes_test.py`) 10 | - [ ] Create pull request against `master` 11 | - [ ] Ensure all tests pass in CI 12 | - [ ] Merge PR into `master` 13 | - [ ] Tag and sign release, copying release notes from the codebase 14 | - [ ] Rebase `gh-pages` against `master` 15 | - [ ] Upload (signed) package to PyPi (INACTIVE) 16 | * `sudo python3 setup.py sdist build` 17 | * `sudo python3 setup.py sdist upload -r pypi` 18 | - [ ] Publish bootstrap files 19 | - [ ] Publish Docker images 20 | - [ ] Update documentation 21 | 22 | 23 | # Announcements 24 | 25 | - [ ] [Official Forums](https://forums.counterparty.io/t/new-version-announcements-counterparty-and-counterpartyd/363) 26 | - [ ] Mailing list 27 | - [ ] Telegram 28 | - [ ] Twitter 29 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Reporting a Vulnerability 4 | If you believe that you have discovered a security vulnerability in Counterparty Core or any related repositories, please e-mail admin@counterparty.io with a bug report and you will be answered promptly. 5 | -------------------------------------------------------------------------------- /counterparty-core/.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | concurrency = multiprocessing,thread 3 | parallel = true 4 | sigterm = true 5 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CounterpartyXCP/counterparty-core/cfe5baff2080dfa5a72617e1c5afde1fce7dd377/counterparty-core/counterpartycore/__init__.py -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/__init__.py: -------------------------------------------------------------------------------- 1 | from . import cli # noqa F401 2 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CounterpartyXCP/counterparty-core/cfe5baff2080dfa5a72617e1c5afde1fce7dd377/counterparty-core/counterpartycore/lib/api/__init__.py -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/api/caches.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import time 3 | 4 | from counterpartycore.lib import config 5 | from counterpartycore.lib.utils import database, helpers 6 | 7 | logger = logging.getLogger(config.LOGGER_NAME) 8 | 9 | 10 | class AddressEventsCache(metaclass=helpers.SingletonMeta): 11 | def __init__(self) -> None: 12 | start_time = time.time() 13 | logger.debug("Initialising address events cache...") 14 | self.cache_db = database.get_db_connection(":memory:", read_only=False, check_wal=False) 15 | self.cache_db.execute("ATTACH DATABASE ? AS state_db", (config.STATE_DATABASE,)) 16 | self.cache_db.execute(""" 17 | CREATE TABLE address_events ( 18 | address TEXT, 19 | event_index INTEGER 20 | ); 21 | INSERT INTO address_events (address, event_index) SELECT address, event_index FROM state_db.address_events; 22 | CREATE INDEX address_events_address_idx ON address_events (address); 23 | CREATE INDEX address_events_event_index_idx ON address_events (event_index); 24 | """) 25 | 26 | duration = time.time() - start_time 27 | logger.debug("Address events cache initialised in %.2f seconds", duration) 28 | 29 | def insert(self, address, event_index): 30 | self.cache_db.execute("INSERT INTO address_events VALUES (?, ?)", (address, event_index)) 31 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/api/healthz.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import time 3 | 4 | import flask 5 | 6 | from counterpartycore.lib import ( 7 | backend, 8 | config, 9 | exceptions, 10 | ledger, 11 | ) 12 | from counterpartycore.lib.api import composer 13 | from counterpartycore.lib.ledger.currentstate import CurrentState 14 | from counterpartycore.lib.utils import helpers 15 | 16 | logger = logging.getLogger(config.LOGGER_NAME) 17 | 18 | 19 | def check_last_parsed_block(db, blockcount): 20 | """Checks database to see if is caught up with backend.""" 21 | last_block = ledger.blocks.get_last_block(db) 22 | if last_block is None: 23 | raise exceptions.DatabaseError( 24 | f"{config.XCP_NAME} database is behind backend." 25 | ) # No blocks in the database 26 | if time.time() - last_block["block_time"] < 60: 27 | return 28 | if CurrentState().current_block_index() + 1 < blockcount: 29 | raise exceptions.DatabaseError(f"{config.XCP_NAME} database is behind backend.") 30 | logger.trace("API Server - Database state check passed.") 31 | 32 | 33 | def healthz_light(db): 34 | latest_block_index = backend.bitcoind.getblockcount() 35 | check_last_parsed_block(db, latest_block_index) 36 | 37 | 38 | def healthz_heavy(db): 39 | composer.compose_transaction( 40 | db, 41 | name="send", 42 | params={ 43 | "source": config.UNSPENDABLE, 44 | "destination": config.UNSPENDABLE, 45 | "asset": config.XCP, 46 | "quantity": 600, 47 | }, 48 | construct_parameters={ 49 | "validate": False, 50 | "allow_unconfirmed_inputs": True, 51 | "exact_fee": 300, 52 | "inputs_set": "15d26ce17ef81cf6a12bf5fc0a62940eda3c1f82bd14adcc1e7b668fa3b67487:0:600:76a914818895f3dc2c178629d3d2d8fa3ec4a3f817982188ac", 53 | }, 54 | ) 55 | 56 | 57 | def healthz(db, check_type: str = "light"): 58 | try: 59 | if check_type == "heavy": 60 | healthz_light(db) 61 | healthz_heavy(db) 62 | else: 63 | healthz_light(db) 64 | except Exception as e: # pylint: disable=broad-except 65 | logger.exception(e) 66 | logger.error("Health check failed: %s", e) 67 | return False 68 | return True 69 | 70 | 71 | def handle_healthz_route(db, check_type: str = "light"): 72 | """ 73 | Health check route. 74 | :param check_type: Type of health check to perform. Options are 'light' and 'heavy' (e.g. light) 75 | """ 76 | msg, code = "Healthy", 200 77 | if not healthz(db, check_type): 78 | msg, code = "Unhealthy", 503 79 | result = {"result": msg, "success": code == 200} 80 | if code != 200: 81 | result["error"] = msg 82 | return flask.Response(helpers.to_json(result), code, mimetype="application/json") 83 | 84 | 85 | def check_server_health(db, check_type: str = "light"): 86 | """ 87 | Health check route. 88 | :param check_type: Type of health check to perform. Options are 'light' and 'heavy' (e.g. light) 89 | """ 90 | if not healthz(db, check_type): 91 | return {"status": "Unhealthy"} 92 | return {"status": "Healthy"} 93 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/api/migrations/0001.create_and_populate_address_events.py: -------------------------------------------------------------------------------- 1 | # 2 | # file: counterpartycore/lib/api/migrations/0001.populate_address_events.py 3 | # 4 | import logging 5 | import time 6 | 7 | from counterpartycore.lib import config 8 | from counterpartycore.lib.api.apiwatcher import EVENTS_ADDRESS_FIELDS, update_address_events 9 | from yoyo import step 10 | 11 | logger = logging.getLogger(config.LOGGER_NAME) 12 | 13 | 14 | def dict_factory(cursor, row): 15 | fields = [column[0] for column in cursor.description] 16 | return dict(zip(fields, row)) 17 | 18 | 19 | def apply(db): 20 | start_time = time.time() 21 | logger.debug("Populating the `address_events` table...") 22 | 23 | if hasattr(db, "row_factory"): 24 | db.row_factory = dict_factory 25 | 26 | attached = ( 27 | db.execute( 28 | "SELECT COUNT(*) AS count FROM pragma_database_list WHERE name = ?", ("ledger_db",) 29 | ).fetchone()["count"] 30 | > 0 31 | ) 32 | if not attached: 33 | db.execute("ATTACH DATABASE ? AS ledger_db", (config.DATABASE,)) 34 | 35 | cursor = db.cursor() 36 | 37 | cursor.execute(""" 38 | CREATE TABLE address_events ( 39 | address TEXT, 40 | event_index INTEGER, 41 | block_index INTEGER 42 | ) 43 | """) 44 | 45 | event_names = list(EVENTS_ADDRESS_FIELDS.keys()) 46 | placeholders = ", ".join(["?"] * len(event_names)) 47 | 48 | sql = f""" 49 | SELECT event, bindings, message_index, block_index 50 | FROM ledger_db.messages WHERE event IN ({placeholders}) 51 | ORDER BY message_index 52 | """ # noqa S608 # nosec B608 53 | 54 | cursor.execute(sql, event_names) 55 | 56 | inserted = 0 57 | for event in cursor: 58 | update_address_events(db, event, no_cache=True) 59 | inserted += 1 60 | if inserted % 1000000 == 0: 61 | logger.trace(f"Inserted {inserted} address events") 62 | 63 | cursor.execute("CREATE INDEX address_events_address_idx ON address_events (address)") 64 | cursor.execute("CREATE INDEX address_events_event_index_idx ON address_events (event_index)") 65 | cursor.execute("CREATE INDEX address_events_block_index_idx ON address_events (block_index)") 66 | 67 | cursor.close() 68 | 69 | logger.debug("Populated `address_events` table in %.2f seconds", time.time() - start_time) 70 | 71 | 72 | def rollback(db): 73 | db.execute("DROP TABLE address_events") 74 | 75 | 76 | if not __name__.startswith("apsw_"): 77 | steps = [step(apply, rollback)] 78 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/api/migrations/0002.create_and_populate_parsed_events.py: -------------------------------------------------------------------------------- 1 | # 2 | # file: counterpartycore/lib/api/migrations/0002.create_and_populate_parsed_events.py 3 | # 4 | import logging 5 | import time 6 | 7 | from counterpartycore.lib import config 8 | from yoyo import step 9 | 10 | logger = logging.getLogger(config.LOGGER_NAME) 11 | 12 | __depends__ = {"0001.create_and_populate_address_events"} 13 | 14 | 15 | def dict_factory(cursor, row): 16 | fields = [column[0] for column in cursor.description] 17 | return dict(zip(fields, row)) 18 | 19 | 20 | def apply(db): 21 | start_time = time.time() 22 | logger.debug("Populating the `parsed_events` table...") 23 | 24 | if hasattr(db, "row_factory"): 25 | db.row_factory = dict_factory 26 | 27 | attached = ( 28 | db.execute( 29 | "SELECT COUNT(*) AS count FROM pragma_database_list WHERE name = ?", ("ledger_db",) 30 | ).fetchone()["count"] 31 | > 0 32 | ) 33 | if not attached: 34 | db.execute("ATTACH DATABASE ? AS ledger_db", (config.DATABASE,)) 35 | 36 | sqls = [ 37 | """ 38 | CREATE TABLE parsed_events( 39 | event_index INTEGER, 40 | event TEXT, 41 | event_hash TEXT, 42 | block_index INTEGER 43 | ); 44 | """, 45 | """ 46 | INSERT INTO parsed_events (event_index, event, event_hash, block_index) 47 | SELECT message_index AS event_index, event, event_hash, block_index 48 | FROM ledger_db.messages 49 | """, 50 | """ 51 | CREATE UNIQUE INDEX parsed_events_event_index_idx ON parsed_events (event_index) 52 | """, 53 | """ 54 | CREATE INDEX parsed_events_event_idx ON parsed_events (event) 55 | """, 56 | """ 57 | CREATE INDEX parsed_events_block_index_idx ON parsed_events (block_index) 58 | """, 59 | ] 60 | for sql in sqls: 61 | db.execute(sql) 62 | 63 | logger.debug("Populated the `parsed_events` table in %.2f seconds", time.time() - start_time) 64 | 65 | 66 | def rollback(db): 67 | db.execute("DROP TABLE parsed_events") 68 | 69 | 70 | if not __name__.startswith("apsw_"): 71 | steps = [step(apply, rollback)] 72 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/api/migrations/0003.create_and_populate_all_expirations.py: -------------------------------------------------------------------------------- 1 | # 2 | # file: counterpartycore/lib/api/migrations/0003.create_and_populate_all_expirations.py 3 | # 4 | import logging 5 | import time 6 | 7 | from counterpartycore.lib import config 8 | from yoyo import step 9 | 10 | logger = logging.getLogger(config.LOGGER_NAME) 11 | 12 | __depends__ = {"0002.create_and_populate_parsed_events"} 13 | 14 | 15 | def dict_factory(cursor, row): 16 | fields = [column[0] for column in cursor.description] 17 | return dict(zip(fields, row)) 18 | 19 | 20 | def apply(db): 21 | start_time = time.time() 22 | logger.debug("Populating the `all_expirations` table...") 23 | 24 | if hasattr(db, "row_factory"): 25 | db.row_factory = dict_factory 26 | 27 | attached = ( 28 | db.execute( 29 | "SELECT COUNT(*) AS count FROM pragma_database_list WHERE name = ?", ("ledger_db",) 30 | ).fetchone()["count"] 31 | > 0 32 | ) 33 | if not attached: 34 | db.execute("ATTACH DATABASE ? AS ledger_db", (config.DATABASE,)) 35 | 36 | sqls = [ 37 | """ 38 | CREATE TABLE all_expirations( 39 | type TEXT, 40 | object_id TEXT, 41 | block_index INTEGER 42 | ); 43 | """, 44 | """ 45 | INSERT INTO all_expirations (object_id, block_index, type) 46 | SELECT order_hash AS object_id, block_index, 'order' AS type 47 | FROM ledger_db.order_expirations 48 | """, 49 | """ 50 | INSERT INTO all_expirations (object_id, block_index, type) 51 | SELECT order_match_id AS object_id, block_index, 'order_match' AS type 52 | FROM ledger_db.order_match_expirations 53 | """, 54 | """ 55 | INSERT INTO all_expirations (object_id, block_index, type) 56 | SELECT bet_hash AS object_id, block_index, 'bet' AS type 57 | FROM ledger_db.bet_expirations 58 | """, 59 | """ 60 | INSERT INTO all_expirations (object_id, block_index, type) 61 | SELECT bet_match_id AS object_id, block_index, 'bet_match' AS type 62 | FROM ledger_db.bet_match_expirations 63 | """, 64 | """ 65 | INSERT INTO all_expirations (object_id, block_index, type) 66 | SELECT rps_hash AS object_id, block_index, 'rps' AS type 67 | FROM ledger_db.rps_expirations 68 | """, 69 | """ 70 | INSERT INTO all_expirations (object_id, block_index, type) 71 | SELECT rps_match_id AS object_id, block_index, 'rps_match' AS type 72 | FROM ledger_db.rps_match_expirations 73 | """, 74 | """ 75 | CREATE INDEX all_expirations_type_idx ON all_expirations (type) 76 | """, 77 | """ 78 | CREATE INDEX all_expirations_block_index_idx ON all_expirations (block_index) 79 | """, 80 | ] 81 | for sql in sqls: 82 | db.execute(sql) 83 | 84 | logger.debug("Populated the `all_expirations` table in %.2f seconds", time.time() - start_time) 85 | 86 | 87 | def rollback(db): 88 | db.execute("DROP TABLE all_expirations") 89 | 90 | 91 | if not __name__.startswith("apsw_"): 92 | steps = [step(apply, rollback)] 93 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/api/migrations/0005.create_and_populate_events_count.py: -------------------------------------------------------------------------------- 1 | # 2 | # file: counterpartycore/lib/api/migrations/0005.create_and_populate_events_counts.py 3 | # 4 | import logging 5 | import time 6 | 7 | from counterpartycore.lib import config 8 | from yoyo import step 9 | 10 | logger = logging.getLogger(config.LOGGER_NAME) 11 | 12 | __depends__ = {"0004.create_and_populate_assets_info"} 13 | 14 | 15 | def dict_factory(cursor, row): 16 | fields = [column[0] for column in cursor.description] 17 | return dict(zip(fields, row)) 18 | 19 | 20 | def apply(db): 21 | start_time = time.time() 22 | logger.debug("Populating the `events_count` table...") 23 | 24 | if hasattr(db, "row_factory"): 25 | db.row_factory = dict_factory 26 | 27 | attached = ( 28 | db.execute( 29 | "SELECT COUNT(*) AS count FROM pragma_database_list WHERE name = ?", ("ledger_db",) 30 | ).fetchone()["count"] 31 | > 0 32 | ) 33 | if not attached: 34 | db.execute("ATTACH DATABASE ? AS ledger_db", (config.DATABASE,)) 35 | 36 | db.execute(""" 37 | CREATE TABLE events_count( 38 | event TEXT PRIMARY KEY, 39 | count INTEGER 40 | ); 41 | """) 42 | 43 | db.execute(""" 44 | INSERT INTO events_count (event, count) 45 | SELECT event, COUNT(*) AS counter 46 | FROM ledger_db.messages 47 | GROUP BY event; 48 | """) 49 | 50 | db.execute("""CREATE INDEX events_count_count_idx ON events_count (count)""") 51 | 52 | logger.debug("Populated the `events_count` table in %.2f seconds", time.time() - start_time) 53 | 54 | 55 | def rollback(db): 56 | db.execute("DROP TABLE events_count") 57 | 58 | 59 | if not __name__.startswith("apsw_"): 60 | steps = [step(apply, rollback)] 61 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/api/migrations/0008.create_config_table.py: -------------------------------------------------------------------------------- 1 | # 2 | # file: counterpartycore/lib/api/migrations/0008.create_config_table 3 | # 4 | import logging 5 | import time 6 | 7 | from counterpartycore.lib import config 8 | from counterpartycore.lib.utils import database 9 | from yoyo import step 10 | 11 | logger = logging.getLogger(config.LOGGER_NAME) 12 | 13 | __depends__ = {"0007.create_views"} 14 | 15 | 16 | def apply(db): 17 | start_time = time.time() 18 | logger.debug("Creating `config` table...") 19 | 20 | sql = """ 21 | CREATE TABLE config ( 22 | name TEXT PRIMARY KEY, 23 | value TEXT 24 | ) 25 | """ 26 | db.execute(sql) 27 | db.execute("CREATE INDEX config_config_name_idx ON config (name)") 28 | 29 | database.update_version(db) 30 | 31 | logger.debug("`config` table created in %.2f seconds", time.time() - start_time) 32 | 33 | 34 | def rollback(db): 35 | db.execute("DROP TABLE config") 36 | 37 | 38 | if not __name__.startswith("apsw_"): 39 | steps = [step(apply, rollback)] 40 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/api/migrations/0009.create_and_populate_transaction_types_count.py: -------------------------------------------------------------------------------- 1 | # 2 | # file: counterpartycore/lib/api/migrations/0009.create_and_populate_transaction_types_count.py 3 | # 4 | import logging 5 | import time 6 | 7 | from counterpartycore.lib import config 8 | from yoyo import step 9 | 10 | logger = logging.getLogger(config.LOGGER_NAME) 11 | 12 | __depends__ = {"0008.create_config_table"} 13 | 14 | 15 | def dict_factory(cursor, row): 16 | fields = [column[0] for column in cursor.description] 17 | return dict(zip(fields, row)) 18 | 19 | 20 | def apply(db): 21 | start_time = time.time() 22 | logger.debug("Populating the `transaction_types_count` table...") 23 | 24 | if hasattr(db, "row_factory"): 25 | db.row_factory = dict_factory 26 | 27 | attached = ( 28 | db.execute( 29 | "SELECT COUNT(*) AS count FROM pragma_database_list WHERE name = ?", ("ledger_db",) 30 | ).fetchone()["count"] 31 | > 0 32 | ) 33 | if not attached: 34 | db.execute("ATTACH DATABASE ? AS ledger_db", (config.DATABASE,)) 35 | 36 | db.execute(""" 37 | CREATE TABLE transaction_types_count( 38 | transaction_type TEXT PRIMARY KEY, 39 | count INTEGER 40 | ); 41 | """) 42 | 43 | db.execute(""" 44 | INSERT INTO transaction_types_count (transaction_type, count) 45 | SELECT transaction_type, COUNT(*) AS counter 46 | FROM ledger_db.transactions 47 | GROUP BY transaction_type; 48 | """) 49 | 50 | db.execute( 51 | """CREATE INDEX transaction_types_count_count_idx ON transaction_types_count (count)""" 52 | ) 53 | 54 | logger.debug( 55 | "Populated the `transaction_types_count` table in %.2f seconds", time.time() - start_time 56 | ) 57 | 58 | 59 | def rollback(db): 60 | db.execute("DROP TABLE transaction_types_count") 61 | 62 | 63 | if not __name__.startswith("apsw_"): 64 | steps = [step(apply, rollback)] 65 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/api/migrations/0010.fix_bet_match_resolution_event_name.py: -------------------------------------------------------------------------------- 1 | # 2 | # file: counterpartycore/lib/api/migrations/0009.create_and_populate_transaction_types_count.py 3 | # 4 | import logging 5 | import time 6 | 7 | from counterpartycore.lib import config 8 | from yoyo import step 9 | 10 | logger = logging.getLogger(config.LOGGER_NAME) 11 | 12 | __depends__ = {"0008.create_config_table"} 13 | 14 | 15 | def dict_factory(cursor, row): 16 | fields = [column[0] for column in cursor.description] 17 | return dict(zip(fields, row)) 18 | 19 | 20 | def apply(db): 21 | start_time = time.time() 22 | logger.debug("Fix `BET_MATCH_RESOLUTION` event name...") 23 | 24 | db.execute( 25 | "UPDATE events_count SET event = 'BET_MATCH_RESOLUTION' WHERE event = 'BET_MATCH_RESOLUTON'" 26 | ) 27 | 28 | logger.debug( 29 | "`BET_MATCH_RESOLUTION` event name fixed in %.2f seconds", time.time() - start_time 30 | ) 31 | 32 | 33 | def rollback(db): 34 | pass 35 | 36 | 37 | if not __name__.startswith("apsw_"): 38 | steps = [step(apply, rollback)] 39 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/backend/__init__.py: -------------------------------------------------------------------------------- 1 | from counterpartycore.lib import config, exceptions 2 | 3 | from . import bitcoind, electrs 4 | 5 | 6 | def list_unspent(source, allow_unconfirmed_inputs): 7 | # first try with Bitcoin Core 8 | unspent_list = bitcoind.list_unspent(source, allow_unconfirmed_inputs) 9 | if len(unspent_list) > 0: 10 | return unspent_list 11 | 12 | # then try with Electrs 13 | if config.ELECTRS_URL is None: 14 | raise exceptions.ComposeError( 15 | "No UTXOs found with Bitcoin Core and Electrs is not configured, use the `inputs_set` parameter to provide UTXOs" 16 | ) 17 | return electrs.list_unspent(source, allow_unconfirmed_inputs) 18 | 19 | 20 | def search_pubkey(source, tx_hashes=None): 21 | # first search with Bitcoin Core 22 | if isinstance(tx_hashes, list) and len(tx_hashes) > 0: 23 | pubkey = bitcoind.search_pubkey_in_transactions(source, tx_hashes) 24 | if pubkey is not None: 25 | return pubkey 26 | # then search with Electrs 27 | if config.ELECTRS_URL is None: 28 | return None 29 | pubkey = electrs.search_pubkey(source) 30 | if pubkey is not None: 31 | return pubkey 32 | return None 33 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/cli/__init__.py: -------------------------------------------------------------------------------- 1 | from . import main # noqa F401 2 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/ledger/__init__.py: -------------------------------------------------------------------------------- 1 | from . import ( 2 | balances, # noqa F401 3 | blocks, # noqa F401 4 | caches, # noqa F401 5 | currentstate, # noqa F401 6 | events, # noqa F401 7 | issuances, # noqa F401 8 | markets, # noqa F401 9 | other, # noqa F401 10 | supplies, # noqa F401 11 | ) 12 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/ledger/backendheight.py: -------------------------------------------------------------------------------- 1 | import ctypes 2 | import logging 3 | import threading 4 | import time 5 | from multiprocessing import Value 6 | 7 | from counterpartycore.lib import backend, config 8 | 9 | logger = logging.getLogger(config.LOGGER_NAME) 10 | 11 | BACKEND_HEIGHT_REFRSH_INTERVAL = 3 12 | 13 | 14 | class BackendHeight(threading.Thread): 15 | def __init__(self): 16 | threading.Thread.__init__(self, name="BackendHeight") 17 | self.last_check = 0 18 | self.stop_event = threading.Event() 19 | self.shared_backend_height = Value(ctypes.c_ulong, 0) 20 | self.refresh() 21 | 22 | def run(self): 23 | if config.API_ONLY: 24 | return 25 | try: 26 | while not self.stop_event.is_set(): 27 | if time.time() - self.last_check > BACKEND_HEIGHT_REFRSH_INTERVAL: 28 | self.refresh() 29 | self.stop_event.wait(0.1) 30 | finally: 31 | logger.info("BackendHeight Thread stopped.") 32 | 33 | def refresh(self): 34 | if config.API_ONLY: 35 | return 36 | logger.trace("Updating backend height...") 37 | tip = backend.bitcoind.get_chain_tip() 38 | block_count = backend.bitcoind.getblockcount() 39 | value = int(tip * 10e8 + block_count) # let use only one shared value 40 | self.shared_backend_height.value = value 41 | self.last_check = time.time() 42 | 43 | def stop(self): 44 | self.stop_event.set() 45 | logger.info("Stopping BackendHeight thread...") 46 | self.join() 47 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/ledger/migrations/0002.create_mempool_transactions_table.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE IF NOT EXISTS mempool_transactions( 2 | tx_index INTEGER UNIQUE, 3 | tx_hash TEXT UNIQUE, 4 | block_index INTEGER, 5 | block_hash TEXT, 6 | block_time INTEGER, 7 | source TEXT, 8 | destination TEXT, 9 | btc_amount INTEGER, 10 | fee INTEGER, 11 | data BLOB, 12 | supported BOOL DEFAULT 1, 13 | utxos_info TEXT, transaction_type TEXT); 14 | CREATE INDEX IF NOT EXISTS mempool_transactions_block_index_idx ON mempool_transactions (block_index); 15 | CREATE INDEX IF NOT EXISTS mempool_transactions_tx_index_idx ON mempool_transactions (tx_index); 16 | CREATE INDEX IF NOT EXISTS mempool_transactions_tx_hash_idx ON mempool_transactions (tx_hash); 17 | CREATE INDEX IF NOT EXISTS mempool_transactions_block_index_tx_index_idx ON mempool_transactions (block_index, tx_index); 18 | CREATE INDEX IF NOT EXISTS mempool_transactions_tx_index_tx_hash_block_index_idx ON mempool_transactions (tx_index, tx_hash, block_index); 19 | CREATE INDEX IF NOT EXISTS mempool_transactions_source_idx ON mempool_transactions (source); 20 | 21 | CREATE VIEW IF NOT EXISTS all_transactions AS 22 | SELECT 23 | tx_index, 24 | tx_hash, 25 | block_index, 26 | block_hash, 27 | block_time, 28 | source, 29 | destination, 30 | btc_amount, 31 | fee, 32 | data, 33 | supported, 34 | utxos_info, 35 | transaction_type, 36 | FALSE as confirmed 37 | FROM mempool_transactions 38 | UNION ALL 39 | SELECT 40 | tx_index, 41 | tx_hash, 42 | block_index, 43 | block_hash, 44 | block_time, 45 | source, 46 | destination, 47 | btc_amount, 48 | fee, 49 | data, 50 | supported, 51 | utxos_info, 52 | transaction_type, 53 | TRUE as confirmed 54 | FROM transactions; -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/ledger/migrations/0003.add_indexes_in_sends_table.sql: -------------------------------------------------------------------------------- 1 | CREATE INDEX IF NOT EXISTS sends_send_type ON sends (send_type); 2 | CREATE INDEX IF NOT EXISTS sends_source_address ON sends (source_address); 3 | CREATE INDEX IF NOT EXISTS sends_destination_address ON sends (destination_address); -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/ledger/migrations/0005.add_max_mint_by_address.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE fairminters ADD COLUMN max_mint_per_address INTEGER; -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/ledger/migrations/0006.fix_bet_match_resolution_event_name.sql: -------------------------------------------------------------------------------- 1 | 2 | DROP TRIGGER IF EXISTS block_update_messages; 3 | 4 | UPDATE messages SET event = 'BET_MATCH_RESOLUTION' WHERE event = 'BET_MATCH_RESOLUTON'; 5 | 6 | CREATE TRIGGER IF NOT EXISTS block_update_messages 7 | BEFORE UPDATE ON messages BEGIN 8 | SELECT RAISE(FAIL, "UPDATES NOT ALLOWED"); 9 | END; -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/ledger/migrations/0007.add_mime_type_field.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE fairminters ADD COLUMN mime_type TEXT DEFAULT "text/plain"; 2 | ALTER TABLE issuances ADD COLUMN mime_type TEXT DEFAULT "text/plain"; 3 | ALTER TABLE broadcasts ADD COLUMN mime_type TEXT DEFAULT "text/plain"; -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/messages/__init__.py: -------------------------------------------------------------------------------- 1 | # Each message gets identified by its first byte. 2 | # The only exception is send.version1 which is 0 the first to fourth byte. 3 | # 4 | # Used IDs for messages: 5 | # 6 | # 0 send.version1 7 | # 1 send.enhanced_send 8 | # 10 order 9 | # 11 btcpay 10 | # 12 dispenser 11 | # 20 issuance 12 | # 21 issuance.subasset 13 | # 30 broadcast 14 | # 40 bet 15 | # 50 dividend 16 | # 60 burn 17 | # 70 cancel 18 | # 80 rps 19 | # 81 rpsresolve 20 | # 90 fairminter 21 | # 100 utxo attach and dettach 22 | # 110 destroy 23 | # 24 | # Allocate each new type of message within the "logical" 10 number boundary 25 | # Only allocate a 10 number boundary if it makes sense 26 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/messages/data/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CounterpartyXCP/counterparty-core/cfe5baff2080dfa5a72617e1c5afde1fce7dd377/counterparty-core/counterpartycore/lib/messages/data/__init__.py -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/messages/rps.py: -------------------------------------------------------------------------------- 1 | """ 2 | Transaction 1: rps (Open the game) 3 | source: address used to play the game 4 | wager: amount to bet 5 | move_random_hash: sha256(sha256(move + random)) (stored as bytes, 16 bytes random) 6 | possible_moves: arbitrary odd number >= 3 7 | expiration: how many blocks the game is valid 8 | 9 | Matching conditions: 10 | - tx0_possible_moves = tx1_possible_moves 11 | - tx0_wager = tx1_wager 12 | 13 | Transaction 2: rpsresolve (Resolve the game) 14 | source: same address as first transaction 15 | random: 16 bytes random 16 | move: the move number 17 | rps_match_id: matching id 18 | """ 19 | 20 | import json 21 | import logging 22 | import os 23 | 24 | from counterpartycore.lib import config, ledger 25 | from counterpartycore.lib.parser import protocol 26 | 27 | logger = logging.getLogger(config.LOGGER_NAME) 28 | 29 | # possible_moves wager move_random_hash expiration 30 | ID = 80 31 | 32 | CURR_DIR = os.path.dirname(os.path.realpath(__file__)) 33 | with open(os.path.join(CURR_DIR, "data", "rps_events.json"), encoding="utf-8") as f: 34 | RPS_EVENTS = json.load(f) 35 | 36 | 37 | def replay_events(db, key): 38 | if protocol.is_test_network(): 39 | return 40 | events = RPS_EVENTS.get(key) 41 | if events: 42 | ledger.events.replay_events(db, events) 43 | 44 | 45 | def parse(db, tx): 46 | logger.debug( 47 | "Replay RPS events for transaction %(tx_hash)s", 48 | { 49 | "tx_hash": tx["tx_hash"], 50 | }, 51 | ) 52 | replay_events(db, tx["tx_hash"]) 53 | 54 | 55 | def expire(db, block_index): 56 | logger.trace( 57 | "Replay RPS events for block %(block_index)s", 58 | { 59 | "block_index": block_index if block_index != config.MEMPOOL_BLOCK_INDEX else "mempool", 60 | }, 61 | ) 62 | replay_events(db, str(block_index)) 63 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/messages/rpsresolve.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from counterpartycore.lib import config 4 | 5 | from . import rps 6 | 7 | logger = logging.getLogger(config.LOGGER_NAME) 8 | 9 | # move random rps_match_id 10 | ID = 81 11 | 12 | 13 | def parse(db, tx): 14 | rps.parse(db, tx) 15 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/messages/versions/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CounterpartyXCP/counterparty-core/cfe5baff2080dfa5a72617e1c5afde1fce7dd377/counterparty-core/counterpartycore/lib/messages/versions/__init__.py -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/monitors/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CounterpartyXCP/counterparty-core/cfe5baff2080dfa5a72617e1c5afde1fce7dd377/counterparty-core/counterpartycore/lib/monitors/__init__.py -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/monitors/sentry.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | 4 | import sentry_sdk 5 | 6 | from counterpartycore.lib import config 7 | from counterpartycore.lib.monitors.telemetry.collectors.base import TelemetryCollectorBase 8 | from counterpartycore.lib.utils import database 9 | 10 | logger = logging.getLogger(config.LOGGER_NAME) 11 | 12 | environment = os.environ.get("SENTRY_ENVIRONMENT", "development") 13 | release = os.environ.get("SENTRY_RELEASE", config.__version__) 14 | 15 | 16 | def before_send(event, _hint): 17 | db = database.get_connection(read_only=True) 18 | data = TelemetryCollectorBase(db).collect() 19 | db.close() 20 | 21 | event["tags"] = event.get("tags", []) 22 | 23 | event["tags"].append(["core_version", data["version"]]) 24 | event["tags"].append(["docker", data["dockerized"]]) 25 | event["tags"].append(["network", data["network"]]) 26 | event["tags"].append(["force_enabled", data["force_enabled"]]) 27 | 28 | event["extra"] = event.get("extra", {}) 29 | event["extra"]["last_block"] = data["last_block"] 30 | 31 | return event 32 | 33 | 34 | def before_send_transaction(event, _hint): 35 | if event.get("transaction") == "RedirectToRpcV1": 36 | return None 37 | return event 38 | 39 | 40 | def init(): 41 | dsn = os.environ.get("SENTRY_DSN") 42 | # No-op if SENTRY_DSN is not set 43 | if not dsn: 44 | return 45 | 46 | sample_rate = float(os.environ.get("SENTRY_SAMPLE_RATE", 0.01)) 47 | 48 | logger.info("Initializing Sentry with %s and sample rate of %s...", dsn, sample_rate) 49 | 50 | sentry_sdk.init( 51 | dsn=dsn, 52 | environment=environment, 53 | release=release, 54 | traces_sample_rate=sample_rate, 55 | before_send=before_send, 56 | before_send_transaction=before_send_transaction, 57 | ) 58 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/monitors/slack.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | 4 | import requests 5 | 6 | from counterpartycore.lib import config 7 | from counterpartycore.lib.utils import helpers 8 | 9 | logger = logging.getLogger(config.LOGGER_NAME) 10 | 11 | 12 | def send_slack_message(message): 13 | # Get the webhook URL from environment variables 14 | webhook_url = os.environ.get("SLACK_HOOK") 15 | if not webhook_url: 16 | return False 17 | 18 | # Get the current commit hash and append to message if available 19 | current_commit = helpers.get_current_commit_hash() 20 | final_message = message 21 | if current_commit: 22 | final_message = f"{message}\n{current_commit}" 23 | 24 | # Prepare the data to send 25 | payload = {"text": final_message} 26 | 27 | # Send the POST request to the webhook 28 | response = requests.post( 29 | webhook_url, 30 | json=payload, 31 | timeout=10, 32 | ) 33 | 34 | # Check if the request was successful 35 | if response.status_code != 200: 36 | logger.error("Error sending message: %s, %s", response.status_code, response.text) 37 | return False 38 | 39 | return True 40 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/monitors/telemetry/clients/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CounterpartyXCP/counterparty-core/cfe5baff2080dfa5a72617e1c5afde1fce7dd377/counterparty-core/counterpartycore/lib/monitors/telemetry/clients/__init__.py -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/monitors/telemetry/clients/influxdb.py: -------------------------------------------------------------------------------- 1 | import influxdb_client 2 | from counterpartycore.lib import config 3 | from counterpartycore.lib.monitors.telemetry.util import ID 4 | from influxdb_client.client.write_api import SYNCHRONOUS 5 | 6 | from .interface import TelemetryClientI 7 | 8 | 9 | class TelemetryClientInfluxDB(TelemetryClientI): 10 | def __init__(self): 11 | # UUID for life of process 12 | 13 | self.__id = ID().id 14 | 15 | self.__influxdb_client = influxdb_client.InfluxDBClient( 16 | url=config.INFLUX_DB_URL, 17 | token=config.INFLUX_DB_TOKEN, 18 | org=config.INFLUX_DB_ORG, 19 | ) 20 | 21 | self.__write_api = self.__influxdb_client.write_api(write_options=SYNCHRONOUS) 22 | 23 | def send(self, data): 24 | assert data["__influxdb"] 25 | 26 | tags = data["__influxdb"]["tags"] 27 | fields = data["__influxdb"]["fields"] 28 | 29 | point = influxdb_client.Point("node-heartbeat") 30 | 31 | point.tag("id", self.__id) 32 | 33 | for tag in tags: 34 | point.tag(tag, data[tag]) 35 | 36 | for field in fields: 37 | point.field(field, data[field]) 38 | 39 | self.__write_api.write( 40 | bucket=config.INFLUX_DB_BUCKET, org=config.INFLUX_DB_ORG, record=point 41 | ) 42 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/monitors/telemetry/clients/interface.py: -------------------------------------------------------------------------------- 1 | class TelemetryClientI: 2 | def send(self, data): 3 | raise NotImplementedError() 4 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/monitors/telemetry/clients/local.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from counterpartycore.lib.monitors.telemetry.clients.interface import TelemetryClientI 4 | 5 | # IMPLEMENTATIONS 6 | 7 | 8 | class TelemetryClientLocal(TelemetryClientI): 9 | def __init__(self): 10 | self.logger = logging.getLogger(__name__) 11 | 12 | def send(self, data): 13 | self.logger.info(data) 14 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/monitors/telemetry/collectors/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CounterpartyXCP/counterparty-core/cfe5baff2080dfa5a72617e1c5afde1fce7dd377/counterparty-core/counterpartycore/lib/monitors/telemetry/collectors/__init__.py -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/monitors/telemetry/collectors/base.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | 4 | from counterpartycore.lib import config, ledger 5 | from counterpartycore.lib.monitors.telemetry import util 6 | from counterpartycore.lib.monitors.telemetry.collectors.interface import TelemetryCollectorI 7 | 8 | logger = logging.getLogger(config.LOGGER_NAME) 9 | 10 | 11 | # DEFAULT IMPLEMENTATION 12 | class TelemetryCollectorKwargs(TelemetryCollectorI): 13 | def __init__(self, **kwargs): 14 | self.static_attrs = kwargs 15 | 16 | def collect(self): 17 | return self.static_attrs 18 | 19 | def close(self): 20 | pass 21 | 22 | 23 | class TelemetryCollectorBase(TelemetryCollectorKwargs): 24 | def __init__(self, db, **kwargs): 25 | super().__init__(**kwargs) 26 | self.db = db 27 | 28 | def collect(self): 29 | version = util.get_version() 30 | uptime = util.get_uptime() 31 | is_docker = util.is_docker() 32 | network = util.get_network() 33 | force_enabled = util.is_force_enabled() 34 | platform = util.get_system() 35 | 36 | block_index = ledger.events.last_message(self.db)["block_index"] 37 | cursor = self.db.cursor() 38 | last_block = cursor.execute( 39 | "SELECT * FROM blocks where block_index = ?", [block_index] 40 | ).fetchone() 41 | 42 | if not last_block: 43 | return None 44 | 45 | return { 46 | "version": version, 47 | "uptime": int(uptime), 48 | "dockerized": is_docker, 49 | "network": network, 50 | "force_enabled": force_enabled, 51 | "platform": platform, 52 | "last_block": last_block, 53 | **self.static_attrs, 54 | } 55 | 56 | def is_running_in_docker(self): 57 | """ 58 | Checks if the current process is running inside a Docker container. 59 | Returns: 60 | bool: True if running inside a Docker container, False otherwise. 61 | """ 62 | return ( 63 | os.path.exists("/.dockerenv") 64 | or "DOCKER_HOST" in os.environ 65 | or "KUBERNETES_SERVICE_HOST" in os.environ 66 | ) 67 | 68 | def close(self): 69 | pass 70 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/monitors/telemetry/collectors/influxdb.py: -------------------------------------------------------------------------------- 1 | from counterpartycore.lib.monitors.telemetry.collectors.base import TelemetryCollectorBase 2 | 3 | 4 | class TelemetryCollectorInfluxDB(TelemetryCollectorBase): 5 | def collect(self): 6 | data = super().collect() 7 | 8 | data = data | data["last_block"] 9 | 10 | if data is None: 11 | return None 12 | 13 | data["__influxdb"] = { 14 | "tags": [], 15 | "fields": [ 16 | "network", 17 | "platform", 18 | "force_enabled", 19 | "dockerized", 20 | "version", 21 | "uptime", 22 | "block_hash", 23 | "block_index", 24 | "ledger_hash", 25 | "txlist_hash", 26 | "messages_hash", 27 | ], 28 | } 29 | 30 | return data 31 | # Collect data and send to InfluxDB 32 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/monitors/telemetry/collectors/interface.py: -------------------------------------------------------------------------------- 1 | class TelemetryCollectorI: 2 | def collect(self): 3 | raise NotImplementedError() 4 | 5 | def close(self): 6 | raise NotImplementedError() 7 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/monitors/telemetry/daemon.py: -------------------------------------------------------------------------------- 1 | import threading # noqa: I001 2 | import time 3 | import logging 4 | 5 | 6 | from counterpartycore.lib.monitors.telemetry.collectors.interface import TelemetryCollectorI 7 | from counterpartycore.lib.monitors.telemetry.clients.interface import TelemetryClientI 8 | 9 | 10 | from counterpartycore.lib import config 11 | 12 | DEFAULT_INTERVAL = 60 13 | 14 | logger = logging.getLogger(config.LOGGER_NAME) 15 | 16 | 17 | class TelemetryDaemon: 18 | def __init__( 19 | self, 20 | collector: TelemetryCollectorI, 21 | client: TelemetryClientI, 22 | interval=DEFAULT_INTERVAL, 23 | ): 24 | self.thread = threading.Thread(target=self._run) 25 | self.thread.daemon = True 26 | self.client = client 27 | self.collector = collector 28 | self.interval = interval # must be greater than 0.5 29 | self.is_running = False 30 | 31 | def start(self): 32 | self.is_running = True 33 | self.thread.start() 34 | 35 | def _run(self): 36 | logger.info("Starting Telemetry Daemon thread...") 37 | last_run = time.time() 38 | while self.is_running: 39 | try: 40 | if time.time() - last_run < self.interval: 41 | time.sleep(0.5) 42 | continue 43 | data = self.collector.collect() 44 | if data: 45 | self.client.send(data) 46 | last_run = time.time() 47 | except Exception as e: # pylint: disable=broad-except 48 | logger.error("Error in telemetry daemon: %s", e) 49 | time.sleep(0.5) 50 | 51 | def stop(self): 52 | logger.info("Stopping Telemetry Daemon thread...") 53 | self.is_running = False 54 | self.collector.close() 55 | self.thread.join() 56 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/monitors/telemetry/oneshot.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import time 3 | 4 | from counterpartycore.lib import config 5 | from counterpartycore.lib.monitors.telemetry.clients.influxdb import TelemetryClientInfluxDB 6 | from counterpartycore.lib.monitors.telemetry.collectors.influxdb import ( 7 | TelemetryCollectorInfluxDB, 8 | ) 9 | from counterpartycore.lib.utils.database import LedgerDBConnectionPool 10 | from counterpartycore.lib.utils.helpers import SingletonMeta 11 | from sentry_sdk import capture_exception 12 | 13 | logger = logging.getLogger(config.LOGGER_NAME) 14 | 15 | 16 | class TelemetryOneShot(metaclass=SingletonMeta): 17 | def __init__(self): 18 | logger.debug("Initializing TelemetryOneShot") 19 | self.client = TelemetryClientInfluxDB() 20 | 21 | def send(self, data, retry=0): 22 | try: 23 | self.client.send(data) 24 | except Exception as e: # pylint: disable=broad-except 25 | if retry < 10: 26 | logger.trace(f"Error in telemetry one shot: {e}. Retrying in 2 seconds...") 27 | time.sleep(2) 28 | self.send(data, retry=retry + 1) 29 | else: 30 | raise e 31 | 32 | def submit(self): 33 | try: 34 | with LedgerDBConnectionPool().connection() as ledger_db: 35 | collector = TelemetryCollectorInfluxDB(db=ledger_db) 36 | data = collector.collect() 37 | collector.close() 38 | if data: 39 | self.send(data) 40 | except Exception as e: # pylint: disable=broad-except 41 | capture_exception(e) 42 | logger.warning("Error in telemetry one shot: %s", e) 43 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/monitors/telemetry/util.py: -------------------------------------------------------------------------------- 1 | import os 2 | import platform 3 | import time 4 | from uuid import uuid4 5 | 6 | import appdirs 7 | from counterpartycore.lib import config 8 | 9 | start_time = time.time() 10 | 11 | 12 | def get_system(): 13 | return platform.system() 14 | 15 | 16 | def get_version(): 17 | return config.__version__ 18 | 19 | 20 | def get_uptime(): 21 | return time.time() - start_time 22 | 23 | 24 | def is_docker(): 25 | """ 26 | Checks if the current process is running inside a Docker container. 27 | Returns: 28 | bool: True if running inside a Docker container, False otherwise. 29 | """ 30 | return ( 31 | os.path.exists("/.dockerenv") 32 | or "DOCKER_HOST" in os.environ 33 | or "KUBERNETES_SERVICE_HOST" in os.environ 34 | ) 35 | 36 | 37 | def get_network(): 38 | if __read_config_with_default("TESTNET4", False): 39 | return "TESTNET4" 40 | if __read_config_with_default("TESTNET3", False): 41 | return "TESTNET3" 42 | return "MAINNET" 43 | 44 | 45 | def is_force_enabled(): 46 | return __read_config_with_default("FORCE", False) 47 | 48 | 49 | def __read_config_with_default(key, default): 50 | return getattr(config, key, default) 51 | 52 | 53 | class ID: 54 | def __init__(self): 55 | # if file exists, read id from file 56 | # else create new id and write to file 57 | user_id = None 58 | 59 | state_dir = appdirs.user_state_dir( 60 | appauthor=config.XCP_NAME, appname=config.APP_NAME, roaming=True 61 | ) 62 | if not os.path.isdir(state_dir): 63 | os.makedirs(state_dir, mode=0o755) 64 | node_uid_filepath = os.path.join(state_dir, ".counterparty-node-uuid") 65 | 66 | # Migrate old file 67 | node_uid_old_filepath = os.path.join(os.path.expanduser("~"), ".counterparty-node-uuid") 68 | if os.path.exists(node_uid_old_filepath): 69 | os.rename(node_uid_old_filepath, node_uid_filepath) 70 | 71 | if os.path.exists(node_uid_filepath): 72 | with open(node_uid_filepath, encoding="utf-8") as f: 73 | user_id = f.read() 74 | else: 75 | user_id = str(uuid4()) 76 | with open(node_uid_filepath, "w", encoding="utf-8") as f: 77 | f.write(user_id) 78 | 79 | self.id = user_id 80 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/parser/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CounterpartyXCP/counterparty-core/cfe5baff2080dfa5a72617e1c5afde1fce7dd377/counterparty-core/counterpartycore/lib/parser/__init__.py -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/parser/deserialize.py: -------------------------------------------------------------------------------- 1 | from counterparty_rs import indexer # pylint: disable=no-name-in-module 2 | 3 | from counterpartycore.lib import config 4 | from counterpartycore.lib.ledger.currentstate import CurrentState 5 | from counterpartycore.lib.utils.helpers import SingletonMeta 6 | 7 | 8 | class Deserializer(metaclass=SingletonMeta): 9 | def __init__(self): 10 | rpc_address = f"://{config.BACKEND_CONNECT}:{config.BACKEND_PORT}" 11 | if config.BACKEND_SSL: 12 | rpc_address = f"https{rpc_address}" 13 | else: 14 | rpc_address = f"http{rpc_address}" 15 | self.deserializer = indexer.Deserializer( 16 | { 17 | "rpc_address": rpc_address, 18 | "rpc_user": config.BACKEND_USER, 19 | "rpc_password": config.BACKEND_PASSWORD, 20 | "db_dir": config.FETCHER_DB, 21 | "log_file": config.FETCHER_LOG, 22 | "json_format": config.JSON_LOGS, 23 | "only_write_in_reorg_window": True, 24 | "network": config.NETWORK_NAME, 25 | "prefix": config.PREFIX, 26 | "enable_all_protocol_changes": config.ENABLE_ALL_PROTOCOL_CHANGES, 27 | } 28 | ) 29 | 30 | def parse_transaction(self, tx_hex, block_index, parse_vouts=False): 31 | return self.deserializer.parse_transaction(tx_hex, block_index, parse_vouts) 32 | 33 | def parse_block(self, block_hex, block_index, parse_vouts=False): 34 | return self.deserializer.parse_block(block_hex, block_index, parse_vouts) 35 | 36 | 37 | def deserialize_tx(tx_hex, parse_vouts=False, block_index=None): 38 | current_block_index = block_index or CurrentState().current_block_index() 39 | decoded_tx = Deserializer().parse_transaction(tx_hex, current_block_index, parse_vouts) 40 | return decoded_tx 41 | 42 | 43 | def deserialize_block(block_hex, parse_vouts=False, block_index=None): 44 | current_block_index = block_index or CurrentState().current_block_index() 45 | decoded_block = Deserializer().parse_block(block_hex, current_block_index, parse_vouts) 46 | return decoded_block 47 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/parser/utxosinfo.py: -------------------------------------------------------------------------------- 1 | def is_utxo_format(value): 2 | if not isinstance(value, str): 3 | return False 4 | values = value.split(":") 5 | if len(values) != 2: 6 | return False 7 | if not values[1].isnumeric(): 8 | return False 9 | if str(int(values[1])) != values[1]: 10 | return False 11 | try: 12 | int(values[0], 16) 13 | except ValueError: 14 | return False 15 | if len(values[0]) != 64: 16 | return False 17 | return True 18 | 19 | 20 | def parse_utxos_info(utxos_info): 21 | info = utxos_info.split(" ") 22 | 23 | # new format 24 | if len(info) == 4 and not is_utxo_format(info[-1]): 25 | sources = [source for source in info[0].split(",") if source] 26 | destination = info[1] or None 27 | outputs_count = int(info[2]) 28 | op_return_output = int(info[3]) if info[3] != "" else None 29 | return sources, destination, outputs_count, op_return_output 30 | 31 | # old format 32 | destination = info[-1] 33 | sources = info[:-1] 34 | return sources, destination, None, None 35 | 36 | 37 | def get_destination_from_utxos_info(utxos_info): 38 | _sources, destination, _outputs_count, _op_return_output = parse_utxos_info(utxos_info) 39 | return destination 40 | 41 | 42 | def get_sources_from_utxos_info(utxos_info): 43 | sources, _destination, _outputs_count, _op_return_output = parse_utxos_info(utxos_info) 44 | return sources 45 | 46 | 47 | def get_outputs_count_from_utxos_info(utxos_info): 48 | _sources, _destination, outputs_count, _op_return_output = parse_utxos_info(utxos_info) 49 | return outputs_count 50 | 51 | 52 | def get_op_return_output_from_utxos_info(utxos_info): 53 | _sources, _destination, _outputs_count, op_return_output = parse_utxos_info(utxos_info) 54 | return op_return_output 55 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/utils/base58.py: -------------------------------------------------------------------------------- 1 | import binascii 2 | 3 | from counterparty_rs import b58 # pylint: disable=no-name-in-module 4 | from counterpartycore.lib import exceptions 5 | 6 | 7 | def base58_check_encode(original, version): 8 | return b58.b58_encode(version + binascii.unhexlify(original)) 9 | 10 | 11 | def base58_check_decode(s, version): 12 | try: 13 | decoded = bytes(b58.b58_decode(s)) 14 | except ValueError as e: 15 | raise exceptions.Base58Error("invalid base58 string") from e 16 | 17 | if decoded[0] != ord(version): 18 | raise exceptions.VersionByteError("incorrect version byte") 19 | 20 | return decoded[1:] 21 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/utils/multisig.py: -------------------------------------------------------------------------------- 1 | from counterpartycore.lib import exceptions 2 | 3 | 4 | def is_multisig(address): 5 | """Check if the address is multi-signature.""" 6 | array = address.split("_") 7 | return len(array) > 1 8 | 9 | 10 | def test_array(signatures_required, pubs, signatures_possible): 11 | """Check if multi-signature data is valid.""" 12 | try: 13 | signatures_required, signatures_possible = ( 14 | int(signatures_required), 15 | int(signatures_possible), 16 | ) 17 | except (ValueError, TypeError) as e: 18 | raise exceptions.MultiSigAddressError("Signature values not integers.") from e 19 | if signatures_required < 1 or signatures_required > 3: 20 | raise exceptions.MultiSigAddressError("Invalid signatures_required.") 21 | if signatures_possible < 2 or signatures_possible > 3: 22 | raise exceptions.MultiSigAddressError("Invalid signatures_possible.") 23 | for pubkey in pubs: 24 | if "_" in pubkey: 25 | raise exceptions.MultiSigAddressError("Invalid characters in pubkeys/pubkeyhashes.") 26 | if signatures_possible != len(pubs): 27 | raise exceptions.InputError( 28 | "Incorrect number of pubkeys/pubkeyhashes in multi-signature address." 29 | ) 30 | 31 | 32 | def construct_array(signatures_required, pubs, signatures_possible): 33 | """Create a multi-signature address.""" 34 | test_array(signatures_required, pubs, signatures_possible) 35 | address = "_".join([str(signatures_required)] + sorted(pubs) + [str(signatures_possible)]) 36 | return address 37 | 38 | 39 | def extract_array(address): 40 | """Extract data from multi-signature address.""" 41 | assert is_multisig(address) 42 | array = address.split("_") 43 | signatures_required, pubs, signatures_possible = array[0], sorted(array[1:-1]), array[-1] 44 | test_array(signatures_required, pubs, signatures_possible) 45 | return int(signatures_required), pubs, int(signatures_possible) 46 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/lib/utils/script.py: -------------------------------------------------------------------------------- 1 | import binascii 2 | 3 | from counterparty_rs import utils # pylint: disable=no-name-in-module 4 | from counterpartycore.lib import config, exceptions 5 | from counterpartycore.lib.utils import opcodes 6 | 7 | 8 | def script_to_asm(scriptpubkey): 9 | try: 10 | if isinstance(scriptpubkey, str): 11 | scriptpubkey = binascii.unhexlify(scriptpubkey) 12 | asm = utils.script_to_asm(scriptpubkey) 13 | if asm[-1] == opcodes.OP_CHECKMULTISIG: # noqa: F405 14 | asm[-2] = int.from_bytes(asm[-2], "big") 15 | asm[0] = int.from_bytes(asm[0], "big") 16 | return asm 17 | except BaseException as e: 18 | raise exceptions.DecodeError("invalid script") from e 19 | 20 | 21 | def _script_to_address(scriptpubkey, use_legacy=False): 22 | if isinstance(scriptpubkey, str): 23 | scriptpubkey = binascii.unhexlify(scriptpubkey) 24 | try: 25 | script = ( 26 | bytes(scriptpubkey, "utf-8") if isinstance(scriptpubkey, str) else bytes(scriptpubkey) 27 | ) # noqa: E721 28 | if use_legacy: 29 | return utils.script_to_address_legacy(script, config.NETWORK_NAME) 30 | return utils.script_to_address(script, config.NETWORK_NAME) 31 | except BaseException as e: 32 | raise exceptions.DecodeError("scriptpubkey decoding error") from e 33 | 34 | 35 | def script_to_address(scriptpubkey): 36 | return _script_to_address(scriptpubkey, use_legacy=False) 37 | 38 | 39 | def script_to_address_legacy(scriptpubkey): 40 | return _script_to_address(scriptpubkey, use_legacy=True) 41 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CounterpartyXCP/counterparty-core/cfe5baff2080dfa5a72617e1c5afde1fce7dd377/counterparty-core/counterpartycore/test/__init__.py -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/fixtures/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CounterpartyXCP/counterparty-core/cfe5baff2080dfa5a72617e1c5afde1fce7dd377/counterparty-core/counterpartycore/test/fixtures/__init__.py -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/fixtures/test_public_key.asc: -------------------------------------------------------------------------------- 1 | -----BEGIN PGP PUBLIC KEY BLOCK----- 2 | 3 | mDMEZjgGuRYJKwYBBAHaRw8BAQdATAz6X4a0383lQWrw/4htgUk/5D/nMSvJBQpJ 4 | iG2YDGK0K2NvdW50ZXJwYXJ0eSA8Y291bnRlcnBhcnR5QGNvdW50ZXJwYXJ0eS5p 5 | bz6IkwQTFgoAOxYhBMpYppUY9vsWsYSNuu+xER+hVzfbBQJmOAa5AhsDBQsJCAcC 6 | AiICBhUKCQgLAgQWAgMBAh4HAheAAAoJEO+xER+hVzfbCVABAKSg2lavD6DeYpfc 7 | PDk7zxL4a12kPeIQuyojG3oJvMbjAQCuFp0eou+E1FKj75O8EKX0zqEqmjp9CYyr 8 | 3Fj7P3qLD7g4BGY4BrkSCisGAQQBl1UBBQEBB0Bx0+u60U00tTZqlJaH+eQ069vM 9 | Fi4vSJgWtiBYpSk/RwMBCAeIeAQYFgoAIBYhBMpYppUY9vsWsYSNuu+xER+hVzfb 10 | BQJmOAa5AhsMAAoJEO+xER+hVzfbAYoA/36LXdXnFlv/g2OysHuFybnyK7V3YvPs 11 | OO6sNAIj3AOYAPwIBc3HFrorfhhKlK44rNvWlvtADAatPvaN6ty5Hkq0Cg== 12 | =WOlL 13 | -----END PGP PUBLIC KEY BLOCK----- 14 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/fixtures/test_snapshot.sig: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CounterpartyXCP/counterparty-core/cfe5baff2080dfa5a72617e1c5afde1fce7dd377/counterparty-core/counterpartycore/test/fixtures/test_snapshot.sig -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/fixtures/test_snapshot.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CounterpartyXCP/counterparty-core/cfe5baff2080dfa5a72617e1c5afde1fce7dd377/counterparty-core/counterpartycore/test/fixtures/test_snapshot.tar.gz -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/functionals/conftest.py: -------------------------------------------------------------------------------- 1 | from counterpartycore.test.mocks.conftest import * # noqa F403 2 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/functionals/fixtures_test.py: -------------------------------------------------------------------------------- 1 | from counterpartycore.lib.parser import protocol 2 | from counterpartycore.test.fixtures import ledgerdb 3 | from counterpartycore.test.mocks.counterpartydbs import ProtocolChangesDisabled 4 | 5 | 6 | def test_ledger_db(ledger_db): 7 | cursor = ledger_db.cursor() 8 | burns = cursor.execute("SELECT * FROM burns").fetchall() 9 | transactions = cursor.execute("SELECT * FROM transactions").fetchall() 10 | 11 | tx_count = len([tx for tx in ledgerdb.UNITTEST_FIXTURE if tx[0] != "mine_empty_blocks"]) 12 | burn_count = len([tx for tx in ledgerdb.UNITTEST_FIXTURE if tx[0] == "burn"]) 13 | 14 | assert len(transactions) == tx_count 15 | assert len(burns) == burn_count 16 | 17 | 18 | def test_ledger_db_2(ledger_db): 19 | cursor = ledger_db.cursor() 20 | burns = cursor.execute("SELECT * FROM burns").fetchall() 21 | transactions = cursor.execute("SELECT * FROM transactions").fetchall() 22 | 23 | tx_count = len([tx for tx in ledgerdb.UNITTEST_FIXTURE if tx[0] != "mine_empty_blocks"]) 24 | burn_count = len([tx for tx in ledgerdb.UNITTEST_FIXTURE if tx[0] == "burn"]) 25 | 26 | assert len(transactions) == tx_count 27 | assert len(burns) == burn_count 28 | 29 | 30 | def test_state_db(state_db): 31 | cursor = state_db.cursor() 32 | balances = cursor.execute( 33 | "SELECT asset, SUM(quantity) AS quantity FROM balances GROUP BY asset" 34 | ).fetchall() 35 | assert balances == [ 36 | {"asset": "A160361285792733729", "quantity": 50}, 37 | {"asset": "A95428959342453541", "quantity": 100000000}, 38 | {"asset": "CALLABLE", "quantity": 1000}, 39 | {"asset": "DIVIDEND", "quantity": 100}, 40 | {"asset": "DIVISIBLE", "quantity": 100000000000}, 41 | {"asset": "FREEFAIRMIN", "quantity": 10}, 42 | {"asset": "LOCKED", "quantity": 1000}, 43 | {"asset": "LOCKEDPREV", "quantity": 1000}, 44 | {"asset": "MAXI", "quantity": 9223372036854775807}, 45 | {"asset": "NODIVISIBLE", "quantity": 1000}, 46 | {"asset": "PARENT", "quantity": 100000000}, 47 | {"asset": "PAYTOSCRIPT", "quantity": 1000}, 48 | {"asset": "QAIDFAIRMIN", "quantity": 20}, 49 | {"asset": "RAIDFAIRMIN", "quantity": 20}, 50 | {"asset": "TAIDFAIRMIN", "quantity": 1}, 51 | {"asset": "TESTDISP", "quantity": 900}, 52 | {"asset": "XCP", "quantity": 603314652282}, 53 | ] 54 | 55 | 56 | def test_state_db_2(state_db): 57 | cursor = state_db.cursor() 58 | balances = cursor.execute( 59 | "SELECT asset, SUM(quantity) AS quantity FROM balances GROUP BY asset" 60 | ).fetchall() 61 | assert len(balances) == 17 62 | 63 | 64 | def test_protocol_changes_disabled(): 65 | assert protocol.enabled("multisig_addresses") 66 | 67 | with ProtocolChangesDisabled(["multisig_addresses"]): 68 | assert not protocol.enabled("multisig_addresses") 69 | 70 | assert protocol.enabled("multisig_addresses") 71 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/functionals/p2sh_scenarios_test.py: -------------------------------------------------------------------------------- 1 | from counterpartycore.test.functionals.multisig_scenarios_test import check_standard_scenario 2 | 3 | 4 | def test_p2sh_scenario_1(empty_ledger_db, bitcoind_mock, defaults): 5 | check_standard_scenario( 6 | empty_ledger_db, 7 | bitcoind_mock, 8 | defaults, 9 | defaults["addresses"][0], 10 | defaults["p2sh_addresses"][0], 11 | "ee0c7f0249a0a5c4e89ef6a3d21c78ac744b66556390eef577d5f23b7e85a95a", 12 | ) 13 | 14 | 15 | def test_p2sh_scenario_2(empty_ledger_db, bitcoind_mock, defaults): 16 | check_standard_scenario( 17 | empty_ledger_db, 18 | bitcoind_mock, 19 | defaults, 20 | defaults["p2sh_addresses"][0], 21 | defaults["addresses"][0], 22 | "30c27533c7fa68f60793cd472a33b89ea8363f32a7d3c14e28b6e43f0249896c", 23 | ) 24 | 25 | 26 | def test_p2sh_scenario_3(empty_ledger_db, bitcoind_mock, defaults): 27 | check_standard_scenario( 28 | empty_ledger_db, 29 | bitcoind_mock, 30 | defaults, 31 | defaults["p2sh_addresses"][0], 32 | defaults["p2sh_addresses"][1], 33 | "65f18d6c2dcccf233a3c20719c766e05bdfdbc10ca8818006cf076f1368a4a22", 34 | ) 35 | 36 | 37 | def test_p2sh_scenario_4(empty_ledger_db, bitcoind_mock, defaults): 38 | check_standard_scenario( 39 | empty_ledger_db, 40 | bitcoind_mock, 41 | defaults, 42 | defaults["p2sh_addresses"][0], 43 | defaults["p2tr_addresses"][0], 44 | "49481efecf705d45fca78fc950df39e165eaaf6d27f7ee3ddd4b0e07ac00d679", 45 | ) 46 | 47 | 48 | def test_p2sh_scenario_5(empty_ledger_db, bitcoind_mock, defaults): 49 | check_standard_scenario( 50 | empty_ledger_db, 51 | bitcoind_mock, 52 | defaults, 53 | defaults["p2tr_addresses"][0], 54 | defaults["p2sh_addresses"][0], 55 | "c54754424db5d9bae72e705a7b7421c08eec192cfc901d257c82166ef52342bb", 56 | ) 57 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/functionals/taproot_scenarios_test.py: -------------------------------------------------------------------------------- 1 | from counterpartycore.test.functionals.multisig_scenarios_test import check_standard_scenario 2 | 3 | 4 | def test_taproot_scenario_1(empty_ledger_db, bitcoind_mock, defaults): 5 | check_standard_scenario( 6 | empty_ledger_db, 7 | bitcoind_mock, 8 | defaults, 9 | defaults["p2tr_addresses"][0], 10 | defaults["addresses"][0], 11 | "6f488077d790fa369c53bccd1d09a548201fad6c56f48cf150f6c2f0e7062a01", 12 | ) 13 | 14 | 15 | def test_taproot_scenario_2(empty_ledger_db, bitcoind_mock, defaults): 16 | check_standard_scenario( 17 | empty_ledger_db, 18 | bitcoind_mock, 19 | defaults, 20 | defaults["addresses"][0], 21 | defaults["p2tr_addresses"][0], 22 | "4330fdfa070eb72f65742a23fe9e68bbf2e67c55d28ad4b85ad11288f2eeb7ab", 23 | ) 24 | 25 | 26 | def test_taproot_scenario_3(empty_ledger_db, bitcoind_mock, defaults): 27 | check_standard_scenario( 28 | empty_ledger_db, 29 | bitcoind_mock, 30 | defaults, 31 | defaults["p2tr_addresses"][0], 32 | defaults["p2tr_addresses"][1], 33 | "1da2ffb3052684b760c5974e8008479be1b76c8f35fc069fbb1985f02377ffe4", 34 | ) 35 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/integrations/load_test.py: -------------------------------------------------------------------------------- 1 | import time 2 | from io import StringIO 3 | 4 | from counterpartycore.test.integrations import reparsetest 5 | from counterpartycore.test.integrations.locustrunner import run_locust 6 | 7 | 8 | def test_load(): 9 | sh_counterparty_server, db_file, _api_url = reparsetest.prepare("mainnet") 10 | sh_counterparty_server( 11 | "bootstrap", 12 | "--bootstrap-url", 13 | "https://storage.googleapis.com/counterparty-bootstrap/counterparty.db.v11.0.0.zst", 14 | ) 15 | 16 | try: 17 | out = StringIO() 18 | server_process = sh_counterparty_server( 19 | "start", 20 | "--api-only", 21 | "--backend-connect", 22 | "api.counterparty.io", 23 | "--backend-port", 24 | "8332", 25 | "--backend-ssl", 26 | "--wsgi-server", 27 | "gunicorn", 28 | _bg=True, 29 | _out=out, 30 | _err_to_out=True, 31 | ) 32 | 33 | while "API.Watcher - Catch up completed" not in out.getvalue(): 34 | print("Waiting for server to be ready...") 35 | time.sleep(1) 36 | 37 | env = run_locust(db_file) 38 | 39 | print(env.stats.serialize_errors()) 40 | assert env.stats.total.num_failures == 0 41 | assert env.stats.total.avg_response_time < 1500 # ms 42 | assert env.stats.total.get_response_time_percentile(0.95) < 2000 # ms 43 | finally: 44 | print(out.getvalue()) 45 | server_process.terminate() 46 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/integrations/mainnet_test.py: -------------------------------------------------------------------------------- 1 | from reparsetest import bootstrap_reparse_rollback_and_catchup 2 | 3 | 4 | def test_mainnet(): 5 | bootstrap_reparse_rollback_and_catchup("mainnet") 6 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/integrations/rebuild_test.py: -------------------------------------------------------------------------------- 1 | from reparsetest import prepare 2 | 3 | 4 | def test_rebuild(): 5 | sh_counterparty_server, _db_file, _api_url = prepare("testnet4") 6 | sh_counterparty_server("rebuild", "--rebuild-state-db") 7 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/integrations/regtest/apidoc/group-compose.md: -------------------------------------------------------------------------------- 1 | **Notes about fee calculation** 2 | 3 | To calculate the fees required for a transaction, we do not know the final size of the transaction before signing it. 4 | So the composer injects fake script_sig and witnesses into the transaction before calculating the adjusted vsize. 5 | 6 | Two remarks: 7 | 8 | 1. this only works for standard scripts 9 | 10 | 1. the size of DER signatures can vary by a few bytes and it is impossible to predict it. The composer uses a fixed size of 70 bytes so there may be a discrepancy of a few satoshis with the fees requested with `sat_per_vbyte` (for example if a DER signature is 72 bytes with `sat_per_vbyte=2` there will be an error of 4 sats in the calculated fees). 11 | 12 | **Note about transaction chaining** 13 | 14 | if you make several transactions in the same block, you need to chain them using `inputs_set=:`; otherwise, you can't guarantee the final order of the transactions. 15 | 16 | **Deprecated parameters** 17 | 18 | The following parameters are deprecated in the new composer and will no longer be supported in a future version: 19 | 20 | - `fee_per_kb`: Use `sat_per_vbyte` instead 21 | - `fee_provided`: Ue `max_fee` instead 22 | - `unspent_tx_hash`: Use `inputs_set` instead 23 | - `dust_return_pubkey`: Use `multisig_pubkey` instead 24 | - `return_psbt`: Use `verbose` instead 25 | - `regular_dust_size`: Automatically calculated 26 | - `multisig_dust_size`: Automatically calculated 27 | - `extended_tx_info`: API v1 only, use API v2 instead 28 | - `old_style_api`: API v1 only, use API v2 instead 29 | - `p2sh_pretx_txid`: Ignored, P2SH disabled 30 | - `segwit`: Ignored, Segwit automatically detected 31 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/integrations/regtest/dreddhooks.py: -------------------------------------------------------------------------------- 1 | import dredd_hooks as hooks 2 | 3 | 4 | @hooks.before_each 5 | def my_before_all_hook(transaction): 6 | if "/compose" in transaction["fullPath"]: 7 | transaction["fullPath"] = transaction["fullPath"].replace( 8 | "exclude_utxos_with_balances=False", "exclude_utxos_with_balances=True" 9 | ) 10 | 11 | return transaction 12 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/integrations/regtest/scenarios/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CounterpartyXCP/counterparty-core/cfe5baff2080dfa5a72617e1c5afde1fce7dd377/counterparty-core/counterpartycore/test/integrations/regtest/scenarios/__init__.py -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/integrations/regtest/scenarios/scenario_15_destroy.py: -------------------------------------------------------------------------------- 1 | SCENARIO = [ 2 | { 3 | "title": "Destroy some XCP", 4 | "transaction": "destroy", 5 | "source": "$ADDRESS_4", 6 | "params": { 7 | "asset": "XCP", 8 | "quantity": 1, 9 | "tag": "destroy", 10 | }, 11 | "set_variables": { 12 | "DESTROY_1_TX_HASH": "$TX_HASH", 13 | }, 14 | "controls": [ 15 | { 16 | "url": "blocks/$BLOCK_INDEX/events?event_name=ASSET_DESTRUCTION,DEBIT", 17 | "result": [ 18 | { 19 | "event": "ASSET_DESTRUCTION", 20 | "event_index": "$EVENT_INDEX_4", 21 | "params": { 22 | "asset": "XCP", 23 | "block_index": "$BLOCK_INDEX", 24 | "quantity": 1, 25 | "source": "$ADDRESS_4", 26 | "status": "valid", 27 | "tag": "64657374726f79", 28 | "tx_hash": "$TX_HASH", 29 | "tx_index": "$TX_INDEX", 30 | }, 31 | "tx_hash": "$TX_HASH", 32 | }, 33 | { 34 | "event": "DEBIT", 35 | "event_index": "$EVENT_INDEX_3", 36 | "params": { 37 | "action": "destroy", 38 | "address": "$ADDRESS_4", 39 | "asset": "XCP", 40 | "block_index": "$BLOCK_INDEX", 41 | "event": "$TX_HASH", 42 | "quantity": 1, 43 | "tx_index": "$TX_INDEX", 44 | "utxo": None, 45 | "utxo_address": None, 46 | }, 47 | "tx_hash": "$TX_HASH", 48 | }, 49 | ], 50 | } 51 | ], 52 | }, 53 | ] 54 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/integrations/regtest/scenarios/scenario_24_dispenser.py: -------------------------------------------------------------------------------- 1 | SCENARIO = [ 2 | { 3 | "title": "Create Dispenser XCP", 4 | "transaction": "dispenser", 5 | "source": "$ADDRESS_8", 6 | "params": { 7 | "asset": "XCP", 8 | "give_quantity": 1, 9 | "escrow_quantity": 5000, 10 | "mainchainrate": 1, # 1 BTC for 1 XCP 11 | "status": 0, 12 | "exact_fee": 0, 13 | }, 14 | "set_variables": { 15 | "DISPENSER_XCP_TX_HASH": "$TX_HASH", 16 | "DISPENSER_XCP_TX_INDEX": "$TX_INDEX", 17 | "DISPENSER_XCP_BLOCK_INDEX": "$BLOCK_INDEX", 18 | }, 19 | "controls": [], 20 | }, 21 | { 22 | "title": "Dispense with send: get 900 XCP", 23 | "transaction": "send", 24 | "source": "$ADDRESS_2", 25 | "params": { 26 | "destination": "$ADDRESS_8", 27 | "quantity": 3000, 28 | "asset": "BTC", 29 | "exact_fee": 0, 30 | }, 31 | "controls": [ 32 | { 33 | "url": "blocks/$BLOCK_INDEX/events?event_name=DISPENSE", 34 | "result": [ 35 | { 36 | "event": "DISPENSE", 37 | "event_index": "$EVENT_INDEX_6", 38 | "params": { 39 | "asset": "XCP", 40 | "block_index": "$BLOCK_INDEX", 41 | "btc_amount": 3000, 42 | "destination": "$ADDRESS_2", 43 | "dispense_index": 0, 44 | "dispense_quantity": 3000, 45 | "dispenser_tx_hash": "$DISPENSER_XCP_TX_HASH", 46 | "source": "$ADDRESS_8", 47 | "tx_hash": "$TX_HASH", 48 | "tx_index": "$TX_INDEX", 49 | }, 50 | "tx_hash": "$TX_HASH", 51 | }, 52 | ], 53 | } 54 | ], 55 | }, 56 | { 57 | "title": "Dispense with send no_dispense: get 0 XCP", 58 | "transaction": "send", 59 | "source": "$ADDRESS_2", 60 | "params": { 61 | "destination": "$ADDRESS_1", 62 | "quantity": 3000, 63 | "asset": "BTC", 64 | "no_dispense": True, 65 | }, 66 | "controls": [ 67 | { 68 | "url": "blocks/$BLOCK_INDEX/events?event_name=DISPENSE", 69 | "result": [], 70 | } 71 | ], 72 | }, 73 | ] 74 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/integrations/regtest/scenarios/scenario_4_broadcast.py: -------------------------------------------------------------------------------- 1 | SCENARIO = [ 2 | { 3 | "title": "Broadcast something", 4 | "transaction": "broadcast", 5 | "source": "$ADDRESS_1", 6 | "params": { 7 | "timestamp": 4003903983, 8 | "value": 999, 9 | "fee_fraction": 0.0, 10 | "text": "Hello, world!", 11 | }, 12 | "set_variables": { 13 | "BROADCAST_HASH": "$TX_HASH", 14 | }, 15 | "controls": [ 16 | { 17 | "url": "blocks/$BLOCK_INDEX/events?event_name=BROADCAST", 18 | "result": [ 19 | { 20 | "event": "BROADCAST", 21 | "event_index": "$EVENT_INDEX_3", 22 | "params": { 23 | "block_index": "$BLOCK_INDEX", 24 | "fee_fraction_int": 0, 25 | "locked": False, 26 | "source": "$ADDRESS_1", 27 | "status": "valid", 28 | "text": "Hello, world!", 29 | "mime_type": "text/plain", 30 | "timestamp": 4003903983, 31 | "tx_hash": "$TX_HASH", 32 | "tx_index": "$TX_INDEX", 33 | "value": 999.0, 34 | }, 35 | "tx_hash": "$TX_HASH", 36 | } 37 | ], 38 | }, 39 | ], 40 | }, 41 | { 42 | "title": "Broadcast dispenser price", 43 | "transaction": "broadcast", 44 | "source": "$ADDRESS_6", 45 | "params": { 46 | "timestamp": 4003903983, 47 | "value": 66600, 48 | "fee_fraction": 0.0, 49 | "text": "price-USD", 50 | }, 51 | "set_variables": { 52 | "BROADCAST_HASH": "$TX_HASH", 53 | }, 54 | "controls": [ 55 | { 56 | "url": "blocks/$BLOCK_INDEX/events?event_name=BROADCAST", 57 | "result": [ 58 | { 59 | "event": "BROADCAST", 60 | "event_index": "$EVENT_INDEX_3", 61 | "params": { 62 | "block_index": "$BLOCK_INDEX", 63 | "fee_fraction_int": 0, 64 | "locked": False, 65 | "source": "$ADDRESS_6", 66 | "status": "valid", 67 | "text": "price-USD", 68 | "mime_type": "text/plain", 69 | "timestamp": 4003903983, 70 | "tx_hash": "$TX_HASH", 71 | "tx_index": "$TX_INDEX", 72 | "value": 66600.0, 73 | }, 74 | "tx_hash": "$TX_HASH", 75 | } 76 | ], 77 | }, 78 | ], 79 | }, 80 | ] 81 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/integrations/shutdown_test.py: -------------------------------------------------------------------------------- 1 | import random 2 | import socket 3 | import time 4 | from io import StringIO 5 | 6 | from counterpartycore.lib.cli import server 7 | from counterpartycore.lib.cli.initialise import initialise_log_and_config 8 | from counterpartycore.lib.cli.main import arg_parser 9 | from counterpartycore.test.integrations import reparsetest 10 | 11 | 12 | def is_port_in_used(port): 13 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 14 | try: 15 | s.bind(("127.0.0.1", port)) 16 | return False 17 | except socket.error: 18 | return True 19 | finally: 20 | s.close() 21 | 22 | 23 | def test_shutdown(): 24 | reparsetest.prepare("testnet4") 25 | 26 | try: 27 | parser = arg_parser(no_config_file=True) 28 | args = parser.parse_args( 29 | [ 30 | "--testnet4", 31 | "--data-dir", 32 | reparsetest.DATA_DIR, 33 | "--cache-dir", 34 | reparsetest.DATA_DIR, 35 | "start", 36 | "--backend-connect", 37 | "testnet4.counterparty.io", 38 | "--backend-port", 39 | "48332", 40 | "--backend-ssl", 41 | "--wsgi-server", 42 | "gunicorn", 43 | ] 44 | ) 45 | 46 | log_stream = StringIO() 47 | initialise_log_and_config(args, log_stream=log_stream) 48 | 49 | # Let it have at least 10 seconds 50 | # to start essential components 51 | test_duration = random.randint(10, 60) # noqa S311 52 | start_time = time.time() 53 | 54 | print("Test duration: ", test_duration) 55 | 56 | counterparty_server = server.CounterpartyServer(args, log_stream) 57 | counterparty_server.start() 58 | while time.time() - start_time < test_duration: 59 | counterparty_server.join(1) 60 | 61 | finally: 62 | print("Shutting down server...") 63 | counterparty_server.stop() 64 | 65 | logs = log_stream.getvalue() 66 | 67 | assert "Ledger.Main - Shutting down..." in logs 68 | assert "Ledger.Main - Asset Conservation Checker thread stopped." in logs 69 | assert "Ledger.BackendHeight - BackendHeight Thread stopped." in logs 70 | assert "Ledger.Main - API Server v1 thread stopped." in logs 71 | assert "Ledger.Main - API Server process stopped." in logs 72 | assert "Ledger.Main - Shutdown complete." in logs 73 | 74 | assert not is_port_in_used(44000) 75 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/integrations/testnet4_test.py: -------------------------------------------------------------------------------- 1 | from reparsetest import bootstrap_reparse_rollback_and_catchup 2 | 3 | 4 | def test_testnet4(): 5 | bootstrap_reparse_rollback_and_catchup("testnet4") 6 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/mocks/apis.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from counterpartycore.lib import config 3 | from counterpartycore.lib.api import apiserver, apiv1 4 | 5 | 6 | @pytest.fixture() 7 | def apiv1_app(): 8 | app = apiv1.create_app() 9 | app.config.update( 10 | { 11 | "TESTING": True, 12 | } 13 | ) 14 | yield app 15 | 16 | 17 | @pytest.fixture() 18 | def apiv2_app(ledger_db, state_db, monkeypatch, current_block_index): 19 | monkeypatch.setattr( 20 | "counterpartycore.lib.backend.bitcoind.getblockcount", lambda: current_block_index 21 | ) 22 | monkeypatch.setattr("counterpartycore.lib.backend.bitcoind.get_blocks_behind", lambda: 0) 23 | 24 | app = apiserver.init_flask_app() 25 | app.config.update( 26 | { 27 | "TESTING": True, 28 | } 29 | ) 30 | config.DISABLE_API_CACHE = True 31 | yield app 32 | 33 | 34 | def rpc_call(client, method, params): 35 | import json 36 | 37 | headers = {"content-type": "application/json"} 38 | payload = { 39 | "method": method, 40 | "params": params, 41 | "jsonrpc": "2.0", 42 | "id": 0, 43 | } 44 | return client.post("/", data=json.dumps(payload), headers=headers, auth=("rpc", "rpc")) 45 | 46 | 47 | @pytest.fixture() 48 | def apiv1_client(apiv1_app, ledger_db, state_db): 49 | def call(method, params): 50 | return rpc_call(apiv1_app.test_client(), method, params) 51 | 52 | return call 53 | 54 | 55 | @pytest.fixture() 56 | def apiv2_client(apiv2_app, ledger_db, state_db): 57 | return apiv2_app.test_client() 58 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/units/api/apicaches_test.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from counterpartycore.lib.api import apiwatcher, caches, queries 4 | 5 | 6 | def test_address_events_cache(state_db, defaults, ledger_db): 7 | cache = caches.AddressEventsCache() 8 | 9 | cache_count = cache.cache_db.execute("SELECT COUNT(*) AS count FROM address_events").fetchone()[ 10 | "count" 11 | ] 12 | state_db_count = state_db.execute("SELECT COUNT(*) AS count FROM address_events").fetchone()[ 13 | "count" 14 | ] 15 | 16 | assert cache_count == state_db_count 17 | 18 | send_event = { 19 | "event": "SEND", 20 | "message_index": 9999999, 21 | "block_index": 9999999, 22 | "bindings": json.dumps( 23 | { 24 | "asset": "XCP", 25 | "quantity": 100000000, 26 | "source": defaults["addresses"][0], 27 | "destination": defaults["addresses"][1], 28 | } 29 | ), 30 | } 31 | 32 | ledger_db.execute( 33 | """ 34 | INSERT INTO messages (message_index, block_index, event, bindings) 35 | VALUES (:message_index, :block_index, :event, :bindings) 36 | """, 37 | send_event, 38 | ) 39 | 40 | apiwatcher.update_address_events(state_db, send_event) 41 | 42 | cache_count_after = cache.cache_db.execute( 43 | "SELECT COUNT(*) AS count FROM address_events" 44 | ).fetchone()["count"] 45 | state_db_count_after = state_db.execute( 46 | "SELECT COUNT(*) AS count FROM address_events" 47 | ).fetchone()["count"] 48 | 49 | assert cache_count_after == state_db_count_after 50 | assert cache_count_after == cache_count + 2 51 | 52 | result = queries.get_events_by_addresses( 53 | ledger_db, 54 | addresses=f"{defaults['addresses'][0]},{defaults['addresses'][1]}", 55 | event_name="SEND", 56 | ) 57 | assert result.result == [ 58 | { 59 | "event_index": 9999999, 60 | "event": "SEND", 61 | "params": { 62 | "asset": "XCP", 63 | "quantity": 100000000, 64 | "source": defaults["addresses"][0], 65 | "destination": defaults["addresses"][1], 66 | }, 67 | "tx_hash": None, 68 | "block_index": 9999999, 69 | } 70 | ] 71 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/units/api/healthz_test.py: -------------------------------------------------------------------------------- 1 | import bitcoin as bitcoinlib 2 | from counterpartycore.lib import config 3 | from counterpartycore.lib.ledger.currentstate import CurrentState 4 | 5 | 6 | def set_mainnet_network(monkeypatch, block_index=400000): 7 | config.NETWORK_NAME = "mainnet" 8 | config.UNSPENDABLE = config.UNSPENDABLE_MAINNET 9 | bitcoinlib.SelectParams("mainnet") 10 | config.ADDRESSVERSION = config.ADDRESSVERSION_MAINNET 11 | CurrentState().set_current_block_index(block_index) 12 | CurrentState().last_update = 0 13 | 14 | 15 | def restore_network(): 16 | config.NETWORK_NAME = "regtest" 17 | config.UNSPENDABLE = config.UNSPENDABLE_REGTEST 18 | bitcoinlib.SelectParams("regtest") 19 | config.ADDRESSVERSION = config.ADDRESSVERSION_REGTEST 20 | 21 | 22 | def test_healthz_light(apiv2_client, monkeypatch, current_block_index): 23 | set_mainnet_network(monkeypatch) 24 | assert apiv2_client.get("/healthz").json == {"result": {"status": "Healthy"}} 25 | assert apiv2_client.get("/healthz?check_type=heavy").json == {"result": {"status": "Healthy"}} 26 | restore_network() 27 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/units/api/wsgi_test.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from counterpartycore.lib.api import wsgi 4 | 5 | 6 | def test_lazy_logger(caplog, test_helpers): 7 | lazy_logger = wsgi.LazyLogger() 8 | assert lazy_logger.last_message is None 9 | assert lazy_logger.last_print == 0 10 | assert lazy_logger.message_delay == 10 11 | 12 | with test_helpers.capture_log(caplog, "Coucou"): 13 | lazy_logger.debug("Coucou") 14 | assert lazy_logger.last_message == "Coucou" 15 | assert lazy_logger.last_print > 0 16 | last_print = lazy_logger.last_print 17 | 18 | caplog.clear() 19 | with test_helpers.capture_log(caplog, "Coucou", not_in=True): 20 | lazy_logger.debug("Coucou") 21 | assert lazy_logger.last_message == "Coucou" 22 | assert lazy_logger.last_print == last_print 23 | 24 | lazy_logger.message_delay = 0.1 25 | time.sleep(0.2) 26 | 27 | caplog.clear() 28 | with test_helpers.capture_log(caplog, "Coucou"): 29 | lazy_logger.debug("Coucou") 30 | assert lazy_logger.last_print > last_print 31 | last_print = lazy_logger.last_print 32 | 33 | with test_helpers.capture_log(caplog, "Hello", not_in=True): 34 | lazy_logger.debug("Hello") 35 | assert lazy_logger.last_print == last_print 36 | 37 | time.sleep(0.2) 38 | with test_helpers.capture_log(caplog, "Hello"): 39 | lazy_logger.debug("Hello") 40 | assert lazy_logger.last_print > last_print 41 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/units/cli/bootstrap_test.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from counterpartycore.lib.cli.bootstrap import verify_signature 4 | 5 | 6 | def test_verify_signature(): 7 | dir = os.path.dirname(os.path.abspath(__file__)) 8 | public_key_path = os.path.join(dir, "..", "..", "fixtures", "test_public_key.asc") 9 | signature_path = os.path.join(dir, "..", "..", "fixtures", "test_snapshot.sig") 10 | snapshot_path = os.path.join(dir, "..", "..", "fixtures", "test_snapshot.tar.gz") 11 | other_path = os.path.join(dir, "..", "..", "fixtures", "rawtransactions.db") 12 | with open(public_key_path, "rb") as f: 13 | public_key_data = f.read() 14 | 15 | assert verify_signature(public_key_data, signature_path, snapshot_path) 16 | assert not verify_signature(public_key_data, signature_path, other_path) 17 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/units/cli/log_test.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from counterpartycore.lib import config 4 | from counterpartycore.lib.cli.log import CustomFilter 5 | from counterpartycore.lib.ledger.currentstate import CurrentState 6 | 7 | logger = logging.getLogger(config.LOGGER_NAME) 8 | 9 | 10 | def test_quiet_mode(test_helpers, caplog, monkeypatch): 11 | monkeypatch.setattr(config, "QUIET", True) 12 | monkeypatch.setattr(CurrentState, "ledger_state", lambda self: "Catching Up") 13 | 14 | CurrentState().state["CATCHING_UP"] = True 15 | 16 | with test_helpers.capture_log(caplog, "test urgent message"): 17 | logger.urgent("test urgent message") 18 | 19 | caplog.at_level(6, logger=config.LOGGER_NAME) 20 | caplog.clear() 21 | logger.propagate = True 22 | 23 | logger.info("test info message") 24 | assert not CustomFilter().filter(caplog.records[-1]) 25 | 26 | logger.debug("test debug message") 27 | assert not CustomFilter().filter(caplog.records[-1]) 28 | 29 | logger.trace("test trace message") 30 | assert not CustomFilter().filter(caplog.records[-1]) 31 | 32 | logger.urgent("test urgent message") 33 | assert CustomFilter().filter(caplog.records[-1]) 34 | 35 | logger.warning("test warning message") 36 | assert CustomFilter().filter(caplog.records[-1]) 37 | 38 | logger.error("test error message") 39 | assert CustomFilter().filter(caplog.records[-1]) 40 | 41 | CurrentState().state["CATCHING_UP"] = False 42 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/units/cli/main_test.py: -------------------------------------------------------------------------------- 1 | from counterpartycore.lib import cli 2 | 3 | 4 | def test_argparser(): 5 | parser = cli.main.arg_parser(no_config_file=True, app_name="counterparty-test") 6 | args = parser.parse_args( 7 | [ 8 | "--regtest", 9 | "--data-dir=datadir", 10 | "--wsgi-server=waitress", 11 | "--gunicorn-workers=2", 12 | "--no-telemetry", 13 | "--electrs-url=http://localhost:3002", 14 | "start", 15 | "-vv", 16 | ] 17 | ) 18 | assert vars(args) == { 19 | "help": False, 20 | "verbose": 2, 21 | "quiet": False, 22 | "mainnet": True, 23 | "testnet3": False, 24 | "testnet4": False, 25 | "regtest": True, 26 | "api_limit_rows": 1000, 27 | "backend_name": "addrindex", 28 | "backend_connect": "localhost", 29 | "backend_port": None, 30 | "backend_user": "rpc", 31 | "backend_password": "rpc", 32 | "backend_ssl": False, 33 | "backend_ssl_no_verify": False, 34 | "backend_poll_interval": 3.0, 35 | "skip_asset_conservation_check": False, 36 | "p2sh_dust_return_pubkey": None, 37 | "rpc_host": "127.0.0.1", 38 | "rpc_port": None, 39 | "rpc_user": "rpc", 40 | "rpc_password": "rpc", 41 | "rpc_no_allow_cors": False, 42 | "rpc_batch_size": 20, 43 | "api_host": "127.0.0.1", 44 | "api_port": None, 45 | "api_user": None, 46 | "api_password": None, 47 | "api_no_allow_cors": False, 48 | "requests_timeout": 20, 49 | "force": False, 50 | "no_confirm": False, 51 | "data_dir": "datadir", 52 | "cache_dir": None, 53 | "log_file": False, 54 | "api_log_file": False, 55 | "no_log_files": False, 56 | "max_log_file_size": 41943040, 57 | "max_log_file_rotations": 20, 58 | "log_exclude_filters": None, 59 | "log_include_filters": None, 60 | "utxo_locks_max_addresses": 1000, 61 | "utxo_locks_max_age": 3.0, 62 | "no_mempool": False, 63 | "no_telemetry": True, 64 | "enable_zmq_publisher": False, 65 | "zmq_publisher_port": None, 66 | "db_connection_pool_size": 20, 67 | "json_logs": False, 68 | "wsgi_server": "waitress", 69 | "waitress_threads": 10, 70 | "gunicorn_workers": 2, 71 | "gunicorn_threads_per_worker": 2, 72 | "bootstrap_url": None, 73 | "electrs_url": "http://localhost:3002", 74 | "refresh_state_db": False, 75 | "rebuild_state_db": False, 76 | "action": "start", 77 | "config_file": None, 78 | "catch_up": "normal", 79 | "api_only": False, 80 | "profile": False, 81 | "enable_all_protocol_changes": False, 82 | } 83 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/units/cli/server_test.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import MagicMock, patch 2 | 3 | import pytest 4 | from counterpartycore.lib import config 5 | from counterpartycore.lib.cli.server import rebuild 6 | 7 | 8 | @pytest.fixture 9 | def rebuild_mock_dependencies(): 10 | """Fixture to simulate external dependencies""" 11 | with ( 12 | patch("counterpartycore.lib.monitors.slack.send_slack_message") as mock_slack, 13 | patch("counterpartycore.lib.cli.bootstrap.clean_data_dir") as mock_clean, 14 | patch("counterpartycore.lib.cli.server.start_all") as mock_start_all, 15 | ): 16 | yield {"slack": mock_slack, "clean_data_dir": mock_clean, "start_all": mock_start_all} 17 | 18 | 19 | def test_rebuild_success(rebuild_mock_dependencies): 20 | """Test the case where rebuild executes successfully""" 21 | # Arrange 22 | args = MagicMock() 23 | 24 | # Act 25 | rebuild(args) 26 | 27 | # Assert 28 | rebuild_mock_dependencies["slack"].assert_any_call("Starting new rebuild.") 29 | rebuild_mock_dependencies["clean_data_dir"].assert_called_once_with(config.DATA_DIR) 30 | rebuild_mock_dependencies["start_all"].assert_called_once_with(args, stop_when_ready=True) 31 | rebuild_mock_dependencies["slack"].assert_called_with("Rebuild complete.") 32 | assert rebuild_mock_dependencies["slack"].call_count == 2 33 | 34 | 35 | def test_rebuild_exception(rebuild_mock_dependencies): 36 | """Test the case where an exception occurs during rebuild""" 37 | # Arrange 38 | args = MagicMock() 39 | rebuild_mock_dependencies["clean_data_dir"].side_effect = Exception("Test error") 40 | 41 | # Act & Assert 42 | with pytest.raises(Exception, match="Test error"): 43 | rebuild(args) 44 | 45 | # Verify that appropriate Slack messages were sent 46 | rebuild_mock_dependencies["slack"].assert_any_call("Starting new rebuild.") 47 | rebuild_mock_dependencies["slack"].assert_any_call("Rebuild failed: Test error") 48 | assert rebuild_mock_dependencies["slack"].call_count == 2 49 | 50 | 51 | def test_rebuild_start_all_exception(rebuild_mock_dependencies): 52 | """Test the case where start_all raises an exception""" 53 | # Arrange 54 | args = MagicMock() 55 | rebuild_mock_dependencies["start_all"].side_effect = Exception("Start failed") 56 | 57 | # Act & Assert 58 | with pytest.raises(Exception, match="Start failed"): 59 | rebuild(args) 60 | 61 | # Verify that appropriate messages were sent 62 | rebuild_mock_dependencies["slack"].assert_any_call("Starting new rebuild.") 63 | rebuild_mock_dependencies["slack"].assert_any_call("Rebuild failed: Start failed") 64 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/units/cli/zmqpublisher_test.py: -------------------------------------------------------------------------------- 1 | import json 2 | import time 3 | 4 | import zmq.green as zmq 5 | from counterpartycore.lib import config 6 | from counterpartycore.lib.cli import log 7 | 8 | context = zmq.Context() 9 | 10 | 11 | def test_zmqpublisher_test(test_helpers, caplog): 12 | config.ENABLE_ZMQ_PUBLISHER = True 13 | config.ZMQ_PUBLISHER_PORT = 44001 14 | 15 | log.ZmqPublisher() 16 | 17 | block_index = 1000 18 | event_index = 2000 19 | event_name = "ANICEEVENT" 20 | bindings = {"key": "value", "counter": 1} 21 | 22 | socket = context.socket(zmq.SUB) 23 | socket.connect(f"tcp://localhost:{config.ZMQ_PUBLISHER_PORT}") 24 | socket.setsockopt(zmq.RCVHWM, 0) 25 | socket.setsockopt(zmq.SUBSCRIBE, b"") 26 | time.sleep(0.1) 27 | # bindings["counter"] += 1 28 | with test_helpers.capture_log(caplog, f"ANICEEVENT [key=value counter={bindings['counter']}]"): 29 | log.log_event(block_index, event_index, event_name, bindings) 30 | 31 | while True: 32 | _event_name, event = socket.recv_multipart() 33 | event = json.loads(event.decode("utf-8")) 34 | print(event) 35 | assert event["block_index"] == block_index 36 | assert event["event_index"] == event_index 37 | assert event["event"] == event_name 38 | assert event["params"]["key"] == "value" 39 | assert event["params"]["counter"] == bindings["counter"] 40 | time.sleep(1) 41 | bindings["counter"] += 1 42 | with test_helpers.capture_log( 43 | caplog, f"ANICEEVENT [key=value counter={bindings['counter']}]" 44 | ): 45 | log.log_event(block_index, event_index, event_name, bindings) 46 | if bindings["counter"] == 10: 47 | break 48 | 49 | socket.close(linger=0) 50 | context.term() 51 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/units/conftest.py: -------------------------------------------------------------------------------- 1 | from counterpartycore.test.mocks.conftest import * # noqa F403 2 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/units/ledger/ledgerblocks_test.py: -------------------------------------------------------------------------------- 1 | import apsw 2 | from counterpartycore.lib.ledger import blocks 3 | 4 | 5 | def test_blocks_functions(ledger_db, current_block_index): 6 | last_block = blocks.get_block(ledger_db, current_block_index) 7 | assert ( 8 | last_block["block_hash"] 9 | == "4b77f641741ee30f3d9b517b30e0c59b7622b6a4a6b4342348b6ceae2d351da6" 10 | ) 11 | assert last_block["block_index"] == current_block_index 12 | assert ( 13 | last_block["ledger_hash"] 14 | == "3b2bef139d2b964e8833e42546016ba32481541b18d8c64cbc899e440d386f1f" 15 | ) 16 | assert ( 17 | last_block["txlist_hash"] 18 | == "8fba3ce017f4f6e87987d75218c16996961fce70ad4433f49d0715940ddb7f4e" 19 | ) 20 | 21 | assert blocks.last_db_index(ledger_db) == current_block_index 22 | 23 | last_block = blocks.get_block_by_hash(ledger_db, last_block["block_hash"]) 24 | assert ( 25 | last_block["block_hash"] 26 | == "4b77f641741ee30f3d9b517b30e0c59b7622b6a4a6b4342348b6ceae2d351da6" 27 | ) 28 | assert last_block["block_index"] == current_block_index 29 | assert ( 30 | last_block["ledger_hash"] 31 | == "3b2bef139d2b964e8833e42546016ba32481541b18d8c64cbc899e440d386f1f" 32 | ) 33 | assert ( 34 | last_block["txlist_hash"] 35 | == "8fba3ce017f4f6e87987d75218c16996961fce70ad4433f49d0715940ddb7f4e" 36 | ) 37 | 38 | assert ( 39 | blocks.get_block_hash(ledger_db, current_block_index) 40 | == "4b77f641741ee30f3d9b517b30e0c59b7622b6a4a6b4342348b6ceae2d351da6" 41 | ) 42 | assert blocks.get_block_hash(ledger_db, 999999999999999) is None 43 | assert blocks.get_vouts(ledger_db, "hash") == [] 44 | 45 | all_txs = blocks.get_transactions(ledger_db) 46 | print(all_txs[0]["tx_hash"], all_txs[0]["tx_index"]) 47 | assert len(all_txs) == 67 48 | assert ( 49 | all_txs[0]["tx_hash"] == "5adde02c200e7959a453c2fc362e9baf20fe4ef3c8a37b9ce601be238ad4e6fa" 50 | ) 51 | assert all_txs[0]["tx_index"] == 0 52 | assert blocks.get_transactions(ledger_db, tx_hash=all_txs[0]["tx_hash"]) == [all_txs[0]] 53 | assert blocks.get_transactions(ledger_db, tx_index=all_txs[0]["tx_index"]) == [all_txs[0]] 54 | assert blocks.get_transactions( 55 | ledger_db, tx_hash=all_txs[0]["tx_hash"], tx_index=all_txs[0]["tx_index"] 56 | ) == [all_txs[0]] 57 | 58 | assert blocks.get_transaction(ledger_db, all_txs[0]["tx_hash"]) == all_txs[0] 59 | assert blocks.get_transaction(ledger_db, "foobar") is None 60 | 61 | 62 | def test_no_blocks_table(empty_ledger_db): 63 | dummy_db = apsw.Connection(":memory:") 64 | assert blocks.last_db_index(dummy_db) == 0 65 | 66 | empty_ledger_db.execute("""PRAGMA foreign_keys=OFF""") 67 | empty_ledger_db.execute("""DELETE FROM blocks""") 68 | assert blocks.last_db_index(empty_ledger_db) == 0 69 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/units/ledger/other_test.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from counterpartycore.lib import exceptions 3 | from counterpartycore.lib.ledger import other 4 | 5 | 6 | def test_get_oracle_price(ledger_db): 7 | broadcasts = ledger_db.execute("SELECT * FROM broadcasts WHERE status = 'valid'").fetchall() 8 | 9 | assert other.get_oracle_last_price( 10 | ledger_db, broadcasts[0]["source"], broadcasts[0]["block_index"] + 1 11 | ) == ( 12 | broadcasts[0]["value"], 13 | broadcasts[0]["fee_fraction_int"], 14 | "", 15 | broadcasts[0]["block_index"], 16 | ) 17 | with pytest.raises(exceptions.InvalidArgument, match="Invalid order_by parameter"): 18 | other.get_broadcasts_by_source(ledger_db, broadcasts[0]["source"], order_by="DEC") 19 | 20 | assert len(other.get_pending_bet_matches(ledger_db, broadcasts[0]["source"])) == 0 21 | 22 | assert other.get_oracle_last_price( 23 | ledger_db, broadcasts[0]["source"], broadcasts[0]["block_index"] 24 | ) == (None, None, None, None) 25 | 26 | ledger_db.execute("DROP TRIGGER block_update_broadcasts") 27 | ledger_db.execute( 28 | "UPDATE broadcasts SET text = 'part1-part2' WHERE tx_index = ?", 29 | (broadcasts[0]["tx_index"],), 30 | ) 31 | assert other.get_oracle_last_price( 32 | ledger_db, broadcasts[0]["source"], broadcasts[0]["block_index"] + 1 33 | ) == ( 34 | broadcasts[0]["value"], 35 | broadcasts[0]["fee_fraction_int"], 36 | "part2", 37 | broadcasts[0]["block_index"], 38 | ) 39 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/units/messages/cancel_test.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from counterpartycore.lib import exceptions 3 | from counterpartycore.lib.messages import cancel 4 | 5 | 6 | def get_open_order(ledger_db): 7 | return ledger_db.execute( 8 | """ 9 | SELECT * FROM 10 | (SELECT tx_hash, status, source, MAX(rowid) FROM orders GROUP BY tx_hash) 11 | WHERE status='open' ORDER BY tx_hash DESC LIMIT 1 12 | """ 13 | ).fetchone() 14 | 15 | 16 | def test_compose(ledger_db, defaults): 17 | open_order = get_open_order(ledger_db) 18 | 19 | assert cancel.compose(ledger_db, open_order["source"], open_order["tx_hash"]) == ( 20 | open_order["source"], 21 | [], 22 | b"F\xfd\xcd]\xdf\x084\xb1\xf6\xe7\xd7\xe4\xb9^\x92=\xd5\x1a:\xd4\xdaW\x95\xc0\xf5\xf2q\xa5\x1f\xc3\xab\xb4.", 23 | ) 24 | 25 | with pytest.raises(exceptions.ComposeError, match="no open offer with that hash"): 26 | cancel.compose(ledger_db, defaults["addresses"][1], "bet_hash") 27 | 28 | with pytest.raises(exceptions.ComposeError, match="incorrect source address"): 29 | cancel.compose(ledger_db, "addresses", open_order["tx_hash"]) 30 | 31 | closed_bet = ledger_db.execute( 32 | "SELECT * FROM bets WHERE source = ? ORDER BY rowid DESC LIMIT 1", 33 | (defaults["addresses"][1],), 34 | ).fetchone() 35 | 36 | with pytest.raises(exceptions.ComposeError, match="offer not open"): 37 | cancel.compose(ledger_db, closed_bet["source"], closed_bet["tx_hash"]) 38 | 39 | 40 | def test_parse_cancel_order(ledger_db, blockchain_mock, test_helpers, current_block_index): 41 | open_order = get_open_order(ledger_db) 42 | tx = blockchain_mock.dummy_tx(ledger_db, open_order["source"]) 43 | message = b"\xfd\xcd]\xdf\x084\xb1\xf6\xe7\xd7\xe4\xb9^\x92=\xd5\x1a:\xd4\xdaW\x95\xc0\xf5\xf2q\xa5\x1f\xc3\xab\xb4." 44 | 45 | cancel.parse(ledger_db, tx, message) 46 | test_helpers.check_records( 47 | ledger_db, 48 | [ 49 | { 50 | "table": "cancels", 51 | "values": { 52 | "block_index": tx["block_index"], 53 | "offer_hash": open_order["tx_hash"], 54 | "source": open_order["source"], 55 | "status": "valid", 56 | "tx_hash": tx["tx_hash"], 57 | "tx_index": tx["tx_index"], 58 | }, 59 | }, 60 | { 61 | "table": "orders", 62 | "values": { 63 | "status": "cancelled", 64 | "tx_hash": open_order["tx_hash"], 65 | "block_index": current_block_index, 66 | }, 67 | }, 68 | ], 69 | ) 70 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/units/monitors/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | from unittest.mock import Mock, patch 3 | 4 | import pytest 5 | from counterpartycore.lib import config 6 | 7 | 8 | @pytest.fixture(autouse=True) 9 | def mock_logger(): 10 | """Fixture to mock logger for all tests""" 11 | with patch("logging.getLogger") as mock_get_logger: 12 | mock_logger = Mock() 13 | mock_get_logger.return_value = mock_logger 14 | yield mock_logger 15 | 16 | 17 | @pytest.fixture 18 | def mock_db(): 19 | """Fixture to create a mock database connection""" 20 | mock_db = Mock() 21 | mock_cursor = Mock() 22 | mock_db.cursor.return_value = mock_cursor 23 | return mock_db 24 | 25 | 26 | @pytest.fixture 27 | def mock_collector(): 28 | """Fixture to create a mock telemetry collector""" 29 | collector = Mock() 30 | collector.collect.return_value = {"test": "data"} 31 | collector.close.return_value = None 32 | return collector 33 | 34 | 35 | @pytest.fixture 36 | def mock_client(): 37 | """Fixture to create a mock telemetry client""" 38 | client = Mock() 39 | client.send.return_value = None 40 | return client 41 | 42 | 43 | @pytest.fixture(autouse=True) 44 | def clean_environment(): 45 | """Fixture to clean environment variables before and after tests""" 46 | # Save original environment 47 | original_env = os.environ.copy() 48 | 49 | # Clean specific environment variables 50 | for key in [ 51 | "SENTRY_DSN", 52 | "SENTRY_ENVIRONMENT", 53 | "SENTRY_RELEASE", 54 | "SENTRY_SAMPLE_RATE", 55 | "SLACK_HOOK", 56 | "DOCKER_HOST", 57 | "KUBERNETES_SERVICE_HOST", 58 | ]: 59 | if key in os.environ: 60 | del os.environ[key] 61 | 62 | yield 63 | 64 | # Restore original environment 65 | os.environ.clear() 66 | os.environ.update(original_env) 67 | 68 | 69 | @pytest.fixture(autouse=True) 70 | def reset_config_attributes(): 71 | """Fixture to reset any modified config attributes""" 72 | # Save original attributes 73 | original_attrs = {} 74 | for attr in [ 75 | "TESTNET3", 76 | "TESTNET4", 77 | "FORCE", 78 | "INFLUX_DB_URL", 79 | "INFLUX_DB_TOKEN", 80 | "INFLUX_DB_ORG", 81 | "INFLUX_DB_BUCKET", 82 | "LOGGER_NAME", 83 | "XCP_NAME", 84 | "APP_NAME", 85 | ]: 86 | if hasattr(config, attr): 87 | original_attrs[attr] = getattr(config, attr) 88 | 89 | yield 90 | 91 | # Restore original attributes 92 | for attr, value in original_attrs.items(): 93 | setattr(config, attr, value) 94 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/units/monitors/telemetry_test.py: -------------------------------------------------------------------------------- 1 | import time 2 | from unittest.mock import MagicMock, patch 3 | 4 | from counterpartycore.lib.monitors.telemetry.collectors.base import TelemetryCollectorBase 5 | from counterpartycore.lib.monitors.telemetry.daemon import TelemetryDaemon 6 | 7 | 8 | class TestTelemetryDaemon: 9 | def test_init(self): 10 | collector = MagicMock() 11 | client = MagicMock() 12 | daemon = TelemetryDaemon(collector, client) 13 | assert daemon.client == client 14 | assert daemon.collector == collector 15 | assert daemon.interval == 60 16 | 17 | def test_init_with_custom_interval(self): 18 | collector = MagicMock() 19 | client = MagicMock() 20 | daemon = TelemetryDaemon(collector, client, interval=10) 21 | assert daemon.client == client 22 | assert daemon.collector == collector 23 | assert daemon.interval == 10 24 | 25 | def test_send_at_intervals(self): 26 | collector = MagicMock() 27 | client = MagicMock() 28 | daemon = TelemetryDaemon(collector, client, interval=0.5) 29 | daemon.start() 30 | assert daemon.is_running 31 | time.sleep(2) 32 | daemon.stop() 33 | assert not daemon.is_running 34 | assert client.send.call_count > 1 35 | assert collector.collect.call_count > 1 36 | 37 | 38 | class TestTelemetryCollectorBase: 39 | @patch("counterpartycore.lib.monitors.telemetry.util.config") 40 | @patch("counterpartycore.lib.monitors.telemetry.collectors.base.ledger") 41 | def test_collect(self, mock_ledger, mock_config): 42 | mock_db = MagicMock() 43 | mock_ledger.events.last_message.return_value = {"block_index": 12345} 44 | mock_config.__version__ = "1.2.3" 45 | mock_config.TESTNET3 = False 46 | mock_config.TESTNET4 = False 47 | mock_config.FORCE = False 48 | 49 | collector = TelemetryCollectorBase(mock_db) 50 | time.sleep(0.1) 51 | data = collector.collect() 52 | 53 | print("\n\n\n", data) 54 | 55 | mock_ledger.events.last_message.assert_called_with(mock_db) 56 | mock_db.cursor().execute.assert_called_with( 57 | "SELECT * FROM blocks where block_index = ?", [12345] 58 | ) 59 | 60 | assert data["version"] == "1.2.3" 61 | assert data["uptime"] > 0 62 | assert data["network"] == "MAINNET" 63 | assert isinstance(data["dockerized"], bool) 64 | assert not data["force_enabled"] 65 | 66 | @patch("counterpartycore.lib.monitors.telemetry.collectors.base.ledger") 67 | @patch("counterpartycore.lib.monitors.telemetry.collectors.base.os.path.exists") 68 | def test_collect_with_docker(self, mock_exists, mock_ledger): 69 | mock_db = MagicMock() 70 | mock_exists.return_value = True 71 | mock_ledger.events.last_message.return_value = {"block_index": 12345} 72 | collector = TelemetryCollectorBase(mock_db) 73 | data = collector.collect() 74 | assert data["dockerized"] == True # noqa: E712 75 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/units/monitors/util_test.py: -------------------------------------------------------------------------------- 1 | import time 2 | from unittest.mock import patch 3 | 4 | from counterpartycore.lib import config 5 | from counterpartycore.lib.monitors.telemetry import util 6 | 7 | 8 | def test_get_system(): 9 | with patch("platform.system", return_value="TestOS"): 10 | assert util.get_system() == "TestOS" 11 | 12 | 13 | def test_get_version(): 14 | original_version = config.__version__ 15 | config.__version__ = "1.2.3" 16 | assert util.get_version() == "1.2.3" 17 | config.__version__ = original_version 18 | 19 | 20 | def test_get_uptime(): 21 | original_start_time = util.start_time 22 | util.start_time = time.time() - 60 # 60 seconds ago 23 | uptime = util.get_uptime() 24 | assert 59 <= uptime <= 61 # Allow small variation in timing 25 | util.start_time = original_start_time 26 | 27 | 28 | def test_is_docker(): 29 | # Test when /.dockerenv exists 30 | with patch("os.path.exists", return_value=True): 31 | assert util.is_docker() is True 32 | 33 | 34 | def test_get_network(): 35 | # Test MAINNET (the simplest case) 36 | original_testnet3 = getattr(config, "TESTNET3", None) 37 | original_testnet4 = getattr(config, "TESTNET4", None) 38 | 39 | # Set to False to test MAINNET 40 | config.TESTNET3 = False 41 | config.TESTNET4 = False 42 | 43 | assert util.get_network() == "MAINNET" 44 | 45 | # Restore original values 46 | if original_testnet3 is None: 47 | delattr(config, "TESTNET3") 48 | else: 49 | config.TESTNET3 = original_testnet3 50 | 51 | if original_testnet4 is None: 52 | delattr(config, "TESTNET4") 53 | else: 54 | config.TESTNET4 = original_testnet4 55 | 56 | 57 | def test_is_force_enabled(): 58 | original_force = getattr(config, "FORCE", None) 59 | 60 | config.FORCE = True 61 | assert util.is_force_enabled() is True 62 | 63 | config.FORCE = False 64 | assert util.is_force_enabled() is False 65 | 66 | if original_force is None: 67 | delattr(config, "FORCE") 68 | else: 69 | config.FORCE = original_force 70 | 71 | 72 | def test_id_mocked(): 73 | """Test ID by completely replacing the class""" 74 | # Rather than testing ID generation, which depends on the file system, 75 | # we simply test that the class can be instantiated and has an id attribute 76 | with patch.object(util.ID, "__init__", return_value=None): 77 | id_obj = util.ID() 78 | # Manually set the ID 79 | id_obj.id = "test-id" 80 | assert id_obj.id == "test-id" 81 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/units/parser/mempool/conftest.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | import pytest 4 | 5 | # Import les modules nécessaires 6 | from counterpartycore.lib.ledger.currentstate import CurrentState 7 | 8 | 9 | @pytest.fixture 10 | def mock_db(): 11 | """Fixture pour créer un mock de la base de données""" 12 | db = mock.MagicMock() 13 | cursor = mock.MagicMock() 14 | db.cursor.return_value = cursor 15 | return db, cursor 16 | 17 | 18 | @pytest.fixture 19 | def mock_current_state(): 20 | """Fixture pour mocker CurrentState""" 21 | with mock.patch.object(CurrentState, "set_parsing_mempool") as mock_set: 22 | yield mock_set 23 | 24 | 25 | @pytest.fixture 26 | def mock_deserialize(): 27 | """Fixture pour mocker deserialize.deserialize_tx""" 28 | with mock.patch( 29 | "counterpartycore.lib.parser.deserialize.deserialize_tx" 30 | ) as mock_deserialize_tx: 31 | yield mock_deserialize_tx 32 | 33 | 34 | @pytest.fixture 35 | def mock_blocks(): 36 | """Fixture pour mocker blocks.list_tx et blocks.parse_block""" 37 | with ( 38 | mock.patch("counterpartycore.lib.parser.blocks.list_tx") as mock_list_tx, 39 | mock.patch("counterpartycore.lib.parser.blocks.parse_block") as mock_parse_block, 40 | ): 41 | yield mock_list_tx, mock_parse_block 42 | 43 | 44 | @pytest.fixture 45 | def mock_ledger_blocks(): 46 | """Fixture pour mocker ledger.blocks.get_transaction""" 47 | with mock.patch("counterpartycore.lib.ledger.blocks.get_transaction") as mock_get_tx: 48 | yield mock_get_tx 49 | 50 | 51 | @pytest.fixture 52 | def mock_backend_bitcoind(): 53 | """Fixture pour mocker backend.bitcoind.getrawmempool""" 54 | with mock.patch("counterpartycore.lib.backend.bitcoind.getrawmempool") as mock_getrawmempool: 55 | yield mock_getrawmempool 56 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/units/parser/p2sh_test.py: -------------------------------------------------------------------------------- 1 | import binascii 2 | 3 | from counterpartycore.lib.parser import deserialize, p2sh 4 | from counterpartycore.lib.utils import script 5 | 6 | 7 | def test_p2sh_signed_multisig_script_decoding(): 8 | txHex = "0100000001bae95e59f83e55035f566dc0e3034f79f0d670dc6d6a0d207a11b4e49e9baecf00000000fd0301483045022100d2d38c2d98285e44a271e91894622fa85044469257dbfc15a49e1ba98cddaf8002202b06bf0ca9d65af9f9c96db13c7585b4cd66cabedba269f9b70659dd8e456c46014cb84c8d434e5452505254591e5a3ae08000000000000000000000000073434950203620737570706f727473207573696e672070327368206164647265737365732061732074686520736f7572636520616464726573732062757420726571756972657320616e206164646974696f6e616c20696e70757420696e207468652064617461207472616e73616374696f6e2e752102e53b79237cacdc221cff4c0fb320223cac3e0fe30a682a22f19a70a3975aa3f8ad0075740087ffffffff0100000000000000000e6a0c804e42751677319b884a2d1b00000000" 9 | 10 | ctx = deserialize.deserialize_tx(txHex, parse_vouts=True) 11 | vin = ctx["vin"][0] 12 | asm = script.script_to_asm(vin["script_sig"]) 13 | new_source, new_destination, new_data = p2sh.decode_p2sh_input(asm) 14 | 15 | assert new_data == binascii.unhexlify( 16 | "1e5a3ae08000000000000000000000000073434950203620737570706f727473207573696e672070327368206164647265737365732061732074686520736f7572636520616464726573732062757420726571756972657320616e206164646974696f6e616c20696e70757420696e207468652064617461207472616e73616374696f6e2e" 17 | ) 18 | -------------------------------------------------------------------------------- /counterparty-core/counterpartycore/test/units/parser/protocol_test.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from counterpartycore.lib import config 3 | from counterpartycore.lib.parser import protocol 4 | from counterpartycore.test.mocks.counterpartydbs import ProtocolChangesDisabled 5 | 6 | 7 | def test_enabled(): 8 | assert protocol.enabled("numeric_asset_names") 9 | 10 | config.REGTEST = False 11 | with pytest.raises(KeyError, match="foobar"): 12 | protocol.enabled("foobar") 13 | config.REGTEST = True 14 | 15 | with ProtocolChangesDisabled(["numeric_asset_names"]): 16 | assert not protocol.enabled("numeric_asset_names") 17 | 18 | config.ENABLE_ALL_PROTOCOL_CHANGES = True 19 | assert protocol.enabled("barbaz") 20 | config.ENABLE_ALL_PROTOCOL_CHANGES = False 21 | -------------------------------------------------------------------------------- /counterparty-core/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling", "hatch-requirements-txt"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "counterparty-core" 7 | requires-python = ">= 3.10" 8 | dynamic = ["version", "dependencies"] 9 | description = "Counterparty Protocol Reference Implementation" 10 | readme = "../README.md" 11 | license = "MIT" 12 | authors = [ 13 | { name = "Counterparty Developers", email = "dev@counterparty.io" }, 14 | ] 15 | keywords = ['counterparty', 'bitcoin', 'blockchain', 'crypto', 'cryptocurrency', 'wallet', 'exchange', 'trading', 'finance'] 16 | classifiers = [ 17 | "Development Status :: 5 - Production/Stable", 18 | "Intended Audience :: Developers", 19 | "Intended Audience :: Financial and Insurance Industry", 20 | "License :: OSI Approved :: MIT License", 21 | "Natural Language :: English", 22 | "Operating System :: Microsoft :: Windows", 23 | "Operating System :: POSIX", 24 | "Programming Language :: Python :: 3 :: Only", 25 | "Topic :: Office/Business :: Financial", 26 | "Topic :: Software Development :: Libraries :: Python Modules", 27 | "Topic :: System :: Distributed Computing" 28 | ] 29 | 30 | [tool.hatch.metadata] 31 | allow-direct-references = true 32 | 33 | [tool.hatch.metadata.hooks.requirements_txt] 34 | files = ["requirements.txt"] 35 | 36 | [project.urls] 37 | "Latest release" = "https://github.com/CounterpartyXCP/counterparty-core/releases/latest" 38 | "Documentation" = "https://docs.counterparty.io/" 39 | "Source code" = "https://github.com/CounterpartyXCP/" 40 | "Home Page" = "https://counterparty.io/" 41 | 42 | [tool.hatch.version] 43 | path = "counterpartycore/lib/config.py" 44 | 45 | [tool.hatch.build.targets.wheel] 46 | include = ["counterpartycore"] 47 | 48 | [tool.hatch.envs.default] 49 | pre-install-commands = [ 50 | "pip install -e ../counterparty-rs", 51 | ] 52 | 53 | [project.scripts] 54 | counterparty-server = "counterpartycore:lib.cli.main.main" 55 | 56 | [tool.license_scanner] 57 | allowed-licences = [ 58 | 'Apache license', 59 | 'Apache license 2.0', 60 | 'BSD 2-clause license', 61 | 'BSD 3-clause license', 62 | 'BSD license', 63 | 'GNU general public license v2 (gplv2)', 64 | 'GNU lesser general public license', 65 | 'GNU lesser general public license v2 (lgplv2)', 66 | 'GNU lesser general public license v3 (lgplv3)', 67 | 'ISC license (iscl)', 'MIT license', 68 | 'Mozilla public license 2.0 (mpl 2.0)', 69 | 'Python software foundation license', 70 | 'The Unlicense (Unlicense)', 71 | 'Public domain', 72 | 'Creative Commons Zero, CC-0', 73 | 'Zope Public License v2', 74 | 'UNKNOWN', 75 | ] 76 | allowed-packages = [ 77 | 'counterparty-core', 'counterparty-rs', 78 | 'maturin', 'apsw', 79 | ] 80 | -------------------------------------------------------------------------------- /counterparty-core/requirements.txt: -------------------------------------------------------------------------------- 1 | apsw==3.49.0.0 2 | appdirs==1.4.4 3 | python-dateutil==2.8.2 4 | Flask-HTTPAuth==4.8.0 5 | Flask==3.0.0 6 | prettytable==3.9.0 7 | json-rpc==1.15.0 8 | pycryptodome==3.20.0 9 | ripemd-hash==1.0.1 10 | safe-pysha3==1.0.4 11 | pytest==7.4.4 12 | pytest-cov==4.1.0 13 | python-bitcoinlib==0.12.2 14 | requests==2.32.2 15 | tendo==0.3.0 16 | xmltodict==0.13.0 17 | cachetools==5.3.2 18 | bitstring==4.1.4 19 | Werkzeug==3.0.6 20 | itsdangerous==2.1.2 21 | arc4==0.4.0 22 | halo==0.0.31 23 | termcolor==2.4.0 24 | sentry-sdk==2.22.0 25 | docstring_parser==0.16 26 | psutil==5.9.8 27 | influxdb-client==1.42.0 28 | python-gnupg==0.5.2 29 | pyzmq==26.3.0 30 | gevent==24.11.1 31 | JSON-log-formatter==1.0 32 | yoyo-migrations==8.2.0 33 | gunicorn==23.0.0 34 | waitress==3.0.1 35 | hypothesis==6.116.0 36 | bitcoin-utils==0.7.1 37 | pyzstd==0.16.2 38 | dredd_hooks==0.2.0 39 | sh==2.0.6 40 | PyYAML==6.0.2 41 | multiprocessing-logging==0.3.4 42 | locust==2.32.8 43 | pygit2==1.17.0 44 | cbor2==5.6.5 45 | counterparty-rs==11.0.0 46 | -------------------------------------------------------------------------------- /counterparty-core/tools/comparebalances.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | 4 | import apsw 5 | 6 | LAST_BLOCK = 885705 7 | 8 | db_v10 = apsw.Connection( 9 | "/home/ouziel/.local/share/counterparty/counterparty.db", flags=apsw.SQLITE_OPEN_READONLY 10 | ) 11 | db_v11 = apsw.Connection( 12 | "/home/ouziel/.local/share/counterparty/v11/counterparty.db", flags=apsw.SQLITE_OPEN_READONLY 13 | ) 14 | 15 | 16 | sql = f"SELECT * FROM balances WHERE block_index <= {LAST_BLOCK} ORDER BY rowid" # noqa: S608 17 | 18 | cursor_v10 = db_v10.cursor() 19 | cursor_v11 = db_v11.cursor() 20 | 21 | cursor_v10.execute(sql) 22 | cursor_v11.execute(sql) 23 | 24 | for row_v10, row_v11 in zip(cursor_v10, cursor_v11): 25 | if row_v10 != row_v11: 26 | print("Mismatch:") 27 | print("v10:", row_v10) 28 | print("v11:", row_v11) 29 | row_v10_with_truncated_address = [row_v10[0][:36]] + list(row_v10[1:]) 30 | row_v11_with_truncated_address = [row_v11[0][:36]] + list(row_v11[1:]) 31 | assert row_v10_with_truncated_address == row_v11_with_truncated_address 32 | print("Only difference is the address length: OK") 33 | print("-" * 50) 34 | -------------------------------------------------------------------------------- /counterparty-core/tools/copyscenarios.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import glob 4 | import os 5 | import sys 6 | 7 | CURRENT_DIR = os.path.dirname(os.path.realpath(__file__)) 8 | SCENARIOS_DIR = os.path.join(CURRENT_DIR, "..", "counterpartycore/test/fixtures/scenarios") 9 | 10 | dryrun = "--dry-run" in sys.argv or "--dryrun" in sys.argv 11 | 12 | for new_fixture_path in glob.glob(os.path.join(SCENARIOS_DIR, "*.new.*")): 13 | old_fixture_path = new_fixture_path.replace(".new.", ".") 14 | print(f"Move {new_fixture_path} to {old_fixture_path}") 15 | if not dryrun: 16 | os.replace(new_fixture_path, old_fixture_path) 17 | -------------------------------------------------------------------------------- /counterparty-core/tools/dumprps.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import apsw 4 | 5 | DATABASE = "/home/ouziel/.local/share/counterparty/counterparty.db" 6 | 7 | EVENTS = [ 8 | "CANCEL_RPS", 9 | "OPEN_RPS", 10 | "RPS_MATCH", 11 | "RPS_EXPIRATION", 12 | "RPS_MATCH_EXPIRATION", 13 | "RPS_RESOLVE", 14 | "RPS_MATCH_UPDATE", 15 | "RPS_UPDATE", 16 | ] 17 | 18 | credits_calling_function = ["recredit wager", "wins"] 19 | debits_acions = ["open RPS", "reopen RPS after matching expiration"] 20 | 21 | 22 | db = apsw.Connection(DATABASE, flags=apsw.SQLITE_OPEN_READONLY) 23 | cursor = db.cursor() 24 | 25 | query = f""" 26 | SELECT block_index, tx_hash, event, command, category, bindings 27 | FROM messages 28 | WHERE 29 | event IN ('{"', '".join(EVENTS)}') OR 30 | (event = 'CREDIT' AND bindings LIKE '%"calling_function":"recredit wager"%') OR 31 | (event = 'CREDIT' AND bindings LIKE '%"calling_function":"wins"%') OR 32 | (event = 'DEBIT' AND bindings LIKE '%"action":"open RPS"%') OR 33 | (event = 'DEBIT' AND bindings LIKE '%"action":"reopen RPS after matching expiration"%') 34 | ORDER BY block_index, tx_hash, rowid 35 | """ # noqa S608 36 | 37 | cursor.execute(query) 38 | rows = cursor.fetchall() 39 | 40 | events_by_hash = {} 41 | for row in rows: 42 | block_index, tx_hash, event, command, category, bindings = row 43 | event_key = tx_hash 44 | if event_key is None: 45 | event_key = block_index 46 | if event_key not in events_by_hash: 47 | events_by_hash[event_key] = [] 48 | values = [event, command, category, bindings] 49 | if event == "RPS_MATCH_UPDATE": 50 | values.append("id") 51 | elif event == "RPS_UPDATE": 52 | values.append("tx_hash") 53 | events_by_hash[event_key].append(values) 54 | 55 | with open("counterpartycore/lib/messages/data/rps_events.json", "w") as f: 56 | json.dump(events_by_hash, f, indent=4) 57 | -------------------------------------------------------------------------------- /counterparty-core/tools/finddivergence.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import difflib 4 | import json 5 | import time 6 | 7 | import sh 8 | 9 | SERVER_1 = "https://dev.counterparty.io:4000/" 10 | SERVER_2 = "http://localhost:4000/" 11 | 12 | START_BLOCK = 872150 13 | 14 | 15 | def compare_strings(string1, string2): 16 | """Compare strings diff-style.""" 17 | diff = list(difflib.unified_diff(string1.splitlines(1), string2.splitlines(1), n=0)) 18 | if len(diff): 19 | print("\nDifferences:") 20 | print("\n".join(diff)) 21 | return len(diff) 22 | 23 | 24 | def get_hashes(server, block_index): 25 | result = json.loads(sh.curl(f"{server}v2/blocks/{block_index}").strip())["result"] 26 | return result["ledger_hash"], result["txlist_hash"] 27 | 28 | 29 | def get_events(server, block_index): 30 | result = json.loads(sh.curl(f"{server}v2/blocks/{block_index}/events").strip())["result"] 31 | return result 32 | 33 | 34 | block_index = START_BLOCK 35 | hashes_1 = get_hashes(SERVER_1, block_index) 36 | hashes_2 = get_hashes(SERVER_2, block_index) 37 | while hashes_1[0] != hashes_2[0]: 38 | print(f"Block {block_index} NOK") 39 | time.sleep(0.1) 40 | block_index -= 1 41 | hashes_1 = get_hashes(SERVER_1, block_index) 42 | hashes_2 = get_hashes(SERVER_2, block_index) 43 | 44 | print(f"Block {block_index} OK") 45 | 46 | block_index += 1 47 | print(f"First bad block: {block_index}") 48 | events_1 = get_events(SERVER_1, block_index) 49 | events_2 = get_events(SERVER_2, block_index) 50 | 51 | compare_strings(json.dumps(events_1, indent=4), json.dumps(events_2, indent=4)) 52 | -------------------------------------------------------------------------------- /counterparty-core/tools/gennewcheckpoints.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pprint 3 | 4 | import apsw 5 | from counterpartycore.lib.messages.data import checkpoints 6 | 7 | # db_name = "counterparty.testnet4.db" 8 | # checkpoints = checkpoints.CHECKPOINTS_TESTNET4 9 | 10 | db_name = "counterparty.testnet3.db" 11 | checkpoints = checkpoints.CHECKPOINTS_TESTNET3 12 | 13 | # db_name = "evanblocks.db" 14 | # checkpoints = checkpoints.CHECKPOINTS_MAINNET 15 | 16 | db = apsw.Connection(os.path.join(os.path.expanduser("~"), ".local/share/counterparty/", db_name)) 17 | 18 | new_checkpoints = {} 19 | for block_index in checkpoints.keys(): 20 | new_checkpoints[block_index] = {} 21 | block = db.execute( 22 | "SELECT ledger_hash, txlist_hash FROM blocks WHERE block_index = ?", (block_index,) 23 | ).fetchone() 24 | new_checkpoints[block_index]["ledger_hash"] = block[0] 25 | new_checkpoints[block_index]["txlist_hash"] = block[1] 26 | 27 | pp = pprint.PrettyPrinter(indent=4) 28 | pp.pprint(new_checkpoints) 29 | -------------------------------------------------------------------------------- /counterparty-core/tools/rebuild.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Function to display messages with timestamp 4 | log_message() { 5 | echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" 6 | } 7 | 8 | # Function to perform a complete rebuild cycle 9 | perform_rebuild_cycle() { 10 | local repo_dir=$1 11 | local branch=$2 12 | 13 | log_message "Starting rebuild cycle for branch: $branch..." 14 | 15 | # Checkout specified branch 16 | git checkout "$branch" 17 | 18 | # Pull latest changes 19 | git pull -f origin "$branch":"$branch" 20 | 21 | # Install counterparty-rs module 22 | if [ -d "$repo_dir/counterparty-rs" ]; then 23 | cd "$repo_dir/counterparty-rs" 24 | pip install -e . 25 | cd "$repo_dir" 26 | fi 27 | 28 | LAST_COMMIT=$(python3 -c "from counterpartycore.lib.utils import helpers; print(helpers.get_current_commit_hash(not_from_env=True))") 29 | log_message "Last commit hash: $LAST_COMMIT" 30 | 31 | # Run rebuild 32 | log_message "Running rebuild..." 33 | CURRENT_COMMIT="$LAST_COMMIT" counterparty-server rebuild 34 | 35 | # Check if the rebuild command failed 36 | if [ $? -ne 0 ]; then 37 | log_message "Error: Rebuild command failed. Stopping script." 38 | exit 1 39 | fi 40 | 41 | log_message "Rebuild cycle completed." 42 | } 43 | 44 | # Get the repository root directory (script will be in counterparty-core/tools) 45 | SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" 46 | REPO_DIR="$(cd "$SCRIPT_DIR/../.." && pwd)" 47 | 48 | # Set default branch value 49 | BRANCH="develop" 50 | 51 | # Check if branch parameter is provided 52 | if [ $# -ge 1 ]; then 53 | BRANCH="$1" 54 | fi 55 | 56 | # Navigate to the repository directory 57 | log_message "Changing to repository directory: $REPO_DIR" 58 | cd "$REPO_DIR" || exit 1 59 | 60 | log_message "Using branch: $BRANCH" 61 | 62 | # Infinite loop 63 | while true; do 64 | perform_rebuild_cycle "$REPO_DIR" "$BRANCH" 65 | log_message "Starting next cycle in 5 seconds..." 66 | sleep 5 67 | done -------------------------------------------------------------------------------- /counterparty-core/tools/updatetxids.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import binascii 4 | import os 5 | import re 6 | import sys 7 | 8 | COMMIT = "8906a8188ba841599f66627157e29a270ca838cf" 9 | UNITTEST_FIXTURE_SQL = "counterpartycore/test/fixtures/scenarios/unittest_fixture.sql" 10 | UNITTEST_VECTORS_PY = "counterpartycore/test/fixtures/vectors.py" 11 | 12 | REGEX = r"^(?P[+-])INSERT INTO transactions VALUES\((?P\d+),'(?P.+?)'," 13 | 14 | dryrun = "--dry-run" in sys.argv or "--dryrun" in sys.argv 15 | args = list(filter(lambda a: a not in [__file__, "--dry-run", "--dryrun"], sys.argv)) 16 | 17 | diffcmd = f"git --no-pager diff {UNITTEST_FIXTURE_SQL}" 18 | if len(args) == 1: 19 | commit = args[0] 20 | diffcmd = f"git --no-pager show {commit} {UNITTEST_FIXTURE_SQL}" 21 | 22 | elif len(args) > 1: 23 | raise Exception("Too many arguments") 24 | 25 | 26 | def to_literal_byte_string(h): 27 | r = "" 28 | for x in binascii.unhexlify(h): 29 | if x >= 32 and x <= 126: 30 | # print(x, "[%s]" % chr(x)) 31 | r += chr(x) 32 | else: 33 | # print(x, hex(x), "\\x" + ("00" + hex(x).replace('0x', ''))[-2:]) 34 | r += "\\x" + ("00" + hex(x).replace("0x", ""))[-2:] 35 | 36 | return r 37 | 38 | 39 | old_txid_map = {} 40 | new_txid_map = {} 41 | 42 | with os.popen(diffcmd) as diff: # noqa: S605 43 | lines = diff.readlines() 44 | 45 | for line in lines: 46 | m = re.match(REGEX, line) 47 | if m: 48 | if m.group("change") == "+": 49 | new_txid_map[m.group("tx_index")] = m.group("tx_hash") 50 | else: 51 | old_txid_map[m.group("tx_index")] = m.group("tx_hash") 52 | 53 | with open(UNITTEST_VECTORS_PY, "r") as f: 54 | filedata = f.read() 55 | 56 | for tx_index, old_txid in sorted(old_txid_map.items(), key=lambda kv: kv[0]): 57 | new_txid = new_txid_map[tx_index] 58 | 59 | print(f"{old_txid} -> {new_txid}") 60 | 61 | filedata = filedata.replace(old_txid, new_txid) 62 | filedata = filedata.replace( 63 | to_literal_byte_string(old_txid), to_literal_byte_string(new_txid) 64 | ) 65 | 66 | 67 | if not dryrun: 68 | assert filedata 69 | with open(UNITTEST_VECTORS_PY, "w") as f: 70 | f.write(filedata) 71 | else: 72 | print("DRYRUN") 73 | -------------------------------------------------------------------------------- /counterparty-core/tools/upgradesqlitepagesize.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | import os 4 | import shutil 5 | import sys 6 | from subprocess import PIPE, Popen 7 | 8 | PAGE_SIZE = 4096 9 | 10 | assert len(sys.argv) == 2, "path to DB required" 11 | 12 | dbfile = sys.argv[1] 13 | dbdir = os.path.dirname(dbfile) 14 | tmpdir = f"{dbdir}/upgradesqlitepagesize" 15 | sqlfile = f"{tmpdir}/dump.sql" 16 | tmpfile = f"{tmpdir}/{os.path.basename(dbfile)}" 17 | 18 | if not os.path.isfile(dbfile): 19 | print(f"dbfile {dbfile} does not exist") 20 | sys.exit(1) 21 | 22 | try: 23 | os.mkdir(tmpdir) 24 | except FileExistsError: 25 | print("tmpdir already exists, deleting...") 26 | shutil.rmtree(tmpdir) 27 | os.mkdir(tmpdir) 28 | 29 | print("creating .sql dump of DB...") 30 | pdump = Popen(["sqlite3", dbfile], stdin=PIPE, stdout=PIPE, stderr=PIPE) # noqa: S603, S607 31 | output = pdump.communicate( 32 | bytes( 33 | f""" 34 | .headers ON 35 | .mode csv 36 | .output {sqlfile} 37 | .dump 38 | .output stdout 39 | .exit 40 | """, 41 | "utf-8", 42 | ) 43 | ) 44 | print(output) 45 | assert pdump.wait() == 0 46 | 47 | print("preparing new DB...") 48 | ppre = Popen(["sqlite3", tmpfile], stdin=PIPE, stdout=PIPE, stderr=PIPE) # noqa: S603, S607 49 | output = ppre.communicate( 50 | bytes( 51 | f""" 52 | PRAGMA journal_mode = OFF; 53 | PRAGMA synchronous = OFF; 54 | PRAGMA page_size={PAGE_SIZE}; 55 | """, 56 | "utf-8", 57 | ) 58 | ) 59 | print(output) 60 | assert ppre.wait() == 0 61 | 62 | print("loading .sql dump into DB...") 63 | fsqlfile = os.open(sqlfile, os.O_RDONLY) 64 | pload = Popen(["sqlite3", tmpfile], stdin=fsqlfile, stdout=PIPE, stderr=PIPE) # noqa: S603, S607 65 | print(pload.communicate()) 66 | assert pload.wait() == 0 67 | 68 | print("finalizing new DB...") 69 | ppost = Popen(["sqlite3", tmpfile], stdin=PIPE, stdout=PIPE, stderr=PIPE) # noqa: S603, S607 70 | output = ppost.communicate( 71 | bytes( 72 | """ 73 | PRAGMA journal_mode = ON; 74 | PRAGMA synchronous = NORMAL; 75 | """, 76 | "utf-8", 77 | ) 78 | ) 79 | print(output) 80 | assert ppost.wait() == 0 81 | 82 | print("replacing old DB with new DB...") 83 | os.remove(dbfile) 84 | os.rename(tmpfile, dbfile) 85 | 86 | print("done!") 87 | -------------------------------------------------------------------------------- /counterparty-rs/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "counterparty-rs" 3 | version = "11.0.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | [lib] 8 | name = "counterparty_rs" 9 | crate-type = ["cdylib"] 10 | 11 | [dependencies] 12 | ring = "0.17.13" 13 | pyo3 = { version = "0.21.2", features = ["extension-module", "abi3-py37"] } 14 | bs58 = { version = "0.5.0", features = ["check"] } 15 | # secp256k1 = "0.25.0" 16 | bip32 = { version = "0.5.1", features = ["secp256k1-ffi"] } 17 | ripemd = "0.1.3" 18 | sha256 = "1.5.0" 19 | bitcoin = "0.32.4" 20 | par-map = "0.1.4" 21 | tracing = "0.1.40" 22 | crossbeam-channel = "0.5.13" 23 | thiserror = "1.0.58" 24 | derive_more = "0.99.17" 25 | tracing-subscriber = { version = "0.3.18", features = [ 26 | "env-filter", 27 | "fmt", 28 | "chrono", 29 | "json", 30 | ] } 31 | rocksdb = "0.22.0" 32 | rand = "0.8.5" 33 | serde = { version = "1.0.214", features = ["derive"] } 34 | serde_json = "1.0.132" 35 | uuid = { version = "1.11.0", features = ["v4", "fast-rng"] } 36 | colored = "2.1.0" 37 | rust-crypto = "0.2.36" 38 | time = "0.3.37" 39 | hex = "0.4" 40 | lazy_static = "1.4" 41 | reqwest = { version = "0.12.12", features = ["json", "blocking"]} 42 | base64 = "0.21" 43 | env_logger = "0.10" 44 | log = "0.4" 45 | serde_cbor = "0.11" 46 | 47 | [build-dependencies] 48 | vergen = { version = "8.3.1", features = [ 49 | "build", 50 | "cargo", 51 | "git", 52 | "gitcl", 53 | "rustc", 54 | ] } 55 | pyo3-build-config = "0.20.3" 56 | 57 | [dev-dependencies] 58 | quickcheck = "1.0.3" 59 | quickcheck_macros = "1.0.0" 60 | -------------------------------------------------------------------------------- /counterparty-rs/README.md: -------------------------------------------------------------------------------- 1 | # counterparty_rs 2 | 3 | Rust and pyo3-based speed-ups for `counterparty-core`. 4 | 5 | This is a rust-based python wheel that wraps [rust-bitcoin](https://docs.rs/bitcoin/latest/bitcoin/). 6 | 7 | -------------------------------------------------------------------------------- /counterparty-rs/build.rs: -------------------------------------------------------------------------------- 1 | // build.rs 2 | use std::error::Error; 3 | use vergen::EmitBuilder; 4 | 5 | fn main() -> Result<(), Box> { 6 | pyo3_build_config::add_extension_module_link_args(); 7 | 8 | // Emit the instructions 9 | EmitBuilder::builder() 10 | .all_build() 11 | .all_git() 12 | .all_cargo() 13 | .emit()?; 14 | Ok(()) 15 | } 16 | -------------------------------------------------------------------------------- /counterparty-rs/pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["maturin>=0.14,<0.15"] 3 | build-backend = "maturin" 4 | 5 | [project] 6 | name = "counterparty-rs" 7 | dynamic = ["version"] 8 | repository = "https://github.com/CounterpartyXCP/counterparty-core" 9 | requires-python = ">=3.10" 10 | license = { text = "MIT" } 11 | classifiers = [ 12 | "Programming Language :: Rust", 13 | "Programming Language :: Python :: Implementation :: CPython", 14 | "Programming Language :: Python :: Implementation :: PyPy", 15 | ] 16 | dependencies = [ 17 | "python-bitcoinlib==0.12.2", 18 | ] 19 | 20 | 21 | [tool.maturin] 22 | strip = true 23 | 24 | [tool.hatch.version] 25 | path = "../counterparty-core/counterpartycore/lib/config.py" 26 | -------------------------------------------------------------------------------- /counterparty-rs/rustfmt.toml: -------------------------------------------------------------------------------- 1 | match_arm_blocks = true 2 | -------------------------------------------------------------------------------- /counterparty-rs/src/b58.rs: -------------------------------------------------------------------------------- 1 | use pyo3::exceptions::PyValueError; 2 | use pyo3::prelude::*; 3 | 4 | #[pyfunction] 5 | pub fn b58_encode(decoded: &[u8]) -> String { 6 | bs58::encode(decoded).with_check().into_string() 7 | } 8 | 9 | #[pyfunction] 10 | fn b58_encode_list(decoded_list: Vec>) -> Vec { 11 | decoded_list.iter().map(|x| b58_encode(x)).collect() 12 | } 13 | 14 | #[pyfunction] 15 | fn b58_decode(encoded: &str) -> PyResult> { 16 | let decoded = bs58::decode(encoded).with_check(None).into_vec(); 17 | 18 | match decoded { 19 | Ok(s) => Ok(s), 20 | Err(_) => Err(PyValueError::new_err("Bad input")), 21 | } 22 | } 23 | 24 | #[pyfunction] 25 | fn b58_decode_list(encoded_list: Vec) -> PyResult>> { 26 | let mut decoded_list = Vec::new(); 27 | for encoded in encoded_list { 28 | let decoded = b58_decode(&encoded)?; 29 | decoded_list.push(decoded); 30 | } 31 | Ok(decoded_list) 32 | } 33 | 34 | pub fn register_b58_module(parent_module: &Bound<'_, PyModule>) -> PyResult<()> { 35 | let m = PyModule::new_bound(parent_module.py(), "b58")?; 36 | m.add_function(wrap_pyfunction!(b58_encode, &m)?)?; 37 | m.add_function(wrap_pyfunction!(b58_encode_list, &m)?)?; 38 | m.add_function(wrap_pyfunction!(b58_decode, &m)?)?; 39 | m.add_function(wrap_pyfunction!(b58_decode_list, &m)?)?; 40 | parent_module.add_submodule(&m)?; 41 | Ok(()) 42 | } 43 | -------------------------------------------------------------------------------- /counterparty-rs/src/indexer/constants.rs: -------------------------------------------------------------------------------- 1 | // pub const CP_HEIGHT: u32 = 278270; 2 | pub const CP_HEIGHT: u32 = 800000; 3 | -------------------------------------------------------------------------------- /counterparty-rs/src/indexer/handlers/get_block.rs: -------------------------------------------------------------------------------- 1 | use crossbeam_channel::{select, Receiver}; 2 | 3 | use crate::indexer::{block::Block, stopper::Stopper, types::error::Error}; 4 | 5 | pub fn new(stopper: Stopper, rx: Receiver>) -> Result, Error> { 6 | let (_, done) = stopper.subscribe()?; 7 | select! { 8 | recv(done) -> _ => Err(Error::Stopped), 9 | recv(rx) -> result => Ok(result?) 10 | } 11 | } 12 | 13 | pub fn new_non_blocking( 14 | stopper: Stopper, 15 | rx: Receiver>, 16 | ) -> Result>, Error> { 17 | let (_, done) = stopper.subscribe()?; 18 | select! { 19 | recv(done) -> _ => Err(Error::Stopped), 20 | recv(rx) -> result => Ok(Some(result?)), 21 | default() => Ok(None) 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /counterparty-rs/src/indexer/handlers/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod get_block; 2 | pub mod new; 3 | pub mod start; 4 | pub mod stop; 5 | -------------------------------------------------------------------------------- /counterparty-rs/src/indexer/handlers/new.rs: -------------------------------------------------------------------------------- 1 | use std::cmp::max; 2 | 3 | use crossbeam_channel::bounded; 4 | use tracing::{debug, info}; 5 | 6 | use crate::indexer::{ 7 | bitcoin_client::BitcoinClient, config::Config, database::Database, logging::setup_logging, 8 | stopper::Stopper, types::error::Error, Indexer, 9 | }; 10 | 11 | pub fn new(config: Config) -> Result { 12 | setup_logging(&config); 13 | 14 | info!("Indexer initializing..."); 15 | let parallelism = std::thread::available_parallelism()?; 16 | let stopper = Stopper::new(); 17 | let client = BitcoinClient::new(&config, stopper.clone(), parallelism.into())?; 18 | let handles = client.start()?; 19 | debug!("Connecting to database: {}", config.db_dir); 20 | let db = Database::new(config.db_dir.to_string())?; 21 | debug!("Connected"); 22 | let chan = bounded(64); 23 | debug!("Initialized"); 24 | 25 | Ok(Indexer { 26 | config, 27 | parallelism: max(parallelism.into(), 4), 28 | stopper, 29 | client, 30 | db, 31 | chan, 32 | handles, 33 | }) 34 | } 35 | -------------------------------------------------------------------------------- /counterparty-rs/src/indexer/handlers/stop.rs: -------------------------------------------------------------------------------- 1 | use std::thread::JoinHandle; 2 | 3 | use crossbeam_channel::unbounded; 4 | use tracing::debug; 5 | 6 | use crate::indexer::{ 7 | config::Config, 8 | stopper::Stopper, 9 | types::{error::Error, pipeline::ChanOut}, 10 | workers::{consumer, new_worker_pool}, 11 | }; 12 | 13 | pub fn new( 14 | handles: &mut Vec>>, 15 | config: Config, 16 | stopper: Stopper, 17 | chan: ChanOut, 18 | ) -> Result<(), Error> { 19 | if stopper.stopped()? { 20 | return Err(Error::Stopped); 21 | } 22 | 23 | debug!("Stopping..."); 24 | let mut consumer_handle = None; 25 | let consumer_stopper = Stopper::new(); 26 | let (tx, _) = unbounded::<()>(); 27 | if !config.consume_blocks { 28 | let mut handles = new_worker_pool( 29 | "Consumer".into(), 30 | 1, 31 | chan.1, 32 | tx, 33 | consumer_stopper.clone(), 34 | consumer::new(), 35 | )?; 36 | consumer_handle = Some(handles.remove(0)); 37 | } 38 | 39 | stopper.stop()?; 40 | for handle in handles.drain(..) { 41 | if let Err(e) = handle.join() { 42 | let error_message = if let Some(s) = e.downcast_ref::() { 43 | s.clone() 44 | } else if let Some(&s) = e.downcast_ref::<&str>() { 45 | s.into() 46 | } else { 47 | "unknown error".to_string() 48 | }; 49 | 50 | return Err(Error::GracefulExitFailure(error_message)); 51 | } 52 | } 53 | 54 | if let Some(handle) = consumer_handle { 55 | consumer_stopper.stop()?; 56 | handle.join().ok(); 57 | } 58 | 59 | debug!("Stopped."); 60 | Ok(()) 61 | } 62 | -------------------------------------------------------------------------------- /counterparty-rs/src/indexer/stopper.rs: -------------------------------------------------------------------------------- 1 | use super::{types::error::Error, utils::Broadcaster}; 2 | use crossbeam_channel::Receiver; 3 | use std::sync::{Arc, Mutex}; 4 | use uuid::Uuid; 5 | 6 | pub type Done = Receiver<()>; 7 | 8 | #[derive(Clone)] 9 | pub struct Stopper { 10 | broadcaster: Broadcaster<()>, 11 | is_stopped: Arc>, 12 | } 13 | 14 | impl Stopper { 15 | pub fn new() -> Self { 16 | Stopper { 17 | broadcaster: Broadcaster::new(), 18 | is_stopped: Arc::new(Mutex::new(false)), 19 | } 20 | } 21 | 22 | pub fn stop(&self) -> Result<(), Error> { 23 | let mut stopped_guard = self.is_stopped.lock()?; 24 | if *stopped_guard { 25 | return Ok(()); 26 | } 27 | *stopped_guard = true; 28 | self.broadcaster.broadcast(()) 29 | } 30 | 31 | pub fn subscribe(&self) -> Result<(Uuid, Done), Error> { 32 | let stopped = self.is_stopped.lock()?; 33 | if *stopped { 34 | return Err(Error::Stopped); 35 | } 36 | self.broadcaster.subscribe() 37 | } 38 | 39 | pub fn unsubscribe(&self, id: Uuid) -> Result<(), Error> { 40 | self.broadcaster.unsubscribe(id) 41 | } 42 | 43 | pub fn stopped(&self) -> Result { 44 | let stopped = self.is_stopped.lock()?; 45 | Ok(*stopped) 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /counterparty-rs/src/indexer/test_utils.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::expect_used, clippy::unwrap_used)] 2 | use std::thread; 3 | 4 | use bitcoin::{hashes::Hash, BlockHash}; 5 | use crossbeam_channel::{unbounded, Receiver, Sender}; 6 | 7 | use super::{stopper::Stopper, types::error::Error}; 8 | 9 | pub fn test_worker(mut worker_fn: F, input_data: I) -> Vec 10 | where 11 | T: Send + 'static, 12 | U: Send + 'static, 13 | F: FnMut(Receiver>, Sender>, Stopper) -> Result<(), Error> 14 | + Clone 15 | + Send 16 | + 'static, 17 | I: IntoIterator, 18 | { 19 | let (tx, rx) = unbounded::>(); 20 | let (out_tx, out_rx) = unbounded::>(); 21 | let stopper = Stopper::new(); 22 | let stopper_clone = stopper.clone(); 23 | 24 | let handle = thread::spawn(move || worker_fn(rx, out_tx, stopper_clone)); 25 | 26 | let mut input_count = 0; 27 | for item in input_data { 28 | tx.send(Box::new(item)).expect("Failed to send input"); 29 | input_count += 1; 30 | } 31 | 32 | let mut output_data = Vec::with_capacity(input_count); 33 | 34 | for _ in 0..input_count { 35 | if let Ok(data) = out_rx.recv() { 36 | output_data.push(*data); 37 | } 38 | } 39 | 40 | stopper.stop().unwrap(); 41 | handle 42 | .join() 43 | .expect("Worker thread paniced") 44 | .expect("Worker returned error"); 45 | 46 | output_data 47 | } 48 | 49 | pub fn test_sha256_hash(i: u32) -> [u8; 32] { 50 | [i as u8; 32] 51 | } 52 | 53 | pub fn test_h160_hash(i: u32) -> [u8; 20] { 54 | [i as u8; 20] 55 | } 56 | 57 | pub fn test_block_hash(i: u32) -> BlockHash { 58 | BlockHash::from_slice(&test_sha256_hash(i)).unwrap() 59 | } 60 | 61 | #[macro_export] 62 | macro_rules! new_test_db { 63 | () => {{ 64 | use std::{fs, path::Path}; 65 | 66 | let db_dir = "test_dbs"; 67 | let dir_path = Path::new(db_dir); 68 | if !dir_path.exists() { 69 | fs::create_dir_all(dir_path).expect("Failed to create test database directory"); 70 | } 71 | 72 | let file = Path::new(file!()).file_name().unwrap().to_str().unwrap(); 73 | let line = line!(); 74 | let test_id = format!("{}_{}", file, line).replace(".", "_"); 75 | let db_path_str = format!("{}/test_{}", db_dir, test_id); 76 | let db_path = Path::new(&db_path_str); 77 | 78 | if db_path.exists() { 79 | fs::remove_dir_all(db_path).unwrap(); 80 | } 81 | fs::create_dir_all(db_path).expect("Failed to create test database directory"); 82 | 83 | Database::new(db_path_str) 84 | }}; 85 | } 86 | -------------------------------------------------------------------------------- /counterparty-rs/src/indexer/types/error.rs: -------------------------------------------------------------------------------- 1 | use crossbeam_channel::SendError; 2 | use pyo3::exceptions::PyException; 3 | use pyo3::PyErr; 4 | use std::sync; 5 | use thiserror::Error; 6 | 7 | #[derive(Error, Debug)] 8 | pub enum Error { 9 | #[error("IO error: {0}")] 10 | IO(#[from] std::io::Error), 11 | #[error("RocksDB error: {0}")] 12 | RocksDB(#[from] rocksdb::Error), 13 | #[error("TryFromSlice error: {0}")] 14 | TryFromSlice(#[from] std::array::TryFromSliceError), 15 | #[error("Recv error: {0}")] 16 | Recv(#[from] crossbeam_channel::RecvError), 17 | #[error("Send error: {0}")] 18 | Send(String), 19 | #[error("KeyParse error: {0}")] 20 | KeyParse(String), 21 | #[error("ValueParse error: {0}")] 22 | ValueParse(String), 23 | #[error("RocksDBIter error: {0}")] 24 | RocksDBIter(String), 25 | #[error("RocksDBColumnFamily error: {0}")] 26 | RocksDBColumnFamily(String), 27 | #[error("U32Conversion error: {0}")] 28 | U32Conversion(String), 29 | #[error("Stopped error")] 30 | Stopped, 31 | #[error("GracefulExitFailure: {0}")] 32 | GracefulExitFailure(String), 33 | #[error("Block not yet written: {0}")] 34 | BlockNotWritten(u32), 35 | #[error("No hash match found!")] 36 | NoHashMatchFound, 37 | #[error("OpertionCancelled error: {0}")] 38 | OperationCancelled(String), 39 | #[error("Sync error: {0}")] 40 | Sync(String), 41 | #[error("OrderInvariantError: Essential ordering constraint violated between {0} and {1}")] 42 | OrderInvariant(u32, u32), 43 | #[error("Serde JSON error: {0}")] 44 | Serde(#[from] serde_json::Error), 45 | #[error("ParseVout error: {0}")] 46 | ParseVout(String), 47 | #[error("Bitcoin RPC error: {0}")] 48 | BitcoinRpc(String), 49 | #[error("Database error: {0}")] 50 | Database(String), 51 | #[error("System error: {0}")] 52 | System(String), 53 | } 54 | 55 | impl From> for Error { 56 | fn from(value: SendError) -> Self { 57 | Error::Send(value.to_string()) 58 | } 59 | } 60 | 61 | impl From> for Error { 62 | fn from(value: sync::PoisonError) -> Self { 63 | Error::Sync(value.to_string()) 64 | } 65 | } 66 | 67 | impl From for PyErr { 68 | fn from(value: Error) -> PyErr { 69 | PyException::new_err(value.to_string()) 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /counterparty-rs/src/indexer/types/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod entry; 2 | pub mod error; 3 | pub mod pipeline; 4 | -------------------------------------------------------------------------------- /counterparty-rs/src/indexer/workers/consumer.rs: -------------------------------------------------------------------------------- 1 | use crossbeam_channel::{select, Receiver, Sender}; 2 | 3 | use crate::indexer::{stopper::Stopper, types::error::Error}; 4 | 5 | pub fn new() -> impl Fn(Receiver, Sender, Stopper) -> Result<(), Error> + Clone { 6 | move |rx, _, stopper| { 7 | let (_, done) = stopper.subscribe()?; 8 | loop { 9 | select! { 10 | recv(done) -> _ => return Ok(()), 11 | recv(rx) -> result => { 12 | if result.is_err() { 13 | return Ok(()) 14 | } 15 | } 16 | } 17 | } 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /counterparty-rs/src/indexer/workers/extractor.rs: -------------------------------------------------------------------------------- 1 | use crossbeam_channel::{select, Receiver, Sender}; 2 | 3 | use crate::indexer::{ 4 | config::Config, 5 | stopper::Stopper, 6 | types::{error::Error, pipeline::Transition}, 7 | }; 8 | 9 | pub fn new( 10 | config: Config, 11 | ) -> impl Fn(Receiver>, Sender>, Stopper) -> Result<(), Error> + Clone 12 | where 13 | T: Transition, Config, ()>, 14 | { 15 | move |rx, tx, stopper| { 16 | let (_, done) = stopper.subscribe()?; 17 | loop { 18 | select! { 19 | recv(done) -> _ => return Ok(()), 20 | recv(rx) -> result => { 21 | let data = match result { 22 | Ok(data) => data, 23 | Err(_) => return Ok(()), 24 | }; 25 | let (_, s) = data.transition(config.clone())?; 26 | if tx.send(s).is_err() { 27 | return Ok(()); 28 | }; 29 | } 30 | } 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /counterparty-rs/src/indexer/workers/fetcher.rs: -------------------------------------------------------------------------------- 1 | use bitcoin::BlockHash; 2 | use crossbeam_channel::{select, Receiver, Sender}; 3 | 4 | use crate::indexer::{ 5 | bitcoin_client::BitcoinRpc, 6 | stopper::Stopper, 7 | types::{ 8 | error::Error, 9 | pipeline::{BlockHasEntries, HasHeight, Transition}, 10 | }, 11 | utils::with_retry, 12 | }; 13 | 14 | pub fn new( 15 | client: C, 16 | ) -> impl Fn(Receiver>, Sender>, Stopper) -> Result<(), Error> + Clone 17 | where 18 | T: HasHeight + Transition, (BlockHash, Box), ()>, 19 | B: BlockHasEntries, 20 | C: BitcoinRpc, 21 | { 22 | move |rx, tx, stopper| { 23 | let (_, done) = stopper.subscribe()?; 24 | loop { 25 | select! { 26 | recv(done) -> _ => return Ok(()), 27 | recv(rx) -> result => { 28 | let data = match result { 29 | Ok(data) => data, 30 | Err(_) => return Ok(()), 31 | }; 32 | 33 | let height = data.get_height(); 34 | let hash = with_retry( 35 | stopper.clone(), 36 | || client.get_block_hash(height), 37 | format!("Error fetching block hash for height {}", height), 38 | )?; 39 | 40 | let block = with_retry( 41 | stopper.clone(), 42 | || client.get_block(&hash), 43 | format!("Error fetching block for hash {}", &hash), 44 | )?; 45 | 46 | let (_, s) = data.transition((hash, block))?; 47 | if tx.send(s).is_err() { 48 | return Ok(()); 49 | }; 50 | } 51 | } 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /counterparty-rs/src/indexer/workers/mod.rs: -------------------------------------------------------------------------------- 1 | use std::thread::{self, JoinHandle}; 2 | 3 | use crossbeam_channel::{Receiver, Sender}; 4 | use tracing::{debug, error}; 5 | 6 | use super::{stopper::Stopper, types::error::Error}; 7 | 8 | pub mod consumer; 9 | pub mod extractor; 10 | pub mod fetcher; 11 | pub mod orderer; 12 | pub mod producer; 13 | pub mod reporter; 14 | pub mod writer; 15 | 16 | pub fn new_worker_pool( 17 | name: String, 18 | n: usize, 19 | rx: Receiver, 20 | tx: Sender, 21 | stopper: Stopper, 22 | f: F, 23 | ) -> Result>>, Error> 24 | where 25 | R: Send + 'static, 26 | T: Send + 'static, 27 | F: FnMut(Receiver, Sender, Stopper) -> Result<(), Error> + Clone + Send + 'static, 28 | { 29 | let mut handles = Vec::new(); 30 | for i in 0..n { 31 | let rx = rx.clone(); 32 | let tx = tx.clone(); 33 | let mut f = f.clone(); 34 | let stopper_clone = stopper.clone(); 35 | let stopper_clone_1 = stopper.clone(); 36 | let name = name.clone(); 37 | 38 | handles.push(thread::spawn(move || { 39 | if let Err(e) = f(rx, tx, stopper_clone) { 40 | if !stopper_clone_1.stopped()? { 41 | error!("{}-{} worker exited with error: {}", name, i, e); 42 | stopper_clone_1.stop()?; 43 | return Err(e); 44 | } 45 | } 46 | 47 | debug!("{}-{} worker exited.", name, i); 48 | Ok(()) 49 | })); 50 | } 51 | 52 | debug!("{} {} workers started", n, name); 53 | Ok(handles) 54 | } 55 | -------------------------------------------------------------------------------- /counterparty-rs/src/indexer/workers/writer.rs: -------------------------------------------------------------------------------- 1 | use crossbeam_channel::{Receiver, RecvTimeoutError, Sender}; 2 | use std::time::Duration; 3 | use tracing::debug; 4 | 5 | use crate::indexer::{ 6 | config::Config, 7 | database::DatabaseOps, 8 | stopper::Stopper, 9 | types::{ 10 | entry::ToEntry, 11 | error::Error, 12 | pipeline::{HasHeight, PipelineDataBatch, Transition}, 13 | }, 14 | utils::in_reorg_window, 15 | }; 16 | 17 | pub fn new( 18 | db: D, 19 | config: Config, 20 | start_height: u32, 21 | reorg_window: u32, 22 | max_num_entries: usize, 23 | ) -> impl FnMut(Receiver>, Sender>>, Stopper) -> Result<(), Error> + Clone 24 | where 25 | T: HasHeight + Transition, (), Vec>>, 26 | D: DatabaseOps, 27 | { 28 | move |rx, tx, stopper| { 29 | let (_, done) = stopper.subscribe()?; 30 | let mut height = start_height - 1; 31 | let mut target_height = start_height; 32 | loop { 33 | if done.try_recv().is_ok() { 34 | return Ok(()); 35 | } 36 | let mut entries = Vec::new(); 37 | let mut batch = Vec::new(); 38 | 39 | while entries.len() < max_num_entries { 40 | match rx.recv_timeout(Duration::from_secs(1)) { 41 | Ok(data) => { 42 | height = data.get_height(); 43 | target_height = data.get_target_height(); 44 | let (mut new_entries, data_out) = data.transition(())?; 45 | entries.append(&mut new_entries); 46 | batch.push(data_out); 47 | } 48 | Err(RecvTimeoutError::Timeout) => break, 49 | Err(RecvTimeoutError::Disconnected) => return Ok(()), 50 | } 51 | } 52 | 53 | if !batch.is_empty() { 54 | let num_entries = entries.len(); 55 | // height + 1 because we need one before to satisfy the check 56 | let in_reorg_window_b = in_reorg_window(height + 1, target_height, reorg_window); 57 | let min_index_height = if in_reorg_window_b { 58 | Some(height - reorg_window) 59 | } else { 60 | None 61 | }; 62 | 63 | if !config.only_write_in_reorg_window || in_reorg_window_b { 64 | debug!( 65 | "Writing batch of length {} with max height {} and target height {}", 66 | batch.len(), 67 | height, 68 | target_height 69 | ); 70 | db.write_batch(|batch| { 71 | db.put_entries(batch, min_index_height, &entries)?; 72 | db.put_max_block_height(batch, height) 73 | })?; 74 | } 75 | 76 | let pipeline_batch = PipelineDataBatch { batch, num_entries }; 77 | if tx.send(Box::new(pipeline_batch)).is_err() { 78 | return Ok(()); 79 | } 80 | } 81 | } 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /counterparty-rs/src/lib.rs: -------------------------------------------------------------------------------- 1 | mod b58; 2 | mod indexer; 3 | mod utils; 4 | 5 | use b58::register_b58_module; 6 | 7 | use indexer::register_indexer_module; 8 | use pyo3::prelude::*; 9 | use pyo3::types::PyString; 10 | use utils::register_utils_module; 11 | 12 | #[pymodule] 13 | fn counterparty_rs(m: &Bound<'_, PyModule>) -> PyResult<()> { 14 | register_b58_module(m)?; 15 | register_utils_module(m)?; 16 | register_indexer_module(m)?; 17 | 18 | m.add( 19 | "__version__", 20 | PyString::new_bound(m.py(), env!("VERGEN_GIT_DESCRIBE")), 21 | )?; 22 | m.add( 23 | "__sha__", 24 | PyString::new_bound(m.py(), env!("VERGEN_GIT_SHA")), 25 | )?; 26 | m.add( 27 | "__target__", 28 | PyString::new_bound(m.py(), env!("VERGEN_CARGO_TARGET_TRIPLE")), 29 | )?; 30 | m.add( 31 | "__build_date__", 32 | PyString::new_bound(m.py(), env!("VERGEN_BUILD_DATE")), 33 | )?; 34 | Ok(()) 35 | } 36 | -------------------------------------------------------------------------------- /release-notes/defunct/counterparty-cli-changelog.md: -------------------------------------------------------------------------------- 1 | # ChangeLog for `counterparty-cli` Pre-v9.61.2 2 | 3 | * master (unreleased) 4 | * Added indexd arguments 5 | * removed backend-name argument 6 | * v1.1.4 (2017/10/26) 7 | * Added enhanced send arguments support. 8 | * v1.1.3 (2017/05/01) 9 | * Added `vacuum` command to server CLI. 10 | * v1.1.2 (2016/07/11) 11 | * Added P2SH support (to match counterparty-lib 9.55.0) 12 | * added `get_tx_info` command 13 | * added `--disable-utxo-locks` to `compose_transaction` to disable the locking of selected UTXOs for when the 'user' doesn't intend to broadcast the TX (straight away) 14 | * Peg dependency versions in `setup.py` 15 | * Added `debug_config` argument to print config to CLI. 16 | * Added `--quiet` flag to `bootstrap` command 17 | * Logging improvements 18 | * Removed `rps` and `rpsresolve` commands 19 | * Updated `README.md` 20 | * v1.1.1 (2015/04/20) 21 | * Fix `broadcast` command 22 | * Cleaner, Commented-out Default Config Files 23 | * Support new configuration parameter: `no-check-asset-conservation`, `rpc-batch-size`, `requests-timeout` 24 | * v1.1.0 (2015/03/31) 25 | * Code reorganisation 26 | * Remove `market` command 27 | * Add `getrows` command 28 | * Add `clientapi` module 29 | * Rename `get_running_info` to `getinfo` 30 | * Rename `backend-ssl-verify` to `backend-ssl-no-verify` 31 | * Rename `rpc-allow-cors` to `rpc-no-allow-cors` 32 | * Change installation procedure 33 | * v1.0.1 (2015/03/18) 34 | * Update minimum `counterparty-lib` version from `v9.49.4` to `v9.50.0` 35 | * v1.0.0 (2015/02/05) 36 | * Initial Release -------------------------------------------------------------------------------- /release-notes/release-notes-v10.0.1.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.0.1 (2024-04-09) 2 | 3 | Hotfix release for #1619 4 | 5 | # ChangeLog 6 | 7 | ## Stability and Correctness 8 | * Add missing sanity check in address unpacking for dispensers that causes a complete network crash 9 | 10 | # Credits 11 | * Adam Krellenstein 12 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.1.0.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.1.0 (2024-04-16) 2 | 3 | This release includes fixes for a number of bugs as well as a few regressions in v10.0.x. 4 | 5 | 6 | # Upgrade Procedure 7 | 8 | This upgrade is optional but highly recommended. Upgrading from v10.0.x requires an automatic reparse from block 835,500 on `mainnet`, which should take a few minutes. If you are upgrading directly from v9.x.y, then there will be an automatic full database rebuild, which may take a long time (refer to the release notes for v10.0.0.) 9 | 10 | In order to perform a manual installation, you must first uninstall all existing Counterparty Core Python packages: 11 | 12 | ```bash 13 | pip3 uninstall counterparty-rs counterparty-lib counterparty-cli 14 | ``` 15 | 16 | 17 | # ChangeLog 18 | 19 | ## Bugfixes 20 | * Validate non-empty `block_indexes` in call to `api.get_blocks` (fix for #1621) 21 | * Reproduce order expiration bug in v9.61.x (fix for #1631) 22 | * Fix `get_blocks` call when several block indexes are provided (fix for #1629) 23 | * Fix `create_send` when one of the outputs is a dispenser (fix for #1119) 24 | * Fix `get_dispenser_info` RPC call 25 | 26 | ## Codebase 27 | * Split out `counterparty-cli` package into `counterparty-core` and `counterparty-wallet` packages 28 | * Implement heavy healthz probe (default to light) 29 | * Automatic code checking and correction with Ruff 30 | * Refactor transaction file singleton to class 31 | * Run `PRAGMA optimize` on shutting down 32 | * Run `PRAGMA quick_check` on database initialization 33 | * Temporary disable asset conservation checking after each new block 34 | * Add instrumentation for optional Sentry error and performance monitoring 35 | 36 | ## Command-Line Interface 37 | * Rename `counterpary-client` to `counterparty-wallet` 38 | * Add `--skip-db-check` flag to skip database quick check 39 | * Add `--no-mempool` flash to disable mempool parsing 40 | 41 | # Credits 42 | * Ouziel Slama 43 | * Adam Krellenstein 44 | * Warren Puffett 45 | * Matt Marcello 46 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.1.1.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.1.1 (2024-04-19) 2 | 3 | This is a relatively small release with a number of bugfixes, one of which is critical---in v10.0.x and v10.1.0 there is a bug which can cause nodes to crash upon a blockchain reorganization. 4 | 5 | 6 | # Upgrading 7 | 8 | To upgrade from v10.1.0 manually, you must first uninstall the following Counterparty Core Python packages: 9 | 10 | ```bash 11 | pip3 uninstall counterparty-rs counterparty-lib counterparty-cli 12 | ``` 13 | 14 | This release contains no protocol changes, and the API has not been modified. 15 | 16 | 17 | # ChangeLog 18 | 19 | ## Bugfixes 20 | * Fix missing events (`NEW_BLOCK` and `NEW_TRANSACTION`) when kickstarting and reparsing. To correct the values in the `messages` table, a full reparse is required. 21 | * Fix the current block index after a blockchain reorganisation. 22 | * Fix database shutdown, which caused a recovery of the WAL file on each startup. 23 | * Eliminate some extraneous error messages 24 | 25 | ## Codebase 26 | * Merge `counterparty-lib` and `counterparty-core` package into `counterparty-core` 27 | * Integrate telemetry with optional Sentry service 28 | 29 | ## Command-Line Interface 30 | * Replace `--no-check-asset-conservation` with `--check-asset-conservation` 31 | * Disable automatic DB integrity check on startup 32 | 33 | # Credits 34 | * Ouziel Slama 35 | * Adam Krellenstein 36 | * Warren Puffett 37 | * Matt Marcello 38 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.1.2.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.1.2 (2024-05-08) 2 | 3 | This version of Counterparty Core marks the release of API v2, a new RESTful API—see the [official project documentation](https://docs.counterparty.io/docs/advanced/api-v2/node-api/). The new API is available at `/v2/`, while the old API is now available at `/v1/` in addition to `/`. 4 | 5 | 6 | # Upgrading 7 | 8 | There is a [guide for migrating from the v1 to the v2 API](https://docs.counterparty.io/docs/advanced/api-v2/v1-to-v2/) in the documentation, which specifies equivalences between old and new functionality. 9 | 10 | This release maintains full backwards-compatibility and includes no protocol changes. 11 | 12 | 13 | # ChangeLog 14 | 15 | ## Bugfixes 16 | * Fix logging of some raw tracebacks (#1715) 17 | * Retry on `ChunkedEncodingError` with AddrIndexRs; break loop on all errors 18 | * Fix bad logging of Rust module panic (#1721) 19 | 20 | 21 | ## Codebase 22 | * Release API v2 23 | * Have both API v1 and v2 return `ready` if the last block is less than one minute old 24 | * Add an index on the `block_index` field in the `credits` and `debits` tables 25 | * Add `TRACE` level to Python logging 26 | * Add basic anonymous node telemetry 27 | 28 | ## Command-Line Interface 29 | * Set default and minimum values for Backend Poll Interval to 3.0 seconds 30 | * Update `docker-compose.yml` to use different profiles for `mainnet` and `testnet` 31 | * Check that another process is not connected to the database before starting the server 32 | * Launch database quick check on startup if the database has not been correctly shut down 33 | * Support an additional level of verbosity with the CLI flags `-vv` 34 | * Add the `--no-telemetry` flag to disable node telemetry 35 | 36 | 37 | # Credits 38 | * Ouziel Slama 39 | * Adam Krellenstein 40 | * Warren Puffett 41 | * Matt Marcello 42 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.10.1.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.10.1 (2025-03-03) 2 | 3 | This release includes two major improvements to the Counterparty Core codebase: (1) faster and more reliable node shutdown, (2) ~50% faster initial node catchup. It also includes a number of usability improvements to the API and CLI, as well as bugfixes. 4 | 5 | # Upgrading 6 | 7 | **Breaking Changes:** 8 | The commands `get_asset_names` and `get_asset_longnames` have been removed from API v1, as they are buggy and extremely non-performant. If you have been using these endpoints, you should migrate to `/v2/assets`. 9 | 10 | **Upgrade Instructions:** 11 | To upgrade, download the latest version of `counterparty-core` and restart `counterparty-server`. 12 | 13 | With Docker Compose: 14 | 15 | ```bash 16 | cd counterparty-core 17 | git pull 18 | docker compose stop counterparty-core 19 | docker compose --profile mainnet up -d 20 | ``` 21 | 22 | or use `ctrl-c` to interrupt the server: 23 | 24 | ```bash 25 | cd counterparty-core 26 | git pull 27 | cd counterparty-rs 28 | pip install -e . 29 | cd ../counterparty-core 30 | pip install -e . 31 | counterparty-server start 32 | ``` 33 | 34 | # ChangeLog 35 | 36 | ## Bugfixes 37 | 38 | - Handle subprocess errors correctly when bootstrapping 39 | - Fix `getrawtransaction_batch()` for batches greater than `config.MAX_RPC_BATCH_SIZE` 40 | - Improve error handling for when a port in use 41 | - Fix ungraceful ZeroMQ failure 42 | - Fix Conservation Check failing ungracefully 43 | - Implement cleaner Gunicorn shutdown 44 | - Fix ungraceful Waitress shutdown 45 | - Handle RSFetcher version mismatch error correctly 46 | - Handle Counterparty Server version checking errors correctly 47 | - Fix the handling of `TypeError` in API calls 48 | 49 | ## Codebase 50 | 51 | - Tweak logging during Bitcoin Core catch up 52 | - Batch `getrawtransaction` for `get_vin_info()` 53 | - Create events indexes after catch up rather than before 54 | - Make RPC calls to get transaction input info with RSFetcher 55 | - Make RSFetcher compatible with HTTPS 56 | - Fix all code scanner alerts (Bandit, CodeQL, Pylint) 57 | - Only print debug messages about Counterparty being behind Bitcoin Core every 10 seconds 58 | - Add missing indexes to the `sends` table 59 | 60 | ## API 61 | 62 | - Check balances when composing `detach` transaction 63 | - Add a `show_unconfirmed` parameter for Get Transactions endpoints 64 | - Add a `count_confirmed` parameter for Get Transactions Count endpoints 65 | - Add a `X-LEDGER-STATE` header to all API responses 66 | - Add a `ledger_state` field in API v2 root endpoint 67 | - Remove `get_asset_names` and `get_asset_longnames` commands from API v1 68 | 69 | ## CLI 70 | 71 | - Accept `--catch-up` flag before the command 72 | - Add Locust runner to test local node performance (`python3 counterpartycore/test/integration/locustrunner.py` will start the Locust web UI on http://localhost:8089/) 73 | - Add `--profile` flag that enables cProfile during catchup and dumps results to the console when complete 74 | - Add `--rebuild` command to re-sync from scratch and then stop the server 75 | - Add memory database cache for `address_events` table 76 | 77 | # Credits 78 | 79 | - Ouziel Slama 80 | - Adam Krellenstein 81 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.3.1.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.3.1 (2024-08-07) 2 | 3 | This release is a relatively minor update with a large number of improvements to the node API, as well as a few important bugfixes and tweaks to the CLI and telemetry subsystems. 4 | 5 | # Upgrading 6 | 7 | This release is not a protocol change and does not require any reparsing. 8 | 9 | # ChangeLog 10 | 11 | ## Bugfixes 12 | 13 | * Fix cache for `/v2/bitcoin/*` routes 14 | * Fix queries by `asset_longname` 15 | * Fix capture exception by Sentry 16 | * Be sure not to cache `orders` and dispenser addresses from mempool 17 | * Catch `UnicodeError` in `transactions.unpack()` function 18 | * Fix `script_to_address()` function on Regtest 19 | 20 | ## Codebase 21 | 22 | ## API 23 | 24 | * Add the following routes: 25 | - Get Balances By Addresses 26 | - Get Events By Addresses 27 | - Get Mempool Events By Addresses 28 | - Get Order Matches By Asset 29 | - Get Order Matches By Two Assets 30 | - Get Subassets By Asset 31 | - Get Unspent Txouts By Addresses 32 | * Capitalize `` in routes 33 | * Accept `asset_longname` for `GET /v2/assets//issuances` 34 | * Add default values for the `locked` and `reset` fields in `issuances` table 35 | * Add XCP to the `assets_info` table 36 | * Remove `timestamp` from events in API results 37 | * Standardize the format of mempool events and confirmed events 38 | * Use strings instead of integers to query `dispensers` by status 39 | * Accept several statuses for querying `dispensers`, `orders` and `order_matches` 40 | * Add `sort` argument for the following routes: 41 | - `/v2/assets//balances` 42 | - `/v2/addresses/
/balances` 43 | - `/v2/addresses/balances` 44 | * Sort `orders` by `tx_index DESC` 45 | * Insert `return_psbt` argument for compose endpoints 46 | * Insert `market_price` when getting orders or order matches by two assets 47 | * Make queries to get orders or order matches by two assets case-insensitive 48 | * Make unconfirmed objects (`transactions`, `issuances`, `orders`, etc.) accessible via the API with the parameter `?show_unconfirmed=true` 49 | * Inject `fiat_price` and oracle info in dispensers 50 | * Include decoded transaction in the result of `/v2/transactions/info` 51 | * Return `null` when `destination` field is empty 52 | * Support comma-separated addresses for the `
` value in API routes 53 | * Catch `CBitcoinAddressError` correctly 54 | * Return a 400 error instead a 503 on `ComposeError` and `UnpackError` 55 | 56 | ## CLI 57 | 58 | * Add `--json-logs` flag for displaying logs in the JSON format 59 | * Send telemetry data after each block 60 | * Reduce Sentry Trace Sample Rate to 10% 61 | 62 | 63 | # Credits 64 | 65 | * Ouziel Slama 66 | * Adam Krellenstein 67 | * Warren Puffett 68 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.3.2.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.3.2 (2024-08-11) 2 | 3 | This release is a minor update with some bugfixes. 4 | 5 | # Upgrading 6 | 7 | This release is not a protocol change and does not require any reparsing. 8 | 9 | # ChangeLog 10 | 11 | ## Bugfixes 12 | 13 | * Fix `get_value_by_block_index()` on `regtest` 14 | * Fix events hash mismatch after a reparse 15 | * Fix `regtest` default ports 16 | * Fix `/v2/assets/XCP` route 17 | * Fix queries on `messages` table (remove mempool filtering) 18 | * Fix graceful closing of `counterparty.db` 19 | * Fix Get XCP Holders route 20 | * Fix division by zero in API market prices 21 | 22 | ## Codebase 23 | 24 | ## API 25 | 26 | ## CLI 27 | 28 | * Move Counterparty Node UUID from `~/counterparty-node-uuid` to `~/.local/state/counterparty/.counterparty-node-uuid` 29 | 30 | # Credits 31 | 32 | * Ouziel Slama 33 | * Adam Krellenstein 34 | * Warren Puffett 35 | * Matt Marcello -------------------------------------------------------------------------------- /release-notes/release-notes-v10.4.1.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.4.1 (2024-09-24) 2 | 3 | This release is a minor update with miscellaneous bugfixes. 4 | 5 | # Upgrading 6 | 7 | This release fixes an old bug leading to potential undefined behavior for dispenser refills. No database reparse is required. 8 | 9 | # ChangeLog 10 | 11 | ## Protocol Changes 12 | 13 | ## Bugfixes 14 | 15 | * Fix order cache: update cache when an order is filled 16 | * Fix typo in `protocol_changes.json` 17 | * Fix division by zero in `api.util.divide()` 18 | * Catch invalid raw transaction in `/v2/transactions/info` endpoint 19 | * Fix duplicate command in `xcpcli.py` 20 | * Fix `source` field when refilling a dispenser created by a different source 21 | 22 | ## Codebase 23 | 24 | * Don't report expected errors to Sentry in API v1 25 | * Use `trace` instead of `warning` for "Prefetch queue is empty." message 26 | * Use debug for expected and handled `yoyo` migration error 27 | * Support Python 3.10 and 3.11 only 28 | * Refactor and clean up `transaction.py`. The contents of this file are now distributed across `lib/api/compose.py`, `lib/transaction_helper/transaction_outputs.py`, and `lib/transaction_helper/transaction_inputs.py`. 29 | 30 | ## API 31 | 32 | * Add support for `inputs_set` parameter 33 | * Rename the `fee` argument to `exact_fee` (the `fee` argument is still available in API v1) 34 | * Have the composition API return, in addition to a `rawtransaction` or a `psbt`, the following fields: `data`, `btc_in`, `btc_out`, `btc_change`, and `btc_fee` 35 | * Add `sort` argument for `orders`, `order_matches` and `dispenser` 36 | * Add the following route: 37 | - `/v2/transactions//info` (This route works if the tx is in the mempool of the queried node) 38 | 39 | ## CLI 40 | 41 | # Credits 42 | 43 | * Ouziel Slama 44 | * Warren Puffett 45 | * Adam Krellenstein 46 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.4.2.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.4.2 (2024-10-02) 2 | 3 | This is a small but important release which includes fixes for a number of node stability issues and updates to the API. All node hosts should upgrade as soon as possible. 4 | 5 | 6 | # Upgrading 7 | 8 | This release is not a protocol change and does not require any reparsing. A regression in the v1 API has been resolved. 9 | 10 | 11 | # ChangeLog 12 | 13 | ## Protocol Changes 14 | 15 | 16 | ## Bugfixes 17 | 18 | * Retry indefinitely when RSFetcher cannot connect to Bitcoin Core 19 | * Fix RSFetcher startup logic 20 | * Restart RSFetcher when it is found to have been stopped 21 | * Fix JSON serialization of `bytes` in API v1 22 | 23 | ## Codebase 24 | 25 | * Fix Docker Compose test 26 | * Fetch old mempool entries from Bitcoin Core after node startup 27 | 28 | ## API 29 | 30 | * Disable expiration of mempool entries after 24 hours 31 | * Expose timestamp field for mempool transactions (for client-side filtering) 32 | * Revert accidental change in API v1 (renamed `unsigned_tx_hex` to `tx_hex` in `create_*()` result) 33 | * Disable `p2sh` encoding, which no longer works with recent versions of Bitcoin Core 34 | 35 | ## CLI 36 | 37 | 38 | # Credits 39 | 40 | * Ouziel Slama 41 | * Warren Puffett 42 | * Adam Krellenstein 43 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.4.3.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.4.3 (2024-10-05) 2 | 3 | This is a minor release with a number of bugfixes and minor improvements to the API. 4 | 5 | 6 | # Upgrading 7 | 8 | This release is not a protocol change and does not require a database reparse. 9 | 10 | 11 | # ChangeLog 12 | 13 | ## Bugfixes 14 | 15 | - Fix `asset_events` during an asset ownership transfer 16 | - Refresh XCP supply in API DB on startup 17 | - Clean mempool after each block when catching up 18 | - Tweak mempool cleaning in API Watcher 19 | - Fix `AttributeError` on `get_transactions` (API v1) 20 | - Catch `BadRequest` error (API v2) 21 | - Fix checking when a fairmint reaches the hard cap 22 | - Fix `--no-mempool` flag 23 | 24 | ## Codebase 25 | 26 | - Add `regtest` and `mainnet` test for the `healthz` endpoint 27 | - Re-enable `check.asset_conservation()` and run it in the background, in a separate thread, both at startup and every 12 hours 28 | - Add checkpoints for block 866000 and block 866300 29 | 30 | ## API 31 | 32 | - Use the GitHub repository for the Blueprint URL 33 | - Add the `/v2/routes` route in the `/v2/` result 34 | - Add the `addresses` argument to the `/v2/mempool/events` route 35 | - Support prefixed data for `/v2/transactions/unpack` 36 | - Return assets issued and owned by `
` in `/v2/addresses/
/assets` 37 | - Add the following routes: 38 | - `/v2/addresses/
/assets/issued` 39 | - `/v2/addresses/
/assets/owned` 40 | 41 | ## CLI 42 | 43 | 44 | # Credits 45 | 46 | * Ouziel Slama 47 | * Warren Puffett 48 | * Adam Krellenstein 49 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.4.4.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.4.4 (2024-10-09) 2 | 3 | This is a minor release with a number of bugfixes and minor improvements to the API. 4 | 5 | # Upgrading 6 | 7 | This release is not a protocol change and does not require a database reparse. 8 | 9 | # ChangeLog 10 | 11 | ## Bugfixes 12 | 13 | - Handle invalid scripts in outputs properly 14 | - Fix `last_block` in `get_running_info` command (API v1) 15 | - Fix blockchain reorganization support 16 | 17 | ## Codebase 18 | 19 | - Add `regtest` support in RSFetcher 20 | 21 | ## API 22 | 23 | - Add Gunicorn support 24 | 25 | ## CLI 26 | 27 | - Add `wsgi-server` (`werkzeug` or `gunicorn`) and `gunicorn-workers` flags 28 | - Enable Sentry Caches and Queries pages 29 | 30 | # Credits 31 | 32 | * Ouziel Slama 33 | * Warren Puffett 34 | * Adam Krellenstein 35 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.4.5.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.4.5 (2024-10-16) 2 | 3 | This is a minor release with a number of bugfixes and minor improvements to the API. 4 | 5 | # Upgrading 6 | 7 | This release is not a protocol change and does not require a database reparse. However a minor bug in the reorg-handling logic was detected and fixed. If your node hashes for block 865699 were not equal to L: 6d64717, TX: f71fa92, M: 8734a58, then you should either rollback to 865690 or restart with the latest bootstrap file. This bug is not believed to have affected node consensus. 8 | 9 | # ChangeLog 10 | 11 | ## Bugfixes 12 | 13 | - Fix `TypeError` in `is_server_ready()` function 14 | - Handle `AddressError` in API calls 15 | - Fix RSFetcher pre-fetcher queue 16 | - Fix RSFetcher blockchain reorganization management 17 | 18 | ## Codebase 19 | 20 | - Retry when Bitcon Core returns a 503 error 21 | 22 | ## API 23 | 24 | - Use UTXOs locks when `unspents_set` is used (formerly `custom_inputs`) 25 | - Tweak and fix `asset_events` field (new events `transfer` and `change_description`; `reissuance` only if `quantity` greater than 0; `lock` also when locked with the `lock` argument) 26 | - Add Waitress WSGI server support and make it the default 27 | - Fix missing parentheses in SQL queries 28 | - Fix `dispenser.close_block_index` type in API database 29 | - Set CORS in pre-flight requests 30 | 31 | ## CLI 32 | 33 | 34 | 35 | # Credits 36 | 37 | * Ouziel Slama 38 | * Warren Puffett 39 | * Adam Krellenstein 40 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.4.6.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.4.6 (2024-10-17) 2 | 3 | This is a Hotfix release to address the slowness of block parsing. 4 | 5 | # Upgrading 6 | 7 | # ChangeLog 8 | 9 | ## Bugfixes 10 | 11 | 12 | ## Codebase 13 | 14 | - Use of RAM cache to verify balances attached to a UTXO 15 | 16 | ## API 17 | 18 | ## CLI 19 | 20 | 21 | 22 | # Credits 23 | 24 | * Ouziel Slama 25 | * Warren Puffett 26 | * Adam Krellenstein 27 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.4.7.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.4.7 (2024-10-17) 2 | 3 | This is a hotfix release to fix API v1 with Waitress. 4 | 5 | # Upgrading 6 | 7 | # ChangeLog 8 | 9 | ## Bugfixes 10 | 11 | - Fix API v1 with Waitress (remove hop-by-hop headers) 12 | 13 | ## Codebase 14 | 15 | - Check API v1 in regtest test suite 16 | 17 | ## API 18 | 19 | - Use `127.0.0.1` instead `localhost` as default for `API_HOST` and `RPC_HOST` (to force IPv4) 20 | 21 | ## CLI 22 | 23 | 24 | 25 | # Credits 26 | 27 | * Ouziel Slama 28 | * Warren Puffett 29 | * Adam Krellenstein 30 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.4.8.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.4.8 (2024-10-17) 2 | 3 | This is a hotfix release to fix a number of additional issues that arose with the recent protocol changes. 4 | 5 | # Upgrading 6 | 7 | This is not a protocol change, and no database reparsing is necessary. 8 | 9 | 10 | # ChangeLog 11 | 12 | ## Bugfixes 13 | 14 | - Fix fair minting rollback 15 | - Fix API server crash due to missing sanity check 16 | - Retry maximum 10 times on Bitcoin Core RPC call error 17 | 18 | ## Codebase 19 | 20 | ## API 21 | 22 | ## CLI 23 | 24 | 25 | 26 | # Credits 27 | 28 | * Ouziel Slama 29 | * Warren Puffett 30 | * Adam Krellenstein 31 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.6.0.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.6.0 (2024-10-24) 2 | 3 | This release includes a protocol change to fix a regression for the case when there have been multiple dispensers opened at a single address. The bug prevents users from triggering dispensers at addresses where there have previously been closed dispensers (rather than simply re-opened dispensers). 4 | 5 | 6 | # Upgrading 7 | 8 | This release is a protocol change from mainnet block 868,300 (in about six days). It also includes a backwards-incompatible change in the API: 9 | 10 | - `/v2/addresses/
/balances/` and `/v2/assets//balances/
` now return a list that may include balances attached to UTXOs of `
`. 11 | 12 | This release also includes a bugfix for chained UTXO movements within the same block. This bugfix requires an automatic reparse starting from block 867000. Given the current slowdowns in catching up with the API database, we recommend using `counterparty-server bootstrap` before restarting your server. 13 | 14 | *IMPORTANT* All wallets should use the `compose_dispense()` call to trigger dispenses rather than the legacy `create_send()`. Due to the above bug, using `create_send()` can make it possible for users to send BTC to an address where the dispense will fail. All node hosts should migrate to `compose_dispense()` as soon as possible. 15 | 16 | 17 | # ChangeLog 18 | 19 | ## Protocol Changes 20 | 21 | - Block 868300: Dispenses are now triggered if *at least* one dispenser on the address is valid rather than only if all of them are valid. 22 | 23 | ## Bugfixes 24 | 25 | - Catch invalid pubkeys in the compose API correctly 26 | - Run reparse only if necessary 27 | - Fix `message_data` when retrieving information about fairminter or fairmint transactions 28 | - Use `threading.Event()` to cleanly stop threads and subprocesses started by `counterparty-server` 29 | - Don't update UTXOs balances cache on mempool transactions 30 | - Update UTXOs balances cache before transacation parsing to catch chained UTXO moves in the same block 31 | 32 | ## Codebase 33 | 34 | - Use a lock file for RS Fetcher thread 35 | - Add checkpoint for block 867290 36 | 37 | ## API 38 | 39 | - Have `/v2/addresses/
/balances/` and `/v2/assets//balances/
` now return a list that may include balances attached to UTXOs of `
` 40 | - Add the following routes: 41 | * `/v2/blocks//fairminters` 42 | * `/v2/blocks//fairmints` 43 | * `/v2/compose/attach/estimatexcpfees` 44 | - Add `status` argument for Fairminters routes 45 | - Make `/blocks/last` faster by adding an index to the `ledger_hash` field 46 | - Have `/v2/addresses/
/sweeps` now also search by the `destination` field 47 | - Add `asset_events` argument for Issuances routes 48 | - Raise an error on `fairmint.compose()` when the fairminter is free and the quantity is not zero 49 | - Add `get_asset` and `give_asset` arguments for the `/v2/orders` route 50 | 51 | ## CLI 52 | 53 | - Add support for `--bootstrap-url` to `start` command 54 | 55 | # Credits 56 | 57 | * Ouziel Slama 58 | * Adam Krellenstein 59 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.6.1.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.6.1 (2024-10-28) 2 | 3 | This is a minor release to address a few small bugs in the v2 API, especially for MPMA, and to fill out API support for the management of assets attached to UTXOs. 4 | 5 | 6 | # Upgrading 7 | 8 | This upgrade is not a protocol change and no automatic reparse is necessary. 9 | 10 | 11 | # ChangeLog 12 | 13 | ## Protocol Changes 14 | 15 | ## Bugfixes 16 | 17 | - Fix heavy `healthz` check 18 | - Raise a `ComposeError` in `mpma.compose()` if `memo` is not a string or if `memo_is_hex` is not a boolean 19 | - Send API v2 log messages to the `config.API_LOG` logfile 20 | - Create a dust output when attaching an asset to a UTXO without a destination address 21 | - Fix dust value in compose move to UTXO 22 | 23 | ## Codebase 24 | 25 | 26 | ## API 27 | 28 | - Add `memos` and `memos_are_hex` parameters to the MPMA compose API. When using MPMA sends, one memo must be provided for each destination if these parameters are used. 29 | - Add the `/v2/utxos//balances` route 30 | - Exclude UTXOs containing balances by default when composing transactions 31 | - Add `use_utxos_with_balances` and `exclude_utxos_with_balances` parameters to the compose API 32 | 33 | ## CLI 34 | 35 | 36 | # Credits 37 | 38 | * Ouziel Slama 39 | * Adam Krellenstein 40 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.7.1.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.7.1 (2024-11-19) 2 | 3 | This is a hotfix release that must be installed immediately. It fixes a bug in the parsing of Fairminters transactions that crashes the server. 4 | 5 | # Upgrading 6 | 7 | 8 | # ChangeLog 9 | 10 | ## Protocol Changes 11 | 12 | ## Bugfixes 13 | 14 | - Don't raise error on unpack exceptions 15 | 16 | ## Codebase 17 | 18 | ## API 19 | 20 | ## CLI 21 | 22 | 23 | # Credits 24 | 25 | * Ouziel Slama 26 | 27 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.7.2.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.7.2 (2024-11-24) 2 | 3 | This is a minor release with a large number of bugfixes and quality-of-life improvements. 4 | 5 | 6 | # Upgrading 7 | 8 | This upgrade is not a protocol change and does not require an automatic reparse. 9 | 10 | 11 | # ChangeLog 12 | 13 | ## Protocol Changes 14 | 15 | ## Bugfixes 16 | 17 | - Fix CORS headers for `OPTIONS` requests 18 | - Fix rounding error on normalized quantity 19 | - Use `null` instead of `''` for `asset_longname` and `asset_parent` fields 20 | - Correctly catch `ValueError` in unpack endpoint 21 | - Correctly catch `InvalidBase58Error` in compose endpoints 22 | - Correctly catch `BitcoindRPCError` in get transaction info endpoint 23 | - Fix typo in dispenser error messages (`has` -> `have`) 24 | - Fix get balances endpoint when using `sort=asset` 25 | - Catch all errors when using unpack endpoint with invalid data 26 | - Restart RSFetcher when it returns `None` 27 | - Clean up blocks without ledger hash before starting catch-up 28 | - Don't inject details before publishing events with ZMQ 29 | - Populate `address_events` also with UTXO events (attach, detach and move) 30 | - Fix `compose_movetoutxo` documentation 31 | - Fix `block.close_block_index` field type 32 | - Set `issuances.reset` and `issuances.locked` default value to False instead None 33 | - Fix error message when trying to compose a dispense with the dispenser address as the source 34 | - Fix utxo balances checking 35 | 36 | ## Codebase 37 | 38 | - Replace `counterparty.api.db` with `state.db` 39 | - Add `issuances.asset_events`, `dispenses.btc_amount` and `mempool.addresses` field in Ledger DB 40 | - Remove duplicate table from `state.db` 41 | - Add `api/dbbuilder.py` module and refactor migrations to build `state.db` 42 | - Use migrations to rollback `state.db` 43 | - Remove rollback event by event in `state.db` 44 | - Add version checking for `state.db`: launch a rollback when a reparse or a rollback is necessary for the Ledger DB 45 | - Use `event_hash` to detect Blockchain reorganization and launch a rollback of `state.db` 46 | - Refactor functions to refresh `util.CURRENT_BLOCK_INDEX` in `wsgi.py` 47 | 48 | ## API 49 | 50 | - Add `sort` parameter for the get holders endpoint (sortable fields: `quantity`, `holding_type`, and `status`) 51 | - Exclude blocks that are not finished being parsed 52 | - Optimize events counts endpoints with `events_count` table 53 | - Add route `/v2/utxos/withbalances` to check if utxos have balances 54 | - Add `type` parameter for get balances endpoints (`all`, `utxo` or `address`) 55 | - Add `description_locked` in asset info 56 | - Return a list of invalid UTXOs when possible 57 | 58 | ## CLI 59 | 60 | - Support the `SENTRY_SAMPLE_RATE` environment variable to set the Sentry sample rate 61 | - Show help if no actions are provided 62 | - Fix and rename `--check-asset-conservation` flag to `--skip-asset-conservation-check` 63 | - Add `build-state-db` command 64 | - `rollback` and `reparse` commands trigger a re-build of the State DB 65 | 66 | # Credits 67 | 68 | * droplister 69 | * Ouziel Slama 70 | * Adam Krellenstein 71 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.7.3.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.7.3 (2024-11-26) 2 | 3 | This is a hotfix release that must be installed immediately. It fixes a bug in the parsing of Attach and Detach transactions that crashes the server. 4 | 5 | # Upgrading 6 | 7 | 8 | # ChangeLog 9 | 10 | ## Protocol Changes 11 | 12 | ## Bugfixes 13 | 14 | - Don't raise error on unpack exceptions 15 | 16 | ## Codebase 17 | 18 | ## API 19 | 20 | ## CLI 21 | 22 | 23 | # Credits 24 | 25 | * Ouziel Slama 26 | 27 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.7.4.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.7.4 (2024-12-09) 2 | 3 | This is a hotfix release that addresses a deterministic node crash due to the fact that Enhanced Sends and MPMAs weren't setting the `msg_index` value correctly. This bug was triggered by a UTXO send and an Enhanced Send being combined in a single Bitcoin transaction. All node operators should upgrade immediately. 4 | 5 | # Upgrading 6 | 7 | This upgrade is required to address a critical protocol-level bug. No reparse is necessary. 8 | 9 | 10 | # ChangeLog 11 | 12 | ## Protocol Changes 13 | 14 | ## Bugfixes 15 | 16 | - Ensure `msg_index` value is set for Enhanced Send 17 | - Ensure `msg_index` is unique for MPMA 18 | 19 | ## Codebase 20 | 21 | ## API 22 | 23 | ## CLI 24 | 25 | 26 | # Credits 27 | 28 | * Ouziel Slama 29 | -------------------------------------------------------------------------------- /release-notes/release-notes-v10.9.1.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v10.9.1 (2025-01-22) 2 | 3 | This is a small release that includes numerous bug and stability fixes, as well as a major refactor of the codebase in the name of improving testability. In particular, there was a bug in the Bitcoin fee calculation algorithm released as part of v10.9.0 which causes `detach` and `move` transactions composed with the API to have a higher fee than they should. 4 | 5 | 6 | # Upgrading 7 | 8 | This release is not a protocol change and upgrading is not mandatory, but it is highly recommended. 9 | 10 | IMPORTANT: 11 | - If you are running a version lower than 10.9.0, you must first update to 10.9.0 and only then install v10.9.1. 12 | - If you are running a testnet4 node, you need to rollback to block 64492 manually before starting the server process 13 | 14 | 15 | # ChangeLog 16 | 17 | ## Protocol Changes 18 | 19 | ## Bugfixes 20 | 21 | - Fix ignored deprecated parameters in Compose API 22 | - Fix Get Mempool Events By Addresses endpoint for `attach`, `detach` and UTXO `move` 23 | - Retry ten times on telemetry request errors 24 | - Return "not implemented" error when trying to get info about RPS transactions 25 | - Fix typo in `protocol_changes.json' for testnet4 26 | - Fix incorrect fee calculation for `detach` and `move` 27 | - Don't retry to get vin info when parsing the mempool, which can cause nodes to lock up 28 | 29 | ## Codebase 30 | 31 | - Remove all counterparty-wallet functionality 32 | - Split up the test vectors file 33 | - Move Python tests from `counterparty-rs` to `counterparty-core` 34 | - Reorganize files and functions; split too-big files; delete all unused functions 35 | - Remove globals in `lib/util.py` and `ledger.py` 36 | - Use `yoyo` migrations to update the database 37 | - Add stacktrace when warning because of Bitcoin Core errors 38 | 39 | ## API 40 | 41 | - Add the `no_dispense` parameter to allow API clients to bypass the dispense transaction creation even when the destination is a dispenser 42 | - Add the `event_name` parameter to Get Mempool Events By Addresses endpoint 43 | - Have `sat_per_vbyte` parameter accept a float 44 | - Check addresses and hashes format in parameters 45 | 46 | ## CLI 47 | 48 | 49 | # Credits 50 | 51 | - Ouziel Slama 52 | - Adam Krellenstein 53 | -------------------------------------------------------------------------------- /release-notes/release-notes-v9.61.2.md: -------------------------------------------------------------------------------- 1 | # Release Notes - `counterparty-lib` v9.61.2 (2024-02-28) 2 | 3 | This is a hotfix release for three critical bugs, each of which causes the network suddenly to halt when triggered. 4 | 5 | # Bugfixes 6 | * Fix integer overflow in dispensers 7 | * Invalidate broadcast with malformed text 8 | * Fix logging for destructions with invalid asset 9 | 10 | # Credits 11 | * Juan Alemán 12 | * Javier Varona 13 | * Dan Anderson 14 | * Curtis Lacy 15 | * Ouziel Slama 16 | * Adam Krellenstein 17 | -------------------------------------------------------------------------------- /release-notes/release-notes-v9.61.3.md: -------------------------------------------------------------------------------- 1 | # Release Notes - Counterparty Core v9.61.3 (2024-04-09) 2 | 3 | Hotfix release for #1619 4 | 5 | # ChangeLog 6 | 7 | ## Stability and Correctness 8 | * Add missing sanity check in address unpacking for dispensers that causes a complete network crash 9 | 10 | # Credits 11 | * Adam Krellenstein 12 | -------------------------------------------------------------------------------- /ruff.toml: -------------------------------------------------------------------------------- 1 | line-length = 100 2 | 3 | [lint] 4 | select = [ 5 | "E4", # pycode style 6 | "E7", 7 | "E9", 8 | "F", # pyflakes 9 | "S", # flake8-bandit 10 | "I", # isort 11 | "PLC", # pylint conventions 12 | "PLE", # pylint error 13 | "PLE", # pylint refactor 14 | "PLW", # pylint warning 15 | "B" # bugbear 16 | ] 17 | ignore = [ 18 | "E501", # line too long 19 | "S101", # bandit: use of assert 20 | ] 21 | --------------------------------------------------------------------------------