├── tests
├── __init__.py
├── miners
│ ├── __init__.py
│ ├── test_challenge.py
│ └── test_blacklist.py
├── validators
│ ├── __init__.py
│ ├── utils
│ │ ├── __init__.py
│ │ └── test_uids.py
│ ├── balance_query_regex.json
│ ├── funds_flow_query_regex.json
│ ├── balance_query_script.json
│ ├── bitcoin_balance_tracking_query.py
│ ├── funds_flow_query_script.json
│ ├── bitcoin_funds_flow_query.py
│ ├── bitcoin_funds_flow_query_2.py
│ ├── test_uptime.py
│ └── test_benchmark.py
├── nodes
│ └── test_base_node.py
└── test_template_validator.py
├── .dependencies_installed
├── neurons
├── miners
│ ├── __init__.py
│ ├── blacklist.py
│ └── llm_client
│ │ └── __init__.py
├── nodes
│ ├── __init__.py
│ ├── bitcoin
│ │ ├── __init__.py
│ │ └── node_utils.py
│ ├── factory.py
│ └── abstract_node.py
├── validators
│ ├── __init__.py
│ ├── challenge_factory
│ │ ├── __init__.py
│ │ └── balance_challenge_factory.py
│ ├── utils
│ │ ├── read_json.py
│ │ ├── ping.py
│ │ ├── synapse.py
│ │ ├── uids.py
│ │ └── metadata.py
│ └── scoring.py
├── setup_logger.py
├── __init__.py
├── utils.py
└── storage.py
├── template
├── api
│ ├── __init__.py
│ ├── dummy.py
│ ├── get_query_axons.py
│ └── examples
│ │ └── subnet21.py
├── base
│ ├── __init__.py
│ └── utils
│ │ └── __init__.py
├── utils
│ ├── __init__.py
│ ├── uids.py
│ └── misc.py
├── validator
│ ├── __init__.py
│ ├── reward.py
│ └── forward.py
├── __init__.py
├── subnet_links.py
├── protocol.py
└── mock.py
├── docs
├── imgs
│ ├── logo.png
│ ├── subnet15.png
│ ├── hla_system_context.png
│ ├── hla_container_context.png
│ └── scoring
│ │ ├── blockchain_weight.png
│ │ ├── block_height_function.png
│ │ ├── process_time_function.png
│ │ └── recency_score_function.png
├── what_are_subnets.md
├── stream_tutorial
│ ├── client.py
│ ├── config.py
│ └── protocol.py
└── scoring_function_explanation.md
├── .dockerignore
├── scripts
├── setup.sh
├── run_validator.sh
├── run_miner.sh
├── check_requirements_changes.sh
├── check_compatibility.sh
└── install_staging.sh
├── .idea
├── vcs.xml
├── .gitignore
├── inspectionProfiles
│ ├── profiles_settings.xml
│ └── Project_Default.xml
├── encodings.xml
├── modules.xml
├── misc.xml
├── blockchain-data-subnet.iml
└── dataSources.xml
├── requirements.txt
├── .env.template
├── Dockerfile
├── .github
└── workflows
│ ├── branch-name-check.yml
│ ├── build_custom.yml
│ └── build.yml
├── LICENSE
├── insights
├── __init__.py
├── protocol.py
└── api
│ ├── query.py
│ ├── get_query_axons.py
│ ├── __init__.py
│ └── insight_api.py
├── setup.py
├── .gitignore
├── min_compute.yml
├── contrib
└── CODE_REVIEW_DOCS.md
└── .circleci
└── config.yml
/tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.dependencies_installed:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/neurons/miners/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/neurons/nodes/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/template/api/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/template/base/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/miners/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/validators/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/neurons/nodes/bitcoin/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/neurons/validators/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/template/base/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/validators/utils/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/neurons/validators/challenge_factory/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/template/utils/__init__.py:
--------------------------------------------------------------------------------
1 | from . import config
2 | from . import misc
3 | from . import uids
4 |
--------------------------------------------------------------------------------
/template/validator/__init__.py:
--------------------------------------------------------------------------------
1 | from .forward import forward
2 | from .reward import reward
3 |
--------------------------------------------------------------------------------
/docs/imgs/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blockchain-insights/blockchain-data-subnet/HEAD/docs/imgs/logo.png
--------------------------------------------------------------------------------
/docs/imgs/subnet15.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blockchain-insights/blockchain-data-subnet/HEAD/docs/imgs/subnet15.png
--------------------------------------------------------------------------------
/docs/imgs/hla_system_context.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blockchain-insights/blockchain-data-subnet/HEAD/docs/imgs/hla_system_context.png
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | #exclude folders
2 | contrib/
3 | ops/
4 | docs/
5 | .github/
6 | .git/
7 | .idea/
8 | .pytest_cache/
9 | .gitignore
10 |
--------------------------------------------------------------------------------
/docs/imgs/hla_container_context.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blockchain-insights/blockchain-data-subnet/HEAD/docs/imgs/hla_container_context.png
--------------------------------------------------------------------------------
/docs/imgs/scoring/blockchain_weight.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blockchain-insights/blockchain-data-subnet/HEAD/docs/imgs/scoring/blockchain_weight.png
--------------------------------------------------------------------------------
/docs/imgs/scoring/block_height_function.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blockchain-insights/blockchain-data-subnet/HEAD/docs/imgs/scoring/block_height_function.png
--------------------------------------------------------------------------------
/docs/imgs/scoring/process_time_function.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blockchain-insights/blockchain-data-subnet/HEAD/docs/imgs/scoring/process_time_function.png
--------------------------------------------------------------------------------
/docs/imgs/scoring/recency_score_function.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/blockchain-insights/blockchain-data-subnet/HEAD/docs/imgs/scoring/recency_score_function.png
--------------------------------------------------------------------------------
/scripts/setup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | cd "$(dirname "$0")/../../../"
3 | echo $(pwd)
4 | export PYTHONPATH=$(pwd)
5 | source venv/Scripts/activate
6 | pip install -r requirements.txt
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/.gitignore:
--------------------------------------------------------------------------------
1 | # Default ignored files
2 | /shelf/
3 | /workspace.xml
4 | # Editor-based HTTP Client requests
5 | /httpRequests/
6 | # Datasource local storage ignored files
7 | /dataSources/
8 | /dataSources.local.xml
9 |
--------------------------------------------------------------------------------
/.idea/inspectionProfiles/profiles_settings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/encodings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | bittensor==7.1.2
2 | python-bitcoinrpc
3 | bitcoin
4 | base58
5 | pycryptodome
6 | neo4j
7 | python-dotenv
8 | sqlalchemy
9 | psycopg2-binary
10 | web3
11 | PyYAML
12 | psycopg2-binary
13 | loguru
14 | pymgclient
15 | blockchain-data-subnet-shared-libs==0.0.20
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/scripts/run_validator.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | cd "$(dirname "$0")/../"
3 | export PYTHONPATH=$PWD
4 | python3 neurons/validators/validator.py --wallet.name "$WALLET_NAME" --wallet.hotkey "$WALLET_HOTKEY" --netuid "$NETUID" --subtensor.network "$SUBTENSOR_NETWORK" --subtensor.chain_endpoint "$SUBTENSOR_URL" --enable_api "$ENABLE_API" --logging.trace
5 |
--------------------------------------------------------------------------------
/tests/validators/balance_query_regex.json:
--------------------------------------------------------------------------------
1 | {"regex": "WITH\\s+block_heights\\s+AS\\s+\\(\\s*SELECT\\s+generate_series\\(\\d+,\\s*\\d+\\)\\s+AS\\s+block\\s+(?:UNION\\s+ALL\\s+SELECT\\s+generate_series\\(\\d+,\\s*\\d+\\)\\s+AS\\s+block\\s+)+\\)\\s+SELECT\\s+SUM\\(\\s*block\\s*\\)\\s+FROM\\s+balance_changes\\s+WHERE\\s+block\\s+IN\\s+\\(SELECT\\s+block\\s+FROM\\s+block_heights\\)"}
--------------------------------------------------------------------------------
/scripts/run_miner.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | cd "$(dirname "$0")/../"
3 | export PYTHONPATH=$PWD
4 | python3 neurons/miners/miner.py --network "$NETWORK" --wallet.name "$WALLET_NAME" --wallet.hotkey "$WALLET_HOTKEY" --netuid "$NETUID" --subtensor.network "$SUBTENSOR_NETWORK" --miner_set_weights "$MINER_SET_WEIGHTS" --subtensor.chain_endpoint "$SUBTENSOR_URL" --llm_engine_url "$LLM_ENGINE_URL" --logging.trace
--------------------------------------------------------------------------------
/scripts/check_requirements_changes.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Check if requirements files have changed in the last commit
4 | if git diff --name-only HEAD~1 | grep -E 'requirements.txt|requirements.txt'; then
5 | echo "Requirements files have changed. Running compatibility checks..."
6 | echo 'export REQUIREMENTS_CHANGED="true"' >> $BASH_ENV
7 | else
8 | echo "Requirements files have not changed. Skipping compatibility checks..."
9 | echo 'export REQUIREMENTS_CHANGED="false"' >> $BASH_ENV
10 | fi
11 |
--------------------------------------------------------------------------------
/.idea/inspectionProfiles/Project_Default.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
--------------------------------------------------------------------------------
/neurons/nodes/factory.py:
--------------------------------------------------------------------------------
1 | from protocols.blockchain import NETWORK_BITCOIN, NETWORK_ETHEREUM
2 | from neurons.nodes.bitcoin.node import BitcoinNode
3 |
4 |
5 | class NodeFactory:
6 | @classmethod
7 | def create_node(cls, network: str):
8 | node_class = {
9 | NETWORK_BITCOIN: BitcoinNode,
10 | # Add other networks and their corresponding classes as needed
11 | }.get(network)
12 |
13 | if node_class is None:
14 | raise ValueError(f"Unsupported network: {network}")
15 |
16 | return node_class()
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/tests/validators/funds_flow_query_regex.json:
--------------------------------------------------------------------------------
1 | {"regex": "WITH\\s+(?:range\\(\\d+,\\s*\\d+\\)\\s*\\+\\s*)+range\\(\\d+,\\s*\\d+\\)\\s+AS\\s+block_heights\\s+UNWIND\\s+block_heights\\s+AS\\s+block_height\\s+MATCH\\s+p=\\((sender:Address)\\)-\\[(sent1:SENT)\\]->\\((t:Transaction)\\)-\\[(sent2:SENT)\\]->\\((receiver:Address)\\)\\s+WHERE\\s+t\\.block_height\\s+=\\s+block_height\\s+WITH\\s+project\\(p\\)\\s+AS\\s+subgraph\\s+CALL\\s+pagerank\\.get\\(subgraph\\)\\s+YIELD\\s+node,\\s+rank\\s+RETURN\\s+round\\(rank\\s*\\*\\s*1000000\\)\\s*/\\s*1000000\\s+AS\\s+roundedRank\\s+ORDER\\s+BY\\s+roundedRank\\s+DESC\\s+LIMIT\\s+1"}
--------------------------------------------------------------------------------
/neurons/nodes/abstract_node.py:
--------------------------------------------------------------------------------
1 | from abc import ABC, abstractmethod
2 |
3 | import concurrent
4 |
5 | class Node(ABC):
6 | def __init__(self):
7 | pass
8 |
9 |
10 | @abstractmethod
11 | def get_current_block_height(self):
12 | ...
13 |
14 | @abstractmethod
15 | def get_block_by_height(self, block_height):
16 | ...
17 |
18 | @abstractmethod
19 | def create_funds_flow_challenge(self, start_block_height, last_block_height):
20 | ...
21 |
22 | @abstractmethod
23 | def create_balance_tracking_challenge(self, block_height):
24 | ...
25 |
--------------------------------------------------------------------------------
/.env.template:
--------------------------------------------------------------------------------
1 | USE_TORCH=1
2 | WAIT_FOR_SYNC=False
3 |
4 | # DB
5 | GRAPH_DB_USER=
6 | GRAPH_DB_PASSWORD=
7 | GRAPH_DB_URL=
8 |
9 | # Bitcoin
10 | BITCOIN_NODE_RPC_URL=
11 |
12 | # Ethereum Indexer Config
13 | ETHEREUM_NODE_RPC_URL=
14 |
15 | ## Only Main Indexer Config
16 | ETHEREUM_MAIN_START_BLOCK_HEIGHT=
17 | ETHEREUM_MAIN_END_BLOCK_HEIGHT=
18 | ETHEREUM_MAIN_IN_REVERSE_ORDER=
19 |
20 | ## Main + Sub Indexer Config for entire indexing
21 | ETHEREUM_SUB_START_BLOCK_HEIGHT=
22 | ETHEREUM_SUB_END_BLOCK_HEIGHT=
23 | ETHEREUM_SUB_THREAD_CNT=
24 | PORT=
25 | NETUID=
26 | NETWORK=
27 |
28 | POSTGRES_DB=
29 | POSTGRES_USER=
30 | POSTGRES_PASSWORD=
--------------------------------------------------------------------------------
/.idea/blockchain-data-subnet.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/neurons/validators/utils/read_json.py:
--------------------------------------------------------------------------------
1 | def is_api_data_valid(data):
2 | if not isinstance(data, dict):
3 | return False, "Not a dictionary"
4 |
5 | if "keys" not in data.keys():
6 | return False, "Missing users key"
7 |
8 | if not isinstance(data["keys"], dict):
9 | return False, "Keys field is not a dict"
10 |
11 | for key, value in data["keys"].items():
12 | if not isinstance(value, dict):
13 | return False, "Key value is not a dictionary"
14 | if "requests_per_min" not in value.keys():
15 | return False, "Missing requests_per_min field"
16 | if not isinstance(value["requests_per_min"], int):
17 | return False, "requests_per_min is not an int"
18 |
19 | return True, "Formatting is good"
--------------------------------------------------------------------------------
/tests/nodes/test_base_node.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from neurons.nodes.bitcoin.node import BitcoinNode
3 | from neurons.nodes.evm.ethereum.node import EthereumNode
4 | from neurons.nodes.factory import NodeFactory
5 | from insights.protocol import NETWORK_BITCOIN, NETWORK_ETHEREUM
6 |
7 | class TestNode(unittest.TestCase):
8 |
9 | def setUp(self):
10 | pass
11 |
12 | def test_create_from_network_bitcoin(self):
13 | node = NodeFactory.create_node(NETWORK_BITCOIN)
14 | self.assertIsInstance(node, BitcoinNode)
15 |
16 | def test_create_from_network_ethereum(self):
17 | node = NodeFactory.create_node(NETWORK_ETHEREUM)
18 | self.assertIsInstance(node, EthereumNode)
19 |
20 | def test_create_from_network_invalid(self):
21 | with self.assertRaises(ValueError):
22 | NodeFactory.create_node("INVALID_NETWORK")
23 |
24 |
25 | if __name__ == '__main__':
26 | unittest.main()
27 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # Use the official Python 3.10 image
2 | FROM python:3.10
3 |
4 | # Set the working directory
5 | WORKDIR /blockchain-data-subnet
6 |
7 | # Copy the requirements file into the working directory
8 | COPY requirements.txt requirements.txt
9 |
10 | # Update the package list and install necessary packages
11 | RUN apt-get update && apt-get install -y \
12 | python3-dev \
13 | cmake \
14 | make \
15 | gcc \
16 | g++ \
17 | libssl-dev
18 |
19 | # Install pymgclient directly via pip
20 | RUN pip install pymgclient
21 |
22 | # Install the Python dependencies
23 | RUN pip install --no-cache-dir -r requirements.txt
24 |
25 | # Copy all remaining project files to the working directory
26 | COPY . .
27 |
28 | # Install the project package itself
29 | RUN pip install --no-cache-dir .
30 |
31 | # Make the scripts executable
32 | RUN chmod +x scripts/*
33 |
34 | # Set the entry point or command if required
35 | # ENTRYPOINT ["your_entry_point"]
36 | # CMD ["your_command"]
37 |
--------------------------------------------------------------------------------
/neurons/validators/utils/ping.py:
--------------------------------------------------------------------------------
1 | import socket
2 | import time
3 |
4 |
5 | def ping(host, port, attempts=10):
6 | times = []
7 | for _ in range(attempts):
8 | try:
9 | start_time = time.perf_counter()
10 | # Set up the socket
11 | sock = socket.create_connection((host, port), timeout=0.5)
12 | end_time = time.perf_counter()
13 | times.append(end_time - start_time)
14 | sock.close()
15 | except socket.timeout:
16 | times.append(float('inf')) # Use 'inf' to indicate a timeout
17 | except Exception:
18 | times.append(float('inf')) # Use 'inf' for other exceptions
19 |
20 | # Calculate average time of successful pings
21 | successful_times = [time for time in times if time != float('inf')]
22 | if successful_times:
23 | average_time = sum(successful_times) / len(successful_times)
24 | return True, average_time
25 | else:
26 | return False, 0 # Return False and -1 if all attempts failed
--------------------------------------------------------------------------------
/.github/workflows/branch-name-check.yml:
--------------------------------------------------------------------------------
1 | name: Enforce Branch Naming Convention
2 | on:
3 | push:
4 |
5 | jobs:
6 | check-branch-name:
7 | runs-on: ubuntu-latest
8 | steps:
9 | - name: Verify Branch Name Pattern
10 | run: |
11 | # Extract the branch name from GITHUB_REF
12 | BRANCH_NAME=${GITHUB_REF#refs/heads/}
13 |
14 | # Define regex for release and feature branches
15 | RELEASE_BRANCH_REGEX="^release\/[0-9]+\.[0-9]+$"
16 | FEATURE_BRANCH_REGEX="^feature\/.+$"
17 |
18 | # Allow branches main, release/*, feature/* by skipping checks
19 | if [[ "$BRANCH_NAME" == "main" ]] || [[ $BRANCH_NAME =~ $RELEASE_BRANCH_REGEX ]] || [[ $BRANCH_NAME =~ $FEATURE_BRANCH_REGEX ]]; then
20 | echo "Branch name $BRANCH_NAME is allowed."
21 | exit 0
22 | else
23 | echo "ERROR: Branch name $BRANCH_NAME does not follow the naming convention (main, release/x.x, or feature/anything)."
24 | exit 1
25 | fi
26 |
--------------------------------------------------------------------------------
/neurons/setup_logger.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import json
3 | import time
4 |
5 | class CustomFormatter(logging.Formatter):
6 | def format(self, record):
7 | ct = self.converter(record.created)
8 | s = time.strftime(self.default_time_format, ct)
9 | if self.default_msec_format:
10 | s = self.default_msec_format % (s, record.msecs)
11 | config = {
12 | 'timestamp' : s,
13 | 'level' : record.levelname,
14 | 'message' : record.msg
15 | }
16 | if(record.__dict__.get('extra_content')): config.update(record.__dict__["extra_content"])
17 | return json.dumps(config)
18 |
19 | def setup_logger(name):
20 | formatter = CustomFormatter()
21 | handler = logging.StreamHandler()
22 | handler.setFormatter(formatter)
23 | logger = logging.getLogger(name)
24 | logger.setLevel(logging.DEBUG)
25 | logger.addHandler(handler)
26 | return logger
27 | def logger_extra_data(**kwargs):
28 | extra = {}
29 | for key in kwargs:
30 | extra[key] = kwargs[key]
31 | return {"extra_content" : extra}
--------------------------------------------------------------------------------
/neurons/validators/utils/synapse.py:
--------------------------------------------------------------------------------
1 | from insights.protocol import Discovery, DiscoveryMetadata, DiscoveryOutput
2 | from protocols.blockchain import get_networks
3 |
4 |
5 | def is_discovery_response_valid(discovery_output: Discovery) -> bool:
6 | if discovery_output is None:
7 | return False
8 |
9 | output: DiscoveryOutput = discovery_output.output
10 | if output is None:
11 | return False
12 | if output.block_height is None or output.start_block_height is None:
13 | return False
14 | if output.start_block_height < 0 or output.block_height < 0:
15 | return False
16 | if output.start_block_height >= output.block_height:
17 | return False
18 | if output.start_block_height == 0:
19 | return False
20 | if output.balance_model_last_block is None:
21 | return False
22 | if output.balance_model_last_block < 0:
23 | return False
24 |
25 | metadata: DiscoveryMetadata = output.metadata
26 |
27 | if metadata.network is None or metadata.network not in get_networks():
28 | return False
29 | return True
30 |
31 |
--------------------------------------------------------------------------------
/tests/validators/balance_query_script.json:
--------------------------------------------------------------------------------
1 | {"code": "def build_balance_query(network, start_block, balance_end, diff=1):\n import random\n total_blocks = balance_end - start_block\n part_size = total_blocks // 8\n range_clauses = []\n\n for i in range(8):\n part_start = start_block + i * part_size\n if i == 7:\n part_end = balance_end\n else:\n part_end = start_block + (i + 1) * part_size - 1\n\n if (part_end - part_start) > diff:\n sub_range_start = random.randint(part_start, part_end - diff)\n else:\n sub_range_start = part_start\n\n sub_range_end = sub_range_start + diff\n range_clauses.append(f\"SELECT generate_series({sub_range_start}, {sub_range_end}) AS block\")\n\n combined_ranges = \" UNION ALL \".join(range_clauses)\n\n final_query = f\"\"\"\n WITH block_heights AS (\n {combined_ranges}\n )\n SELECT SUM(block) \n FROM balance_changes \n WHERE block IN (SELECT block FROM block_heights)\n \"\"\"\n query = final_query.strip()\n return query\n\nquery = build_balance_query(network, start_block, balance_end, 1)"}
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 Opentensor
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/.github/workflows/build_custom.yml:
--------------------------------------------------------------------------------
1 | name: Release CUSTOM Docker Image
2 |
3 | on:
4 | release:
5 | types: [created]
6 | workflow_dispatch:
7 | inputs:
8 | version:
9 | description: 'Release Version'
10 | required: true
11 |
12 | permissions:
13 | contents: write
14 | packages: write
15 |
16 | jobs:
17 | build-and-push:
18 | runs-on: ubuntu-latest
19 | steps:
20 | - name: Check out code
21 | uses: actions/checkout@v3
22 | with:
23 | ref: ${{ github.ref }}
24 |
25 | - name: Login to GitHub Container Registry
26 | uses: docker/login-action@v1
27 | with:
28 | registry: ghcr.io
29 | username: ${{ github.actor }}
30 | password: ${{ secrets.GITHUB_TOKEN }}
31 |
32 | - name: Build and push Docker image
33 | run: |
34 | IMAGE_NAME=ghcr.io/${{ github.repository_owner }}/blockchain_insights_base
35 | VERSION=${{ github.event.inputs.version || github.event.release.tag_name }}
36 | VERSION_TAG="$IMAGE_NAME:$VERSION"
37 | docker build --tag $VERSION_TAG .
38 | docker push $VERSION_TAG
39 |
40 |
--------------------------------------------------------------------------------
/neurons/__init__.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 | import sys
4 |
5 | from dotenv import load_dotenv
6 | from loguru import logger
7 | import bittensor as bt
8 | import logging
9 |
10 | mandatory_config = {}
11 |
12 | def serialize(record):
13 | try:
14 | tmstamp = format(record['time'], "%Y-%m-%d %H:%M:%S.%03d")
15 | subset = {
16 | 'timestamp': tmstamp,
17 | 'level': record['level'].name,
18 | 'message': record['message'],
19 | }
20 | subset.update(mandatory_config)
21 | subset.update(record['extra'])
22 | return json.dumps(subset)
23 | except Exception:
24 | return record['message']
25 |
26 |
27 | def patching(record):
28 | record['message'] = serialize(record)
29 |
30 |
31 | def custom_log_formatter(record):
32 | """Custom log formatter"""
33 | return "{message} \n"
34 |
35 |
36 | load_dotenv()
37 |
38 |
39 | if not os.environ.get("DISABLE_JSON_LOGS"):
40 | logger = logger.patch(patching)
41 | logger.remove(0)
42 | logger.add(sys.stdout, format=custom_log_formatter)
43 |
44 | bt.logging._logger.setLevel(logging.DEBUG) # disable btlogging
--------------------------------------------------------------------------------
/tests/validators/bitcoin_balance_tracking_query.py:
--------------------------------------------------------------------------------
1 |
2 |
3 | def build_balance_tracking_query(network, start_block, balance_end, diff=1):
4 | import random
5 | total_blocks = balance_end - start_block
6 | part_size = total_blocks // 8
7 | range_clauses = []
8 |
9 | for i in range(8):
10 | part_start = start_block + i * part_size
11 | if i == 7:
12 | part_end = balance_end
13 | else:
14 | part_end = start_block + (i + 1) * part_size - 1
15 |
16 | if (part_end - part_start) > diff:
17 | sub_range_start = random.randint(part_start, part_end - diff)
18 | else:
19 | sub_range_start = part_start
20 |
21 | sub_range_end = sub_range_start + diff
22 | range_clauses.append(f"SELECT generate_series({sub_range_start}, {sub_range_end}) AS block")
23 |
24 | combined_ranges = " UNION ALL ".join(range_clauses)
25 |
26 | final_query = f"""
27 | WITH block_heights AS (
28 | {combined_ranges}
29 | )
30 | SELECT SUM(block)
31 | FROM balance_changes
32 | WHERE block IN (SELECT block FROM block_heights)
33 | """
34 | query = final_query.strip()
35 | return query
36 |
--------------------------------------------------------------------------------
/tests/validators/utils/test_uids.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from unittest.mock import MagicMock, patch
3 | import numpy as np
4 | from neurons.validators.utils.uids import check_uid_availability, get_random_uids
5 |
6 | class TestYourClass(unittest.TestCase):
7 |
8 | def test_check_uid_availability(self):
9 | metagraph = MagicMock()
10 | axon_mock = MagicMock()
11 | neuron_mock = MagicMock()
12 |
13 | uid = 123 # Replace with a valid uid for testing
14 | vpermit_tao_limit = 10 # Replace with a valid vpermit_tao_limit for testing
15 |
16 | # Configure mocks
17 | metagraph.axons.__getitem__.return_value = axon_mock
18 | metagraph.validator_permit.__getitem__.return_value = True # Adjust as needed
19 | metagraph.S.__getitem__.return_value = 5 # Replace with a valid value for testing
20 | metagraph.neurons.__getitem__.return_value = neuron_mock
21 | neuron_mock.axon_info.ip = '192.168.1.1' # Replace with a valid IP for testing
22 |
23 | # Assuming the method is standalone (not part of a class)
24 | result = check_uid_availability(metagraph, uid, vpermit_tao_limit)
25 | self.assertTrue(result)
26 |
27 | if __name__ == '__main__':
28 | unittest.main()
29 |
--------------------------------------------------------------------------------
/tests/miners/test_challenge.py:
--------------------------------------------------------------------------------
1 | import os
2 | import unittest
3 | from neurons.nodes.factory import NodeFactory
4 | from insights.protocol import NETWORK_BITCOIN, Challenge, MODEL_TYPE_FUNDS_FLOW
5 |
6 |
7 | class TestChallenge(unittest.TestCase):
8 | def test_solve_challenge(self):
9 |
10 | node = NodeFactory.create_node(NETWORK_BITCOIN)
11 |
12 | challenge = Challenge(model_type=MODEL_TYPE_FUNDS_FLOW, in_total_amount=203216, out_total_amount=200000, tx_id_last_6_chars="66d946")
13 | expected_output = "93fa1edce68762615740fd35581fc337d508ca33e682057bed7b395b5d66d946"
14 | test_output1 = "93fa1edce68762615740fd35581fc337d508ca33e682057bed7b395b5d66d945"
15 | test_output2 = "83fa1edce68762615740fd35581fc337d508ca33e682057bed7b395b5d66d946"
16 |
17 | is_valid = node.validate_challenge_response_output(challenge, expected_output)
18 | self.assertEqual(is_valid, True)
19 |
20 | is_valid = node.validate_challenge_response_output(challenge, test_output1)
21 | self.assertEqual(is_valid, False)
22 |
23 | is_valid = node.validate_challenge_response_output(challenge, test_output2)
24 | self.assertEqual(is_valid, False)
25 |
26 |
27 | if __name__ == '__main__':
28 | from dotenv import load_dotenv
29 | load_dotenv()
30 | unittest.main()
--------------------------------------------------------------------------------
/tests/validators/funds_flow_query_script.json:
--------------------------------------------------------------------------------
1 | {"code": "def build_funds_flow_query(network, start_block, end_block, diff=1):\n import random\n total_blocks = end_block - start_block\n part_size = total_blocks // 8\n range_clauses = []\n for i in range(8):\n part_start = start_block + i * part_size\n if i == 7:\n part_end = end_block\n else:\n part_end = start_block + (i + 1) * part_size - 1\n if (part_end - part_start) > diff:\n sub_range_start = random.randint(part_start, part_end - diff)\n else:\n sub_range_start = part_start\n sub_range_end = sub_range_start + diff\n range_clauses.append(f\"range({sub_range_start}, {sub_range_end})\")\n combined_ranges = \" + \".join(range_clauses)\n final_query = f\"\"\"\n WITH {combined_ranges} AS block_heights\n UNWIND block_heights AS block_height\n MATCH p=(sender:Address)-[sent1:SENT]->(t:Transaction)-[sent2:SENT]->(receiver:Address)\n WHERE t.block_height = block_height\n WITH project(p) AS subgraph\n CALL pagerank.get(subgraph) YIELD node, rank\n RETURN round(rank * 1000000) / 1000000 AS roundedRank \n ORDER BY roundedRank DESC\n LIMIT 1\n \"\"\"\n query = final_query.strip()\n return query\n\nquery = build_funds_flow_query(network, start_block, end_block, 1)"}
--------------------------------------------------------------------------------
/tests/validators/bitcoin_funds_flow_query.py:
--------------------------------------------------------------------------------
1 |
2 | def build_funds_flow_query(network, start_block, end_block, diff=1):
3 | import random
4 | total_blocks = end_block - start_block
5 | part_size = total_blocks // 8
6 | range_clauses = []
7 | for i in range(8):
8 | part_start = start_block + i * part_size
9 | if i == 7:
10 | part_end = end_block
11 | else:
12 | part_end = start_block + (i + 1) * part_size - 1
13 | if (part_end - part_start) > diff:
14 | sub_range_start = random.randint(part_start, part_end - diff)
15 | else:
16 | sub_range_start = part_start
17 | sub_range_end = sub_range_start + diff
18 | range_clauses.append(f"range({sub_range_start}, {sub_range_end})")
19 | combined_ranges = " + ".join(range_clauses)
20 | final_query = f"""
21 | WITH {combined_ranges} AS block_heights
22 | UNWIND block_heights AS block_height
23 | MATCH p=(sender:Address)-[sent1:SENT]->(t:Transaction)-[sent2:SENT]->(receiver:Address)
24 | WHERE t.block_height = block_height
25 | WITH project(p) AS subgraph
26 | CALL pagerank.get(subgraph) YIELD node, rank
27 | RETURN round(rank * 1000000) / 1000000 AS roundedRank
28 | ORDER BY roundedRank DESC
29 | LIMIT 1
30 | """
31 | query = final_query.strip()
32 | return query
--------------------------------------------------------------------------------
/insights/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | The MIT License (MIT)
3 | Copyright © 2023 Chris Wilson
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
6 | documentation files (the “Software”), to deal in the Software without restriction, including without limitation
7 | the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
8 | and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of
11 | the Software.
12 |
13 | THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
14 | THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
15 | THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
16 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
17 | DEALINGS IN THE SOFTWARE.
18 | """
19 |
20 | __version__ = "2.2.15"
21 | version_split = __version__.split(".")
22 | __spec_version__ = (
23 | (1000 * int(version_split[0]))
24 | + (10 * int(version_split[1]))
25 | + (1 * int(version_split[2]))
26 | )
27 |
--------------------------------------------------------------------------------
/tests/validators/bitcoin_funds_flow_query_2.py:
--------------------------------------------------------------------------------
1 |
2 | def build_funds_flow_query(network, start_block, end_block, diff=1):
3 | import random
4 | total_blocks = end_block - start_block
5 | part_size = total_blocks // 8
6 | range_clauses = []
7 | for i in range(8):
8 | part_start = start_block + i * part_size
9 | if i == 7:
10 | part_end = end_block
11 | else:
12 | part_end = start_block + (i + 1) * part_size - 1
13 | if (part_end - part_start) > diff:
14 | sub_range_start = random.randint(part_start, part_end - diff)
15 | else:
16 | sub_range_start = part_start
17 | sub_range_end = sub_range_start + diff
18 | range_clauses.append(f"range({sub_range_start}, {sub_range_end})")
19 | combined_ranges = " + ".join(range_clauses)
20 | final_query = f"""
21 | WITH {combined_ranges} AS block_heights
22 | UNWIND block_heights AS block_height
23 | MATCH (t:Transaction)
24 | WHERE t.block_height = block_height
25 | WITH t
26 | MATCH (sender:Address)-[sent1:SENT]->(t)-[sent2:SENT]->(receiver:Address)
27 | WITH SUM(sent1.value_satoshi + sent2.value_satoshi) AS total_value, COUNT(sender) AS sender_count, COUNT(receiver) AS receiver_count, COUNT(t) AS transaction_count
28 | RETURN total_value + sender_count + receiver_count + transaction_count AS output
29 | """
30 | query = final_query.strip()
31 | return query
--------------------------------------------------------------------------------
/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 | name: Release Official Docker Image
2 |
3 | on:
4 | release:
5 | types: [created]
6 | workflow_dispatch:
7 | inputs:
8 | version:
9 | description: 'Release Version'
10 | required: true
11 |
12 | permissions:
13 | contents: write
14 | packages: write
15 |
16 | jobs:
17 | build-and-push:
18 | runs-on: ubuntu-latest
19 | steps:
20 | - name: Check out code
21 | uses: actions/checkout@v3
22 | with:
23 | ref: ${{ github.ref }}
24 |
25 | - name: Login to GitHub Container Registry
26 | uses: docker/login-action@v1
27 | with:
28 | registry: ghcr.io
29 | username: ${{ github.actor }}
30 | password: ${{ secrets.GITHUB_TOKEN }}
31 |
32 | - name: Build and push Docker image
33 | run: |
34 | IMAGE_NAME=ghcr.io/${{ github.repository_owner }}/blockchain_insights_base
35 | VERSION=${{ github.event.inputs.version || github.event.release.tag_name }}
36 | VERSION_TAG="$IMAGE_NAME:$VERSION"
37 | LATEST_TAG="$IMAGE_NAME:latest"
38 | # Build the Docker image with the version tag
39 | docker build --tag $VERSION_TAG .
40 | # Tag the same image with the 'latest' tag
41 | docker tag $VERSION_TAG $LATEST_TAG
42 | # Push both the version tag and the latest tag
43 | docker push $VERSION_TAG
44 | docker push $LATEST_TAG
45 |
--------------------------------------------------------------------------------
/template/__init__.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright © 2023 Yuma Rao
3 | # TODO(developer): Set your name
4 | # Copyright © 2023
5 |
6 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
7 | # documentation files (the “Software”), to deal in the Software without restriction, including without limitation
8 | # the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
9 | # and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
10 |
11 | # The above copyright notice and this permission notice shall be included in all copies or substantial portions of
12 | # the Software.
13 |
14 | # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
15 | # THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
16 | # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
17 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
18 | # DEALINGS IN THE SOFTWARE.
19 |
20 | # TODO(developer): Change this value when updating your code base.
21 | # Define the version of the template module.
22 | __version__ = "0.0.0"
23 | version_split = __version__.split(".")
24 | __spec_version__ = (
25 | (1000 * int(version_split[0]))
26 | + (10 * int(version_split[1]))
27 | + (1 * int(version_split[2]))
28 | )
29 |
30 | # Import all submodules.
31 | from . import protocol
32 | from . import base
33 | from . import validator
34 | from . import api
--------------------------------------------------------------------------------
/neurons/utils.py:
--------------------------------------------------------------------------------
1 | import re
2 |
3 |
4 | # doesn't check all possible cases since Memgraph also runs some checks (comment checks etc)
5 | def generate_patterns_for_terms(terms):
6 | patterns = []
7 | for term in terms:
8 | lower_term = term.lower()
9 | # Escape term for regex pattern
10 | escaped_term = re.escape(lower_term)
11 | # Basic term
12 | patterns.append(escaped_term)
13 |
14 | # With spaces between each character
15 | patterns.append(r'\s*'.join(escaped_term))
16 |
17 | # Unicode escape sequences (basic example)
18 | unicode_pattern = ''.join([f'\\u{ord(char):04x}' for char in lower_term])
19 | patterns.append(unicode_pattern)
20 |
21 | # Mixed case variations to catch case obfuscation
22 | mixed_case_variations = [f'[{char.lower()}{char.upper()}]' for char in lower_term]
23 | patterns.append(''.join(mixed_case_variations))
24 |
25 | # Detecting comments that might hide portions of malicious queries
26 | # This is a simplistic approach and might need refinement
27 | patterns.append(f'/{escaped_term}|{escaped_term}/')
28 |
29 | return patterns
30 |
31 |
32 | def is_malicious(query, terms):
33 | # Normalize the query by lowercasing (maintaining original for comment checks)
34 | normalized_query = query.lower()
35 |
36 | # Generate patterns for the given terms, including obfuscation-resistant versions
37 | write_patterns = generate_patterns_for_terms(terms)
38 |
39 | # Compile regex patterns to detect any of the write operations
40 | pattern = re.compile('|'.join(write_patterns), re.IGNORECASE)
41 |
42 | # Check if the normalized query matches any of the patterns
43 | if pattern.search(normalized_query):
44 | return True # Query is potentially malicious or not read-only
45 |
46 | return False # Query passed the check
--------------------------------------------------------------------------------
/template/api/dummy.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright © 2021 Yuma Rao
3 | # Copyright © 2023 Opentensor Foundation
4 | # Copyright © 2023 Opentensor Technologies Inc
5 |
6 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
7 | # documentation files (the “Software”), to deal in the Software without restriction, including without limitation
8 | # the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
9 | # and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
10 |
11 | # The above copyright notice and this permission notice shall be included in all copies or substantial portions of
12 | # the Software.
13 |
14 | # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
15 | # THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
16 | # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
17 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
18 | # DEALINGS IN THE SOFTWARE.
19 |
20 | import bittensor as bt
21 | from typing import List, Optional, Union, Any, Dict
22 | from template.protocol import Dummy
23 | from bittensor.subnets import SubnetsAPI
24 |
25 |
26 | class DummyAPI(SubnetsAPI):
27 | def __init__(self, wallet: "bt.wallet"):
28 | super().__init__(wallet)
29 | self.netuid = 33
30 | self.name = "dummy"
31 |
32 | def prepare_synapse(self, dummy_input: int) -> Dummy:
33 | synapse.dummy_input = dummy_input
34 | return synapse
35 |
36 | def process_responses(
37 | self, responses: List[Union["bt.Synapse", Any]]
38 | ) -> List[int]:
39 | outputs = []
40 | for response in responses:
41 | if response.dendrite.status_code != 200:
42 | continue
43 | return outputs.append(response.dummy_output)
44 | return outputs
45 |
--------------------------------------------------------------------------------
/tests/validators/test_uptime.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from sqlalchemy import create_engine
3 | from sqlalchemy.orm import sessionmaker, scoped_session
4 |
5 | from neurons.validators.uptime import Downtimes, Base, MinerUptimeManager, Miners
6 |
7 | class TestMinerUptimeManager(unittest.TestCase):
8 | def setUp(self):
9 | self.engine = create_engine('sqlite:///:memory:')
10 | Base.metadata.create_all(self.engine)
11 | self.Session = scoped_session(sessionmaker(bind=self.engine))
12 | self.uptime_manager = MinerUptimeManager('sqlite:///:memory:')
13 | self.uptime_manager.Session = self.Session # Ensure we use the same session factory
14 | self.session = self.Session()
15 |
16 | def tearDown(self):
17 | self.session.rollback()
18 | self.session.close()
19 | self.Session.remove()
20 | self.engine.dispose()
21 |
22 | def test_add_miner_for_first_time_its_up(self):
23 | self.uptime_manager.up(123, 'key123')
24 | miner = self.uptime_manager.get_miner('key123')
25 |
26 | self.assertIsNotNone(miner)
27 | self.assertEqual(miner.uid, 123)
28 | self.assertEqual(miner.hotkey, 'key123')
29 |
30 | def test_update_miner__when_up_with_different_uid(self):
31 | self.uptime_manager.up(123, 'key123')
32 | self.uptime_manager.up(100, 'key123')
33 |
34 | miner = self.uptime_manager.get_miner('key123')
35 | self.assertEqual(miner.uid, 100)
36 |
37 | def test_miner_up_and_down(self):
38 | self.uptime_manager.up(100, 'key123')
39 | self.uptime_manager.down(100, 'key123')
40 | self.uptime_manager.up(100, 'key123')
41 |
42 | miner = self.uptime_manager.get_miner('key123')
43 | self.assertTrue(len(miner.downtimes) == 1)
44 |
45 | self.uptime_manager.up(200, 'key123')
46 | miner = self.uptime_manager.get_miner('key123')
47 | self.assertTrue(len(miner.downtimes) == 1)
48 |
49 | uptimes = self.uptime_manager.get_uptime_scores('key123')
50 | print(f"{uptimes=}")
51 |
52 |
53 | if __name__ == '__main__':
54 | unittest.main()
55 |
--------------------------------------------------------------------------------
/docs/what_are_subnets.md:
--------------------------------------------------------------------------------
1 | # What is Bittensor?
2 | Bittensor is a network where computers validate the work that other computers contribute to the network - the work what is most valuable to the collective will be rewarded
3 |
4 | Bittensor is a catalyst to the open-source developers and smaller AI research labs now have a financial incentive for fine-tuning open foundational models
5 |
6 | Bittensor is a library of machine intelligence that continuously grows and shares knowledge amongst peers
7 |
8 | # What is a subnet?
9 |
10 | Bittensor is releasing its own language for creating incentive mechanisms. This allows developers to build incentive systems on Bittensor, tapping into our web of intelligence to develop markets of the developer’s choosings
11 |
12 | Subnet 1, an incentive system for machine intelligence production, showcases the enormous potential of markets to procure huge amounts of resources. Releasing user-created subnets is set to create a cambrian explosion of additional resources into the Bittensor ecosystem
13 |
14 | # Why should you care?
15 |
16 | As an open-source developer, you now have the ability to write your own incentive mechanisms without creating an entirely new chain. By tapping into Bittensor’s network of intelligence, you can incentivize AI models from all over the world to perform tasks of your choosing (i.e., image generation, storage, compute access, etc.) - the possibilities are truly endless
17 |
18 | The release of subnets also offers the potential to pull these tools into a shared network, making all the ingredients necessary to create intelligence available within one network, governed by one token
19 |
20 | You get to play a vital role in helping bootstrap what could one day become one of the most powerful networks in the world - and you make money by doing so!
21 |
22 | By incentivizing developers to create their own markets, Bittensor is set to become a one-stop-shop for those seeking all the compute requirements for building unstoppable applications on top of an incentivized infrastructure
23 |
24 | # Deeper dive
25 | Check out the Bittensor about page [here](https://bittensor.com/about) for more details about what the bittensor paradigm is and why subnets are revolutionary technology.
26 |
27 | Also see our [linktree](https://linktr.ee/opentensor) for more information.
--------------------------------------------------------------------------------
/template/validator/reward.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright © 2023 Yuma Rao
3 | # TODO(developer): Set your name
4 | # Copyright © 2023
5 |
6 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
7 | # documentation files (the “Software”), to deal in the Software without restriction, including without limitation
8 | # the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
9 | # and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
10 |
11 | # The above copyright notice and this permission notice shall be included in all copies or substantial portions of
12 | # the Software.
13 |
14 | # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
15 | # THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
16 | # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
17 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
18 | # DEALINGS IN THE SOFTWARE.
19 |
20 | import numpy as np
21 | from typing import List
22 |
23 |
24 | def reward(query: int, response: int) -> float:
25 | """
26 | Reward the miner response to the dummy request. This method returns a reward
27 | value for the miner, which is used to update the miner's score.
28 |
29 | Returns:
30 | - float: The reward value for the miner.
31 | """
32 |
33 | return 1.0 if response == query * 2 else 0
34 |
35 |
36 | def get_rewards(
37 | self,
38 | query: int,
39 | responses: List[float],
40 | ) -> np.float32:
41 | """
42 | Returns a tensor of rewards for the given query and responses.
43 |
44 | Args:
45 | - query (int): The query sent to the miner.
46 | - responses (List[float]): A list of responses from the miner.
47 |
48 | Returns:
49 | - np.float32: A tensor of rewards for the given query and responses.
50 | """
51 | # Get all the reward results by iteratively calling your reward() function.
52 | return np.float32(
53 | [reward(query, response) for response in responses]
54 | )
55 |
--------------------------------------------------------------------------------
/insights/protocol.py:
--------------------------------------------------------------------------------
1 | from typing import Optional, List, Dict, Any
2 | import bittensor as bt
3 | from protocols.llm_engine import LlmMessage, QueryOutput
4 | from pydantic import BaseModel, ConfigDict
5 |
6 | # protocol version
7 | VERSION = 6
8 | ERROR_TYPE = int
9 | MAX_MINER_INSTANCE = 9
10 |
11 |
12 | class DiscoveryMetadata(BaseModel):
13 | network: str = None
14 |
15 |
16 | class DiscoveryOutput(BaseModel):
17 | model_config = ConfigDict(protected_namespaces=())
18 |
19 | metadata: DiscoveryMetadata = None
20 | block_height: int = None
21 | start_block_height: int = None
22 | balance_model_last_block: int = None
23 | version: Optional[int] = VERSION
24 |
25 |
26 | class BaseSynapse(bt.Synapse):
27 | version: int = VERSION
28 |
29 |
30 | class HealthCheck(BaseSynapse):
31 | output: Optional[List[Dict]] = None
32 |
33 | def deserialize(self):
34 | return self.output
35 |
36 |
37 | class Discovery(BaseSynapse):
38 | output: Optional[DiscoveryOutput] = None
39 |
40 | def deserialize(self):
41 | return self
42 |
43 |
44 | class Benchmark(BaseSynapse):
45 | network: str = None
46 | query: str = None
47 | query_type: str = None
48 |
49 | # output
50 | output: Optional[float] = None
51 |
52 | def deserialize(self) -> Optional[float]:
53 | return self.output
54 |
55 |
56 | class Challenge(BaseSynapse):
57 | model_config = ConfigDict(protected_namespaces=())
58 |
59 | model_type: str # model type
60 | # For BTC funds flow model
61 | in_total_amount: Optional[int] = None
62 | out_total_amount: Optional[int] = None
63 | tx_id_last_6_chars: Optional[str] = None
64 |
65 | # For BTC balance tracking model
66 | block_height: Optional[int] = None
67 |
68 | # Altcoins
69 | checksum: Optional[str] = None
70 |
71 | output: Optional[Any] = None
72 |
73 | def deserialize(self) -> str:
74 | return self.output
75 |
76 |
77 | class LlmQuery(BaseSynapse):
78 | network: str = None
79 | # decide whether to invoke a generic llm endpoint or not
80 | # is_generic_llm: bool = False
81 | # messages: conversation history for llm agent to use as context
82 | messages: List[LlmMessage] = None
83 |
84 | # output
85 | output: Optional[List[QueryOutput]] = None
86 |
87 | def deserialize(self) -> Optional[List[QueryOutput]]:
88 | return self.output
89 |
--------------------------------------------------------------------------------
/insights/api/query.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright © 2021 Yuma Rao
3 | # Copyright © 2023 Opentensor Foundation
4 | # Copyright © 2023 Opentensor Technologies Inc
5 |
6 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
7 | # documentation files (the “Software”), to deal in the Software without restriction, including without limitation
8 | # the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
9 | # and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
10 |
11 | # The above copyright notice and this permission notice shall be included in all copies or substantial portions of
12 | # the Software.
13 |
14 | # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
15 | # THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
16 | # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
17 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
18 | # DEALINGS IN THE SOFTWARE.
19 |
20 | import bittensor as bt
21 | from typing import List, Optional, Union, Any, Dict
22 |
23 | from protocols.llm_engine import LLM_MESSAGE_TYPE_USER
24 |
25 | from insights import protocol
26 | from insights.protocol import LlmQuery, LlmMessage
27 | from insights.api import SubnetsAPI
28 |
29 |
30 | class TextQueryAPI(SubnetsAPI):
31 | def __init__(self, wallet: "bt.wallet"):
32 | super().__init__(wallet)
33 | self.netuid = 15
34 | self.name = "LlmQuery"
35 |
36 | def prepare_synapse(self, network:str, text: str) -> LlmQuery:
37 | synapse = LlmQuery(
38 | network=network,
39 | messages=[
40 | LlmMessage(
41 | type=LLM_MESSAGE_TYPE_USER,
42 | content=text
43 | ),
44 | ],
45 | )
46 | return synapse
47 |
48 | def process_responses(
49 | self, responses: List[Union["bt.Synapse", Any]]
50 | ) -> List[int]:
51 | outputs = []
52 | blacklist_axon_list = []
53 | for id, response in enumerate(responses):
54 | print(response)
55 | if response.dendrite.status_code != 200:
56 | blacklist_axon_list.append(id)
57 | continue
58 | outputs.append(response.output)
59 | return outputs, blacklist_axon_list
--------------------------------------------------------------------------------
/neurons/validators/challenge_factory/balance_challenge_factory.py:
--------------------------------------------------------------------------------
1 | import time
2 | import threading
3 | from typing import List, Tuple
4 | import random
5 |
6 | from insights.protocol import Challenge
7 |
8 | class BalanceChallengeFactory:
9 | """
10 | BalanceChallengeFactory generates challenges for balance tracking model periodically
11 | It has several tiers and generate one challenge for each tier
12 | It caches generated challenges and provides them to validators
13 | """
14 | def __init__(self, node, interval=300, tier_gap=100000):
15 | self.node = node # blockchain node
16 | self.interval = interval # seconds between updates
17 | self.tier_gap = tier_gap # a block height gap between tiers
18 | self.last_generated_tier = -1
19 | self.challenges: List[Tuple[Challenge, int]] = [] # list of generated challenges and expected results, each element corresponding to each tier
20 | self.lock = threading.Lock() # Lock for synchronizing access to 'challenges'
21 | self.running = True # Control the thread activity
22 | self.thread = threading.Thread(target=self.update)
23 | self.thread.start()
24 |
25 | def update(self):
26 | while self.running:
27 | # Use the lock to ensure safe update of the variable
28 |
29 | block_height = 0
30 | latest_block_height = self.node.get_current_block_height() - 6
31 | new_tier = self.last_generated_tier + 1
32 | if new_tier * self.tier_gap < latest_block_height:
33 | block_height = random.randint(new_tier * self.tier_gap, min(latest_block_height, (new_tier + 1) * self.tier_gap - 1))
34 | else:
35 | new_tier = 0
36 | challenge, expected_output = self.node.create_balance_challenge(block_height)
37 |
38 | with self.lock:
39 | if new_tier > len(self.challenges) - 1:
40 | self.challenges.append((challenge, expected_output))
41 | else:
42 | self.challenges[new_tier] = (challenge, expected_output)
43 |
44 | self.last_generated_tier = new_tier
45 | time.sleep(self.interval) # Wait for the specified interval
46 |
47 | def get_challenge(self, block_height: int) -> Tuple[Challenge, int]:
48 | with self.lock:
49 | max_tier = min(block_height // self.tier_gap, len(self.challenges) - 1)
50 | random_tier = random.randint(0, max_tier)
51 | return self.challenges[random_tier]
52 |
53 | def stop(self):
54 | self.running = False
55 | self.thread.join() # Wait for the thread to finish
56 |
--------------------------------------------------------------------------------
/template/utils/uids.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import random
3 | import bittensor as bt
4 | from typing import List
5 |
6 |
7 | def check_uid_availability(
8 | metagraph: "bt.metagraph.Metagraph", uid: int, vpermit_tao_limit: int
9 | ) -> bool:
10 | """Check if uid is available. The UID should be available if it is serving and has less than vpermit_tao_limit stake
11 | Args:
12 | metagraph (:obj: bt.metagraph.Metagraph): Metagraph object
13 | uid (int): uid to be checked
14 | vpermit_tao_limit (int): Validator permit tao limit
15 | Returns:
16 | bool: True if uid is available, False otherwise
17 | """
18 | # Filter non serving axons.
19 | if not metagraph.axons[uid].is_serving:
20 | return False
21 | if metagraph.validator_permit[uid]:
22 |
23 | # Filter validator permit > 1024 stake.
24 | if metagraph.S[uid] > vpermit_tao_limit:
25 | return False
26 |
27 | # Filter out uid without IP.
28 | if metagraph.neurons[uid].axon_info.ip == '0.0.0.0':
29 | return False
30 | # Available otherwise.
31 | return True
32 |
33 |
34 | def get_random_uids(
35 | self, k: int, exclude: List[int] = None
36 | ) -> np.int64:
37 | """Returns k available random uids from the metagraph.
38 | Args:
39 | k (int): Number of uids to return.
40 | exclude (List[int]): List of uids to exclude from the random sampling.
41 | Returns:
42 | uids (np.int64): Randomly sampled available uids.
43 | Notes:
44 | If `k` is larger than the number of available `uids`, set `k` to the number of available `uids`.
45 | """
46 | candidate_uids = []
47 | avail_uids = []
48 |
49 | for uid in range(self.metagraph.n.item()):
50 | uid_is_available = check_uid_availability(
51 | self.metagraph, uid, self.config.neuron.vpermit_tao_limit
52 | )
53 | uid_is_not_excluded = exclude is None or uid not in exclude
54 |
55 | if uid_is_available:
56 | avail_uids.append(uid)
57 | if uid_is_not_excluded:
58 | candidate_uids.append(uid)
59 | # If k is larger than the number of available uids, set k to the number of available uids.
60 | k = min(k, len(avail_uids))
61 | # Check if candidate_uids contain enough for querying, if not grab all avaliable uids
62 | available_uids = candidate_uids
63 | if len(candidate_uids) < k:
64 | available_uids += random.sample(
65 | [uid for uid in avail_uids if uid not in candidate_uids],
66 | k - len(candidate_uids),
67 | )
68 | print(available_uids)
69 | uids = np.array(random.sample(available_uids, k))
70 | return uids
71 |
--------------------------------------------------------------------------------
/scripts/check_compatibility.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | if [ -z "$1" ]; then
4 | echo "Please provide a Python version as an argument."
5 | exit 1
6 | fi
7 |
8 | python_version="$1"
9 | all_passed=true
10 |
11 | GREEN='\033[0;32m'
12 | YELLOW='\033[0;33m'
13 | RED='\033[0;31m'
14 | NC='\033[0m' # No Color
15 |
16 | check_compatibility() {
17 | all_supported=0
18 |
19 | while read -r requirement; do
20 | # Skip lines starting with git+
21 | if [[ "$requirement" == git+* ]]; then
22 | continue
23 | fi
24 |
25 | package_name=$(echo "$requirement" | awk -F'[!=<>]' '{print $1}' | awk -F'[' '{print $1}') # Strip off brackets
26 | echo -n "Checking $package_name... "
27 |
28 | url="https://pypi.org/pypi/$package_name/json"
29 | response=$(curl -s $url)
30 | status_code=$(curl -s -o /dev/null -w "%{http_code}" $url)
31 |
32 | if [ "$status_code" != "200" ]; then
33 | echo -e "${RED}Information not available for $package_name. Failure.${NC}"
34 | all_supported=1
35 | continue
36 | fi
37 |
38 | classifiers=$(echo "$response" | jq -r '.info.classifiers[]')
39 | requires_python=$(echo "$response" | jq -r '.info.requires_python')
40 |
41 | base_version="Programming Language :: Python :: ${python_version%%.*}"
42 | specific_version="Programming Language :: Python :: $python_version"
43 |
44 | if echo "$classifiers" | grep -q "$specific_version" || echo "$classifiers" | grep -q "$base_version"; then
45 | echo -e "${GREEN}Supported${NC}"
46 | elif [ "$requires_python" != "null" ]; then
47 | if echo "$requires_python" | grep -Eq "==$python_version|>=$python_version|<=$python_version"; then
48 | echo -e "${GREEN}Supported${NC}"
49 | else
50 | echo -e "${RED}Not compatible with Python $python_version due to constraint $requires_python.${NC}"
51 | all_supported=1
52 | fi
53 | else
54 | echo -e "${YELLOW}Warning: Specific version not listed, assuming compatibility${NC}"
55 | fi
56 | done < requirements.txt
57 |
58 | return $all_supported
59 | }
60 |
61 | echo "Checking compatibility for Python $python_version..."
62 | check_compatibility
63 | if [ $? -eq 0 ]; then
64 | echo -e "${GREEN}All requirements are compatible with Python $python_version.${NC}"
65 | else
66 | echo -e "${RED}All requirements are NOT compatible with Python $python_version.${NC}"
67 | all_passed=false
68 | fi
69 |
70 | echo ""
71 | if $all_passed; then
72 | echo -e "${GREEN}All tests passed.${NC}"
73 | else
74 | echo -e "${RED}All tests did not pass.${NC}"
75 | exit 1
76 | fi
77 |
--------------------------------------------------------------------------------
/.idea/dataSources.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | sqlite.xerial
6 | true
7 | org.sqlite.JDBC
8 | jdbc:sqlite:C:\work4\blockchain-data-subnet\neurons\validators\test_miner_data_samples.db
9 | $ProjectFileDir$
10 |
11 |
12 | file://$APPLICATION_CONFIG_DIR$/jdbc-drivers/Xerial SQLiteJDBC/3.43.0/org/xerial/sqlite-jdbc/3.43.0.0/sqlite-jdbc-3.43.0.0.jar
13 |
14 |
15 |
16 |
17 | sqlite.xerial
18 | true
19 | org.sqlite.JDBC
20 | jdbc:sqlite:C:\work4\blockchain-data-subnet\neurons\validators\test_miner_registry.db
21 | $ProjectFileDir$
22 |
23 |
24 | file://$APPLICATION_CONFIG_DIR$/jdbc-drivers/Xerial SQLiteJDBC/3.43.0/org/xerial/sqlite-jdbc/3.43.0.0/sqlite-jdbc-3.43.0.0.jar
25 |
26 |
27 |
28 |
29 | sqlite.xerial
30 | true
31 | org.sqlite.JDBC
32 | jdbc:sqlite://wsl$/Ubuntu-22.04/data/blacklist_registry.db
33 | $ProjectFileDir$
34 |
35 |
36 | sqlite.xerial
37 | true
38 | org.sqlite.JDBC
39 | jdbc:sqlite:identifier.sqlite
40 | $ProjectFileDir$
41 |
42 |
43 | sqlite.xerial
44 | true
45 | org.sqlite.JDBC
46 | jdbc:sqlite://wsl$/Ubuntu-22.04/data/miner_registry.db
47 | $ProjectFileDir$
48 |
49 |
50 |
--------------------------------------------------------------------------------
/template/subnet_links.py:
--------------------------------------------------------------------------------
1 | SUBNET_LINKS = [
2 | {"name": "sn0", "url": ""},
3 | {"name": "sn1", "url": "https://github.com/opentensor/prompting/"},
4 | {"name": "sn2", "url": "https://github.com/bittranslateio/bittranslate/"},
5 | {
6 | "name": "sn3",
7 | "url": "https://github.com/gitphantomman/scraping_subnet/",
8 | },
9 | {"name": "sn4", "url": "https://github.com/manifold-inc/targon/"},
10 | {"name": "sn5", "url": "https://github.com/unconst/ImageSubnet/"},
11 | {"name": "sn6", "url": ""},
12 | {"name": "sn7", "url": "https://github.com/tensorage/tensorage/"},
13 | {
14 | "name": "sn8",
15 | "url": "https://github.com/taoshidev/time-series-prediction-subnet/",
16 | },
17 | {"name": "sn9", "url": "https://github.com/unconst/pretrain-subnet/"},
18 | {
19 | "name": "sn10",
20 | "url": "https://github.com/dream-well/map-reduce-subnet/",
21 | },
22 | {"name": "sn11", "url": "https://github.com/opentensor/text-prompting/"},
23 | {"name": "sn12", "url": ""},
24 | {"name": "sn13", "url": "https://github.com/RusticLuftig/data-universe/"},
25 | {
26 | "name": "sn14",
27 | "url": "https://github.com/ceterum1/llm-defender-subnet/",
28 | },
29 | {
30 | "name": "sn15",
31 | "url": "https://github.com/blockchain-insights/blockchain-data-subnet/",
32 | },
33 | {"name": "sn16", "url": "https://github.com/UncleTensor/AudioSubnet/"},
34 | {"name": "sn17", "url": "https://github.com/CortexLM/flavia/"},
35 | {"name": "sn18", "url": "https://github.com/corcel-api/cortex.t/"},
36 | {"name": "sn19", "url": "https://github.com/namoray/vision/"},
37 | {"name": "sn20", "url": "https://github.com/oracle-subnet/oracle-subnet/"},
38 | {"name": "sn21", "url": "https://github.com/ifrit98/storage-subnet/"},
39 | {"name": "sn22", "url": "https://github.com/surcyf123/smart-scrape/"},
40 | {"name": "sn23", "url": "https://github.com/NicheTensor/NicheImage/"},
41 | {"name": "sn24", "url": "https://github.com/eseckft/BitAds.ai/tree/main"},
42 | {"name": "sn25", "url": "https://github.com/KMFODA/DistributedTraining/"},
43 | {
44 | "name": "sn26",
45 | "url": "https://github.com/Supreme-Emperor-Wang/ImageAlchemy/",
46 | },
47 | {
48 | "name": "sn27",
49 | "url": "https://github.com/neuralinternet/compute-subnet/",
50 | },
51 | {"name": "sn28", "url": "https://github.com/zktensor/zktensor_subnet/"},
52 | {"name": "sn29", "url": "https://github.com/404-Repo/Subnet-29/"},
53 | {"name": "sn30", "url": ""},
54 | {
55 | "name": "sn31",
56 | "url": "https://github.com/bthealthcare/healthcare-subnet",
57 | },
58 | {"name": "sn32", "url": "https://github.com/RoyalTensor/roleplay/"},
59 | ]
60 |
--------------------------------------------------------------------------------
/insights/api/get_query_axons.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright © 2021 Yuma Rao
3 | # Copyright © 2023 Opentensor Foundation
4 | # Copyright © 2023 Opentensor Technologies Inc
5 |
6 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
7 | # documentation files (the “Software”), to deal in the Software without restriction, including without limitation
8 | # the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
9 | # and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
10 |
11 | # The above copyright notice and this permission notice shall be included in all copies or substantial portions of
12 | # the Software.
13 |
14 | # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
15 | # THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
16 | # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
17 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
18 | # DEALINGS IN THE SOFTWARE.
19 |
20 |
21 | from insights.protocol import HealthCheck
22 | from neurons import logger
23 |
24 |
25 | async def ping_uids(dendrite, metagraph, uids, timeout=3):
26 | """
27 | Pings a list of UIDs to check their availability on the Bittensor network.
28 |
29 | Args:
30 | dendrite (bittensor.dendrite): The dendrite instance to use for pinging nodes.
31 | metagraph (bittensor.metagraph): The metagraph instance containing network information.
32 | uids (list): A list of UIDs (unique identifiers) to ping.
33 | timeout (int, optional): The timeout in seconds for each ping. Defaults to 3.
34 |
35 | Returns:
36 | tuple: A tuple containing two lists:
37 | - The first list contains UIDs that were successfully pinged.
38 | - The second list contains UIDs that failed to respond.
39 | """
40 | axons = [metagraph.axons[uid] for uid in uids]
41 | try:
42 | responses = await dendrite(
43 | axons,
44 | HealthCheck(),
45 | deserialize=False,
46 | timeout=3,
47 | )
48 | successful_uids = [
49 | uid
50 | for uid, response in zip(uids, responses)
51 | if response.dendrite.status_code == 200
52 | ]
53 | failed_uids = [
54 | uid
55 | for uid, response in zip(uids, responses)
56 | if response.dendrite.status_code != 200
57 | ]
58 | except Exception as e:
59 | logger.error(f"Dendrite ping failed: {e}")
60 | successful_uids = []
61 | failed_uids = uids
62 | logger.debug("ping() successful uids:", successful_uids)
63 | logger.debug("ping() failed uids :", failed_uids)
64 | return successful_uids, failed_uids
--------------------------------------------------------------------------------
/neurons/validators/utils/uids.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import random
3 | import bittensor as bt
4 | from typing import List
5 |
6 | from loguru import logger
7 |
8 | import insights.protocol
9 |
10 |
11 | def check_uid_availability(
12 | metagraph: "bt.metagraph.Metagraph", uid: int, vpermit_tao_limit: int
13 | ) -> bool:
14 | """Check if uid is available. The UID should be available if it is serving and has less than vpermit_tao_limit stake
15 | Args:
16 | metagraph (:obj: bt.metagraph.Metagraph): Metagraph object
17 | uid (int): uid to be checked
18 | vpermit_tao_limit (int): Validator permit tao limit
19 | Returns:
20 | bool: True if uid is available, False otherwise
21 | """
22 | # Filter non serving axons.
23 | if not metagraph.axons[uid].is_serving:
24 | return False
25 |
26 | # Filter out non validator permit.
27 | if metagraph.validator_permit[uid]:
28 |
29 | # Filter out miners who are validators
30 | if metagraph.S[uid] >= vpermit_tao_limit:
31 | return False
32 |
33 | # Filter out uid without IP.
34 | if metagraph.neurons[uid].axon_info.ip == '0.0.0.0':
35 | return False
36 | # Available otherwise.
37 | return True
38 |
39 | def get_random_uids(
40 | self, k: int, exclude: List[int] = None
41 | ) -> np.int64:
42 | """Returns k available random uids from the metagraph.
43 | Args:
44 | k (int): Number of uids to return.
45 | exclude (List[int]): List of uids to exclude from the random sampling.
46 | Returns:
47 | uids (np.array): Randomly sampled available uids.
48 | Notes:
49 | If `k` is larger than the number of available `uids`, set `k` to the number of available `uids`.
50 | """
51 | candidate_uids = []
52 | for uid in range(self.metagraph.n.item()):
53 | uid_is_available = check_uid_availability(
54 | self.metagraph, uid, self.config.neuron.vpermit_tao_limit
55 | )
56 | uid_is_not_excluded = exclude is None or uid not in exclude
57 |
58 | if uid_is_available:
59 | if uid_is_not_excluded:
60 | candidate_uids.append(uid)
61 |
62 | k = max(1, min(len(candidate_uids), k))
63 | uids = np.array(random.sample(candidate_uids, k))
64 | return uids
65 |
66 | def get_uids_batch(self, batch_size: int, exclude: List[int] = None):
67 | candidate_uids = []
68 | for uid in range(self.metagraph.n.item()):
69 | uid_is_available = check_uid_availability(
70 | self.metagraph, uid, self.config.neuron.vpermit_tao_limit
71 | )
72 | uid_is_not_excluded = exclude is None or uid not in exclude and uid != self.uid
73 |
74 | if uid_is_available:
75 | if uid_is_not_excluded:
76 | candidate_uids.append(uid)
77 |
78 | # Shuffle the list of available uids
79 | random.shuffle(candidate_uids)
80 | batch_size = max(1, min(len(candidate_uids), batch_size))
81 |
82 | # Yield batches of uids
83 | for i in range(0, len(candidate_uids), batch_size):
84 | yield np.array(candidate_uids[i:i+batch_size])
85 |
--------------------------------------------------------------------------------
/template/validator/forward.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright © 2023 Yuma Rao
3 | # TODO(developer): Set your name
4 | # Copyright © 2023
5 |
6 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
7 | # documentation files (the “Software”), to deal in the Software without restriction, including without limitation
8 | # the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
9 | # and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
10 |
11 | # The above copyright notice and this permission notice shall be included in all copies or substantial portions of
12 | # the Software.
13 |
14 | # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
15 | # THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
16 | # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
17 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
18 | # DEALINGS IN THE SOFTWARE.
19 |
20 | import bittensor as bt
21 |
22 | from template.protocol import Dummy
23 | from template.validator.reward import get_rewards
24 | from template.utils.uids import get_random_uids
25 | from neurons import logger
26 |
27 |
28 | async def forward(self):
29 | """
30 | The forward function is called by the validator every time step.
31 |
32 | It is responsible for querying the network and scoring the responses.
33 |
34 | Args:
35 | self (:obj:`bittensor.neuron.Neuron`): The neuron object which contains all the necessary state for the validator.
36 |
37 | """
38 | # TODO(developer): Define how the validator selects a miner to query, how often, etc.
39 | # get_random_uids is an example method, but you can replace it with your own.
40 | miner_uids = get_random_uids(self, k=self.config.neuron.sample_size)
41 |
42 | # The dendrite client queries the network.
43 | responses = await self.dendrite(
44 | # Send the query to selected miner axons in the network.
45 | axons=[self.metagraph.axons[uid] for uid in miner_uids],
46 | # Construct a dummy query. This simply contains a single integer.
47 | synapse=Dummy(dummy_input=self.step),
48 | # All responses have the deserialize function called on them before returning.
49 | # You are encouraged to define your own deserialization function.
50 | deserialize=True,
51 | )
52 |
53 | # Log the results for monitoring purposes.
54 | logger.info(f"Received responses", responses = responses)
55 |
56 | # TODO(developer): Define how the validator scores responses.
57 | # Adjust the scores based on responses from miners.
58 | rewards = get_rewards(self, query=self.step, responses=responses)
59 |
60 | logger.info(f"Scored responses", rewards = rewards)
61 | # Update the scores based on the rewards. You may want to define your own update_scores function for custom behavior.
62 | self.update_scores(rewards, miner_uids)
63 |
--------------------------------------------------------------------------------
/template/protocol.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright © 2023 Yuma Rao
3 | # TODO(developer): Set your name
4 | # Copyright © 2023
5 |
6 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
7 | # documentation files (the “Software”), to deal in the Software without restriction, including without limitation
8 | # the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
9 | # and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
10 |
11 | # The above copyright notice and this permission notice shall be included in all copies or substantial portions of
12 | # the Software.
13 |
14 | # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
15 | # THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
16 | # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
17 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
18 | # DEALINGS IN THE SOFTWARE.
19 |
20 | import typing
21 | import bittensor as bt
22 |
23 | # TODO(developer): Rewrite with your protocol definition.
24 |
25 | # This is the protocol for the dummy miner and validator.
26 | # It is a simple request-response protocol where the validator sends a request
27 | # to the miner, and the miner responds with a dummy response.
28 |
29 | # ---- miner ----
30 | # Example usage:
31 | # def dummy( synapse: Dummy ) -> Dummy:
32 | # synapse.dummy_output = synapse.dummy_input + 1
33 | # return synapse
34 | # axon = bt.axon().attach( dummy ).serve(netuid=...).start()
35 |
36 | # ---- validator ---
37 | # Example usage:
38 | # dendrite = bt.dendrite()
39 | # dummy_output = dendrite.query( Dummy( dummy_input = 1 ) )
40 | # assert dummy_output == 2
41 |
42 |
43 | class Dummy(bt.Synapse):
44 | """
45 | A simple dummy protocol representation which uses bt.Synapse as its base.
46 | This protocol helps in handling dummy request and response communication between
47 | the miner and the validator.
48 |
49 | Attributes:
50 | - dummy_input: An integer value representing the input request sent by the validator.
51 | - dummy_output: An optional integer value which, when filled, represents the response from the miner.
52 | """
53 |
54 | # Required request input, filled by sending dendrite caller.
55 | dummy_input: int
56 |
57 | # Optional request output, filled by recieving axon.
58 | dummy_output: typing.Optional[int] = None
59 |
60 | def deserialize(self) -> int:
61 | """
62 | Deserialize the dummy output. This method retrieves the response from
63 | the miner in the form of dummy_output, deserializes it and returns it
64 | as the output of the dendrite.query() call.
65 |
66 | Returns:
67 | - int: The deserialized response, which in this case is the value of dummy_output.
68 |
69 | Example:
70 | Assuming a Dummy instance has a dummy_output value of 5:
71 | >>> dummy_instance = Dummy(dummy_input=4)
72 | >>> dummy_instance.dummy_output = 5
73 | >>> dummy_instance.deserialize()
74 | 5
75 | """
76 | return self.dummy_output
--------------------------------------------------------------------------------
/docs/stream_tutorial/client.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import asyncio
3 | import bittensor as bt
4 |
5 | from protocol import StreamPrompting
6 |
7 | """
8 | This has assumed you have:
9 | 1. Registered your miner on the chain (finney/test)
10 | 2. Are serving your miner on an open port (e.g. 12345)
11 |
12 | Steps:
13 | - Instantiate your synapse subclass with the relevant information. E.g. messages, roles, etc.
14 | - Instantiate your wallet and a dendrite client
15 | - Query the dendrite client with your synapse object
16 | - Iterate over the async generator to extract the yielded tokens on the server side
17 | """
18 |
19 |
20 | async def query_synapse(my_uid, wallet_name, hotkey, network, netuid):
21 | syn = StreamPrompting(
22 | roles=["user"],
23 | messages=[
24 | "hello this is a test of a streaming response. Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua."
25 | ],
26 | )
27 |
28 | # create a wallet instance with provided wallet name and hotkey
29 | wallet = bt.wallet(name=wallet_name, hotkey=hotkey)
30 |
31 | # instantiate the metagraph with provided network and netuid
32 | metagraph = bt.metagraph(
33 | netuid=netuid, network=network, sync=True, lite=False
34 | )
35 |
36 | # Grab the axon you're serving
37 | axon = metagraph.axons[my_uid]
38 |
39 | # Create a Dendrite instance to handle client-side communication.
40 | dendrite = bt.dendrite(wallet=wallet)
41 |
42 | async def main():
43 | responses = await dendrite(
44 | [axon], syn, deserialize=False, streaming=True
45 | )
46 |
47 | for resp in responses:
48 | i = 0
49 | async for chunk in resp:
50 | i += 1
51 | if i % 5 == 0:
52 | print()
53 | if isinstance(chunk, list):
54 | print(chunk[0], end="", flush=True)
55 | else:
56 | # last object yielded is the synapse itself with completion filled
57 | synapse = chunk
58 | break
59 |
60 | # Run the main function with asyncio
61 | await main()
62 |
63 |
64 | if __name__ == "__main__":
65 | parser = argparse.ArgumentParser(
66 | description="Query a Bittensor synapse with given parameters."
67 | )
68 |
69 | # Adding arguments
70 | parser.add_argument(
71 | "--my_uid",
72 | type=int,
73 | required=True,
74 | help="Your unique miner ID on the chain",
75 | )
76 | parser.add_argument(
77 | "--netuid", type=int, required=True, help="Network Unique ID"
78 | )
79 | parser.add_argument(
80 | "--wallet_name", type=str, default="default", help="Name of the wallet"
81 | )
82 | parser.add_argument(
83 | "--hotkey", type=str, default="default", help="Hotkey for the wallet"
84 | )
85 | parser.add_argument(
86 | "--network",
87 | type=str,
88 | default="test",
89 | help='Network type, e.g., "test" or "mainnet"',
90 | )
91 |
92 | # Parse arguments
93 | args = parser.parse_args()
94 |
95 | # Running the async function with provided arguments
96 | asyncio.run(
97 | query_synapse(
98 | args.my_uid,
99 | args.wallet_name,
100 | args.hotkey,
101 | args.network,
102 | args.netuid,
103 | )
104 | )
105 |
--------------------------------------------------------------------------------
/insights/api/__init__.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright © 2021 Yuma Rao
3 | # Copyright © 2023 Opentensor Foundation
4 | # Copyright © 2023 Opentensor Technologies Inc
5 | # Copyright © 2024 Philanthrope
6 |
7 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
8 | # documentation files (the “Software”), to deal in the Software without restriction, including without limitation
9 | # the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
10 | # and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
11 |
12 | # The above copyright notice and this permission notice shall be included in all copies or substantial portions of
13 | # the Software.
14 |
15 | # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
16 | # THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 | # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
18 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
19 | # DEALINGS IN THE SOFTWARE.
20 |
21 | import bittensor as bt
22 | from abc import ABC, abstractmethod
23 | from typing import Any, List, Union, Optional
24 | from neurons import logger
25 |
26 |
27 | class SubnetsAPI(ABC):
28 | def __init__(self, wallet: "bt.wallet"):
29 | self.wallet = wallet
30 | self.dendrite = bt.dendrite(wallet=wallet)
31 |
32 | async def __call__(self, *args, **kwargs):
33 | return await self.query_api(*args, **kwargs)
34 |
35 | @abstractmethod
36 | def prepare_synapse(self, *args, **kwargs) -> Any:
37 | """
38 | Prepare the synapse-specific payload.
39 | """
40 | ...
41 |
42 | @abstractmethod
43 | def process_responses(self, responses: List[Union["bt.Synapse", Any]]) -> Any:
44 | """
45 | Process the responses from the network.
46 | """
47 | ...
48 |
49 | async def query_api(
50 | self,
51 | axons: Union[bt.axon, List[bt.axon]],
52 | deserialize: Optional[bool] = False,
53 | timeout: Optional[int] = 12,
54 | n: Optional[float] = 0.1,
55 | uid: Optional[int] = None,
56 | **kwargs: Optional[Any],
57 | ) -> Any:
58 | """
59 | Queries the API nodes of a subnet using the given synapse and bespoke query function.
60 |
61 | Args:
62 | axons (Union[bt.axon, List[bt.axon]]): The list of axon(s) to query.
63 | deserialize (bool, optional): Whether to deserialize the responses. Defaults to False.
64 | timeout (int, optional): The timeout in seconds for the query. Defaults to 12.
65 | n (float, optional): The fraction of top nodes to consider based on stake. Defaults to 0.1.
66 | uid (int, optional): The specific UID of the API node to query. Defaults to None.
67 | **kwargs: Keyword arguments for the prepare_synapse_fn.
68 |
69 | Returns:
70 | Any: The result of the process_responses_fn.
71 | """
72 | synapse = self.prepare_synapse(**kwargs)
73 | logger.debug(f"Quering valdidator axons with synapse {synapse.name}...")
74 | responses = await self.dendrite(
75 | axons=axons,
76 | synapse=synapse,
77 | deserialize=deserialize,
78 | timeout=timeout,
79 | )
80 | return self.process_responses(responses)
--------------------------------------------------------------------------------
/neurons/validators/utils/metadata.py:
--------------------------------------------------------------------------------
1 | import bittensor as bt
2 |
3 | from typing import List, Dict, Union
4 | from collections import Counter
5 |
6 | from protocols.blockchain import get_network_by_id
7 | from neurons.storage import get_miners_metadata
8 |
9 | class Metadata:
10 | hotkeys: List[Dict[str, Union[str, int]]]
11 | DISTRIBUTION_KEYS = ['network', 'hotkey', 'ip']
12 |
13 | def __init__(self, hotkeys: List[Dict[str, Union[str, int]]]) -> None:
14 | self.hotkeys = hotkeys
15 | self.distributions = {key: self._distribution_by_key(key) for key in self.DISTRIBUTION_KEYS}
16 | result = {}
17 | ck = [x['coldkey'] for x in self.hotkeys]
18 | for d in self.hotkeys:
19 | hotkey, coldkey = d['hotkey'], d['coldkey']
20 | result[hotkey] = ck.count(coldkey)
21 |
22 | self.distributions['coldkey'] = result
23 |
24 | @classmethod
25 | def build(cls, metagraph: bt.metagraph, config):
26 | return cls(cls.retrieve_data(metagraph, config))
27 |
28 | @classmethod
29 | def retrieve_data(cls, metagraph, config) -> List[Dict]:
30 | miners_metadata = get_miners_metadata(config, metagraph)
31 | hotkeys_metadata = []
32 | for neuron in metagraph.neurons:
33 | miner_metadata = miners_metadata.get(neuron.hotkey)
34 |
35 | network_id, version, funds_flow_end_block_height, balance_tracking_end_block_height = None, None, None, None
36 | if miner_metadata:
37 | network_id = miner_metadata.n
38 | version = miner_metadata.cv
39 | funds_flow_end_block_height = miner_metadata.lb
40 | balance_tracking_end_block_height = miner_metadata.bl
41 |
42 | data = dict(
43 | hotkey=neuron.hotkey,
44 | coldkey=neuron.coldkey,
45 | ip=neuron.axon_info.ip,
46 | network=get_network_by_id(network_id),
47 | version=version,
48 | funds_flow_end_block_height=funds_flow_end_block_height,
49 | balance_tracking_end_block_height=balance_tracking_end_block_height
50 | )
51 | hotkeys_metadata.append(data)
52 | return hotkeys_metadata
53 |
54 | def _distribution_by_key(self, key: str) -> Dict[str, int]:
55 | data = [hotkey[key] for hotkey in self.hotkeys if hotkey[key] is not None]
56 | return dict(Counter(data))
57 |
58 | def get_metadata_for_hotkey(self, hotkey: str) -> Dict[str, Union[str, int]]:
59 | for m in self.hotkeys:
60 | if m['hotkey'] == hotkey:
61 | return m
62 | return None
63 |
64 | @property
65 | def network_distribution(self):
66 | return self.distributions['network']
67 |
68 | @property
69 | def hotkey_distribution(self):
70 | return self.distributions['hotkey']
71 |
72 | @property
73 | def ip_distribution(self):
74 | return self.distributions['ip']
75 |
76 | @property
77 | def coldkey_distribution(self):
78 | return self.distributions['coldkey']
79 |
80 | @property
81 | def worst_funds_flow_end_block_height(self):
82 | hotkey = min(filter(lambda x: x.get('funds_flow_end_block_height') is not None, self.hotkeys), key=lambda x: x['funds_flow_end_block_height'])
83 | return hotkey['funds_flow_end_block_height']
84 |
85 | @property
86 | def worst_balance_tracking_end_block_height(self):
87 | hotkey = min(filter(lambda x: x.get('balance_tracking_end_block_height') is not None, self.hotkeys), key=lambda x: x['balance_tracking_end_block_height'])
88 | return hotkey['balance_tracking_end_block_height']
89 |
--------------------------------------------------------------------------------
/neurons/miners/blacklist.py:
--------------------------------------------------------------------------------
1 | import bittensor as bt
2 | from insights import protocol
3 | import typing
4 | import time
5 |
6 | from collections import deque
7 | from neurons import logger
8 |
9 |
10 | def discovery_blacklist(self, synapse: protocol.Discovery) -> typing.Tuple[bool, str]:
11 | """
12 | Perform discovery-specific blacklist checks for a hotkey.
13 |
14 | Parameters:
15 | - synapse (protocol.Discovery)
16 |
17 | Returns:
18 | Tuple[bool, str]:
19 | - First element (bool): True if the hotkey is blacklisted, False if whitelisted.
20 | - Second element (str): Message providing information about the hotkey status.
21 |
22 | Blacklisting Conditions:
23 | - Base Blacklist Check
24 | - Unregistered Hotkey
25 | - Low TAO Stake
26 | - Request Rate Limiting
27 | """
28 | hotkey = synapse.dendrite.hotkey
29 | is_blacklist, message = base_blacklist(self, synapse=synapse)
30 | if is_blacklist:
31 | return is_blacklist, message
32 |
33 | axon_uid = None
34 | for uid, _axon in enumerate(self.metagraph.axons): # noqa: B007
35 | if _axon.hotkey == hotkey:
36 | axon_uid=uid
37 | break
38 |
39 | if axon_uid is None:
40 | return True, f"Blacklisted a non registered hotkey's request from {hotkey}"
41 |
42 | stake = self.metagraph.neurons[uid].stake.tao
43 | logger.debug("Stake of hotkey", validator_hotkey = hotkey, stake = stake)
44 |
45 | if stake < self.miner_config.stake_threshold and self.config.mode == 'prod':
46 | return True, f"Denied due to low stake: {stake}<{self.miner_config.stake_threshold}"
47 |
48 | # Rate Limiting Check
49 | time_window = self.miner_config.min_request_period
50 | current_time = time.time()
51 |
52 | if hotkey not in self.request_timestamps:
53 | self.request_timestamps[hotkey] = deque()
54 |
55 | # Remove timestamps outside the current time window
56 | while self.request_timestamps[hotkey] and current_time - self.request_timestamps[hotkey][0] > time_window:
57 | self.request_timestamps[hotkey].popleft()
58 |
59 | # Check if the number of requests exceeds the limit
60 | if len(self.request_timestamps[hotkey]) >= self.miner_config.max_requests:
61 | return True, f"Request rate exceeded for {hotkey}"
62 |
63 | self.request_timestamps[hotkey].append(current_time)
64 | return False, "Hotkey recognized!"
65 |
66 | def base_blacklist(self, synapse: bt.Synapse) -> typing.Tuple[bool, str]:
67 | """
68 | Perform base blacklist checks for a hotkey.
69 |
70 | Parameters:
71 | - synapse (bt.Synapse)
72 |
73 | Returns:
74 | Tuple[bool, str]:
75 | - First element (bool): True if the hotkey is blacklisted, False if recognized.
76 | - Second element (str): Message providing information about the hotkey status.
77 |
78 | Blacklisting Conditions:
79 | - Unrecognized Hotkey
80 | - Blacklisted Hotkey
81 | - Not Whitelisted
82 |
83 | """
84 |
85 | hotkey = synapse.dendrite.hotkey
86 | if hotkey not in self.metagraph.hotkeys:
87 | logger.trace(f"Blacklisting unrecognized hotkey", validator_hotkey = hotkey)
88 | return True, "Unrecognized hotkey"
89 | if not self.miner_config.is_grace_period and synapse.version != protocol.VERSION:
90 | return True, f"Blacklisted: Protocol Version differs miner_version={protocol.VERSION} validator_version={synapse.version} for hotkey: {hotkey}"
91 | if hotkey in self.miner_config.blacklisted_hotkeys:
92 | return True, f"Blacklisted hotkey: {hotkey}"
93 | if hotkey not in self.miner_config.whitelisted_hotkeys and self.config.mode == 'prod':
94 | return True, f"Not Whitelisted hotkey: {hotkey}"
95 | return False, "Hotkey recognized"
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright © 2023 Yuma Rao
3 | # Copyright © 2023 Chris Wilson
4 |
5 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
6 | # documentation files (the “Software”), to deal in the Software without restriction, including without limitation
7 | # the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
8 | # and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
9 |
10 | # The above copyright notice and this permission notice shall be included in all copies or substantial portions of
11 | # the Software.
12 |
13 | # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
14 | # THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
15 | # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
16 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
17 | # DEALINGS IN THE SOFTWARE.
18 |
19 | import re
20 | import os
21 | import codecs
22 | from os import path
23 | from io import open
24 | from setuptools import setup, find_packages
25 |
26 |
27 | def read_requirements(path):
28 | with open(path, "r") as f:
29 | requirements = f.read().splitlines()
30 | processed_requirements = []
31 |
32 | for req in requirements:
33 | # For git or other VCS links
34 | if req.startswith("git+") or "@" in req:
35 | pkg_name = re.search(r"(#egg=)([\w\-_]+)", req)
36 | if pkg_name:
37 | processed_requirements.append(pkg_name.group(2))
38 | else:
39 | # You may decide to raise an exception here,
40 | # if you want to ensure every VCS link has an #egg= at the end
41 | continue
42 | else:
43 | processed_requirements.append(req)
44 | return processed_requirements
45 |
46 |
47 | requirements = read_requirements("requirements.txt")
48 | here = path.abspath(path.dirname(__file__))
49 |
50 | with open(path.join(here, "README.md"), encoding="utf-8") as f:
51 | long_description = f.read()
52 |
53 | # loading version from setup.py
54 | with codecs.open(
55 | os.path.join(here, "insights/__init__.py"), encoding="utf-8"
56 | ) as init_file:
57 | version_match = re.search(
58 | r"^__version__ = ['\"]([^'\"]*)['\"]", init_file.read(), re.M
59 | )
60 | version_string = version_match.group(1)
61 |
62 | setup(
63 | name="blockchain_insights_subnet",
64 | version=version_string,
65 | description="blockchain_insights_subnet",
66 | long_description=long_description,
67 | long_description_content_type="text/markdown",
68 | url="https://github.com/blockchain-insights/blockchain-data-subnet",
69 | author="aphex5",
70 | packages=find_packages(),
71 | include_package_data=True,
72 | author_email="netkmal@gmail.com",
73 | license="MIT",
74 | python_requires=">=3.9",
75 | install_requires=requirements,
76 | classifiers=[
77 | "Development Status :: 3 - Alpha",
78 | "Intended Audience :: Developers",
79 | "Topic :: Software Development :: Build Tools",
80 | # Pick your license as you wish
81 | "License :: OSI Approved :: MIT License",
82 | "Programming Language :: Python :: 3 :: Only",
83 | "Programming Language :: Python :: 3.8",
84 | "Programming Language :: Python :: 3.9",
85 | "Programming Language :: Python :: 3.10",
86 | "Topic :: Scientific/Engineering",
87 | "Topic :: Scientific/Engineering :: Mathematics",
88 | "Topic :: Scientific/Engineering :: Artificial Intelligence",
89 | "Topic :: Software Development",
90 | "Topic :: Software Development :: Libraries",
91 | "Topic :: Software Development :: Libraries :: Python Modules",
92 | ],
93 | )
94 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | validator.yml
2 | miner.yml
3 |
4 | neurons/validator*.json
5 | neurons/miner*.json
6 |
7 | # Byte-compiled / optimized / DLL files
8 | __pycache__/
9 | *.py[cod]
10 | *$py.class
11 |
12 | # C extensions
13 | *.so
14 |
15 | # Distribution / packaging
16 | .Python
17 | build/
18 | develop-eggs/
19 | dist/
20 | downloads/
21 | eggs/
22 | .eggs/
23 | lib/
24 | lib64/
25 | parts/
26 | sdist/
27 | var/
28 | wheels/
29 | share/python-wheels/
30 | *.egg-info/
31 | .installed.cfg
32 | *.egg
33 | MANIFEST
34 |
35 | # PyInstaller
36 | # Usually these files are written by a python script from a template
37 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
38 | *.manifest
39 | *.spec
40 |
41 | # Installer logs
42 | pip-log.txt
43 | pip-delete-this-directory.txt
44 |
45 | # Unit test / coverage reports
46 | htmlcov/
47 | .tox/
48 | .nox/
49 | .coverage
50 | .coverage.*
51 | .cache
52 | nosetests.xml
53 | coverage.xml
54 | *.cover
55 | *.py,cover
56 | .hypothesis/
57 | .pytest_cache/
58 | cover/
59 |
60 | # Translations
61 | *.mo
62 | *.pot
63 |
64 | # Django stuff:
65 | *.log
66 | local_settings.py
67 | db.sqlite3
68 | db.sqlite3-journal
69 |
70 | # Flask stuff:
71 | instance/
72 | .webassets-cache
73 |
74 | # Scrapy stuff:
75 | .scrapy
76 |
77 | # Sphinx documentation
78 | docs/_build/
79 |
80 | # PyBuilder
81 | .pybuilder/
82 | target/
83 |
84 | # Jupyter Notebook
85 | .ipynb_checkpoints
86 |
87 | # IPython
88 | profile_default/
89 | ipython_config.py
90 |
91 | # pyenv
92 | # For a library or package, you might want to ignore these files since the code is
93 | # intended to run in multiple environments; otherwise, check them in:
94 | # .python-version
95 |
96 | # pipenv
97 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
98 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
99 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
100 | # install all needed dependencies.
101 | #Pipfile.lock
102 |
103 | # poetry
104 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
105 | # This is especially recommended for binary packages to ensure reproducibility, and is more
106 | # commonly ignored for libraries.
107 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
108 | #poetry.lock
109 |
110 | # pdm
111 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
112 | #pdm.lock
113 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
114 | # in version control.
115 | # https://pdm.fming.dev/#use-with-ide
116 | .pdm.toml
117 |
118 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
119 | __pypackages__/
120 |
121 | # Celery stuff
122 | celerybeat-schedule
123 | celerybeat.pid
124 |
125 | # SageMath parsed files
126 | *.sage.py
127 |
128 | # Environments
129 | .env
130 | .venv
131 | env/
132 | venv/
133 | insight-env/
134 | ENV/
135 | env.bak/
136 | venv.bak/
137 |
138 | # Spyder project settings
139 | .spyderproject
140 | .spyproject
141 |
142 | # Rope project settings
143 | .ropeproject
144 |
145 | # mkdocs documentation
146 | /site
147 |
148 | # mypy
149 | .mypy_cache/
150 | .dmypy.json
151 | dmypy.json
152 |
153 | # Pyre type checker
154 | .pyre/
155 |
156 | # pytype static type analyzer
157 | .pytype/
158 |
159 | # Cython debug symbols
160 | cython_debug/
161 |
162 | # PyCharm
163 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
164 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
165 | # and can be added to the global gitignore or merged into this file. For a more nuclear
166 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
167 | #.idea/
168 | /neurons/validators/test_miner_data_samples.db
169 | /neurons/validators/miner_registry.db
170 | /*.db
171 |
--------------------------------------------------------------------------------
/docs/stream_tutorial/config.py:
--------------------------------------------------------------------------------
1 | import bittensor as bt
2 | import argparse
3 | import os
4 |
5 |
6 | def check_config(cls, config: "bt.Config"):
7 | bt.axon.check_config(config)
8 | bt.logging.check_config(config)
9 | full_path = os.path.expanduser(
10 | "{}/{}/{}/{}".format(
11 | config.logging.logging_dir,
12 | config.wallet.get("name", bt.defaults.wallet.name),
13 | config.wallet.get("hotkey", bt.defaults.wallet.hotkey),
14 | config.miner.name,
15 | )
16 | )
17 | config.miner.full_path = os.path.expanduser(full_path)
18 | if not os.path.exists(config.miner.full_path):
19 | os.makedirs(config.miner.full_path)
20 |
21 |
22 | def get_config() -> "bt.Config":
23 | parser = argparse.ArgumentParser()
24 | parser.add_argument(
25 | "--axon.port", type=int, default=8098, help="Port to run the axon on."
26 | )
27 | # Subtensor network to connect to
28 | parser.add_argument(
29 | "--subtensor.network",
30 | default="finney",
31 | help="Bittensor network to connect to.",
32 | )
33 | # Chain endpoint to connect to
34 | parser.add_argument(
35 | "--subtensor.chain_endpoint",
36 | default="wss://entrypoint-finney.opentensor.ai:443",
37 | help="Chain endpoint to connect to.",
38 | )
39 | # Adds override arguments for network and netuid.
40 | parser.add_argument(
41 | "--netuid", type=int, default=1, help="The chain subnet uid."
42 | )
43 |
44 | parser.add_argument(
45 | "--miner.root",
46 | type=str,
47 | help="Trials for this miner go in miner.root / (wallet_cold - wallet_hot) / miner.name ",
48 | default="~/.bittensor/miners/",
49 | )
50 | parser.add_argument(
51 | "--miner.name",
52 | type=str,
53 | help="Trials for this miner go in miner.root / (wallet_cold - wallet_hot) / miner.name ",
54 | default="Bittensor Miner",
55 | )
56 |
57 | # Run config.
58 | parser.add_argument(
59 | "--miner.blocks_per_epoch",
60 | type=str,
61 | help="Blocks until the miner repulls the metagraph from the chain",
62 | default=100,
63 | )
64 |
65 | # Switches.
66 | parser.add_argument(
67 | "--miner.no_serve",
68 | action="store_true",
69 | help="If True, the miner doesnt serve the axon.",
70 | default=False,
71 | )
72 | parser.add_argument(
73 | "--miner.no_start_axon",
74 | action="store_true",
75 | help="If True, the miner doesnt start the axon.",
76 | default=False,
77 | )
78 |
79 | # Mocks.
80 | parser.add_argument(
81 | "--miner.mock_subtensor",
82 | action="store_true",
83 | help="If True, the miner will allow non-registered hotkeys to mine.",
84 | default=False,
85 | )
86 |
87 | # Adds subtensor specific arguments i.e. --subtensor.chain_endpoint ... --subtensor.network ...
88 | bt.subtensor.add_args(parser)
89 |
90 | # Adds logging specific arguments i.e. --logging.debug ..., --logging.trace .. or --logging.logging_dir ...
91 | bt.logging.add_args(parser)
92 |
93 | # Adds wallet specific arguments i.e. --wallet.name ..., --wallet.hotkey ./. or --wallet.path ...
94 | bt.wallet.add_args(parser)
95 |
96 | # Adds axon specific arguments i.e. --axon.port ...
97 | bt.axon.add_args(parser)
98 |
99 | # Activating the parser to read any command-line inputs.
100 | # To print help message, run python3 template/miner.py --help
101 | config = bt.config(parser)
102 |
103 | # Logging captures events for diagnosis or understanding miner's behavior.
104 | config.full_path = os.path.expanduser(
105 | "{}/{}/{}/netuid{}/{}".format(
106 | config.logging.logging_dir,
107 | config.wallet.name,
108 | config.wallet.hotkey,
109 | config.netuid,
110 | "miner",
111 | )
112 | )
113 | # Ensure the directory for logging exists, else create one.
114 | if not os.path.exists(config.full_path):
115 | os.makedirs(config.full_path, exist_ok=True)
116 | return config
117 |
--------------------------------------------------------------------------------
/min_compute.yml:
--------------------------------------------------------------------------------
1 | # Use this document to specify the minimum compute requirements.
2 | # This document will be used to generate a list of recommended hardware for your subnet.
3 |
4 | # This is intended to give a rough estimate of the minimum requirements
5 | # so that the user can make an informed decision about whether or not
6 | # they want to run a miner or validator on their machine.
7 |
8 | # NOTE: Specification for miners may be different from validators
9 |
10 | version: '1.0' # update this version key as needed, ideally should match your release version
11 |
12 | compute_spec:
13 |
14 | miner:
15 |
16 | cpu:
17 | min_cores: 4 # Minimum number of CPU cores
18 | min_speed: 2.5 # Minimum speed per core (GHz)
19 | recommended_cores: 8 # Recommended number of CPU cores
20 | recommended_speed: 3.5 # Recommended speed per core (GHz)
21 | architecture: "x86_64" # Architecture type (e.g., x86_64, arm64)
22 |
23 | gpu:
24 | required: False # Does the application require a GPU?
25 | min_vram: 8 # Minimum GPU VRAM (GB)
26 | recommended_vram: 24 # Recommended GPU VRAM (GB)
27 | cuda_cores: 1024 # Minimum number of CUDA cores (if applicable)
28 | min_compute_capability: 6.0 # Minimum CUDA compute capability
29 | recommended_compute_capability: 7.0 # Recommended CUDA compute capability
30 | recommended_gpu: "NVIDIA A100" # provide a recommended GPU to purchase/rent
31 |
32 | memory:
33 | min_ram: 16 # Minimum RAM (GB)
34 | min_swap: 4 # Minimum swap space (GB)
35 | recommended_swap: 8 # Recommended swap space (GB)
36 | ram_type: "DDR4" # RAM type (e.g., DDR4, DDR3, etc.)
37 |
38 | storage:
39 | min_space: 10 # Minimum free storage space (GB)
40 | recommended_space: 100 # Recommended free storage space (GB)
41 | type: "SSD" # Preferred storage type (e.g., SSD, HDD)
42 | min_iops: 1000 # Minimum I/O operations per second (if applicable)
43 | recommended_iops: 5000 # Recommended I/O operations per second
44 |
45 | os:
46 | name: "Ubuntu" # Name of the preferred operating system(s)
47 | version: 20.04 # Version of the preferred operating system(s)
48 |
49 | validator:
50 |
51 | cpu:
52 | min_cores: 4 # Minimum number of CPU cores
53 | min_speed: 2.5 # Minimum speed per core (GHz)
54 | recommended_cores: 8 # Recommended number of CPU cores
55 | recommended_speed: 3.5 # Recommended speed per core (GHz)
56 | architecture: "x86_64" # Architecture type (e.g., x86_64, arm64)
57 |
58 | gpu:
59 | required: False # Does the application require a GPU?
60 | min_vram: 8 # Minimum GPU VRAM (GB)
61 | recommended_vram: 24 # Recommended GPU VRAM (GB)
62 | cuda_cores: 1024 # Minimum number of CUDA cores (if applicable)
63 | min_compute_capability: 6.0 # Minimum CUDA compute capability
64 | recommended_compute_capability: 7.0 # Recommended CUDA compute capability
65 | recommended_gpu: "NVIDIA A100" # provide a recommended GPU to purchase/rent
66 |
67 | memory:
68 | min_ram: 16 # Minimum RAM (GB)
69 | min_swap: 4 # Minimum swap space (GB)
70 | recommended_swap: 8 # Recommended swap space (GB)
71 | ram_type: "DDR4" # RAM type (e.g., DDR4, DDR3, etc.)
72 |
73 | storage:
74 | min_space: 10 # Minimum free storage space (GB)
75 | recommended_space: 100 # Recommended free storage space (GB)
76 | type: "SSD" # Preferred storage type (e.g., SSD, HDD)
77 | min_iops: 1000 # Minimum I/O operations per second (if applicable)
78 | recommended_iops: 5000 # Recommended I/O operations per second
79 |
80 | os:
81 | name: "Ubuntu" # Name of the preferred operating system(s)
82 | version: 20.04 # Version of the preferred operating system(s)
83 |
84 | network_spec:
85 | bandwidth:
86 | download: 100 # Minimum download bandwidth (Mbps)
87 | upload: 20 # Minimum upload bandwidth (Mbps)
88 |
--------------------------------------------------------------------------------
/tests/miners/test_blacklist.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from unittest.mock import MagicMock, patch
3 | from neurons.miners.blacklist import discovery_blacklist
4 | from insights import protocol
5 | from collections import deque
6 | import time
7 |
8 |
9 | class TestBlackList(unittest.TestCase):
10 |
11 | def setUp(self):
12 | # Mock objects and setup necessary context
13 | self.mock_metagraph = MagicMock()
14 | self.mock_miner_config = MagicMock()
15 | self.mock_config = MagicMock()
16 |
17 | self.validator = MagicMock()
18 | self.validator.metagraph = self.mock_metagraph
19 | self.validator.config = self.mock_config
20 | self.validator.miner_config = self.mock_miner_config
21 |
22 | def test_discovery_blacklist_base_blacklist_true(self):
23 | synapse = protocol.Discovery()
24 | synapse.dendrite.hotkey = 'some_hotkey'
25 | base_blacklist_result = (True, "Base blacklist message")
26 | with patch('neurons.miners.blacklist.base_blacklist', return_value=base_blacklist_result):
27 | result, message = discovery_blacklist(self.validator, synapse)
28 | self.assertEqual(result, base_blacklist_result[0])
29 | self.assertEqual(message, base_blacklist_result[1])
30 |
31 | def test_discovery_blacklist_unregistered_hotkey(self):
32 |
33 | synapse = protocol.Discovery()
34 | synapse.dendrite.hotkey = 'unregistered_hotkey'
35 | self.mock_metagraph.axons = []
36 | with patch('neurons.miners.blacklist.base_blacklist', return_value=(False, '')):
37 | result, message = discovery_blacklist(self.validator, synapse)
38 | self.assertTrue(result)
39 | self.assertEqual(message, "Blacklisted a non registered hotkey's request from unregistered_hotkey")
40 |
41 | def test_discovery_blacklist_low_tao_stake(self):
42 | synapse = protocol.Discovery()
43 | synapse.dendrite.hotkey = 'low_stake_hotkey'
44 | self.mock_metagraph.axons = [MagicMock(hotkey='low_stake_hotkey')]
45 | self.mock_metagraph.neurons = [MagicMock(stake=MagicMock(tao=5))]
46 | self.mock_miner_config.stake_threshold = 10
47 | self.mock_config.mode = 'prod'
48 |
49 | with patch('neurons.miners.blacklist.base_blacklist', return_value=(False, '')):
50 | result, message = discovery_blacklist(self.validator, synapse)
51 | self.assertTrue(result)
52 | self.assertEqual(message, "Denied due to low stake: 5<10")
53 |
54 | def test_discovery_blacklist_request_rate_limiting(self):
55 | synapse = protocol.Discovery()
56 | synapse.dendrite.hotkey = 'rate_limit_hotkey'
57 | self.mock_metagraph.axons = [MagicMock(hotkey='rate_limit_hotkey')]
58 | self.mock_metagraph.neurons = [MagicMock(stake=MagicMock(tao=15))]
59 | self.mock_miner_config.stake_threshold = 10
60 | self.mock_config.mode = 'prod'
61 | self.mock_miner_config.min_request_period = 60
62 | self.mock_miner_config.max_requests = 0
63 | self.validator.request_timestamps = {'rate_limit_hotkey': deque([time.time() - i for i in range(80,0)])}
64 |
65 | with patch('neurons.miners.blacklist.base_blacklist', return_value=(False, '')):
66 | result, message = discovery_blacklist(self.validator, synapse)
67 | self.assertEqual(message, "Request rate exceeded for rate_limit_hotkey")
68 | self.assertTrue(result)
69 |
70 | def test_discovery_blacklist_hotkey_recognized(self):
71 | synapse = protocol.Discovery()
72 | synapse.dendrite.hotkey = 'recognized_hotkey'
73 | self.mock_metagraph.axons = [MagicMock(hotkey='recognized_hotkey')]
74 | self.mock_metagraph.neurons = [MagicMock(stake=MagicMock(tao=15))]
75 | self.mock_miner_config.stake_threshold = 10
76 | self.mock_config.mode = 'prod'
77 | self.mock_miner_config.min_request_period = 60
78 | self.mock_miner_config.max_requests = 2
79 | self.validator.request_timestamps = {'recognized_hotkey': deque([time.time()])}
80 |
81 | with patch('neurons.miners.blacklist.base_blacklist', return_value=(False, '')):
82 | result, message = discovery_blacklist(self.validator, synapse)
83 |
84 | self.assertEqual(message, "Hotkey recognized!")
85 | self.assertFalse(result)
86 |
87 |
88 | if __name__ == '__main__':
89 | unittest.main()
--------------------------------------------------------------------------------
/contrib/CODE_REVIEW_DOCS.md:
--------------------------------------------------------------------------------
1 | # Code Review
2 | ### Conceptual Review
3 |
4 | A review can be a conceptual review, where the reviewer leaves a comment
5 | * `Concept (N)ACK`, meaning "I do (not) agree with the general goal of this pull
6 | request",
7 | * `Approach (N)ACK`, meaning `Concept ACK`, but "I do (not) agree with the
8 | approach of this change".
9 |
10 | A `NACK` needs to include a rationale why the change is not worthwhile.
11 | NACKs without accompanying reasoning may be disregarded.
12 | After conceptual agreement on the change, code review can be provided. A review
13 | begins with `ACK BRANCH_COMMIT`, where `BRANCH_COMMIT` is the top of the PR
14 | branch, followed by a description of how the reviewer did the review. The
15 | following language is used within pull request comments:
16 |
17 | - "I have tested the code", involving change-specific manual testing in
18 | addition to running the unit, functional, or fuzz tests, and in case it is
19 | not obvious how the manual testing was done, it should be described;
20 | - "I have not tested the code, but I have reviewed it and it looks
21 | OK, I agree it can be merged";
22 | - A "nit" refers to a trivial, often non-blocking issue.
23 |
24 | ### Code Review
25 | Project maintainers reserve the right to weigh the opinions of peer reviewers
26 | using common sense judgement and may also weigh based on merit. Reviewers that
27 | have demonstrated a deeper commitment and understanding of the project over time
28 | or who have clear domain expertise may naturally have more weight, as one would
29 | expect in all walks of life.
30 |
31 | Where a patch set affects consensus-critical code, the bar will be much
32 | higher in terms of discussion and peer review requirements, keeping in mind that
33 | mistakes could be very costly to the wider community. This includes refactoring
34 | of consensus-critical code.
35 |
36 | Where a patch set proposes to change the Bittensor consensus, it must have been
37 | discussed extensively on the discord server and other channels, be accompanied by a widely
38 | discussed BIP and have a generally widely perceived technical consensus of being
39 | a worthwhile change based on the judgement of the maintainers.
40 |
41 | ### Finding Reviewers
42 |
43 | As most reviewers are themselves developers with their own projects, the review
44 | process can be quite lengthy, and some amount of patience is required. If you find
45 | that you've been waiting for a pull request to be given attention for several
46 | months, there may be a number of reasons for this, some of which you can do something
47 | about:
48 |
49 | - It may be because of a feature freeze due to an upcoming release. During this time,
50 | only bug fixes are taken into consideration. If your pull request is a new feature,
51 | it will not be prioritized until after the release. Wait for the release.
52 | - It may be because the changes you are suggesting do not appeal to people. Rather than
53 | nits and critique, which require effort and means they care enough to spend time on your
54 | contribution, thundering silence is a good sign of widespread (mild) dislike of a given change
55 | (because people don't assume *others* won't actually like the proposal). Don't take
56 | that personally, though! Instead, take another critical look at what you are suggesting
57 | and see if it: changes too much, is too broad, doesn't adhere to the
58 | [developer notes](DEVELOPMENT_WORKFLOW.md), is dangerous or insecure, is messily written, etc.
59 | Identify and address any of the issues you find. Then ask e.g. on IRC if someone could give
60 | their opinion on the concept itself.
61 | - It may be because your code is too complex for all but a few people, and those people
62 | may not have realized your pull request even exists. A great way to find people who
63 | are qualified and care about the code you are touching is the
64 | [Git Blame feature](https://docs.github.com/en/github/managing-files-in-a-repository/managing-files-on-github/tracking-changes-in-a-file). Simply
65 | look up who last modified the code you are changing and see if you can find
66 | them and give them a nudge. Don't be incessant about the nudging, though.
67 | - Finally, if all else fails, ask on IRC or elsewhere for someone to give your pull request
68 | a look. If you think you've been waiting for an unreasonably long time (say,
69 | more than a month) for no particular reason (a few lines changed, etc.),
70 | this is totally fine. Try to return the favor when someone else is asking
71 | for feedback on their code, and the universe balances out.
72 | - Remember that the best thing you can do while waiting is give review to others!
--------------------------------------------------------------------------------
/template/mock.py:
--------------------------------------------------------------------------------
1 | import time
2 |
3 | import asyncio
4 | import random
5 | import bittensor as bt
6 |
7 | from typing import List
8 | from neurons import logger
9 |
10 |
11 | class MockSubtensor(bt.MockSubtensor):
12 | def __init__(self, netuid, n=16, wallet=None, network="mock"):
13 | super().__init__(network=network)
14 |
15 | if not self.subnet_exists(netuid):
16 | self.create_subnet(netuid)
17 |
18 | # Register ourself (the validator) as a neuron at uid=0
19 | if wallet is not None:
20 | self.force_register_neuron(
21 | netuid=netuid,
22 | hotkey=wallet.hotkey.ss58_address,
23 | coldkey=wallet.coldkey.ss58_address,
24 | balance=100000,
25 | stake=100000,
26 | )
27 |
28 | # Register n mock neurons who will be miners
29 | for i in range(1, n + 1):
30 | self.force_register_neuron(
31 | netuid=netuid,
32 | hotkey=f"miner-hotkey-{i}",
33 | coldkey="mock-coldkey",
34 | balance=100000,
35 | stake=100000,
36 | )
37 |
38 |
39 | class MockMetagraph(bt.metagraph):
40 | def __init__(self, netuid=1, network="mock", subtensor=None):
41 | super().__init__(netuid=netuid, network=network, sync=False)
42 |
43 | if subtensor is not None:
44 | self.subtensor = subtensor
45 | self.sync(subtensor=subtensor)
46 |
47 | for axon in self.axons:
48 | axon.ip = "127.0.0.0"
49 | axon.port = 8091
50 |
51 | logger.info('metagraph', metagraph = f"{self}")
52 | logger.info('axons', axons = f"{self.axons}")
53 |
54 |
55 | class MockDendrite(bt.dendrite):
56 | """
57 | Replaces a real bittensor network request with a mock request that just returns some static response for all axons that are passed and adds some random delay.
58 | """
59 |
60 | def __init__(self, wallet):
61 | super().__init__(wallet)
62 |
63 | async def forward(
64 | self,
65 | axons: List[bt.axon],
66 | synapse: bt.Synapse = bt.Synapse(),
67 | timeout: float = 12,
68 | deserialize: bool = True,
69 | run_async: bool = True,
70 | streaming: bool = False,
71 | ):
72 | if streaming:
73 | raise NotImplementedError("Streaming not implemented yet.")
74 |
75 | async def query_all_axons(streaming: bool):
76 | """Queries all axons for responses."""
77 |
78 | async def single_axon_response(i, axon):
79 | """Queries a single axon for a response."""
80 |
81 | start_time = time.time()
82 | s = synapse.copy()
83 | # Attach some more required data so it looks real
84 | s = self.preprocess_synapse_for_request(axon, s, timeout)
85 | # We just want to mock the response, so we'll just fill in some data
86 | process_time = random.random()
87 | if process_time < timeout:
88 | s.dendrite.process_time = str(time.time() - start_time)
89 | # Update the status code and status message of the dendrite to match the axon
90 | # TODO (developer): replace with your own expected synapse data
91 | s.dummy_output = s.dummy_input * 2
92 | s.dendrite.status_code = 200
93 | s.dendrite.status_message = "OK"
94 | synapse.dendrite.process_time = str(process_time)
95 | else:
96 | s.dummy_output = 0
97 | s.dendrite.status_code = 408
98 | s.dendrite.status_message = "Timeout"
99 | synapse.dendrite.process_time = str(timeout)
100 |
101 | # Return the updated synapse object after deserializing if requested
102 | if deserialize:
103 | return s.deserialize()
104 | else:
105 | return s
106 |
107 | return await asyncio.gather(
108 | *(
109 | single_axon_response(i, target_axon)
110 | for i, target_axon in enumerate(axons)
111 | )
112 | )
113 |
114 | return await query_all_axons(streaming)
115 |
116 | def __str__(self) -> str:
117 | """
118 | Returns a string representation of the Dendrite object.
119 |
120 | Returns:
121 | str: The string representation of the Dendrite object in the format "dendrite()".
122 | """
123 | return "MockDendrite({})".format(self.keypair.ss58_address)
124 |
--------------------------------------------------------------------------------
/.circleci/config.yml:
--------------------------------------------------------------------------------
1 | version: 2.1
2 |
3 | orbs:
4 | python: circleci/python@2.1.1
5 | python-lib: dialogue/python-lib@0.1.55
6 | # coveralls: coveralls/coveralls@1.0.6
7 |
8 | jobs:
9 | black:
10 | resource_class: small
11 | parameters:
12 | python-version:
13 | type: string
14 | docker:
15 | - image: cimg/python:<< parameters.python-version >>
16 |
17 | steps:
18 | - checkout
19 |
20 | - restore_cache:
21 | name: Restore cached black venv
22 | keys:
23 | - v1-pypi-py-black-<< parameters.python-version >>
24 |
25 | - run:
26 | name: Update & Activate black venv
27 | command: |
28 | python -m venv env/
29 | . env/bin/activate
30 | python -m pip install --upgrade pip
31 | pip install black==23.7.0
32 |
33 | - save_cache:
34 | name: Save cached black venv
35 | paths:
36 | - "env/"
37 | key: v1-pypi-py-black-<< parameters.python-version >>
38 |
39 | - run:
40 | name: Black format check
41 | command: |
42 | . env/bin/activate
43 | black --line-length 79 --exclude '(env|venv|.eggs)' --check .
44 |
45 | pylint:
46 | resource_class: small
47 | parameters:
48 | python-version:
49 | type: string
50 | docker:
51 | - image: cimg/python:<< parameters.python-version >>
52 |
53 | steps:
54 | - checkout
55 |
56 | - run:
57 | name: Install Pylint
58 | command: |
59 | python -m venv env/
60 | . env/bin/activate
61 | pip install pylint
62 |
63 | - run:
64 | name: Pylint check
65 | command: |
66 | . env/bin/activate
67 | pylint --fail-on=W,E,F --exit-zero ./
68 |
69 | check_compatibility:
70 | parameters:
71 | python_version:
72 | type: string
73 | docker:
74 | - image: cimg/python:3.10
75 | steps:
76 | - checkout
77 | - run:
78 | name: Check if requirements files have changed
79 | command: ./scripts/check_requirements_changes.sh
80 | - run:
81 | name: Install dependencies and Check compatibility
82 | command: |
83 | if [ "$REQUIREMENTS_CHANGED" == "true" ]; then
84 | sudo apt-get update
85 | sudo apt-get install -y jq curl
86 | ./scripts/check_compatibility.sh << parameters.python_version >>
87 | else
88 | echo "Skipping compatibility checks..."
89 | fi
90 |
91 | build:
92 | resource_class: medium
93 | parallelism: 2
94 | parameters:
95 | python-version:
96 | type: string
97 | docker:
98 | - image: cimg/python:<< parameters.python-version >>
99 |
100 | steps:
101 | - checkout
102 |
103 | - restore_cache:
104 | name: Restore cached venv
105 | keys:
106 | - v1-pypi-py<< parameters.python-version >>-{{ checksum "requirements.txt" }}
107 | - v1-pypi-py<< parameters.python-version >>
108 |
109 | - run:
110 | name: Update & Activate venv
111 | command: |
112 | python -m venv env/
113 | . env/bin/activate
114 | python -m pip install --upgrade pip
115 |
116 | - save_cache:
117 | name: Save cached venv
118 | paths:
119 | - "env/"
120 | key: v1-pypi-py<< parameters.python-version >>-{{ checksum "requirements.txt" }}
121 |
122 | - run:
123 | name: Install Bittensor Subnet Template
124 | command: |
125 | . env/bin/activate
126 | pip install -e .
127 |
128 | - store_test_results:
129 | path: test-results
130 | - store_artifacts:
131 | path: test-results
132 |
133 | coveralls:
134 | docker:
135 | - image: cimg/python:3.10
136 | steps:
137 | - run:
138 | name: Combine Coverage
139 | command: |
140 | pip3 install --upgrade coveralls
141 | coveralls --finish --rcfile .coveragerc || echo "Failed to upload coverage"
142 |
143 | workflows:
144 | compatibility_checks:
145 | jobs:
146 | - check_compatibility:
147 | python_version: "3.8"
148 | name: check-compatibility-3.8
149 | - check_compatibility:
150 | python_version: "3.9"
151 | name: check-compatibility-3.9
152 | - check_compatibility:
153 | python_version: "3.10"
154 | name: check-compatibility-3.10
155 | - check_compatibility:
156 | python_version: "3.11"
157 | name: check-compatibility-3.11
158 |
159 | pr-requirements:
160 | jobs:
161 | - black:
162 | python-version: "3.8.12"
163 | - pylint:
164 | python-version: "3.8.12"
165 | - build:
166 | matrix:
167 | parameters:
168 | python-version: ["3.9.13", "3.10.6", "3.11.4"]
169 |
--------------------------------------------------------------------------------
/template/utils/misc.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright © 2023 Yuma Rao
3 | # Copyright © 2023 Opentensor Foundation
4 |
5 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
6 | # documentation files (the “Software”), to deal in the Software without restriction, including without limitation
7 | # the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
8 | # and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
9 |
10 | # The above copyright notice and this permission notice shall be included in all copies or substantial portions of
11 | # the Software.
12 |
13 | # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
14 | # THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
15 | # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
16 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
17 | # DEALINGS IN THE SOFTWARE.
18 |
19 | import time
20 | import math
21 | import hashlib as rpccheckhealth
22 | from math import floor
23 | from typing import Callable, Any
24 | from functools import lru_cache, update_wrapper
25 |
26 |
27 | # LRU Cache with TTL
28 | def ttl_cache(maxsize: int = 128, typed: bool = False, ttl: int = -1):
29 | """
30 | Decorator that creates a cache of the most recently used function calls with a time-to-live (TTL) feature.
31 | The cache evicts the least recently used entries if the cache exceeds the `maxsize` or if an entry has
32 | been in the cache longer than the `ttl` period.
33 |
34 | Args:
35 | maxsize (int): Maximum size of the cache. Once the cache grows to this size, subsequent entries
36 | replace the least recently used ones. Defaults to 128.
37 | typed (bool): If set to True, arguments of different types will be cached separately. For example,
38 | f(3) and f(3.0) will be treated as distinct calls with distinct results. Defaults to False.
39 | ttl (int): The time-to-live for each cache entry, measured in seconds. If set to a non-positive value,
40 | the TTL is set to a very large number, effectively making the cache entries permanent. Defaults to -1.
41 |
42 | Returns:
43 | Callable: A decorator that can be applied to functions to cache their return values.
44 |
45 | The decorator is useful for caching results of functions that are expensive to compute and are called
46 | with the same arguments frequently within short periods of time. The TTL feature helps in ensuring
47 | that the cached values are not stale.
48 |
49 | Example:
50 | @ttl_cache(ttl=10)
51 | def get_data(param):
52 | # Expensive data retrieval operation
53 | return data
54 | """
55 | if ttl <= 0:
56 | ttl = 65536
57 | hash_gen = _ttl_hash_gen(ttl)
58 |
59 | def wrapper(func: Callable) -> Callable:
60 | @lru_cache(maxsize, typed)
61 | def ttl_func(ttl_hash, *args, **kwargs):
62 | return func(*args, **kwargs)
63 |
64 | def wrapped(*args, **kwargs) -> Any:
65 | th = next(hash_gen)
66 | return ttl_func(th, *args, **kwargs)
67 |
68 | return update_wrapper(wrapped, func)
69 |
70 | return wrapper
71 |
72 |
73 | def _ttl_hash_gen(seconds: int):
74 | """
75 | Internal generator function used by the `ttl_cache` decorator to generate a new hash value at regular
76 | time intervals specified by `seconds`.
77 |
78 | Args:
79 | seconds (int): The number of seconds after which a new hash value will be generated.
80 |
81 | Yields:
82 | int: A hash value that represents the current time interval.
83 |
84 | This generator is used to create time-based hash values that enable the `ttl_cache` to determine
85 | whether cached entries are still valid or if they have expired and should be recalculated.
86 | """
87 | start_time = time.time()
88 | while True:
89 | yield floor((time.time() - start_time) / seconds)
90 |
91 |
92 | # 12 seconds updating block.
93 | @ttl_cache(maxsize=1, ttl=12)
94 | def ttl_get_block(self) -> int:
95 | """
96 | Retrieves the current block number from the blockchain. This method is cached with a time-to-live (TTL)
97 | of 12 seconds, meaning that it will only refresh the block number from the blockchain at most every 12 seconds,
98 | reducing the number of calls to the underlying blockchain interface.
99 |
100 | Returns:
101 | int: The current block number on the blockchain.
102 |
103 | This method is useful for applications that need to access the current block number frequently and can
104 | tolerate a delay of up to 12 seconds for the latest information. By using a cache with TTL, the method
105 | efficiently reduces the workload on the blockchain interface.
106 |
107 | Example:
108 | current_block = ttl_get_block(self)
109 |
110 | Note: self here is the miner or validator instance
111 | """
112 | return self.subtensor.get_current_block()
113 |
--------------------------------------------------------------------------------
/tests/test_template_validator.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright © 2023 Yuma Rao
3 | # Copyright © 2023 Opentensor Foundation
4 |
5 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
6 | # documentation files (the “Software”), to deal in the Software without restriction, including without limitation
7 | # the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
8 | # and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
9 |
10 | # The above copyright notice and this permission notice shall be included in all copies or substantial portions of
11 | # the Software.
12 |
13 | # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
14 | # THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
15 | # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
16 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
17 | # DEALINGS IN THE SOFTWARE.
18 |
19 | import sys
20 | import numpy as np
21 | import unittest
22 | import bittensor as bt
23 |
24 | from neurons.validator import Neuron as Validator
25 | from neurons.miner import Neuron as Miner
26 |
27 | from template.protocol import Dummy
28 | from template.validator.forward import forward
29 | from template.utils.uids import get_random_uids
30 | from template.validator.reward import get_rewards
31 | from template.base.validator import BaseValidatorNeuron
32 |
33 |
34 | class TemplateValidatorNeuronTestCase(unittest.TestCase):
35 | """
36 | This class contains unit tests for the RewardEvent classes.
37 |
38 | The tests cover different scenarios where completions may or may not be successful and the reward events are checked that they don't contain missing values.
39 | The `reward` attribute of all RewardEvents is expected to be a float, and the `is_filter_model` attribute is expected to be a boolean.
40 | """
41 |
42 | def setUp(self):
43 | sys.argv = sys.argv[0] + ["--config", "tests/configs/validator.json"]
44 |
45 | config = BaseValidatorNeuron.config()
46 | config.wallet._mock = True
47 | config.metagraph._mock = True
48 | config.subtensor._mock = True
49 | self.neuron = Validator(config)
50 | self.miner_uids = get_random_uids(self, k=10)
51 |
52 | def test_run_single_step(self):
53 | # TODO: Test a single step
54 | pass
55 |
56 | def test_sync_error_if_not_registered(self):
57 | # TODO: Test that the validator throws an error if it is not registered on metagraph
58 | pass
59 |
60 | def test_forward(self):
61 | # TODO: Test that the forward function returns the correct value
62 | pass
63 |
64 | def test_dummy_responses(self):
65 | # TODO: Test that the dummy responses are correctly constructed
66 |
67 | responses = self.neuron.dendrite.query(
68 | # Send the query to miners in the network.
69 | axons=[
70 | self.neuron.metagraph.axons[uid] for uid in self.miner_uids
71 | ],
72 | # Construct a dummy query.
73 | synapse=Dummy(dummy_input=self.neuron.step),
74 | # All responses have the deserialize function called on them before returning.
75 | deserialize=True,
76 | )
77 |
78 | for i, response in enumerate(responses):
79 | self.assertEqual(response, self.neuron.step * 2)
80 |
81 | def test_reward(self):
82 | # TODO: Test that the reward function returns the correct value
83 | responses = self.dendrite.query(
84 | # Send the query to miners in the network.
85 | axons=[self.metagraph.axons[uid] for uid in self.miner_uids],
86 | # Construct a dummy query.
87 | synapse=Dummy(dummy_input=self.neuron.step),
88 | # All responses have the deserialize function called on them before returning.
89 | deserialize=True,
90 | )
91 |
92 | rewards = get_rewards(self.neuron, responses)
93 | expected_rewards = np.float32([1.0] * len(responses))
94 | self.assertEqual(rewards, expected_rewards)
95 |
96 | def test_reward_with_nan(self):
97 | # TODO: Test that NaN rewards are correctly sanitized
98 | # TODO: Test that a bt.logging.warning is thrown when a NaN reward is sanitized
99 | responses = self.dendrite.query(
100 | # Send the query to miners in the network.
101 | axons=[self.metagraph.axons[uid] for uid in self.miner_uids],
102 | # Construct a dummy query.
103 | synapse=Dummy(dummy_input=self.neuron.step),
104 | # All responses have the deserialize function called on them before returning.
105 | deserialize=True,
106 | )
107 |
108 | rewards = get_rewards(self.neuron, responses)
109 | expected_rewards = rewards.clone()
110 | # Add NaN values to rewards
111 | rewards[0] = float("nan")
112 |
113 | with self.assertLogs(bt.logging, level="WARNING") as cm:
114 | self.neuron.update_scores(rewards, self.miner_uids)
115 |
--------------------------------------------------------------------------------
/template/api/get_query_axons.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright © 2021 Yuma Rao
3 | # Copyright © 2023 Opentensor Foundation
4 | # Copyright © 2023 Opentensor Technologies Inc
5 |
6 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
7 | # documentation files (the “Software”), to deal in the Software without restriction, including without limitation
8 | # the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
9 | # and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
10 |
11 | # The above copyright notice and this permission notice shall be included in all copies or substantial portions of
12 | # the Software.
13 |
14 | # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
15 | # THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
16 | # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
17 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
18 | # DEALINGS IN THE SOFTWARE.
19 |
20 | import numpy as np
21 | import random
22 | import bittensor as bt
23 | from neurons import logger
24 |
25 |
26 | async def ping_uids(dendrite, metagraph, uids, timeout=3):
27 | """
28 | Pings a list of UIDs to check their availability on the Bittensor network.
29 |
30 | Args:
31 | dendrite (bittensor.dendrite): The dendrite instance to use for pinging nodes.
32 | metagraph (bittensor.metagraph): The metagraph instance containing network information.
33 | uids (list): A list of UIDs (unique identifiers) to ping.
34 | timeout (int, optional): The timeout in seconds for each ping. Defaults to 3.
35 |
36 | Returns:
37 | tuple: A tuple containing two lists:
38 | - The first list contains UIDs that were successfully pinged.
39 | - The second list contains UIDs that failed to respond.
40 | """
41 | axons = [metagraph.axons[uid] for uid in uids]
42 | try:
43 | responses = await dendrite(
44 | axons,
45 | bt.Synapse(), # TODO: potentially get the synapses available back?
46 | deserialize=False,
47 | timeout=timeout,
48 | )
49 | successful_uids = [
50 | uid
51 | for uid, response in zip(uids, responses)
52 | if response.dendrite.status_code == 200
53 | ]
54 | failed_uids = [
55 | uid
56 | for uid, response in zip(uids, responses)
57 | if response.dendrite.status_code != 200
58 | ]
59 | except Exception as e:
60 | logger.error(f"Dendrite ping failed", error = {'exception_type': e.__class__.__name__,'exception_message': str(e),'exception_args': e.args})
61 | successful_uids = []
62 | failed_uids = uids
63 | logger.debug("ping() results", successful_uids = successful_uids, failed_uids = failed_uids)
64 | return successful_uids, failed_uids
65 |
66 |
67 | async def get_query_api_nodes(dendrite, metagraph, n=0.1, timeout=3):
68 | """
69 | Fetches the available API nodes to query for the particular subnet.
70 |
71 | Args:
72 | wallet (bittensor.wallet): The wallet instance to use for querying nodes.
73 | metagraph (bittensor.metagraph): The metagraph instance containing network information.
74 | n (float, optional): The fraction of top nodes to consider based on stake. Defaults to 0.1.
75 | timeout (int, optional): The timeout in seconds for pinging nodes. Defaults to 3.
76 |
77 | Returns:
78 | list: A list of UIDs representing the available API nodes.
79 | """
80 | logger.debug("Fetching available API nodes for subnet", subnet_uid = metagraph.netuid)
81 | vtrust_uids = [
82 | uid.item()
83 | for uid in metagraph.uids
84 | if metagraph.validator_trust[uid] > 0
85 | ]
86 | top_uids = np.where(metagraph.S > np.quantile(metagraph.S, 1 - n))
87 | top_uids = top_uids[0].tolist()
88 | init_query_uids = set(top_uids).intersection(set(vtrust_uids))
89 | query_uids, _ = await ping_uids(
90 | dendrite, metagraph, init_query_uids, timeout=timeout
91 | )
92 | logger.debug("Available API node UIDs for subnet", subnet_uid = metagraph.netuid, query_uids = query_uids)
93 | if len(query_uids) > 3:
94 | query_uids = random.sample(query_uids, 3)
95 | return query_uids
96 |
97 |
98 | async def get_query_api_axons(
99 | wallet, metagraph=None, n=0.1, timeout=3, uids=None
100 | ):
101 | """
102 | Retrieves the axons of query API nodes based on their availability and stake.
103 |
104 | Args:
105 | wallet (bittensor.wallet): The wallet instance to use for querying nodes.
106 | metagraph (bittensor.metagraph, optional): The metagraph instance containing network information.
107 | n (float, optional): The fraction of top nodes to consider based on stake. Defaults to 0.1.
108 | timeout (int, optional): The timeout in seconds for pinging nodes. Defaults to 3.
109 | uids (Union[List[int], int], optional): The specific UID(s) of the API node(s) to query. Defaults to None.
110 |
111 | Returns:
112 | list: A list of axon objects for the available API nodes.
113 | """
114 | dendrite = bt.dendrite(wallet=wallet)
115 |
116 | if metagraph is None:
117 | metagraph = bt.metagraph(netuid=21)
118 |
119 | if uids is not None:
120 | query_uids = [uids] if isinstance(uids, int) else uids
121 | else:
122 | query_uids = await get_query_api_nodes(
123 | dendrite, metagraph, n=n, timeout=timeout
124 | )
125 | return [metagraph.axons[uid] for uid in query_uids]
126 |
--------------------------------------------------------------------------------
/neurons/miners/llm_client/__init__.py:
--------------------------------------------------------------------------------
1 | from typing import List, Optional, Dict
2 | from pydantic import BaseModel
3 | from neurons import logger
4 | import requests
5 |
6 | class LlmMessage(BaseModel):
7 | type: int
8 | content: str
9 |
10 |
11 | class QueryOutput(BaseModel):
12 | result: Optional[List[Dict]] = None
13 | interpreted_result: Optional[str] = None
14 | error: Optional[int] = None
15 |
16 |
17 | class GenericOutput(BaseModel):
18 | result: Optional[Dict] = None
19 |
20 |
21 | class LLMClient:
22 | def __init__(self, base_url: str):
23 | self.base_url = base_url
24 |
25 | def discovery_v1(self, network: str) -> GenericOutput | None:
26 | try:
27 | url = f"{self.base_url}/v1/discovery/{network}"
28 | response = requests.get(url, timeout=30)
29 | response.raise_for_status()
30 | return response.json()
31 | except requests.ConnectionError as e:
32 | logger.error(f"Connection error: {e}")
33 | except requests.Timeout as e:
34 | logger.error(f"Request timeout: {e}")
35 | except requests.RequestException as e:
36 | logger.error(f"Failed to query: {e}")
37 | except Exception as e:
38 | logger.error(f"Unexpected error: {e}")
39 | return None
40 |
41 | def challenge_utxo_funds_flow_v1(self, network: str, in_total_amount: int, out_total_amount: int, tx_id_last_6_chars: str) -> GenericOutput | None:
42 | try:
43 | url = f"{self.base_url}/v1/challenge/funds_flow/{network}"
44 | params = {'in_total_amount': in_total_amount, 'out_total_amount': out_total_amount, 'tx_id_last_6_chars': tx_id_last_6_chars}
45 | response = requests.get(url, params, timeout=30)
46 | response.raise_for_status()
47 | return response.json()
48 | except requests.ConnectionError as e:
49 | logger.error(f"Connection error: {e}")
50 | except requests.Timeout as e:
51 | logger.error(f"Request timeout: {e}")
52 | except requests.RequestException as e:
53 | logger.error(f"Failed to query: {e}")
54 | except Exception as e:
55 | logger.error(f"Unexpected error: {e}")
56 | return None
57 |
58 | def challenge_utxo_balance_tracking_v1(self, network: str, block_height: int) -> GenericOutput | None:
59 | try:
60 | url = f"{self.base_url}/v1/challenge/balance_tracking/{network}"
61 | params = {'block_height': block_height}
62 | response = requests.get(url, params, timeout=30)
63 | response.raise_for_status()
64 | return response.json()
65 | except requests.ConnectionError as e:
66 | logger.error(f"Connection error: {e}")
67 | except requests.Timeout as e:
68 | logger.error(f"Request timeout: {e}")
69 | except requests.RequestException as e:
70 | logger.error(f"Failed to query: {e}")
71 | except Exception as e:
72 | logger.error(f"Unexpected error: {e}")
73 | return None
74 |
75 | def challenge_evm_v1(self, network: str, checksum: str) -> GenericOutput | None:
76 | try:
77 | url = f"{self.base_url}/v1/challenge/{network}"
78 | params = {'checksum': checksum}
79 | response = requests.get(url, params, timeout=30)
80 | response.raise_for_status()
81 | return response.json()
82 | except requests.ConnectionError as e:
83 | logger.error(f"Connection error: {e}")
84 | except requests.Timeout as e:
85 | logger.error(f"Request timeout: {e}")
86 | except requests.RequestException as e:
87 | logger.error(f"Failed to query: {e}")
88 | except Exception as e:
89 | logger.error(f"Unexpected error: {e}")
90 | return None
91 |
92 | def benchmark_funds_flow_v1(self, network: str, query) -> GenericOutput | None:
93 | try:
94 | url = f"{self.base_url}/v1/benchmark/funds_flow/{network}"
95 | params = {'query': query }
96 | response = requests.get(url, params, timeout=30)
97 | response.raise_for_status()
98 | return response.json()
99 | except requests.ConnectionError as e:
100 | logger.error(f"Connection error: {e}")
101 | except requests.Timeout as e:
102 | logger.error(f"Request timeout: {e}")
103 | except requests.RequestException as e:
104 | logger.error(f"Failed to query: {e}")
105 | except Exception as e:
106 | logger.error(f"Unexpected error: {e}")
107 | return None
108 |
109 | def benchmark_balance_tracking_v1(self, network: str, query: str) -> GenericOutput | None:
110 | try:
111 | url = f"{self.base_url}/v1/benchmark/balance_tracking/{network}"
112 | params = {'query': query }
113 | response = requests.get(url, params, timeout=30)
114 | response.raise_for_status()
115 | return response.json()
116 | except requests.ConnectionError as e:
117 | logger.error(f"Connection error: {e}")
118 | except requests.Timeout as e:
119 | logger.error(f"Request timeout: {e}")
120 | except requests.RequestException as e:
121 | logger.error(f"Failed to query: {e}")
122 | except Exception as e:
123 | logger.error(f"Unexpected error: {e}")
124 | return None
125 |
126 | def llm_query_v1(self, messages: List[LlmMessage], llm_type: str = "openai", network: str = "bitcoin") -> QueryOutput | None:
127 | try:
128 | url = f"{self.base_url}/v1/process_prompt"
129 | payload = {
130 | "llm_type": llm_type,
131 | "network": network,
132 | "messages": [message.dict() for message in messages]
133 | }
134 | logger.debug(f"Querying LLM with payload: {payload}")
135 | response = requests.post(url, json=payload, timeout=5*60)
136 | response.raise_for_status()
137 | return response.json()
138 | except requests.ConnectionError as e:
139 | logger.error(f"Connection error: {e}")
140 | except requests.Timeout as e:
141 | logger.error(f"Request timeout: {e}")
142 | except requests.RequestException as e:
143 | logger.error(f"Failed to query: {e}")
144 | except Exception as e:
145 | logger.error(f"Unexpected error: {e}")
146 | return None
147 |
--------------------------------------------------------------------------------
/neurons/nodes/bitcoin/node_utils.py:
--------------------------------------------------------------------------------
1 | from Crypto.Hash import SHA256, RIPEMD160
2 | import base58
3 | from dataclasses import dataclass, field
4 | from typing import List, Optional
5 | from decimal import Decimal, getcontext
6 |
7 |
8 | def pubkey_to_address(pubkey: str) -> str:
9 | # Step 1: SHA-256 hashing on the public key
10 | sha256_result = SHA256.new(bytes.fromhex(pubkey)).digest()
11 |
12 | # Step 2: RIPEMD-160 hashing on the result of SHA-256 using PyCryptodome
13 | ripemd160 = RIPEMD160.new()
14 | ripemd160.update(sha256_result)
15 | ripemd160_result = ripemd160.digest()
16 |
17 | # Step 3: Add version byte (0x00 for Mainnet)
18 | versioned_payload = b"\x00" + ripemd160_result
19 |
20 | # Step 4 and 5: Calculate checksum and append to the payload
21 | checksum = SHA256.new(SHA256.new(versioned_payload).digest()).digest()[:4]
22 | binary_address = versioned_payload + checksum
23 |
24 | # Step 6: Encode the binary address in Base58
25 | bitcoin_address = base58.b58encode(binary_address).decode("utf-8")
26 | return bitcoin_address
27 |
28 |
29 | def construct_redeem_script(pubkeys, m):
30 | n = len(pubkeys)
31 | script = f"{m} " + " ".join(pubkeys) + f" {n} OP_CHECKMULTISIG"
32 | return script.encode("utf-8")
33 |
34 |
35 | def hash_redeem_script(redeem_script):
36 | sha256 = SHA256.new(redeem_script).digest()
37 | ripemd160 = RIPEMD160.new(sha256).digest()
38 | return ripemd160
39 |
40 |
41 | def create_p2sh_address(hashed_script, mainnet=True):
42 | version_byte = b"\x05" if mainnet else b"\xc4"
43 | payload = version_byte + hashed_script
44 | checksum = SHA256.new(SHA256.new(payload).digest()).digest()[:4]
45 | return base58.b58encode(payload + checksum).decode()
46 |
47 |
48 | def get_tx_out_hash_table_sub_keys():
49 | hex_chars = "0123456789abcdef"
50 | return [h1 + h2 + h3 for h1 in hex_chars for h2 in hex_chars for h3 in hex_chars]
51 |
52 |
53 | def initialize_tx_out_hash_table():
54 | hash_table = {}
55 | for sub_key in get_tx_out_hash_table_sub_keys():
56 | hash_table[sub_key] = {}
57 | return hash_table
58 |
59 |
60 | def check_if_block_is_valid_for_challenge(block_height: int) -> bool:
61 | blocks_to_avoid = [91722, 91880]
62 | return not block_height in blocks_to_avoid
63 |
64 |
65 | @dataclass
66 | class Block:
67 | block_height: int
68 | block_hash: str
69 | timestamp: int # Using int to represent Unix epoch time
70 | previous_block_hash: str
71 | nonce: int
72 | difficulty: int
73 | transactions: List["Transaction"] = field(default_factory=list)
74 |
75 |
76 | @dataclass
77 | class Transaction:
78 | tx_id: str
79 | block_height: int
80 | timestamp: int # Using int to represent Unix epoch time
81 | fee_satoshi: int
82 | vins: List["VIN"] = field(default_factory=list)
83 | vouts: List["VOUT"] = field(default_factory=list)
84 | is_coinbase: bool = False
85 |
86 |
87 | @dataclass
88 | class VOUT:
89 | vout_id: int
90 | value_satoshi: int
91 | script_pub_key: Optional[str]
92 | is_spent: bool
93 | address: str
94 |
95 |
96 | @dataclass
97 | class VIN:
98 | tx_id: str
99 | vin_id: int
100 | vout_id: int
101 | script_sig: Optional[str]
102 | sequence: Optional[int]
103 |
104 |
105 | getcontext().prec = 28
106 | SATOSHI = Decimal("100000000")
107 |
108 |
109 | def parse_block_data(block_data):
110 | block_height = block_data["height"]
111 | block_hash = block_data["hash"]
112 | block_previous_hash = block_data.get("previousblockhash", "")
113 | timestamp = int(block_data["time"])
114 |
115 | block = Block(
116 | block_height=block_height,
117 | block_hash=block_hash,
118 | timestamp=timestamp,
119 | previous_block_hash=block_previous_hash,
120 | nonce=block_data.get("nonce", 0),
121 | difficulty=block_data.get("difficulty", 0),
122 | )
123 |
124 | for tx_data in block_data["tx"]:
125 | tx_id = tx_data["txid"]
126 | fee = Decimal(tx_data.get("fee", 0))
127 | fee_satoshi = int(fee * SATOSHI)
128 | tx_timestamp = int(tx_data.get("time", timestamp))
129 |
130 | tx = Transaction(
131 | tx_id=tx_id,
132 | block_height=block_height,
133 | timestamp=tx_timestamp,
134 | fee_satoshi=fee_satoshi,
135 | )
136 |
137 | for vin_data in tx_data["vin"]:
138 | vin = VIN(
139 | tx_id=vin_data.get("txid", 0),
140 | vin_id=vin_data.get("sequence", 0),
141 | vout_id=vin_data.get("vout", 0),
142 | script_sig=vin_data.get("scriptSig", {}).get("asm", ""),
143 | sequence=vin_data.get("sequence", 0),
144 | )
145 | tx.vins.append(vin)
146 | tx.is_coinbase = "coinbase" in vin_data
147 |
148 | for vout_data in tx_data["vout"]:
149 | script_type = vout_data["scriptPubKey"].get("type", "")
150 | if "nonstandard" in script_type or script_type == "nulldata":
151 | continue
152 |
153 | value_satoshi = int(Decimal(vout_data["value"]) * SATOSHI)
154 | n = vout_data["n"]
155 | script_pub_key_asm = vout_data["scriptPubKey"].get("asm", "")
156 |
157 | address = vout_data["scriptPubKey"].get("address", "")
158 | if not address:
159 | addresses = vout_data["scriptPubKey"].get("addresses", [])
160 | if addresses:
161 | address = addresses[0]
162 | elif "OP_CHECKSIG" in script_pub_key_asm:
163 | pubkey = script_pub_key_asm.split()[0]
164 | address = pubkey_to_address(pubkey)
165 | elif "OP_CHECKMULTISIG" in script_pub_key_asm:
166 | pubkeys = script_pub_key_asm.split()[1:-2]
167 | m = int(script_pub_key_asm.split()[0])
168 | redeem_script = construct_redeem_script(pubkeys, m)
169 | hashed_script = hash_redeem_script(redeem_script)
170 | address = create_p2sh_address(hashed_script)
171 | else:
172 | raise Exception(
173 | f"Unknown address type: {vout_data['scriptPubKey']}"
174 | )
175 |
176 | vout = VOUT(
177 | vout_id=n,
178 | value_satoshi=value_satoshi,
179 | script_pub_key=script_pub_key_asm,
180 | is_spent=False,
181 | address=address,
182 | )
183 | tx.vouts.append(vout)
184 |
185 | block.transactions.append(tx)
186 |
187 | return block
--------------------------------------------------------------------------------
/neurons/validators/scoring.py:
--------------------------------------------------------------------------------
1 | from neurons.remote_config import ValidatorConfig
2 | from neurons import logger
3 |
4 |
5 | class Scorer:
6 | def __init__(self, config: ValidatorConfig):
7 | self.config = config
8 | self.processing_times = { 'min_time': 1, 'max_time': 10 }
9 |
10 | def calculate_score(self, metagraph, miner_uid, network, process_time, indexed_start_block_height, indexed_end_block_height, blockchain_last_block_height, miner_distribution, uptime_avg, worst_end_block_height):
11 | process_time_score = self.calculate_process_time_score(process_time, self.config.benchmark_timeout)
12 | block_height_score = self.calculate_block_height_score(network, indexed_start_block_height, indexed_end_block_height, blockchain_last_block_height)
13 |
14 | block_height_recency_score = self.calculate_block_height_recency_score(indexed_end_block_height, blockchain_last_block_height, worst_end_block_height)
15 | blockchain_score = self.calculate_blockchain_weight(network, miner_distribution)
16 | uptime_score = self.calculate_uptime_score(uptime_avg)
17 | final_score = self.final_score(process_time_score, block_height_score, block_height_recency_score, blockchain_score, uptime_score)
18 |
19 | miner_ip = metagraph.axons[miner_uid].ip
20 | miner_port = metagraph.axons[miner_uid].port
21 | miner_hotkey = metagraph.hotkeys[miner_uid]
22 | miner_coldkey = metagraph.coldkeys[miner_uid]
23 |
24 | logger.info("Score calculated",
25 | miner_uid=miner_uid,
26 | miner_ip=miner_ip,
27 | miner_port=miner_port,
28 | miner_hotkey=miner_hotkey,
29 | miner_coldkey=miner_coldkey,
30 | benchmark_process_time=process_time,
31 | indexed_start_block_height=indexed_start_block_height,
32 | indexed_end_block_height=indexed_end_block_height,
33 | blockchain_last_block_height=blockchain_last_block_height,
34 | miner_distribution=miner_distribution,
35 | uptime_avg=uptime_avg,
36 | benchmark_process_time_score=process_time_score,
37 | block_height_score=block_height_score,
38 | block_height_recency_score=block_height_recency_score,
39 | blockchain_score=blockchain_score,
40 | uptime_score=uptime_score,
41 | final_score=final_score)
42 |
43 | return final_score
44 |
45 | def final_score(self, process_time_score, block_height_score, block_height_recency_score, blockchain_score, uptime_score):
46 |
47 | if process_time_score == 0 or block_height_score == 0 or block_height_recency_score == 0:
48 | return 0
49 |
50 | total_score = (
51 | process_time_score * self.config.process_time_weight +
52 | block_height_score * self.config.block_height_weight +
53 | block_height_recency_score * self.config.block_height_recency_weight +
54 | blockchain_score * self.config.blockchain_importance_weight +
55 | uptime_score * self.config.uptime_weight
56 | )
57 |
58 | total_weights = (
59 | self.config.process_time_weight +
60 | self.config.block_height_weight +
61 | self.config.block_height_recency_weight +
62 | self.config.blockchain_importance_weight +
63 | self.config.uptime_weight
64 | )
65 |
66 | normalized_score = total_score / total_weights
67 | normalized_score = min(max(normalized_score, 0), 1) # Ensuring the score is within 0 to 1
68 | return normalized_score
69 |
70 | @staticmethod
71 | def get_performance_score(process_time, best_time, worst_time, timeout):
72 | if process_time >= timeout:
73 | return 0 # Timeout case
74 | if process_time <= best_time:
75 | return 1 # Best performance case
76 |
77 | # Calculate the normalized score between best_time and worst_time
78 | normalized_score = 0.1 + 0.9 * (worst_time - process_time) / (worst_time - best_time)
79 | return max(0.1, min(normalized_score, 1)) # Ensure the score is between 0.1 and 1
80 |
81 | def calculate_process_time_score(self, process_time, discovery_timeout):
82 | process_time = min(process_time, discovery_timeout)
83 | factor = (process_time / discovery_timeout) ** (1/3)
84 | process_time_score = max(0, 1 - factor)
85 | # Define the best and worst process times
86 | best_time = self.processing_times['min_time']
87 | worst_time = self.processing_times['max_time']
88 |
89 | # Use the new performance scoring method
90 | process_time_score = self.get_performance_score(process_time, best_time, worst_time, discovery_timeout)
91 | return process_time_score
92 |
93 | @staticmethod
94 | def calculate_block_height_recency_score(indexed_end_block_height, blockchain_block_height, worst_end_block_height):
95 |
96 | # this is done to ensure that the worst miner does not obtain a score of 0
97 | min_recency = worst_end_block_height - 100
98 | if indexed_end_block_height < min_recency:
99 | return 0
100 |
101 | recency_diff = blockchain_block_height - indexed_end_block_height
102 | recency_score = max(0, (1 - recency_diff / (blockchain_block_height-min_recency)) ** 4)
103 | return recency_score
104 |
105 | def calculate_block_height_score(self, network, indexed_start_block_height: int, indexed_end_block_height: int, blockchain_block_height: int):
106 |
107 | covered_blocks = indexed_end_block_height - indexed_start_block_height
108 |
109 | min_blocks = self.config.get_blockchain_min_blocks(network=network)
110 | if covered_blocks < min_blocks:
111 | return 0
112 |
113 | coverage_percentage = (covered_blocks-min_blocks) / (blockchain_block_height-min_blocks)
114 | coverage_percentage = coverage_percentage ** 3
115 | return coverage_percentage
116 |
117 | def calculate_blockchain_weight(self, network, miner_distribution):
118 |
119 | if len(miner_distribution) == 1:
120 | return 1
121 |
122 | importance = self.config.get_network_importance(network)
123 |
124 | miners_actual_distribution = miner_distribution[network] / sum(miner_distribution.values())
125 | miners_distribution_score = max(0, -(miners_actual_distribution - importance))
126 |
127 | overall_score = importance + 0.2 * miners_distribution_score
128 |
129 | return overall_score
130 |
131 | @staticmethod
132 | def calculate_uptime_score(uptime_avg):
133 | return uptime_avg
134 |
--------------------------------------------------------------------------------
/scripts/install_staging.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Section 1: Build/Install
4 | # This section is for first-time setup and installations.
5 |
6 | install_dependencies() {
7 | # Function to install packages on macOS
8 | install_mac() {
9 | which brew > /dev/null
10 | if [ $? -ne 0 ]; then
11 | echo "Installing Homebrew..."
12 | /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
13 | fi
14 | echo "Updating Homebrew packages..."
15 | brew update
16 | echo "Installing required packages..."
17 | brew install make llvm curl libssl protobuf tmux
18 | }
19 |
20 | # Function to install packages on Ubuntu/Debian
21 | install_ubuntu() {
22 | echo "Updating system packages..."
23 | sudo apt update
24 | echo "Installing required packages..."
25 | sudo apt install --assume-yes make build-essential git clang curl libssl-dev llvm libudev-dev protobuf-compiler tmux
26 | }
27 |
28 | # Detect OS and call the appropriate function
29 | if [[ "$OSTYPE" == "darwin"* ]]; then
30 | install_mac
31 | elif [[ "$OSTYPE" == "linux-gnu"* ]]; then
32 | install_ubuntu
33 | else
34 | echo "Unsupported operating system."
35 | exit 1
36 | fi
37 |
38 | # Install rust and cargo
39 | curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
40 |
41 | # Update your shell's source to include Cargo's path
42 | source "$HOME/.cargo/env"
43 | }
44 |
45 | # Call install_dependencies only if it's the first time running the script
46 | if [ ! -f ".dependencies_installed" ]; then
47 | install_dependencies
48 | touch .dependencies_installed
49 | fi
50 |
51 |
52 | # Section 2: Test/Run
53 | # This section is for running and testing the setup.
54 |
55 | # Create a coldkey for the owner role
56 | wallet=${1:-owner}
57 |
58 | # Logic for setting up and running the environment
59 | setup_environment() {
60 | # Clone subtensor and enter the directory
61 | if [ ! -d "subtensor" ]; then
62 | git clone https://github.com/opentensor/subtensor.git
63 | fi
64 | cd subtensor
65 | git pull
66 |
67 | # Update to the nightly version of rust
68 | ./scripts/init.sh
69 |
70 | cd ../bittensor-subnet-template
71 |
72 | # Install the bittensor-subnet-template python package
73 | python -m pip install -e .
74 |
75 | # Create and set up wallets
76 | # This section can be skipped if wallets are already set up
77 | if [ ! -f ".wallets_setup" ]; then
78 | btcli wallet new_coldkey --wallet.name $wallet --no_password --no_prompt
79 | btcli wallet new_coldkey --wallet.name miner --no_password --no_prompt
80 | btcli wallet new_hotkey --wallet.name miner --wallet.hotkey default --no_prompt
81 | btcli wallet new_coldkey --wallet.name validator --no_password --no_prompt
82 | btcli wallet new_hotkey --wallet.name validator --wallet.hotkey default --no_prompt
83 | touch .wallets_setup
84 | fi
85 |
86 | }
87 |
88 | # Call setup_environment every time
89 | setup_environment
90 |
91 | ## Setup localnet
92 | # assumes we are in the bittensor-subnet-template/ directory
93 | # Initialize your local subtensor chain in development mode. This command will set up and run a local subtensor network.
94 | cd ../subtensor
95 |
96 | # Start a new tmux session and create a new pane, but do not switch to it
97 | echo "FEATURES='pow-faucet runtime-benchmarks' BT_DEFAULT_TOKEN_WALLET=$(cat ~/.bittensor/wallets/$wallet/coldkeypub.txt | grep -oP '"ss58Address": "\K[^"]+') bash scripts/localnet.sh" >> setup_and_run.sh
98 | chmod +x setup_and_run.sh
99 | tmux new-session -d -s localnet -n 'localnet'
100 | tmux send-keys -t localnet 'bash ../subtensor/setup_and_run.sh' C-m
101 |
102 | # Notify the user
103 | echo ">> localnet.sh is running in a detached tmux session named 'localnet'"
104 | echo ">> You can attach to this session with: tmux attach-session -t localnet"
105 |
106 | # Register a subnet (this needs to be run each time we start a new local chain)
107 | btcli subnet create --wallet.name $wallet --wallet.hotkey default --subtensor.chain_endpoint ws://127.0.0.1:9946 --no_prompt
108 |
109 | # Transfer tokens to miner and validator coldkeys
110 | export BT_MINER_TOKEN_WALLET=$(cat ~/.bittensor/wallets/miner/coldkeypub.txt | grep -oP '"ss58Address": "\K[^"]+')
111 | export BT_VALIDATOR_TOKEN_WALLET=$(cat ~/.bittensor/wallets/validator/coldkeypub.txt | grep -oP '"ss58Address": "\K[^"]+')
112 |
113 | btcli wallet transfer --subtensor.network ws://127.0.0.1:9946 --wallet.name $wallet --dest $BT_MINER_TOKEN_WALLET --amount 1000 --no_prompt
114 | btcli wallet transfer --subtensor.network ws://127.0.0.1:9946 --wallet.name $wallet --dest $BT_VALIDATOR_TOKEN_WALLET --amount 10000 --no_prompt
115 |
116 | # Register wallet hotkeys to subnet
117 | btcli subnet register --wallet.name miner --netuid 1 --wallet.hotkey default --subtensor.chain_endpoint ws://127.0.0.1:9946 --no_prompt
118 | btcli subnet register --wallet.name validator --netuid 1 --wallet.hotkey default --subtensor.chain_endpoint ws://127.0.0.1:9946 --no_prompt
119 |
120 | # Add stake to the validator
121 | btcli stake add --wallet.name validator --wallet.hotkey default --subtensor.chain_endpoint ws://127.0.0.1:9946 --amount 10000 --no_prompt
122 |
123 | # Ensure both the miner and validator keys are successfully registered.
124 | btcli subnet list --subtensor.chain_endpoint ws://127.0.0.1:9946
125 | btcli wallet overview --wallet.name validator --subtensor.chain_endpoint ws://127.0.0.1:9946 --no_prompt
126 | btcli wallet overview --wallet.name miner --subtensor.chain_endpoint ws://127.0.0.1:9946 --no_prompt
127 |
128 | cd ../bittensor-subnet-template
129 |
130 |
131 | # Check if inside a tmux session
132 | if [ -z "$TMUX" ]; then
133 | # Start a new tmux session and run the miner in the first pane
134 | tmux new-session -d -s bittensor -n 'miner' 'python neurons/miner.py --netuid 1 --subtensor.chain_endpoint ws://127.0.0.1:9946 --wallet.name miner --wallet.hotkey default --logging.debug'
135 |
136 | # Split the window and run the validator in the new pane
137 | tmux split-window -h -t bittensor:miner 'python neurons/validator.py --netuid 1 --subtensor.chain_endpoint ws://127.0.0.1:9946 --wallet.name validator --wallet.hotkey default --logging.debug'
138 |
139 | # Attach to the new tmux session
140 | tmux attach-session -t bittensor
141 | else
142 | # If already in a tmux session, create two panes in the current window
143 | tmux split-window -h 'python neurons/miner.py --netuid 1 --subtensor.chain_endpoint ws://127.0.0.1:9946 --wallet.name miner --wallet.hotkey default --logging.debug'
144 | tmux split-window -v -t 0 'python neurons/validator.py --netuid 1 --subtensor.chain_endpoint ws://127.0.0.1:9946 --wallet.name3 validator --wallet.hotkey default --logging.debug'
145 | fi
146 |
--------------------------------------------------------------------------------
/insights/api/insight_api.py:
--------------------------------------------------------------------------------
1 | import random
2 | import asyncio
3 | from datetime import datetime
4 | import numpy as np
5 | from protocols.chat import ChatMessageRequest, ChatMessageResponse, ChatMessageVariantRequest, ContentType
6 | from fastapi.middleware.cors import CORSMiddleware
7 | from starlette.requests import Request
8 | import time
9 | import insights
10 | from insights.api.query import TextQueryAPI
11 | from neurons.validators.utils.uids import get_top_miner_uids
12 | from fastapi import FastAPI, Body, HTTPException
13 | import uvicorn
14 | from neurons import logger
15 |
16 |
17 | class APIServer:
18 |
19 | failed_prompt_msg = "Please try again. Can't receive any responses from the miners or due to the poor network connection."
20 |
21 | def __init__(self, config, wallet, subtensor, metagraph):
22 | self.app = FastAPI(title="Validator API",
23 | description="API for the Validator service",
24 | version=insights.__version__)
25 |
26 | self.app.add_middleware(
27 | CORSMiddleware,
28 | allow_origins=["*"],
29 | allow_methods=["*"],
30 | allow_headers=["*"],
31 | )
32 |
33 | self.config = config
34 | self.device = self.config.neuron.device
35 | self.wallet = wallet
36 | self.text_query_api = TextQueryAPI(wallet=self.wallet)
37 | self.subtensor = subtensor
38 | self.metagraph = metagraph
39 |
40 | @self.app.middleware("http")
41 | async def log_requests(request: Request, call_next):
42 | start_time = time.time()
43 | response = await call_next(request)
44 | end_time = time.time()
45 | duration = end_time - start_time
46 | logger.info(f"Request completed: {request.method} {request.url} in {duration:.4f} seconds")
47 | return response
48 |
49 | @self.app.post("/v1/api/text_query", summary="Processes chat message requests and returns a response from a randomly selected miner", tags=["v1"])
50 | async def get_response(query: ChatMessageRequest = Body(..., example={
51 | "network": "bitcoin",
52 | "prompt": "Return 3 transactions outgoing from my address bc1q4s8yps9my6hun2tpd5ke5xmvgdnxcm2qspnp9r"
53 | })) -> ChatMessageResponse:
54 |
55 | top_miner_uids = await get_top_miner_uids(metagraph=self.metagraph, wallet=wallet, top_rate=self.config.top_rate)
56 | logger.info(f"Top miner UIDs are {top_miner_uids}")
57 |
58 | selected_miner_uids = None
59 | if len(top_miner_uids) >= 3:
60 | selected_miner_uids = random.sample(top_miner_uids, 3)
61 | else:
62 | selected_miner_uids = top_miner_uids
63 | top_miner_axons = [metagraph.axons[uid] for uid in selected_miner_uids]
64 |
65 | logger.info(f"Top miner axons: {top_miner_axons}")
66 |
67 | if not top_miner_axons:
68 | raise HTTPException(status_code=503, detail=self.failed_prompt_msg)
69 |
70 | # get miner response
71 | responses, blacklist_axon_ids = await self.text_query_api(
72 | axons=top_miner_axons,
73 | network=query.network,
74 | text=query.prompt,
75 | timeout=self.config.timeout
76 | )
77 |
78 | if not responses:
79 | raise HTTPException(status_code=503, detail=self.failed_prompt_msg)
80 |
81 | blacklist_axons = np.array(top_miner_axons)[blacklist_axon_ids]
82 | blacklist_uids = np.where(np.isin(np.array(self.metagraph.axons), blacklist_axons))[0]
83 | responded_uids = np.setdiff1d(np.array(top_miner_uids), blacklist_uids)
84 |
85 | # Add score to miners respond to user query
86 | # uids = responded_uids.tolist()
87 | # TODO: we store the responded UIDs to progres here and that data will be take later in scoring function
88 | # !! Score should go to miners hotkey not its uid !! uid can change but hotkey is unique
89 |
90 | selected_index = responses.index(random.choice(responses))
91 | response_object = ChatMessageResponse(
92 | miner_hotkey=self.metagraph.axons[responded_uids[selected_index]].hotkey,
93 | response=responses[selected_index]
94 | )
95 |
96 | # return response and the hotkey of randomly selected miner
97 | return response_object
98 |
99 | @self.app.post("/v1/api/text_query/variant", summary="Processes variant chat message requests and returns a response from a specific miner", tags=["v1"])
100 | async def get_response_variant(query: ChatMessageVariantRequest = Body(..., example={
101 | "network": "bitcoin",
102 | "prompt": "Return 3 transactions outgoing from my address bc1q4s8yps9my6hun2tpd5ke5xmvgdnxcm2qspnp9r",
103 | "miner_hotkey": "5EExDvawjGyszzxF8ygvNqkM1w5M4hA82ydBjhx4cY2ut2yr"
104 | })) -> ChatMessageResponse:
105 |
106 | logger.info(f"Miner {query.miner_hotkey} received a variant request.")
107 |
108 | try:
109 | miner_id = metagraph.hotkeys.index(query.miner_hotkey)
110 | except ValueError:
111 | raise HTTPException(status_code=404, detail="Miner hotkey not found")
112 |
113 | miner_axon = [metagraph.axons[uid] for uid in [miner_id]]
114 | logger.info(f"Miner axon: {miner_axon}")
115 |
116 | responses, _ = await self.text_query_api(
117 | axons=miner_axon,
118 | network=query.network,
119 | text=query.prompt,
120 | timeout=self.config.timeout
121 | )
122 |
123 | if not responses:
124 | raise HTTPException(status_code=503, detail=self.failed_prompt_msg)
125 |
126 | # TODO: we store the responded UIDs to progres here and that data will be take later in scoring function
127 | # To be considered if that creates fair result, what i someone has a valdiator and will be prompting his own miner to get better score?
128 | # well, he wil pay for openai usage, so he will be paying for the score, so it is fair?
129 |
130 | logger.info(f"Variant: {responses}")
131 | response_object = ChatMessageResponse(
132 | miner_hotkey=query.miner_hotkey,
133 | response=responses[0]
134 | )
135 |
136 | return response_object
137 |
138 | @self.app.get("/", tags=["default"])
139 | def healthcheck():
140 | return {
141 | "status": "ok",
142 | "timestamp": datetime.utcnow()
143 | }
144 |
145 | def start(self):
146 | # Set the default event loop policy to avoid conflicts with uvloop
147 | asyncio.set_event_loop_policy(asyncio.DefaultEventLoopPolicy())
148 | # Start the Uvicorn server with your app
149 | uvicorn.run(self.app, host="0.0.0.0", port=int(self.config.api_port), loop="asyncio")
150 |
151 |
--------------------------------------------------------------------------------
/neurons/storage.py:
--------------------------------------------------------------------------------
1 | from typing import Optional, List
2 |
3 | import insights
4 |
5 | import bittensor as bt
6 | from bittensor.extrinsics import serving
7 | from pydantic import BaseModel
8 | from protocols.blockchain import get_network_id
9 | from neurons import logger
10 |
11 | class Metadata(BaseModel):
12 | def to_compact(self):
13 | return ','.join(f"{key}:{repr(getattr(self, key))}" for key in self.__dict__)
14 |
15 | class MinerMetadata(Metadata):
16 | sb: Optional[int] = None #start_block_height
17 | lb: Optional[int] = None#end_block_height
18 | bl: Optional[int] = None#balance_model_last_block_height
19 | n: Optional[int] = None#network
20 | cv: Optional[str] = None#code_version
21 | lv: Optional[str] = None#lllm engine version
22 | mt: Optional[int] = None # model_type OBSOLETE
23 | di: Optional[str] = None# docker img OBSOLETE
24 |
25 | @staticmethod
26 | def from_compact(compact_str):
27 | data_dict = {}
28 | for item in compact_str.split(','):
29 | key, value = item.split(':', 1)
30 | if value == 'None':
31 | continue
32 | data_dict[key] = value.strip("'")
33 | return MinerMetadata(**data_dict)
34 |
35 | class ValidatorMetadata(Metadata):
36 | cv: Optional[str] = None #code_version
37 | ip: Optional[str] = None #api_ip
38 |
39 | b: Optional[int] = None
40 | v: Optional[int] = None
41 | di: Optional[str] = None
42 |
43 |
44 | @staticmethod
45 | def from_compact(compact_str):
46 | data_dict = {}
47 | for item in compact_str.split(','):
48 | key, value = item.split(':', 1)
49 | if value == 'None':
50 | continue
51 | data_dict[key] = value.strip("'")
52 | return ValidatorMetadata(**data_dict)
53 |
54 | def get_commitment_wrapper(subtensor, netuid, _, hotkey, block=None):
55 | def get_commitment():
56 | metadata = serving.get_metadata(subtensor, netuid, hotkey, block)
57 | if metadata is None:
58 | return None
59 | commitment = metadata["info"]["fields"][0]
60 | hex_data = commitment[list(commitment.keys())[0]][2:]
61 | return bytes.fromhex(hex_data).decode()
62 |
63 | return get_commitment()
64 |
65 |
66 | def store_miner_metadata(self):
67 | try:
68 | discovery = self.llm.discovery_v1(network=self.config.network)
69 |
70 | start_block = discovery['funds_flow_model_start_block']
71 | last_block = discovery['funds_flow_model_ast_block']
72 | balance_model_last_block = discovery['balance_model_last_block']
73 | llm_version = discovery['llm_engine_version']
74 |
75 | logger.info(f"Storing miner metadata")
76 |
77 | metadata = MinerMetadata(
78 | sb=start_block,
79 | lb=last_block,
80 | bl=balance_model_last_block,
81 | n=get_network_id(self.config.network),
82 | cv=insights.__version__,
83 | lv=llm_version
84 | )
85 | self.subtensor.commit(self.wallet, self.config.netuid, Metadata.to_compact(metadata))
86 | logger.success(f"Stored miner metadata: {metadata}")
87 |
88 | except bt.errors.MetadataError as e:
89 | logger.warning("Skipping storing miner metadata", error={'exception_type': e.__class__.__name__, 'exception_message': str(e), 'exception_args': e.args})
90 | except Exception as e:
91 | logger.warning(f"Skipping storing miner metadata", error={'exception_type': e.__class__.__name__, 'exception_message': str(e), 'exception_args': e.args})
92 |
93 | def store_validator_metadata(self):
94 | def get_commitment(netuid: int, uid: int, block: Optional[int] = None) -> str:
95 | metadata = serving.get_metadata(subtensor, netuid, hotkey, block)
96 | if metadata is None:
97 | return None
98 | commitment = metadata["info"]["fields"][0]
99 | hex_data = commitment[list(commitment.keys())[0]][2:]
100 | return bytes.fromhex(hex_data).decode()
101 |
102 | try:
103 | subtensor = bt.subtensor(config=self.config)
104 | logger.info(f"Storing validator metadata")
105 | metadata = ValidatorMetadata(
106 | ip=self.metagraph.axons[self.uid].ip,
107 | cv=insights.__version__,
108 | )
109 |
110 | hotkey= self.wallet.hotkey.ss58_address
111 | subtensor.get_commitment = get_commitment
112 |
113 | existing_commitment = subtensor.get_commitment(self.config.netuid, self.uid)
114 | if existing_commitment is not None:
115 | try:
116 | dual_miner = MinerMetadata.from_compact(existing_commitment)
117 | if dual_miner.sb is not None:
118 | logger.info("Skipping storing validator metadata, as this is a dual hotkey for miner and validator", metadata = metadata.to_compact())
119 | return
120 | except Exception as e:
121 | logger.warning("Error while getting miner metadata, Continuing as validator...", miner_hotkey=hotkey, error = {'exception_type': e.__class__.__name__,'exception_message': str(e),'exception_args': e.args})
122 |
123 | subtensor.commit(self.wallet, self.config.netuid, metadata.to_compact())
124 | logger.success("Stored validator metadata", metadata = metadata.to_compact())
125 | except bt.errors.MetadataError as e:
126 | logger.warning("Skipping storing validator metadata", error = {'exception_type': e.__class__.__name__,'exception_message': str(e),'exception_args': e.args})
127 | except Exception as e:
128 | logger.warning(f"Skipping storing validator metadata,", error = {'exception_type': e.__class__.__name__,'exception_message': str(e),'exception_args': e.args})
129 |
130 | def get_miners_metadata(config, metagraph):
131 | def get_commitment(netuid: int, uid: int, block: Optional[int] = None) -> str:
132 | metadata = serving.get_metadata(subtensor, netuid, hotkey, block)
133 | if metadata is None:
134 | return None
135 | commitment = metadata["info"]["fields"][0]
136 | hex_data = commitment[list(commitment.keys())[0]][2:]
137 | return bytes.fromhex(hex_data).decode()
138 |
139 | subtensor = bt.subtensor(config=config)
140 | subtensor.get_commitment = get_commitment
141 | miners_metadata = {}
142 |
143 | logger.info("Getting miners metadata")
144 | for axon in metagraph.axons:
145 | if axon.is_serving:
146 | hotkey = axon.hotkey
147 | try:
148 | metadata_str = subtensor.get_commitment(config.netuid, 0)
149 | if metadata_str is None:
150 | continue
151 |
152 | metadata = MinerMetadata.from_compact(metadata_str)
153 | miners_metadata[hotkey] = metadata
154 | except Exception as e:
155 | logger.warning("Error while getting miner metadata, Skipping...", miner_hotkey=hotkey, error={'exception_type': e.__class__.__name__,'exception_message': str(e), 'exception_args': e.args})
156 | continue
157 |
158 | return miners_metadata
--------------------------------------------------------------------------------
/template/api/examples/subnet21.py:
--------------------------------------------------------------------------------
1 | # The MIT License (MIT)
2 | # Copyright © 2021 Yuma Rao
3 | # Copyright © 2023 Opentensor Foundation
4 | # Copyright © 2023 Opentensor Technologies Inc
5 |
6 | # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
7 | # documentation files (the “Software”), to deal in the Software without restriction, including without limitation
8 | # the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
9 | # and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
10 |
11 | # The above copyright notice and this permission notice shall be included in all copies or substantial portions of
12 | # the Software.
13 |
14 | # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
15 | # THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
16 | # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
17 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
18 | # DEALINGS IN THE SOFTWARE.
19 |
20 | import numpy as np
21 | import base64
22 | import bittensor as bt
23 | from abc import ABC, abstractmethod
24 | from typing import Any, List, Union
25 | from bittensor.subnets import SubnetsAPI
26 | from neurons import logger
27 |
28 | try:
29 | from storage.validator.cid import generate_cid_string
30 | from storage.validator.encryption import (
31 | encrypt_data,
32 | decrypt_data_with_private_key,
33 | )
34 | except:
35 | storage_url = "https://github.com/ifrit98/storage-subnet"
36 | logger.error(
37 | f"Storage Subnet 21 not installed. Please visit: {storage_url} and install the package to use this example."
38 | )
39 |
40 |
41 | class StoreUserAPI(SubnetsAPI):
42 | def __init__(self, wallet: "bt.wallet"):
43 | super().__init__(wallet)
44 | self.netuid = 21
45 |
46 | def prepare_synapse(
47 | self,
48 | data: bytes,
49 | encrypt=False,
50 | ttl=60 * 60 * 24 * 30,
51 | encoding="utf-8",
52 | ) -> StoreUser:
53 | data = bytes(data, encoding) if isinstance(data, str) else data
54 | encrypted_data, encryption_payload = (
55 | encrypt_data(data, self.wallet) if encrypt else (data, "{}")
56 | )
57 | expected_cid = generate_cid_string(encrypted_data)
58 | encoded_data = base64.b64encode(encrypted_data)
59 |
60 | synapse = StoreUser(
61 | encrypted_data=encoded_data,
62 | encryption_payload=encryption_payload,
63 | ttl=ttl,
64 | )
65 |
66 | return synapse
67 |
68 | def process_responses(
69 | self, responses: List[Union["bt.Synapse", Any]]
70 | ) -> str:
71 | success = False
72 | failure_modes = {"code": [], "message": []}
73 | for response in responses:
74 | if response.dendrite.status_code != 200:
75 | failure_modes["code"].append(response.dendrite.status_code)
76 | failure_modes["message"].append(
77 | response.dendrite.status_message
78 | )
79 | continue
80 |
81 | stored_cid = (
82 | response.data_hash.decode("utf-8")
83 | if isinstance(response.data_hash, bytes)
84 | else response.data_hash
85 | )
86 | logger.debug("received data CID: {}".format(stored_cid))
87 | success = True
88 | break
89 |
90 | if success:
91 | logger.info(
92 | f"Stored data on the Bittensor network with CID {stored_cid}"
93 | )
94 | else:
95 | logger.error(
96 | f"Failed to store data. Response failure codes & messages {failure_modes}"
97 | )
98 | stored_cid = ""
99 |
100 | return stored_cid
101 |
102 |
103 | class RetrieveUserAPI(SubnetsAPI):
104 | def __init__(self, wallet: "bt.wallet"):
105 | super().__init__(wallet)
106 | self.netuid = 21
107 |
108 | def prepare_synapse(self, cid: str) -> RetrieveUser:
109 | synapse = RetrieveUser(data_hash=cid)
110 | return synapse
111 |
112 | def process_responses(
113 | self, responses: List[Union["bt.Synapse", Any]]
114 | ) -> bytes:
115 | success = False
116 | decrypted_data = b""
117 | for response in responses:
118 | logger.trace(f"response: {response.dendrite.dict()}")
119 | if (
120 | response.dendrite.status_code != 200
121 | or response.encrypted_data is None
122 | ):
123 | continue
124 |
125 | # Decrypt the response
126 | logger.trace(
127 | f"encrypted_data: {response.encrypted_data[:100]}"
128 | )
129 | encrypted_data = base64.b64decode(response.encrypted_data)
130 | logger.debug(
131 | f"encryption_payload: {response.encryption_payload}"
132 | )
133 | if (
134 | response.encryption_payload is None
135 | or response.encryption_payload == ""
136 | or response.encryption_payload == "{}"
137 | ):
138 | logger.warning(
139 | "No encryption payload found. Unencrypted data."
140 | )
141 | decrypted_data = encrypted_data
142 | else:
143 | decrypted_data = decrypt_data_with_private_key(
144 | encrypted_data,
145 | response.encryption_payload,
146 | bytes(self.wallet.coldkey.private_key.hex(), "utf-8"),
147 | )
148 | logger.trace(f"decrypted_data: {decrypted_data[:100]}")
149 | success = True
150 | break
151 |
152 | if success:
153 | logger.info(
154 | f"Returning retrieved data: {decrypted_data[:100]}"
155 | )
156 | else:
157 | logger.error("Failed to retrieve data.")
158 |
159 | return decrypted_data
160 |
161 |
162 | async def test_store_and_retrieve(
163 | netuid: int = 22, wallet: "bt.wallet" = None
164 | ):
165 | # Example usage
166 | wallet = wallet or bt.wallet()
167 |
168 | # Instantiate the handler
169 | store_handler = StoreUserAPI(wallet)
170 |
171 | # Fetch the axons you want to query
172 | metagraph = bt.subtensor("test").metagraph(netuid=22)
173 | query_axons = metagraph.axons
174 |
175 | cid = await store_handler(
176 | axons=query_axons,
177 | # any arguments for the proper synapse
178 | data=b"some data",
179 | encrypt=True,
180 | ttl=60 * 60 * 24 * 30,
181 | encoding="utf-8",
182 | uid=None,
183 | )
184 | print("CID:", cid)
185 |
186 | retrieve_handler = RetrieveUserAPI(wallet)
187 | retrieve_response = await retrieve_handler(axons=query_axons, cid=cid)
188 |
--------------------------------------------------------------------------------
/docs/scoring_function_explanation.md:
--------------------------------------------------------------------------------
1 | # Blockchain Insight Scoring Function
2 |
3 | ## Scoring Function Overview
4 |
5 | Our scoring function is designed to provide a comprehensive evaluation of blockchain data through multiple key metrics:
6 |
7 | - **Block Height Coverage ($s_{1}$):** Indicates the percentage of block coverage, offering insights into the comprehensiveness of the blockchain data.
8 |
9 | - **Recency of Block ($s_{2}$):** Reflects the recency of the most recent block, helping users gauge the timeliness of the blockchain data.
10 |
11 | - **Response Time ($s_{3}$):** Measures the time it takes to respond, serving as a crucial indicator of the blockchain's efficiency and responsiveness.
12 |
13 | - **Weight Based on the Mined Blockchain ($s_{4}$):** Considers the specific blockchain mined (e.g., bitcoin, doge, etc.), providing contextual relevance to the scoring process.
14 |
15 | ### Scoring Formula
16 |
17 | The overall score is determined by the weighted sum of these four scores, where $w_i$ represents the weight assigned to each respective metric. The formula is expressed as:
18 |
19 | $$
20 | \text{score} = \frac{\sum_{i=1}^{4} w_{i} \cdot s_{i}}{\sum_{i=1}^{4} w_{i}}$$
21 |
22 | This formula encapsulates the essence of our scoring mechanism, offering a balanced and informative evaluation of blockchain insights.
23 |
24 | ### Weight Assignments
25 |
26 | Currently, the weights are as follows:
27 |
28 | - $(w_{1} = 15)$: Block Height Coverage
29 | - $(w_{2} = 25)$: Recency of Block
30 | - $(w_{3} = 10)$: Response Time
31 | - $(w_{4} = 2)$ : Weight Based on the Mined Blockchain (bitcoin, ethereum, etc.)
32 | - $(w_{5} = 49)$: Uptime
33 |
34 | In other words, to achieve the highest possible score, a miner should index a broad range of recent blocks from a significant blockchain (such as bitcoin) and respond promptly.
35 |
36 | ## Safeguards to Ensure Miner Decentralization
37 |
38 | To uphold decentralization within our network, we've established the following safeguards to prevent any participant from running more than 9 instances:
39 |
40 | Any participant meeting the following criteria will receive a score of 0:
41 |
42 | - Usage of an IP address by more than 9 miners
43 | - Usage of a cold key by more than 9 miners
44 |
45 | As our subnet expands to encompass other blockchains, we're devising a gradual reduction in this number to facilitate an increase in the number of memgraph instances.
46 |
47 | ## Score 0 if one of theses metrics are not met:
48 |
49 | It's crucial to be aware that:
50 |
51 | - A range of blocks less than 800'000 will result in a score of 0.
52 |
53 | - A timeout response will result in a score of 0.
54 |
55 | - Incorrect response to a query give a score of 0.
56 |
57 | - The weights and the minimum range of blocks can be modified as network capabilities increase
58 |
59 | ## Uptime
60 |
61 | Uptime is calculated on the time that a miner answer a query of any type and gives an exact value to the query.
62 |
63 | Eachtime a miner timeout or gives an incorrect answer it's considered down and will be up only when the next query will not timeout and give a correct value
64 |
65 | An Average of the following metrics are averaged:
66 | - Uptime for the last 24 hours
67 | - Uptime for the last week (7 days)
68 | - Uptime for the last Month (30 days)
69 |
70 | ## Deep Dive
71 |
72 | ### Scoring Function Implementation
73 |
74 | Our scoring function is implemented through a set of Python functions to assess various aspects of blockchain data. Let's break down how each component contributes to the overall score.
75 |
76 | ### Block Height Coverage ($s_{1}$) Calculation
77 |
78 | The `Block Height Coverage` function evaluates the coverage of indexed blocks within a blockchain. It considers the number of blocks covered as well as the minimum required blocks.
79 |
80 | The function is illustrated in the graph below
81 |
82 |
83 |
84 |
85 | ### Recency of Block ($s_{2}$) Calculation
86 |
87 | `Recency of Block` measures the difference between the indexed end block height and the current blockchain block height. This function take into account the recency of the miner's last index block with respect to the most recent block of the worst performing miner. The final recency score is based on this difference.
88 |
89 | The function is illustrated in the graph below
90 |
91 |
92 |
93 |
94 |
95 | ### Response Time ($s_{3}$) Calculation
96 |
97 | The `Response Time` function calculates the response time score based on the process time and discovery timeout. It considers the ratio of process time to timeout and squares it to emphasize the impact of longer processing times.
98 |
99 | The function is illustrated in the graph below
100 |
101 |
102 |
103 |
104 |
105 |
106 | ### Weight Based on the Mined Blockchain ($s_{4}$) Calculation
107 |
108 | The `Weight Based on the Mined Blockchain` function assigns a weight to the blockchain based on its importance and distribution among miners. The overall score is a combination of the network's importance and the distribution score.
109 |
110 | The function is illustrated in the graph below
111 |
112 |
113 |
114 |
115 |
116 | ----
117 |
118 | In summary, the scoring function evaluates blockchain data based on the coverage, recency, response time, and the significance of the mined blockchain to provide a comprehensive and informative score.
119 |
120 | ## Further Work and Improvement
121 |
122 | We understand the critical importance of fostering an evenly distributed miner incentivization system, as it significantly impacts the competitiveness and overall quality of our subnet. Given that blockchain miners operate within deterministic parameters, where responses are categorized as either correct or incorrect, our scoring mechanisms must prioritize miner performance.
123 |
124 | To achieve this, we are planning to integrate the following components into our incentive structure:
125 |
126 | - [ ] **Weights scoring for Organic Queries:** Organic queries will have 2x the weights of syntheric queries by integrating a receipt system that validators sign for organic queries.
127 | - [ ] **Hardware metrics:** By incorporating hardware-specific data, we can assess the performance of miners in relation to their hardware capabilities.
128 | - [ ] **Responses quality to organic queries:** Evaluating how effectively miners respond to real-world queries will provide valuable insights into their exactitude.
129 | - [ ] **Dynamic weighting for scoring function:** Introducing adaptability into our scoring mechanism will allow for more nuanced evaluations.
130 |
131 | ### LLM (Large Language Model)
132 |
133 | Additionally, with the new introduction of LLM (Large Language Models) capabilities on the miner side, we anticipate further enhancements to our scoring function, incorporating stochastic features such as:
134 |
135 | - [ ] **Quality of query response explanations:** Assessing the clarity and depth of explanations provided alongside query responses.
136 | - [ ] **LLM capability to answer user queries:** Leveraging multilingual and complexity-handling capabilities to improve response quality.
137 | - [ ] **Scoring Tokens/sec:** Giving an indication of the performance of the miner and quality of the API used for the LLM Prompting and response.
138 |
139 | By integrating these elements, we aim to create a robust and comprehensive incentivization framework that drives continual improvement in miner performance and fosters a vibrant and competitive ecosystem within our subnet.
140 |
--------------------------------------------------------------------------------
/tests/validators/test_benchmark.py:
--------------------------------------------------------------------------------
1 | import inspect
2 | import json
3 | import re
4 | import unittest
5 | from random import randint
6 | from unittest.mock import Mock
7 | from neurons.validators.benchmark import ResponseProcessor
8 | from tests.validators import bitcoin_funds_flow_query_2, bitcoin_funds_flow_query, bitcoin_balance_tracking_query
9 |
10 |
11 | class BenchmarkQueryRegex(unittest.TestCase):
12 |
13 | def test_funds_flow_query_generation(self):
14 | diff = 1
15 | function_code = inspect.getsource(bitcoin_funds_flow_query.build_funds_flow_query) + f"\nquery = build_funds_flow_query(network, start_block, end_block, {diff})"
16 | with open('funds_flow_query_script.json', 'w') as file:
17 | json.dump({"code": function_code}, file)
18 |
19 | query_script = ""
20 | with open('funds_flow_query_script.json', 'r') as file:
21 | data = json.load(file)
22 | query_script = data['code']
23 |
24 | query = bitcoin_funds_flow_query.build_funds_flow_query('bitcoin', 1, 835000, diff)
25 | benchmark_query_script_vars = {
26 | 'network': 'bitcoin',
27 | 'start_block': 1,
28 | 'end_block': 835000,
29 | 'diff': diff,
30 | }
31 |
32 | exec(query_script, benchmark_query_script_vars)
33 | generated_query = benchmark_query_script_vars['query']
34 | print(generated_query)
35 |
36 | pattern = f"WITH\s+(?:range\(\d+,\s*\d+\)\s*\+\s*)*range\(\d+,\s*\d+\)\s+AS\s+block_heights\s+UNWIND\s+block_heights\s+AS\s+block_height\s+MATCH\s+p=\((sender:Address)\)-\[(sent1:SENT)\]->\((t:Transaction)\)-\[(sent2:SENT)\]->\((receiver:Address)\)\s+WHERE\s+t\.block_height\s+=\s+block_height\s+RETURN\s+SUM\(sent1\.value_satoshi\+sent2\.value_satoshi\)\s*\+\s*count\(sender\)\s*\+\s*count\(receiver\)\s*\+\s*count\(t\)\s+as\s+output"
37 | print(pattern)
38 |
39 | with open('funds_flow_query_regex.json', 'w') as file:
40 | json.dump({"regex": pattern}, file)
41 |
42 | regex = re.compile(pattern)
43 | match = regex.fullmatch(generated_query)
44 |
45 | self.assertIsNotNone(match) # Updated assertion to check if the match is not None
46 |
47 | def test_funds_flow_query_generation_2(self):
48 | diff = 256
49 | function_code = inspect.getsource(bitcoin_funds_flow_query_2.build_funds_flow_query) + f"\nquery = build_funds_flow_query(network, start_block, end_block, {diff})"
50 | with open('funds_flow_query_script_2.json', 'w') as file:
51 | json.dump({"code": function_code}, file)
52 |
53 | query_script = ""
54 | with open('funds_flow_query_script_2.json', 'r') as file:
55 | data = json.load(file)
56 | query_script = data['code']
57 |
58 | query = bitcoin_funds_flow_query_2.build_funds_flow_query('bitcoin', 1, 835000, diff)
59 | benchmark_query_script_vars = {
60 | 'network': 'bitcoin',
61 | 'start_block': 1,
62 | 'end_block': 835000,
63 | 'diff': diff,
64 | }
65 |
66 | exec(query_script, benchmark_query_script_vars)
67 | generated_query = benchmark_query_script_vars['query']
68 | print(generated_query)
69 |
70 | pattern = r"WITH\s+(?:range\(\d+,\s*\d+\)\s*\+\s*)*range\(\d+,\s*\d+\)\s+AS\s+block_heights\s+UNWIND\s+block_heights\s+AS\s+block_height\s+MATCH\s+\(t:Transaction\)\s+WHERE\s+t\.block_height\s+=\s+block_height\s+WITH\s+t\s+MATCH\s+\(sender:Address\)-\[sent1:SENT\]->\(t\)-\[sent2:SENT\]->\(receiver:Address\)\s+WITH\s+SUM\(sent1\.value_satoshi\s*\+\s*sent2\.value_satoshi\)\s+AS\s+total_value,\s+COUNT\(sender\)\s+AS\s+sender_count,\s+COUNT\(receiver\)\s+AS\s+receiver_count,\s+COUNT\(t\)\s+AS\s+transaction_count\s+RETURN\s+total_value\s*\+\s*sender_count\s*\+\s*receiver_count\s*\+\s*transaction_count\s+AS\s+output"
71 |
72 | print(pattern)
73 |
74 | with open('funds_flow_query_regex_2.json', 'w') as file:
75 | json.dump({"regex": pattern}, file)
76 |
77 | regex = re.compile(pattern)
78 | match = regex.fullmatch(generated_query)
79 |
80 | self.assertIsNotNone(match) # Updated assertion to check if the match is not None
81 |
82 | def test_balance_tracking_query_generation(self):
83 | diff = 256
84 | function_code = inspect.getsource(bitcoin_balance_tracking_query.build_balance_tracking_query) + f"\nquery = build_balance_tracking_query(network, start_block, balance_end, {diff})"
85 | with open('balance_tracking_query_script.json', 'w') as file:
86 | json.dump({"code": function_code}, file)
87 |
88 | query_script = ""
89 | with open('balance_tracking_query_script.json', 'r') as file:
90 | data = json.load(file)
91 | query_script = data['code']
92 |
93 | query = bitcoin_balance_tracking_query.build_balance_tracking_query('bitcoin', 1, 835000, diff)
94 | benchmark_query_script_vars = {
95 | 'network': 'bitcoin',
96 | 'start_block': 1,
97 | 'end_block': 835000,
98 | 'balance_end': 835000, # Changed 'end_block' to 'balance_end
99 | 'diff': diff,
100 | }
101 |
102 | exec(query_script, benchmark_query_script_vars)
103 | generated_query = benchmark_query_script_vars['query']
104 | print(generated_query)
105 |
106 | pattern = "WITH\s+block_heights\s+AS\s+\(\s*SELECT\s+generate_series\(\d+,\s*\d+\)\s+AS\s+block\s+(?:UNION\s+ALL\s+SELECT\s+generate_series\(\d+,\s*\d+\)\s+AS\s+block\s+)+\)\s+SELECT\s+SUM\(\s*block\s*\)\s+FROM\s+balance_changes\s+WHERE\s+block\s+IN\s+\(SELECT\s+block\s+FROM\s+block_heights\)"
107 | print(pattern)
108 |
109 | with open('balance_query_regex.json', 'w') as file:
110 | json.dump({"regex": pattern}, file)
111 |
112 | regex = re.compile(pattern)
113 | match = regex.fullmatch(generated_query)
114 |
115 | self.assertIsNotNone(match) # Updated assertion to check if the match is not None
116 |
117 |
118 | class TestResponseProcessor(unittest.TestCase):
119 | def setUp(self):
120 | self.validator_config = Mock()
121 | self.validator_config.benchmark_query_chunk_size = 32
122 | self.processor = ResponseProcessor(self.validator_config)
123 |
124 | def generate_ips(self, num_ips):
125 | ips = []
126 | for _ in range(num_ips):
127 | ip = ".".join(map(str, (randint(0, 255) for _ in range(4))))
128 | ips.append(ip)
129 | return ips
130 |
131 | def test_group_responses(self):
132 | # Create mock responses
133 | responses = []
134 | ips = self.generate_ips(100)
135 | for i in range(len(ips)):
136 | response = Mock()
137 | response.axon.ip = ips[i]
138 | response.axon.hotkey = f'hotkey_{i}'
139 | response.output.metadata.network = 'bitcoin'
140 | response.output.start_block_height = i
141 | response.output.block_height = i + 10
142 | response.output.balance_model_last_block = i + 20
143 | responses.append((response, i))
144 |
145 | # Call the method under test
146 | result = self.processor.group_responses(responses)
147 |
148 | # Assert the expected output
149 | self.assertEqual(len(result), 1) # Only one network 'bitcoin'
150 | self.assertEqual(len(result['bitcoin']), self.validator_config.benchmark_query_chunk_size) # 5 groups
151 | for i, group_info in result['bitcoin'].items():
152 | self.assertEqual(group_info['common_start'], i)
153 | self.assertEqual(group_info['common_end'], i + 10)
154 | self.assertEqual(group_info['balance_end'], i + 20)
155 | self.assertEqual(len(group_info['responses']), 1) # Each group has 1 response
156 |
157 | if __name__ == '__main__':
158 | unittest.main()
--------------------------------------------------------------------------------
/docs/stream_tutorial/protocol.py:
--------------------------------------------------------------------------------
1 | import pydantic
2 | import bittensor as bt
3 |
4 | from abc import ABC, abstractmethod
5 | from typing import List, Union, Callable, Awaitable
6 | from starlette.responses import StreamingResponse
7 |
8 |
9 | class StreamPrompting(bt.StreamingSynapse):
10 | """
11 | StreamPrompting is a specialized implementation of the `StreamingSynapse` tailored for prompting functionalities within
12 | the Bittensor network. This class is intended to interact with a streaming response that contains a sequence of tokens,
13 | which represent prompts or messages in a certain scenario.
14 |
15 | As a developer, when using or extending the `StreamPrompting` class, you should be primarily focused on the structure
16 | and behavior of the prompts you are working with. The class has been designed to seamlessly handle the streaming,
17 | decoding, and accumulation of tokens that represent these prompts.
18 |
19 | Attributes:
20 | - `roles` (List[str]): A list of roles involved in the prompting scenario. This could represent different entities
21 | or agents involved in the conversation or use-case. They are immutable to ensure consistent
22 | interaction throughout the lifetime of the object.
23 |
24 | - `messages` (List[str]): These represent the actual prompts or messages in the prompting scenario. They are also
25 | immutable to ensure consistent behavior during processing.
26 |
27 | - `completion` (str): Stores the processed result of the streaming tokens. As tokens are streamed, decoded, and
28 | processed, they are accumulated in the completion attribute. This represents the "final"
29 | product or result of the streaming process.
30 | - `required_hash_fields` (List[str]): A list of fields that are required for the hash.
31 |
32 | Methods:
33 | - `process_streaming_response`: This method asynchronously processes the incoming streaming response by decoding
34 | the tokens and accumulating them in the `completion` attribute.
35 |
36 | - `deserialize`: Converts the `completion` attribute into its desired data format, in this case, a string.
37 |
38 | - `extract_response_json`: Extracts relevant JSON data from the response, useful for gaining insights on the response's
39 | metadata or for debugging purposes.
40 |
41 | Note: While you can directly use the `StreamPrompting` class, it's designed to be extensible. Thus, you can create
42 | subclasses to further customize behavior for specific prompting scenarios or requirements.
43 | """
44 |
45 | roles: List[str] = pydantic.Field(
46 | ...,
47 | title="Roles",
48 | description="A list of roles in the StreamPrompting scenario. Immuatable.",
49 | allow_mutation=False,
50 | )
51 |
52 | messages: List[str] = pydantic.Field(
53 | ...,
54 | title="Messages",
55 | description="A list of messages in the StreamPrompting scenario. Immutable.",
56 | allow_mutation=False,
57 | )
58 |
59 | required_hash_fields: List[str] = pydantic.Field(
60 | ["messages"],
61 | title="Required Hash Fields",
62 | description="A list of required fields for the hash.",
63 | allow_mutation=False,
64 | )
65 |
66 | completion: str = pydantic.Field(
67 | "",
68 | title="Completion",
69 | description="Completion status of the current StreamPrompting object. This attribute is mutable and can be updated.",
70 | )
71 |
72 | async def process_streaming_response(self, response: StreamingResponse):
73 | """
74 | `process_streaming_response` is an asynchronous method designed to process the incoming streaming response from the
75 | Bittensor network. It's the heart of the StreamPrompting class, ensuring that streaming tokens, which represent
76 | prompts or messages, are decoded and appropriately managed.
77 |
78 | As the streaming response is consumed, the tokens are decoded from their 'utf-8' encoded format, split based on
79 | newline characters, and concatenated into the `completion` attribute. This accumulation of decoded tokens in the
80 | `completion` attribute allows for a continuous and coherent accumulation of the streaming content.
81 |
82 | Args:
83 | response: The streaming response object containing the content chunks to be processed. Each chunk in this
84 | response is expected to be a set of tokens that can be decoded and split into individual messages or prompts.
85 | """
86 | if self.completion is None:
87 | self.completion = ""
88 | bt.logging.debug(
89 | "Processing streaming response (StreamingSynapse base class)."
90 | )
91 | async for chunk in response.content.iter_any():
92 | bt.logging.debug(f"Processing chunk: {chunk}")
93 | tokens = chunk.decode("utf-8").split("\n")
94 | for token in tokens:
95 | bt.logging.debug(f"--processing token: {token}")
96 | if token:
97 | self.completion += token
98 | bt.logging.debug(f"yielding tokens {tokens}")
99 | yield tokens
100 |
101 | def deserialize(self) -> str:
102 | """
103 | Deserializes the response by returning the completion attribute.
104 |
105 | Returns:
106 | str: The completion result.
107 | """
108 | return self.completion
109 |
110 | def extract_response_json(self, response: StreamingResponse) -> dict:
111 | """
112 | `extract_response_json` is a method that performs the crucial task of extracting pertinent JSON data from the given
113 | response. The method is especially useful when you need a detailed insight into the streaming response's metadata
114 | or when debugging response-related issues.
115 |
116 | Beyond just extracting the JSON data, the method also processes and structures the data for easier consumption
117 | and understanding. For instance, it extracts specific headers related to dendrite and axon, offering insights
118 | about the Bittensor network's internal processes. The method ultimately returns a dictionary with a structured
119 | view of the extracted data.
120 |
121 | Args:
122 | response: The response object from which to extract the JSON data. This object typically includes headers and
123 | content which can be used to glean insights about the response.
124 |
125 | Returns:
126 | dict: A structured dictionary containing:
127 | - Basic response metadata such as name, timeout, total_size, and header_size.
128 | - Dendrite and Axon related information extracted from headers.
129 | - Roles and Messages pertaining to the current StreamPrompting instance.
130 | - The accumulated completion.
131 | """
132 | headers = {
133 | k.decode("utf-8"): v.decode("utf-8")
134 | for k, v in response.__dict__["_raw_headers"]
135 | }
136 |
137 | def extract_info(prefix):
138 | return {
139 | key.split("_")[-1]: value
140 | for key, value in headers.items()
141 | if key.startswith(prefix)
142 | }
143 |
144 | return {
145 | "name": headers.get("name", ""),
146 | "timeout": float(headers.get("timeout", 0)),
147 | "total_size": int(headers.get("total_size", 0)),
148 | "header_size": int(headers.get("header_size", 0)),
149 | "dendrite": extract_info("bt_header_dendrite"),
150 | "axon": extract_info("bt_header_axon"),
151 | "roles": self.roles,
152 | "messages": self.messages,
153 | "completion": self.completion,
154 | }
155 |
--------------------------------------------------------------------------------