├── .circleci ├── check_pr_status.sh └── config.yml ├── .coveragerc ├── .dockerignore ├── .flake8 ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.yaml │ └── feature_request.yaml ├── PULL_REQUEST_TEMPLATE │ ├── bug_fix.md │ ├── feature_change.md │ └── performance_improvement.md ├── auto_assign.yml ├── dependabot.yml ├── pull_request_template.md └── workflows │ ├── auto-assign.yml │ ├── docker_release.yml │ ├── e2e-subtensor-tests.yaml │ ├── monitor_requirements_size_master.yml │ └── release.yml ├── .gitignore ├── .test_durations ├── CHANGELOG.md ├── Dockerfile ├── LICENSE ├── Makefile ├── README.md ├── bittensor ├── __init__.py ├── __main__.py ├── core │ ├── __init__.py │ ├── async_subtensor.py │ ├── axon.py │ ├── chain_data │ │ ├── __init__.py │ │ ├── axon_info.py │ │ ├── chain_identity.py │ │ ├── delegate_info.py │ │ ├── delegate_info_lite.py │ │ ├── dynamic_info.py │ │ ├── info_base.py │ │ ├── ip_info.py │ │ ├── metagraph_info.py │ │ ├── neuron_info.py │ │ ├── neuron_info_lite.py │ │ ├── prometheus_info.py │ │ ├── proposal_vote_data.py │ │ ├── scheduled_coldkey_swap_info.py │ │ ├── stake_info.py │ │ ├── subnet_hyperparameters.py │ │ ├── subnet_identity.py │ │ ├── subnet_info.py │ │ ├── subnet_state.py │ │ ├── utils.py │ │ └── weight_commit_info.py │ ├── config.py │ ├── dendrite.py │ ├── errors.py │ ├── extrinsics │ │ ├── __init__.py │ │ ├── asyncex │ │ │ ├── __init__.py │ │ │ ├── commit_reveal.py │ │ │ ├── move_stake.py │ │ │ ├── registration.py │ │ │ ├── root.py │ │ │ ├── serving.py │ │ │ ├── staking.py │ │ │ ├── start_call.py │ │ │ ├── take.py │ │ │ ├── transfer.py │ │ │ ├── unstaking.py │ │ │ └── weights.py │ │ ├── commit_reveal.py │ │ ├── commit_weights.py │ │ ├── move_stake.py │ │ ├── registration.py │ │ ├── root.py │ │ ├── serving.py │ │ ├── set_weights.py │ │ ├── staking.py │ │ ├── start_call.py │ │ ├── take.py │ │ ├── transfer.py │ │ ├── unstaking.py │ │ └── utils.py │ ├── metagraph.py │ ├── settings.py │ ├── stream.py │ ├── subtensor.py │ ├── subtensor_api │ │ ├── __init__.py │ │ ├── chain.py │ │ ├── commitments.py │ │ ├── delegates.py │ │ ├── extrinsics.py │ │ ├── metagraphs.py │ │ ├── neurons.py │ │ ├── queries.py │ │ ├── staking.py │ │ ├── subnets.py │ │ ├── utils.py │ │ └── wallets.py │ ├── synapse.py │ ├── tensor.py │ ├── threadpool.py │ ├── timelock.py │ └── types.py └── utils │ ├── __init__.py │ ├── axon_utils.py │ ├── balance.py │ ├── btlogging │ ├── __init__.py │ ├── console.py │ ├── defines.py │ ├── format.py │ ├── helpers.py │ └── loggingmachine.py │ ├── certifi.sh │ ├── easy_imports.py │ ├── formatting.py │ ├── mock │ ├── __init__.py │ └── subtensor_mock.py │ ├── networking.py │ ├── registration │ ├── __init__.py │ ├── async_pow.py │ ├── pow.py │ └── register_cuda.py │ ├── subnets.py │ ├── substrate_utils │ ├── __init__.py │ ├── hasher.py │ └── storage.py │ ├── version.py │ └── weight_utils.py ├── contrib ├── CODE_REVIEW_DOCS.md ├── CONTRIBUTING.md ├── DEBUGGING.md ├── DEVELOPMENT_WORKFLOW.md ├── RELEASE_GUIDELINES.md ├── STYLE.md └── TESTING.md ├── docker-compose.yml ├── example.env ├── mypy.ini ├── pyproject.toml ├── scripts ├── check_pre_submit.sh ├── check_requirements_changes.sh ├── create_wallet.sh ├── install.sh └── post_install_cli.py └── tests ├── __init__.py ├── e2e_tests ├── __init__.py ├── conftest.py ├── test_axon.py ├── test_commit_reveal_v3.py ├── test_commit_weights.py ├── test_commitment.py ├── test_cross_subtensor_compatibility.py ├── test_delegate.py ├── test_dendrite.py ├── test_hotkeys.py ├── test_incentive.py ├── test_liquid_alpha.py ├── test_metagraph.py ├── test_neuron_certificate.py ├── test_reveal_commitments.py ├── test_root_set_weights.py ├── test_set_subnet_identity_extrinsic.py ├── test_set_weights.py ├── test_stake_fee.py ├── test_staking.py ├── test_subnets.py ├── test_subtensor_functions.py ├── test_transfer.py └── utils │ ├── chain_interactions.py │ └── e2e_test_utils.py ├── helpers ├── __init__.py ├── helpers.py └── integration_websocket_data.py ├── integration_tests ├── __init__.py ├── test_metagraph_integration.py ├── test_subtensor_integration.py ├── test_timelock.py └── utils │ └── test_init.py ├── pytest.ini └── unit_tests ├── __init__.py ├── chain_data ├── __init__.py └── test_metagraph_info.py ├── conftest.py ├── extrinsics ├── __init__.py ├── asyncex │ ├── __init__.py │ ├── conftest.py │ ├── test_commit_reveal.py │ ├── test_registration.py │ ├── test_root.py │ ├── test_start_call.py │ ├── test_transfer.py │ └── test_weights.py ├── test__init__.py ├── test_commit_reveal.py ├── test_commit_weights.py ├── test_registration.py ├── test_root.py ├── test_serving.py ├── test_set_weights.py ├── test_staking.py ├── test_start_call.py ├── test_transfer.py ├── test_unstaking.py └── test_utils.py ├── factories ├── __init__.py └── neuron_factory.py ├── test_async_subtensor.py ├── test_axon.py ├── test_chain_data.py ├── test_config.py ├── test_dendrite.py ├── test_deprecated.py ├── test_easy_imports.py ├── test_errors.py ├── test_logging.py ├── test_metagraph.py ├── test_subnets.py ├── test_subtensor.py ├── test_subtensor_api.py ├── test_subtensor_extended.py ├── test_synapse.py ├── test_tensor.py └── utils ├── __init__.py ├── test_balance.py ├── test_formatting.py ├── test_networking.py ├── test_registration.py ├── test_utils.py ├── test_version.py └── test_weight_utils.py /.circleci/check_pr_status.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Extract the repository owner 4 | REPO_OWNER=$(echo $CIRCLE_PULL_REQUEST | awk -F'/' '{print $(NF-3)}') 5 | 6 | # Extract the repository name 7 | REPO_NAME=$(echo $CIRCLE_PULL_REQUEST | awk -F'/' '{print $(NF-2)}') 8 | 9 | # Extract the pull request number 10 | PR_NUMBER=$(echo $CIRCLE_PULL_REQUEST | awk -F'/' '{print $NF}') 11 | 12 | 13 | PR_DETAILS=$(curl -s \ 14 | "https://api.github.com/repos/$REPO_OWNER/$REPO_NAME/pulls/$PR_NUMBER") 15 | 16 | 17 | IS_DRAFT=$(echo "$PR_DETAILS" | jq -r .draft) 18 | echo $IS_DRAFT 19 | 20 | if [ "$IS_DRAFT" == "true" ]; then 21 | echo "This PR is a draft. Skipping the workflow." 22 | exit 1 23 | else 24 | echo "This PR is not a draft. Proceeding with the workflow." 25 | exit 0 26 | fi 27 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = 3 | ./nuclei/* 4 | ./routers/* 5 | ./setup.py 6 | ./tests/* 7 | ./env/* 8 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | **/data/ 2 | **/*.log 3 | **/*.png 4 | **/*.pstats 5 | **/*.ipynb 6 | **/bittensor.egg-info/* 7 | **/lib/* 8 | **/build/* 9 | **/dist/* 10 | **/runs/* 11 | **/env/* 12 | **/venv/* 13 | **/tmp/* 14 | **/test_results/* 15 | **/__pycache__/* 16 | **/.circleci 17 | **/.git 18 | **/.github 19 | **/.hypothesis 20 | **/.vscode 21 | **/.gitignore 22 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 120 3 | exclude = .git,__pycache__, __init__.py, docs/source/conf.py,old,build,dist,venv,.venv,.tox 4 | select = E9,F63,F7,F82,F401 5 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yaml: -------------------------------------------------------------------------------- 1 | name: Bug report 2 | description: Create a report to help us improve 3 | labels: [bug] 4 | assignees: [] 5 | 6 | body: 7 | - type: textarea 8 | id: bug-description 9 | attributes: 10 | label: Describe the bug 11 | description: A clear and concise description of what the bug is. 12 | validations: 13 | required: true 14 | 15 | - type: textarea 16 | id: reproduce 17 | attributes: 18 | label: To Reproduce 19 | description: Steps to reproduce the behavior. 20 | placeholder: | 21 | 1. Go to '...' 22 | 2. Run command '...' 23 | 3. Scroll down to '....' 24 | 4. See error 25 | validations: 26 | required: true 27 | 28 | - type: textarea 29 | id: expected-behavior 30 | attributes: 31 | label: Expected behavior 32 | description: A clear and concise description of what you expected to happen. 33 | validations: 34 | required: true 35 | 36 | - type: textarea 37 | id: screenshots 38 | attributes: 39 | label: Screenshots 40 | description: If applicable, add screenshots to help explain your problem. 41 | validations: 42 | required: false 43 | 44 | - type: input 45 | id: environment 46 | attributes: 47 | label: Environment 48 | description: Please specify your OS and Distro, and Bittensor Version. 49 | placeholder: "OS and Distro: [e.g. Linux Ubuntu, Linux Fedora, etc.], Bittensor Version [e.g. 22]" 50 | validations: 51 | required: true 52 | 53 | - type: textarea 54 | id: additional-context 55 | attributes: 56 | label: Additional context 57 | description: Add any other context about the problem here. 58 | validations: 59 | required: false 60 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yaml: -------------------------------------------------------------------------------- 1 | name: Feature request 2 | description: Suggest an idea for this project 3 | labels: [feature] 4 | assignees: [] 5 | 6 | body: 7 | - type: textarea 8 | id: problem-description 9 | attributes: 10 | label: Is your feature request related to a problem? Please describe. 11 | description: A clear and concise description of what the problem is. 12 | placeholder: "Ex. I'm always frustrated when [...]" 13 | validations: 14 | required: true 15 | 16 | - type: textarea 17 | id: solution 18 | attributes: 19 | label: Describe the solution you'd like 20 | description: A clear and concise description of what you want to happen. 21 | validations: 22 | required: true 23 | 24 | - type: textarea 25 | id: alternatives 26 | attributes: 27 | label: Describe alternatives you've considered 28 | description: A clear and concise description of any alternative solutions or features you've considered. 29 | validations: 30 | required: false 31 | 32 | - type: textarea 33 | id: additional-context 34 | attributes: 35 | label: Additional context 36 | description: Add any other context or screenshots about the feature request here. 37 | validations: 38 | required: false 39 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE/bug_fix.md: -------------------------------------------------------------------------------- 1 | 11 | 12 | ### Bug 13 | 14 | 15 | 16 | ### Description of the Change 17 | 18 | 25 | 26 | ### Alternate Designs 27 | 28 | 29 | 30 | ### Possible Drawbacks 31 | 32 | 33 | 34 | ### Verification Process 35 | 36 | 42 | 43 | ### Release Notes 44 | 45 | 60 | 61 | 62 | ### Branch Acknowledgement 63 | [ ] I am acknowledging that I am opening this branch against `staging` 64 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE/feature_change.md: -------------------------------------------------------------------------------- 1 | ### Requirements for Adding, Changing, or Removing a Feature 2 | 3 | * Fill out the template below. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion. 4 | * The pull request must contribute a change that has been endorsed by the maintainer team. See details in the template below. 5 | * The pull request must update the test suite to exercise the updated functionality. 6 | * After you create the pull request, all status checks must be pass before a maintainer reviews your contribution. 7 | 8 | ### Description of the Change 9 | 10 | 15 | 16 | ### Alternate Designs 17 | 18 | 19 | 20 | ### Possible Drawbacks 21 | 22 | 23 | 24 | ### Verification Process 25 | 26 | 37 | 38 | ### Release Notes 39 | 40 | 55 | 56 | 57 | ### Branch Acknowledgement 58 | [ ] I am acknowledging that I am opening this branch against `staging` 59 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE/performance_improvement.md: -------------------------------------------------------------------------------- 1 | ### Requirements for Contributing a Performance Improvement 2 | 3 | * Fill out the template below. Any pull request that does not include enough information to be reviewed in a timely manner may be closed at the maintainers' discretion. 4 | * The pull request must only affect performance of existing functionality 5 | * After you create the pull request, all status checks must be pass before a maintainer reviews your contribution. 6 | 7 | ### Description of the Change 8 | 9 | 14 | 15 | ### Quantitative Performance Benefits 16 | 17 | 22 | 23 | ### Possible Drawbacks 24 | 25 | 26 | 27 | ### Verification Process 28 | 29 | 34 | 35 | ### Applicable Issues 36 | 37 | 38 | 39 | ### Release Notes 40 | 41 | 56 | 57 | 58 | ### Branch Acknowledgement 59 | [ ] I am acknowledging that I am opening this branch against `staging` 60 | -------------------------------------------------------------------------------- /.github/auto_assign.yml: -------------------------------------------------------------------------------- 1 | addReviewers: true 2 | 3 | # A list of team slugs to add as assignees 4 | reviewers: 5 | - opentensor/cortex 6 | 7 | numberOfReviewers: 0 -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "pip" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | open-pull-requests-limit: 0 # Only security updates will be opened as PRs 8 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | Welcome! 2 | 3 | Due to [GitHub limitations](https://github.com/orgs/community/discussions/4620), 4 | please switch to **Preview** for links to render properly. 5 | 6 | Please choose the right template for your pull request: 7 | 8 | - 🐛 Are you fixing a bug? [Bug fix](?expand=1&template=bug_fix.md) 9 | - 📈 Are you improving performance? [Performance improvement](?expand=1&template=performance_improvement.md) 10 | - 💻 Are you changing functionality? [Feature change](?expand=1&template=feature_change.md) 11 | -------------------------------------------------------------------------------- /.github/workflows/auto-assign.yml: -------------------------------------------------------------------------------- 1 | name: Auto Assign Cortex to Pull Requests 2 | 3 | on: 4 | pull_request: 5 | types: [opened, reopened] 6 | 7 | jobs: 8 | auto-assign: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - name: Auto-assign Cortex Team 12 | uses: kentaro-m/auto-assign-action@v1.2.4 13 | with: 14 | repo-token: "${{ secrets.GITHUB_TOKEN }}" 15 | configuration-path: .github/auto_assign.yml -------------------------------------------------------------------------------- /.github/workflows/docker_release.yml: -------------------------------------------------------------------------------- 1 | name: Build and Push Docker Image 2 | # https://github.com/sigstore/cosign 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | tag: 7 | description: 'Docker image tag' 8 | required: true 9 | default: 'latest' 10 | 11 | jobs: 12 | build-and-push: 13 | runs-on: ubuntu-latest 14 | 15 | permissions: 16 | contents: read 17 | id-token: write 18 | 19 | steps: 20 | - name: Check out code 21 | uses: actions/checkout@v4 22 | 23 | - name: Install cosign 24 | uses: sigstore/cosign-installer@v3 25 | 26 | - name: Log in to Docker Hub 27 | uses: docker/login-action@v2 28 | with: 29 | registry: docker.io 30 | username: ${{ secrets.DOCKERHUB_USERNAME }} 31 | password: ${{ secrets.DOCKERHUB_TOKEN }} 32 | 33 | - name: Set up Docker Buildx 34 | uses: docker/setup-buildx-action@v2 35 | 36 | - name: Build and push Docker image 37 | uses: docker/build-push-action@v4 38 | with: 39 | context: . 40 | push: true 41 | tags: | 42 | opentensorfdn/bittensor:${{ github.event.inputs.tag }} 43 | opentensorfdn/bittensor:latest 44 | provenance: false 45 | 46 | - name: Sign the images with GitHub OIDC Token 47 | env: 48 | DIGEST: ${{ steps.build.outputs.digest }} 49 | TAGS: ${{ steps.build.outputs.tags }} 50 | run: | 51 | echo "${TAGS}" | xargs -I {} cosign sign --yes {}@${DIGEST} 52 | -------------------------------------------------------------------------------- /.github/workflows/monitor_requirements_size_master.yml: -------------------------------------------------------------------------------- 1 | # This workflow measures the disk size of a virtual environment 2 | # after installing the Bittensor SDK across multiple Python versions. 3 | # It runs only when a new pull request targets the master branch, 4 | # and posts a comment with the results. 5 | name: Monitor SDK Requirements Size 6 | 7 | on: 8 | pull_request: 9 | types: [opened, labeled] 10 | branches: [master, staging] 11 | 12 | permissions: 13 | pull-requests: write 14 | contents: read 15 | 16 | jobs: 17 | measure-venv: 18 | if: github.event_name == 'pull_request' && github.base_ref == 'master' || contains( github.event.pull_request.labels.*.name, 'show-venv-size') 19 | runs-on: ubuntu-latest 20 | strategy: 21 | matrix: 22 | python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] 23 | outputs: 24 | py39: ${{ steps.set-output.outputs.py39 }} 25 | py310: ${{ steps.set-output.outputs.py310 }} 26 | py311: ${{ steps.set-output.outputs.py311 }} 27 | py312: ${{ steps.set-output.outputs.py312 }} 28 | py313: ${{ steps.set-output.outputs.py313 }} 29 | 30 | steps: 31 | - uses: actions/checkout@v4 32 | - uses: actions/setup-python@v5 33 | with: 34 | python-version: ${{ matrix.python-version }} 35 | 36 | - name: Create virtualenv and install 37 | run: | 38 | python -m venv venv 39 | source venv/bin/activate 40 | pip install --upgrade pip 41 | pip install . 42 | 43 | - name: Measure venv size 44 | id: set-output 45 | run: | 46 | SIZE=$(du -sm venv | cut -f1) 47 | VERSION=${{ matrix.python-version }} 48 | echo "Detected size: $SIZE MB for Python $VERSION" 49 | case "$VERSION" in 50 | 3.9) echo "py39=$SIZE" >> $GITHUB_OUTPUT ;; 51 | 3.10) echo "py310=$SIZE" >> $GITHUB_OUTPUT ;; 52 | 3.11) echo "py311=$SIZE" >> $GITHUB_OUTPUT ;; 53 | 3.12) echo "py312=$SIZE" >> $GITHUB_OUTPUT ;; 54 | 3.13) echo "py313=$SIZE" >> $GITHUB_OUTPUT ;; 55 | esac 56 | 57 | comment-on-pr: 58 | needs: measure-venv 59 | runs-on: ubuntu-latest 60 | steps: 61 | - name: Post venv size summary to PR 62 | uses: actions/github-script@v7 63 | with: 64 | github-token: ${{ secrets.GITHUB_TOKEN }} 65 | script: | 66 | const sizes = { 67 | "3.9": "${{ needs.measure-venv.outputs.py39 || 'N/A' }}", 68 | "3.10": "${{ needs.measure-venv.outputs.py310 || 'N/A' }}", 69 | "3.11": "${{ needs.measure-venv.outputs.py311 || 'N/A' }}", 70 | "3.12": "${{ needs.measure-venv.outputs.py312 || 'N/A' }}", 71 | "3.13": "${{ needs.measure-venv.outputs.py313 || 'N/A' }}", 72 | }; 73 | 74 | const body = [ 75 | '**Bittensor SDK virtual environment sizes by Python version:**', 76 | '', 77 | '```' 78 | ] 79 | .concat(Object.entries(sizes).map(([v, s]) => `Python ${v}: ${s} MB`)) 80 | .concat(['```']) 81 | .join('\n'); 82 | 83 | github.rest.issues.createComment({ 84 | issue_number: context.issue.number, 85 | owner: context.repo.owner, 86 | repo: context.repo.repo, 87 | body 88 | }); 89 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Build and Publish Python Package 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | version: 7 | description: 'Version to release' 8 | required: true 9 | type: string 10 | 11 | jobs: 12 | build: 13 | name: Build Python distribution 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v4 17 | 18 | - name: Set up Python 19 | uses: actions/setup-python@v4 20 | with: 21 | python-version: '3.10' 22 | 23 | - name: Install dependencies 24 | run: | 25 | python -m pip install --upgrade pip 26 | pip install setuptools wheel twine build toml 27 | 28 | - name: Build package 29 | run: python -m build --sdist --wheel --outdir dist/ 30 | 31 | - name: Check if package version already exists 32 | run: | 33 | PACKAGE_NAME=$(python -c "import toml; print(toml.load('pyproject.toml')['project']['name'])") 34 | PACKAGE_VERSION=${{ github.event.inputs.version }} 35 | if twine check dist/*; then 36 | if pip install $PACKAGE_NAME==$PACKAGE_VERSION; then 37 | echo "Error: Version $PACKAGE_VERSION of $PACKAGE_NAME already exists on PyPI" 38 | exit 1 39 | else 40 | echo "Version $PACKAGE_VERSION of $PACKAGE_NAME does not exist on PyPI. Proceeding with upload." 41 | fi 42 | else 43 | echo "Error: Twine check failed." 44 | exit 1 45 | fi 46 | 47 | - name: Upload artifact 48 | uses: actions/upload-artifact@v4 49 | with: 50 | name: dist 51 | path: dist/ 52 | 53 | approve-and-publish: 54 | needs: build 55 | runs-on: ubuntu-latest 56 | environment: release 57 | permissions: 58 | contents: read 59 | id-token: write 60 | 61 | steps: 62 | - name: Download artifact 63 | uses: actions/download-artifact@v4 64 | with: 65 | name: dist 66 | path: dist/ 67 | 68 | - name: Publish package distributions to PyPI 69 | uses: pypa/gh-action-pypi-publish@release/v1 70 | with: 71 | verbose: true 72 | print-hash: true 73 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | **/__pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | *.pyc 6 | 7 | # Remove notebooks. 8 | *.ipynb 9 | 10 | # weigths and biases 11 | wandb/ 12 | 13 | *.csv 14 | *.torch 15 | *.pt 16 | *.log 17 | 18 | # runs/data/models/logs/~ 19 | data/ 20 | **/data/ 21 | 22 | # C extensions 23 | *.so 24 | 25 | # IDE 26 | *.idea/ 27 | 28 | # VSCODE 29 | .vscode/ 30 | 31 | # Distribution / packaging 32 | .Python 33 | build/ 34 | develop-eggs/ 35 | dist/ 36 | downloads/ 37 | eggs/ 38 | .eggs/ 39 | lib/ 40 | lib64/ 41 | parts/ 42 | sdist/ 43 | var/ 44 | wheels/ 45 | share/python-wheels/ 46 | *.egg-info/ 47 | .installed.cfg 48 | *.egg 49 | MANIFEST 50 | 51 | # PyInstaller 52 | # Usually these files are written by a python script from a template 53 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 54 | *.manifest 55 | *.spec 56 | 57 | # Installer logs 58 | pip-log.txt 59 | pip-delete-this-directory.txt 60 | 61 | # Unit test / coverage reports 62 | htmlcov/ 63 | .tox/ 64 | .nox/ 65 | .coverage 66 | .coverage.* 67 | .cache 68 | nosetests.xml 69 | coverage.xml 70 | *.cover 71 | *.py,cover 72 | .hypothesis/ 73 | .pytest_cache/ 74 | cover/ 75 | 76 | # Translations 77 | *.mo 78 | *.pot 79 | 80 | # Django stuff: 81 | *.log 82 | local_settings.py 83 | db.sqlite3 84 | db.sqlite3-journal 85 | 86 | # Flask stuff: 87 | instance/ 88 | .webassets-cache 89 | 90 | # Scrapy stuff: 91 | .scrapy 92 | 93 | # Sphinx documentation 94 | docs/_build/ 95 | 96 | # PyBuilder 97 | .pybuilder/ 98 | target/ 99 | 100 | # Jupyter Notebook 101 | .ipynb_checkpoints 102 | 103 | # IPython 104 | profile_default/ 105 | ipython_config.py 106 | 107 | # pyenv 108 | # For a library or package, you might want to ignore these files since the code is 109 | # intended to run in multiple environments; otherwise, check them in: 110 | # .python-version 111 | 112 | # pipenv 113 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 114 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 115 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 116 | # install all needed dependencies. 117 | #Pipfile.lock 118 | 119 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 120 | __pypackages__/ 121 | 122 | # Celery stuff 123 | celerybeat-schedule 124 | celerybeat.pid 125 | 126 | # SageMath parsed files 127 | *.sage.py 128 | 129 | # Environments 130 | .env 131 | .venv 132 | env/ 133 | venv/ 134 | ENV/ 135 | env.bak/ 136 | venv.bak/ 137 | 138 | # Spyder project settings 139 | .spyderproject 140 | .spyproject 141 | 142 | # Rope project settings 143 | .ropeproject 144 | 145 | # mkdocs documentation 146 | /site 147 | 148 | # mypy 149 | .mypy_cache/ 150 | .dmypy.json 151 | dmypy.json 152 | 153 | # Pyre type checker 154 | .pyre/ 155 | 156 | # pytype static type analyzer 157 | .pytype/ 158 | 159 | # Cython debug symbols 160 | cython_debug/ 161 | # Generated by Cargo 162 | # will have compiled files and executables 163 | **/target/ 164 | # These are backup files generated by rustfmt 165 | **/*.rs.bk 166 | 167 | .DS_Store 168 | 169 | # The cache for docker container dependency 170 | .cargo 171 | 172 | # The cache for chain data in container 173 | .local 174 | 175 | # State folder for all neurons. 176 | **/data/* 177 | !data/.gitkeep 178 | 179 | # misc 180 | .DS_Store 181 | .env.local 182 | .env.development.local 183 | .env.test.local 184 | .env.production.local 185 | 186 | # PIPY Stuff 187 | bittensor.egg-info 188 | bittensor*.egg 189 | bdist.* 190 | 191 | npm-debug.log* 192 | yarn-debug.log* 193 | yarn-error.log* 194 | 195 | **/build/* 196 | **/dist/* 197 | **/runs/* 198 | **/env/* 199 | **/data/* 200 | **/.data/* 201 | **/tmp/* 202 | 203 | **/.bash_history 204 | **/*.xml 205 | **/*.pstats 206 | **/*.png 207 | 208 | # Replicate library 209 | **/.replicate 210 | replicate.yaml 211 | **/run.sh 212 | 213 | # Notebooks 214 | *.ipynb 215 | 216 | tests/zombienet/bin/**/* -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # syntax=docker/dockerfile:1 2 | FROM python:3.11.8-bookworm 3 | 4 | LABEL bittensor.image.authors="bittensor.com" \ 5 | bittensor.image.vendor="Bittensor" \ 6 | bittensor.image.title="bittensor/bittensor" \ 7 | bittensor.image.description="Bittensor: Incentivized Peer to Peer Neural Networks" \ 8 | bittensor.image.source="https://github.com/opentensor/bittensor.git" \ 9 | bittensor.image.revision="${VCS_REF}" \ 10 | bittensor.image.created="${BUILD_DATE}" \ 11 | bittensor.image.documentation="https://app.gitbook.com/@opentensor/s/bittensor/" 12 | ARG DEBIAN_FRONTEND=noninteractive 13 | 14 | # Update the base image 15 | RUN apt-get update && apt-get upgrade -y 16 | # Install bittensor 17 | ## Install dependencies 18 | RUN apt-get install -y curl sudo nano git htop netcat-openbsd wget unzip tmux apt-utils cmake build-essential 19 | ## Upgrade pip 20 | RUN pip3 install --upgrade pip 21 | 22 | # Install nvm and pm2 23 | RUN curl -o install_nvm.sh https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.1/install.sh && \ 24 | echo 'fabc489b39a5e9c999c7cab4d281cdbbcbad10ec2f8b9a7f7144ad701b6bfdc7 install_nvm.sh' | sha256sum --check && \ 25 | bash install_nvm.sh 26 | 27 | RUN bash -c "source $HOME/.nvm/nvm.sh && \ 28 | # use node 16 29 | nvm install 16 && \ 30 | # install pm2 31 | npm install --location=global pm2" 32 | 33 | RUN mkdir -p /root/.bittensor/bittensor 34 | COPY . /root/.bittensor/bittensor 35 | RUN cd /root/.bittensor/bittensor && python3 -m pip install . 36 | 37 | # Increase ulimit to 1,000,000 38 | RUN prlimit --pid=$PPID --nofile=1000000 39 | 40 | EXPOSE 8091 41 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | Copyright © 2021 Yuma Rao 3 | 4 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated 5 | documentation files (the “Software”), to deal in the Software without restriction, including without limitation 6 | the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, 7 | and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 8 | 9 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of 10 | the Software. 11 | 12 | THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO 13 | THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 14 | THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 15 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 16 | DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | SHELL:=/bin/bash 2 | 3 | init-venv: 4 | python3 -m venv venv && source ./venv/bin/activate 5 | 6 | clean-venv: 7 | source ./venv/bin/activate && \ 8 | pip freeze > make_venv_to_uninstall.txt && \ 9 | pip uninstall -r make_venv_to_uninstall.txt && \ 10 | rm make_venv_to_uninstall.txt 11 | 12 | clean: 13 | rm -rf dist/ && \ 14 | rm -rf build/ && \ 15 | rm -rf bittensor.egg-info/ && \ 16 | rm -rf .pytest_cache/ && \ 17 | rm -rf lib/ 18 | 19 | install: 20 | python3 -m pip install . 21 | 22 | install-dev: 23 | python3 -m pip install '.[dev]' 24 | -------------------------------------------------------------------------------- /bittensor/__init__.py: -------------------------------------------------------------------------------- 1 | import warnings 2 | 3 | from .core.settings import __version__, version_split, DEFAULTS, DEFAULT_NETWORK 4 | from .utils.btlogging import logging 5 | from .utils.easy_imports import * 6 | 7 | 8 | def __getattr__(name): 9 | if name == "version_split": 10 | warnings.warn( 11 | "version_split is deprecated and will be removed in future versions. Use __version__ instead.", 12 | DeprecationWarning, 13 | ) 14 | return version_split 15 | raise AttributeError(f"module {__name__} has no attribute {name}") 16 | -------------------------------------------------------------------------------- /bittensor/__main__.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | import sys 4 | 5 | from bittensor import __version__ 6 | from bittensor.utils.version import check_latest_version_in_pypi 7 | 8 | if __name__ == "__main__": 9 | if len(sys.argv) > 1 and sys.argv[1] == "certifi": 10 | # Resolve the path to certifi.sh 11 | certifi_script = os.path.join(os.path.dirname(__file__), "utils", "certifi.sh") 12 | if not os.path.exists(certifi_script): 13 | print(f"Error: certifi.sh not found at {certifi_script}") 14 | sys.exit(1) 15 | 16 | # Ensure the script is executable 17 | os.chmod(certifi_script, 0o755) 18 | 19 | # Run the script 20 | subprocess.run([certifi_script], check=True) 21 | else: 22 | print(f"Installed Bittensor SDK version: {__version__}") 23 | check_latest_version_in_pypi() 24 | -------------------------------------------------------------------------------- /bittensor/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opentensor/bittensor/be9808dc030b443de1948988629b99461a689f4a/bittensor/core/__init__.py -------------------------------------------------------------------------------- /bittensor/core/chain_data/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module provides data structures and functions for working with the Bittensor network, including neuron and subnet 3 | information, SCALE encoding/decoding, and custom RPC type registry. 4 | """ 5 | 6 | from scalecodec.types import GenericCall 7 | 8 | from .axon_info import AxonInfo 9 | from .chain_identity import ChainIdentity 10 | from .delegate_info import DelegateInfo, DelegatedInfo 11 | from .delegate_info_lite import DelegateInfoLite 12 | from .dynamic_info import DynamicInfo 13 | from .ip_info import IPInfo 14 | from .metagraph_info import ( 15 | MetagraphInfo, 16 | MetagraphInfoEmissions, 17 | MetagraphInfoPool, 18 | MetagraphInfoParams, 19 | SelectiveMetagraphIndex, 20 | ) 21 | from .neuron_info import NeuronInfo 22 | from .neuron_info_lite import NeuronInfoLite 23 | from .prometheus_info import PrometheusInfo 24 | from .proposal_vote_data import ProposalVoteData 25 | from .scheduled_coldkey_swap_info import ScheduledColdkeySwapInfo 26 | from .stake_info import StakeInfo 27 | from .subnet_hyperparameters import SubnetHyperparameters 28 | from .subnet_identity import SubnetIdentity 29 | from .subnet_info import SubnetInfo 30 | from .subnet_state import SubnetState 31 | from .weight_commit_info import WeightCommitInfo 32 | from .utils import decode_account_id, process_stake_data 33 | 34 | ProposalCallData = GenericCall 35 | 36 | __all__ = [ 37 | AxonInfo, 38 | ChainIdentity, 39 | DelegateInfo, 40 | DelegatedInfo, 41 | DelegateInfoLite, 42 | DynamicInfo, 43 | IPInfo, 44 | MetagraphInfo, 45 | MetagraphInfoEmissions, 46 | MetagraphInfoParams, 47 | MetagraphInfoPool, 48 | NeuronInfo, 49 | NeuronInfoLite, 50 | PrometheusInfo, 51 | ProposalCallData, 52 | ProposalVoteData, 53 | ScheduledColdkeySwapInfo, 54 | SelectiveMetagraphIndex, 55 | StakeInfo, 56 | SubnetHyperparameters, 57 | SubnetIdentity, 58 | SubnetInfo, 59 | SubnetState, 60 | WeightCommitInfo, 61 | decode_account_id, 62 | process_stake_data, 63 | ] 64 | -------------------------------------------------------------------------------- /bittensor/core/chain_data/chain_identity.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from bittensor.core.chain_data.info_base import InfoBase 3 | 4 | 5 | @dataclass 6 | class ChainIdentity(InfoBase): 7 | """Dataclass for chain identity information.""" 8 | 9 | name: str 10 | url: str 11 | github: str 12 | image: str 13 | discord: str 14 | description: str 15 | additional: str 16 | 17 | @classmethod 18 | def _from_dict(cls, decoded: dict) -> "ChainIdentity": 19 | """Returns a ChainIdentity object from decoded chain data.""" 20 | return cls( 21 | name=decoded["name"], 22 | url=decoded["url"], 23 | github=decoded["github_repo"], 24 | image=decoded["image"], 25 | discord=decoded["discord"], 26 | description=decoded["description"], 27 | additional=decoded["additional"], 28 | ) 29 | -------------------------------------------------------------------------------- /bittensor/core/chain_data/delegate_info_lite.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | from bittensor.core.chain_data.info_base import InfoBase 4 | from bittensor.core.chain_data.utils import decode_account_id 5 | from bittensor.utils import u16_normalized_float 6 | from bittensor.utils.balance import Balance 7 | 8 | 9 | @dataclass 10 | class DelegateInfoLite(InfoBase): 11 | """ 12 | Dataclass for `DelegateLiteInfo`. This is a lighter version of :func:``DelegateInfo``. 13 | 14 | Args: 15 | delegate_ss58 (str): Hotkey of the delegate for which the information is being fetched. 16 | take (float): Take of the delegate as a percentage. 17 | nominators (int): Count of the nominators of the delegate. 18 | owner_ss58 (str): Coldkey of the owner. 19 | registrations (list[int]): List of subnets that the delegate is registered on. 20 | validator_permits (list[int]): List of subnets that the delegate is allowed to validate on. 21 | return_per_1000 (int): Return per 1000 TAO, for the delegate over a day. 22 | total_daily_return (int): Total daily return of the delegate. 23 | """ 24 | 25 | delegate_ss58: str # Hotkey of delegate 26 | take: float # Take of the delegate as a percentage 27 | nominators: int # Count of the nominators of the delegate. 28 | owner_ss58: str # Coldkey of owner 29 | registrations: list[int] # List of subnets that the delegate is registered on 30 | validator_permits: list[ 31 | int 32 | ] # List of subnets that the delegate is allowed to validate on 33 | return_per_1000: Balance # Return per 1000 tao for the delegate over a day 34 | total_daily_return: Balance # Total daily return of the delegate 35 | 36 | @classmethod 37 | def _from_dict(cls, decoded: dict) -> "DelegateInfoLite": 38 | return DelegateInfoLite( 39 | delegate_ss58=decode_account_id(decoded["delegate_ss58"]), 40 | take=u16_normalized_float(decoded["take"]), 41 | nominators=decoded["nominators"], 42 | owner_ss58=decode_account_id(decoded["owner_ss58"]), 43 | registrations=decoded["registrations"], 44 | validator_permits=decoded["validator_permits"], 45 | return_per_1000=Balance.from_rao(decoded["return_per_1000"]), 46 | total_daily_return=Balance.from_rao(decoded["total_daily_return"]), 47 | ) 48 | -------------------------------------------------------------------------------- /bittensor/core/chain_data/info_base.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Any, TypeVar 3 | 4 | from bittensor.core.errors import SubstrateRequestException 5 | 6 | T = TypeVar("T", bound="InfoBase") 7 | 8 | 9 | @dataclass 10 | class InfoBase: 11 | """Base dataclass for info objects.""" 12 | 13 | @classmethod 14 | def from_dict(cls, decoded: dict) -> T: 15 | try: 16 | return cls._from_dict(decoded) 17 | except KeyError as e: 18 | raise SubstrateRequestException( 19 | f"The {cls} structure is missing {e} from the chain.", 20 | ) 21 | 22 | @classmethod 23 | def list_from_dicts(cls, any_list: list[Any]) -> list[T]: 24 | return [cls.from_dict(any_) for any_ in any_list] 25 | 26 | @classmethod 27 | def _from_dict(cls, decoded: dict) -> T: 28 | return cls(**decoded) 29 | -------------------------------------------------------------------------------- /bittensor/core/chain_data/ip_info.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Any, Union 3 | 4 | from bittensor.utils import networking as net 5 | from bittensor.utils.registration import torch, use_torch 6 | 7 | 8 | @dataclass 9 | class IPInfo: 10 | """ 11 | Dataclass representing IP information. 12 | 13 | Attributes: 14 | ip (str): The IP address as a string. 15 | ip_type (int): The type of the IP address (e.g., IPv4, IPv6). 16 | protocol (int): The protocol associated with the IP (e.g., TCP, UDP). 17 | """ 18 | 19 | ip: str 20 | ip_type: int 21 | protocol: int 22 | 23 | def encode(self) -> dict[str, Any]: 24 | """Returns a dictionary of the IPInfo object that can be encoded.""" 25 | return { 26 | "ip": net.ip_to_int( 27 | self.ip 28 | ), # IP type and protocol are encoded together as a u8 29 | "ip_type_and_protocol": ((self.ip_type << 4) + self.protocol) & 0xFF, 30 | } 31 | 32 | @classmethod 33 | def _from_dict(cls, decoded: dict) -> "IPInfo": 34 | """Returns a IPInfo object from decoded chain data.""" 35 | return IPInfo( 36 | ip_type=decoded["ip_type_and_protocol"] >> 4, 37 | ip=net.int_to_ip(decoded["ip"]), 38 | protocol=decoded["ip_type_and_protocol"] & 0xF, 39 | ) 40 | 41 | def to_parameter_dict( 42 | self, 43 | ) -> Union[dict[str, Union[str, int]], "torch.nn.ParameterDict"]: 44 | """Returns a torch tensor or dict of the subnet IP info.""" 45 | if use_torch(): 46 | return torch.nn.ParameterDict(self.__dict__) 47 | else: 48 | return self.__dict__ 49 | 50 | @classmethod 51 | def from_parameter_dict( 52 | cls, parameter_dict: Union[dict[str, Any], "torch.nn.ParameterDict"] 53 | ) -> "IPInfo": 54 | """Creates a IPInfo instance from a parameter dictionary.""" 55 | if use_torch(): 56 | return cls(**dict(parameter_dict)) 57 | else: 58 | return cls(**parameter_dict) 59 | -------------------------------------------------------------------------------- /bittensor/core/chain_data/prometheus_info.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | import netaddr 4 | 5 | from bittensor.core.chain_data.info_base import InfoBase 6 | 7 | 8 | @dataclass 9 | class PrometheusInfo(InfoBase): 10 | """ 11 | Dataclass representing information related to Prometheus. 12 | 13 | Attributes: 14 | block (int): The block number associated with the Prometheus data. 15 | version (int): The version of the Prometheus data. 16 | ip (str): The IP address associated with Prometheus. 17 | port (int): The port number for Prometheus. 18 | ip_type (int): The type of IP address (e.g., IPv4, IPv6). 19 | """ 20 | 21 | block: int 22 | version: int 23 | ip: str 24 | port: int 25 | ip_type: int 26 | 27 | @classmethod 28 | def _from_dict(cls, data): 29 | """Returns a PrometheusInfo object from decoded chain data.""" 30 | return cls( 31 | block=data["block"], 32 | ip_type=data["ip_type"], 33 | ip=str(netaddr.IPAddress(data["ip"])), 34 | port=data["port"], 35 | version=data["version"], 36 | ) 37 | -------------------------------------------------------------------------------- /bittensor/core/chain_data/proposal_vote_data.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | from bittensor.core.chain_data.info_base import InfoBase 4 | from bittensor.core.chain_data.utils import decode_account_id 5 | 6 | 7 | @dataclass 8 | class ProposalVoteData(InfoBase): 9 | """ 10 | Senate / Proposal data 11 | """ 12 | 13 | index: int 14 | threshold: int 15 | ayes: list[str] 16 | nays: list[str] 17 | end: int 18 | 19 | @classmethod 20 | def from_dict(cls, proposal_dict: dict) -> "ProposalVoteData": 21 | return cls( 22 | ayes=[decode_account_id(key) for key in proposal_dict["ayes"]], 23 | end=proposal_dict["end"], 24 | index=proposal_dict["index"], 25 | nays=[decode_account_id(key) for key in proposal_dict["nays"]], 26 | threshold=proposal_dict["threshold"], 27 | ) 28 | -------------------------------------------------------------------------------- /bittensor/core/chain_data/scheduled_coldkey_swap_info.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Optional 3 | 4 | from scalecodec.utils.ss58 import ss58_encode 5 | 6 | from bittensor.core.chain_data.info_base import InfoBase 7 | from bittensor.core.chain_data.utils import from_scale_encoding, ChainDataType 8 | from bittensor.core.settings import SS58_FORMAT 9 | 10 | 11 | @dataclass 12 | class ScheduledColdkeySwapInfo(InfoBase): 13 | """ 14 | The `ScheduledColdkeySwapInfo` class is a dataclass representing information about scheduled cold key swaps. 15 | 16 | Attributes: 17 | old_coldkey (str): The old cold key before the swap. 18 | new_coldkey (str): The new cold key after the swap. 19 | arbitration_block (int): The block number at which the arbitration of the swap will take place. 20 | """ 21 | 22 | old_coldkey: str 23 | new_coldkey: str 24 | arbitration_block: int 25 | 26 | @classmethod 27 | def _from_dict(cls, decoded: dict) -> "ScheduledColdkeySwapInfo": 28 | """Returns a ScheduledColdkeySwapInfo object from decoded chain data.""" 29 | return cls( 30 | arbitration_block=decoded["arbitration_block"], 31 | new_coldkey=ss58_encode(decoded["new_coldkey"], SS58_FORMAT), 32 | old_coldkey=ss58_encode(decoded["old_coldkey"], SS58_FORMAT), 33 | ) 34 | 35 | @classmethod 36 | def decode_account_id_list(cls, vec_u8: list[int]) -> Optional[list[str]]: 37 | """Decodes a list of AccountIds from vec_u8.""" 38 | decoded = from_scale_encoding( 39 | vec_u8, ChainDataType.ScheduledColdkeySwapInfo.AccountId, is_vec=True 40 | ) 41 | if decoded is None: 42 | return None 43 | return [ss58_encode(account_id, SS58_FORMAT) for account_id in decoded] 44 | -------------------------------------------------------------------------------- /bittensor/core/chain_data/stake_info.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | from bittensor.core.chain_data.info_base import InfoBase 4 | from bittensor.core.chain_data.utils import decode_account_id 5 | from bittensor.utils.balance import Balance 6 | 7 | 8 | @dataclass 9 | class StakeInfo(InfoBase): 10 | """ 11 | Dataclass for representing stake information linked to hotkey and coldkey pairs. 12 | 13 | Attributes: 14 | hotkey_ss58 (str): The SS58 encoded hotkey address. 15 | coldkey_ss58 (str): The SS58 encoded coldkey address. 16 | stake (Balance): The stake associated with the hotkey-coldkey pair, represented as a Balance object. 17 | """ 18 | 19 | hotkey_ss58: str # Hotkey address 20 | coldkey_ss58: str # Coldkey address 21 | netuid: int # Network UID 22 | stake: Balance # Stake for the hotkey-coldkey pair 23 | locked: Balance # Stake which is locked. 24 | emission: Balance # Emission for the hotkey-coldkey pair 25 | drain: int 26 | is_registered: bool 27 | 28 | @classmethod 29 | def from_dict(cls, decoded: dict) -> "StakeInfo": 30 | """Returns a StakeInfo object from decoded chain data.""" 31 | netuid = decoded["netuid"] 32 | return cls( 33 | hotkey_ss58=decode_account_id(decoded["hotkey"]), 34 | coldkey_ss58=decode_account_id(decoded["coldkey"]), 35 | netuid=int(netuid), 36 | stake=Balance.from_rao(decoded["stake"]).set_unit(netuid), 37 | locked=Balance.from_rao(decoded["locked"]).set_unit(netuid), 38 | emission=Balance.from_rao(decoded["emission"]).set_unit(netuid), 39 | drain=int(decoded["drain"]), 40 | is_registered=bool(decoded["is_registered"]), 41 | ) 42 | -------------------------------------------------------------------------------- /bittensor/core/chain_data/subnet_hyperparameters.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | from bittensor.core.chain_data.info_base import InfoBase 4 | 5 | 6 | @dataclass 7 | class SubnetHyperparameters(InfoBase): 8 | """ 9 | This class represents the hyperparameters for a subnet. 10 | 11 | Attributes: 12 | rho (int): The rate of decay of some value. 13 | kappa (int): A constant multiplier used in calculations. 14 | immunity_period (int): The period during which immunity is active. 15 | min_allowed_weights (int): Minimum allowed weights. 16 | max_weight_limit (float): Maximum weight limit. 17 | tempo (int): The tempo or rate of operation. 18 | min_difficulty (int): Minimum difficulty for some operations. 19 | max_difficulty (int): Maximum difficulty for some operations. 20 | weights_version (int): The version number of the weights used. 21 | weights_rate_limit (int): Rate limit for processing weights. 22 | adjustment_interval (int): Interval at which adjustments are made. 23 | activity_cutoff (int): Activity cutoff threshold. 24 | registration_allowed (bool): Indicates if registration is allowed. 25 | target_regs_per_interval (int): Target number of registrations per interval. 26 | min_burn (int): Minimum burn value. 27 | max_burn (int): Maximum burn value. 28 | bonds_moving_avg (int): Moving average of bonds. 29 | max_regs_per_block (int): Maximum number of registrations per block. 30 | serving_rate_limit (int): Limit on the rate of service. 31 | max_validators (int): Maximum number of validators. 32 | adjustment_alpha (int): Alpha value for adjustments. 33 | difficulty (int): Difficulty level. 34 | commit_reveal_period (int): Interval for commit-reveal weights. 35 | commit_reveal_weights_enabled (bool): Flag indicating if commit-reveal weights are enabled. 36 | alpha_high (int): High value of alpha. 37 | alpha_low (int): Low value of alpha. 38 | liquid_alpha_enabled (bool): Flag indicating if liquid alpha is enabled. 39 | """ 40 | 41 | rho: int 42 | kappa: int 43 | immunity_period: int 44 | min_allowed_weights: int 45 | max_weight_limit: float 46 | tempo: int 47 | min_difficulty: int 48 | max_difficulty: int 49 | weights_version: int 50 | weights_rate_limit: int 51 | adjustment_interval: int 52 | activity_cutoff: int 53 | registration_allowed: bool 54 | target_regs_per_interval: int 55 | min_burn: int 56 | max_burn: int 57 | bonds_moving_avg: int 58 | max_regs_per_block: int 59 | serving_rate_limit: int 60 | max_validators: int 61 | adjustment_alpha: int 62 | difficulty: int 63 | commit_reveal_period: int 64 | commit_reveal_weights_enabled: bool 65 | alpha_high: int 66 | alpha_low: int 67 | liquid_alpha_enabled: bool 68 | 69 | @classmethod 70 | def _from_dict(cls, decoded: dict) -> "SubnetHyperparameters": 71 | """Returns a SubnetHyperparameters object from decoded chain data.""" 72 | return SubnetHyperparameters( 73 | activity_cutoff=decoded["activity_cutoff"], 74 | adjustment_alpha=decoded["adjustment_alpha"], 75 | adjustment_interval=decoded["adjustment_interval"], 76 | alpha_high=decoded["alpha_high"], 77 | alpha_low=decoded["alpha_low"], 78 | bonds_moving_avg=decoded["bonds_moving_avg"], 79 | commit_reveal_weights_enabled=decoded["commit_reveal_weights_enabled"], 80 | commit_reveal_period=decoded["commit_reveal_period"], 81 | difficulty=decoded["difficulty"], 82 | immunity_period=decoded["immunity_period"], 83 | kappa=decoded["kappa"], 84 | liquid_alpha_enabled=decoded["liquid_alpha_enabled"], 85 | max_burn=decoded["max_burn"], 86 | max_difficulty=decoded["max_difficulty"], 87 | max_regs_per_block=decoded["max_regs_per_block"], 88 | max_validators=decoded["max_validators"], 89 | max_weight_limit=decoded["max_weights_limit"], 90 | min_allowed_weights=decoded["min_allowed_weights"], 91 | min_burn=decoded["min_burn"], 92 | min_difficulty=decoded["min_difficulty"], 93 | registration_allowed=decoded["registration_allowed"], 94 | rho=decoded["rho"], 95 | serving_rate_limit=decoded["serving_rate_limit"], 96 | target_regs_per_interval=decoded["target_regs_per_interval"], 97 | tempo=decoded["tempo"], 98 | weights_rate_limit=decoded["weights_rate_limit"], 99 | weights_version=decoded["weights_version"], 100 | ) 101 | -------------------------------------------------------------------------------- /bittensor/core/chain_data/subnet_identity.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | 4 | @dataclass 5 | class SubnetIdentity: 6 | """Dataclass for subnet identity information.""" 7 | 8 | subnet_name: str 9 | github_repo: str 10 | subnet_contact: str 11 | subnet_url: str 12 | discord: str 13 | description: str 14 | additional: str 15 | 16 | @classmethod 17 | def _from_dict(cls, decoded: dict) -> "SubnetIdentity": 18 | """Returns a SubnetIdentity object from decoded chain data.""" 19 | return cls( 20 | subnet_name=decoded["subnet_name"], 21 | github_repo=decoded["github_repo"], 22 | subnet_contact=decoded["subnet_contact"], 23 | subnet_url=decoded["subnet_url"], 24 | discord=decoded["discord"], 25 | description=decoded["description"], 26 | additional=decoded["additional"], 27 | ) 28 | -------------------------------------------------------------------------------- /bittensor/core/chain_data/subnet_info.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Any 3 | 4 | from bittensor.core.chain_data.info_base import InfoBase 5 | from bittensor.core.chain_data.utils import decode_account_id 6 | from bittensor.utils import u16_normalized_float 7 | from bittensor.utils.balance import Balance 8 | 9 | 10 | @dataclass 11 | class SubnetInfo(InfoBase): 12 | """Dataclass for subnet info.""" 13 | 14 | netuid: int 15 | rho: int 16 | kappa: int 17 | difficulty: int 18 | immunity_period: int 19 | max_allowed_validators: int 20 | min_allowed_weights: int 21 | max_weight_limit: float 22 | scaling_law_power: float 23 | subnetwork_n: int 24 | max_n: int 25 | blocks_since_epoch: int 26 | tempo: int 27 | modality: int 28 | connection_requirements: dict[str, float] 29 | emission_value: float 30 | burn: Balance 31 | owner_ss58: str 32 | 33 | @classmethod 34 | def _from_dict(cls, decoded: Any) -> "SubnetInfo": 35 | """Returns a SubnetInfo object from decoded chain data.""" 36 | return SubnetInfo( 37 | blocks_since_epoch=decoded["blocks_since_last_step"], 38 | burn=Balance.from_rao(decoded["burn"]), 39 | connection_requirements={ 40 | str(int(netuid)): u16_normalized_float(int(req)) 41 | for (netuid, req) in decoded["network_connect"] 42 | }, 43 | difficulty=decoded["difficulty"], 44 | emission_value=decoded["emission_value"], 45 | immunity_period=decoded["immunity_period"], 46 | kappa=decoded["kappa"], 47 | max_allowed_validators=decoded["max_allowed_validators"], 48 | max_n=decoded["max_allowed_uids"], 49 | max_weight_limit=decoded["max_weights_limit"], 50 | min_allowed_weights=decoded["min_allowed_weights"], 51 | modality=decoded["network_modality"], 52 | netuid=decoded["netuid"], 53 | owner_ss58=decode_account_id(decoded["owner"]), 54 | rho=decoded["rho"], 55 | scaling_law_power=decoded["scaling_law_power"], 56 | subnetwork_n=decoded["subnetwork_n"], 57 | tempo=decoded["tempo"], 58 | ) 59 | -------------------------------------------------------------------------------- /bittensor/core/chain_data/subnet_state.py: -------------------------------------------------------------------------------- 1 | """ 2 | This module defines the `SubnetState` data class and associated methods for handling and decoding 3 | subnetwork states in the Bittensor network. 4 | """ 5 | 6 | from dataclasses import dataclass 7 | 8 | from bittensor.core.chain_data.info_base import InfoBase 9 | from bittensor.core.chain_data.utils import decode_account_id 10 | from bittensor.utils import u16_normalized_float 11 | from bittensor.utils.balance import Balance 12 | 13 | 14 | @dataclass 15 | class SubnetState(InfoBase): 16 | netuid: int 17 | hotkeys: list[str] 18 | coldkeys: list[str] 19 | active: list[bool] 20 | validator_permit: list[bool] 21 | pruning_score: list[float] 22 | last_update: list[int] 23 | emission: list["Balance"] 24 | dividends: list[float] 25 | incentives: list[float] 26 | consensus: list[float] 27 | trust: list[float] 28 | rank: list[float] 29 | block_at_registration: list[int] 30 | alpha_stake: list["Balance"] 31 | tao_stake: list["Balance"] 32 | total_stake: list["Balance"] 33 | emission_history: list[list[int]] 34 | 35 | @classmethod 36 | def _from_dict(cls, decoded: dict) -> "SubnetState": 37 | """Returns a SubnetState object from decoded chain data.""" 38 | netuid = decoded["netuid"] 39 | return SubnetState( 40 | netuid=netuid, 41 | hotkeys=[decode_account_id(hk) for hk in decoded.get("hotkeys", [])], 42 | coldkeys=[decode_account_id(ck) for ck in decoded.get("coldkeys", [])], 43 | active=decoded["active"], 44 | validator_permit=decoded["validator_permit"], 45 | pruning_score=[ 46 | u16_normalized_float(val) for val in decoded["pruning_score"] 47 | ], 48 | last_update=decoded["last_update"], 49 | emission=[ 50 | Balance.from_rao(val).set_unit(netuid) for val in decoded["emission"] 51 | ], 52 | dividends=[u16_normalized_float(val) for val in decoded["dividends"]], 53 | incentives=[u16_normalized_float(val) for val in decoded["incentives"]], 54 | consensus=[u16_normalized_float(val) for val in decoded["consensus"]], 55 | trust=[u16_normalized_float(val) for val in decoded["trust"]], 56 | rank=[u16_normalized_float(val) for val in decoded["rank"]], 57 | block_at_registration=decoded["block_at_registration"], 58 | alpha_stake=[ 59 | Balance.from_rao(val).set_unit(netuid) for val in decoded["alpha_stake"] 60 | ], 61 | tao_stake=[ 62 | Balance.from_rao(val).set_unit(0) for val in decoded["tao_stake"] 63 | ], 64 | total_stake=[ 65 | Balance.from_rao(val).set_unit(netuid) for val in decoded["total_stake"] 66 | ], 67 | emission_history=decoded["emission_history"], 68 | ) 69 | -------------------------------------------------------------------------------- /bittensor/core/chain_data/weight_commit_info.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from bittensor.core.chain_data.utils import decode_account_id 3 | 4 | 5 | @dataclass 6 | class WeightCommitInfo: 7 | """ 8 | Data class representing weight commit information. 9 | 10 | Attributes: 11 | ss58 (str): The SS58 address of the committer 12 | commit_hex (str): The serialized weight commit data as hex string 13 | reveal_round (int): The round number for reveal 14 | """ 15 | 16 | ss58: str 17 | commit_hex: str 18 | reveal_round: int 19 | 20 | @classmethod 21 | def from_vec_u8(cls, data: tuple) -> tuple[str, str, int]: 22 | """ 23 | Creates a WeightCommitInfo instance 24 | 25 | Args: 26 | data (tuple): Tuple containing ((AccountId,), (commit_data,), round_number) 27 | 28 | Returns: 29 | WeightCommitInfo: A new instance with the decoded data 30 | """ 31 | account_id, commit_data, round_number = data 32 | 33 | account_id_ = account_id[0] if isinstance(account_id, tuple) else account_id 34 | commit_data = commit_data[0] if isinstance(commit_data, tuple) else commit_data 35 | commit_hex = "0x" + "".join(format(x, "02x") for x in commit_data) 36 | 37 | return decode_account_id(account_id_), commit_hex, round_number 38 | -------------------------------------------------------------------------------- /bittensor/core/extrinsics/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opentensor/bittensor/be9808dc030b443de1948988629b99461a689f4a/bittensor/core/extrinsics/__init__.py -------------------------------------------------------------------------------- /bittensor/core/extrinsics/asyncex/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opentensor/bittensor/be9808dc030b443de1948988629b99461a689f4a/bittensor/core/extrinsics/asyncex/__init__.py -------------------------------------------------------------------------------- /bittensor/core/extrinsics/asyncex/start_call.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING, Optional 2 | 3 | from bittensor.utils import unlock_key 4 | from bittensor.utils.btlogging import logging 5 | 6 | if TYPE_CHECKING: 7 | from bittensor_wallet import Wallet 8 | from bittensor.core.async_subtensor import AsyncSubtensor 9 | 10 | 11 | async def start_call_extrinsic( 12 | subtensor: "AsyncSubtensor", 13 | wallet: "Wallet", 14 | netuid: int, 15 | wait_for_inclusion: bool = True, 16 | wait_for_finalization: bool = False, 17 | period: Optional[int] = None, 18 | ) -> tuple[bool, str]: 19 | """ 20 | Submits a start_call extrinsic to the blockchain, to trigger the start call process for a subnet (used to start a 21 | new subnet's emission mechanism). 22 | 23 | Args: 24 | subtensor (Subtensor): The Subtensor client instance used for blockchain interaction. 25 | wallet (Wallet): The wallet used to sign the extrinsic (must be unlocked). 26 | netuid (int): The UID of the target subnet for which the call is being initiated. 27 | wait_for_inclusion (bool, optional): Whether to wait for the extrinsic to be included in a block. Defaults to True. 28 | wait_for_finalization (bool, optional): Whether to wait for finalization of the extrinsic. Defaults to False. 29 | period: The number of blocks during which the transaction will remain valid after it's submitted. If 30 | the transaction is not included in a block within that number of blocks, it will expire and be rejected. 31 | You can think of it as an expiration date for the transaction. 32 | 33 | Returns: 34 | Tuple[bool, str]: 35 | - True and a success message if the extrinsic is successfully submitted or processed. 36 | - False and an error message if the submission fails or the wallet cannot be unlocked. 37 | """ 38 | if not (unlock := unlock_key(wallet)).success: 39 | logging.error(unlock.message) 40 | return False, unlock.message 41 | 42 | async with subtensor.substrate as substrate: 43 | start_call = await substrate.compose_call( 44 | call_module="SubtensorModule", 45 | call_function="start_call", 46 | call_params={"netuid": netuid}, 47 | ) 48 | 49 | success, message = await subtensor.sign_and_send_extrinsic( 50 | call=start_call, 51 | wallet=wallet, 52 | wait_for_inclusion=wait_for_inclusion, 53 | wait_for_finalization=wait_for_finalization, 54 | period=period, 55 | ) 56 | 57 | if not wait_for_finalization and not wait_for_inclusion: 58 | return True, message 59 | 60 | if success: 61 | return True, "Success with `start_call` response." 62 | 63 | return True, message 64 | -------------------------------------------------------------------------------- /bittensor/core/extrinsics/asyncex/take.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING, Optional 2 | 3 | from bittensor_wallet.bittensor_wallet import Wallet 4 | 5 | from bittensor.utils import unlock_key 6 | 7 | if TYPE_CHECKING: 8 | from bittensor.core.async_subtensor import AsyncSubtensor 9 | 10 | 11 | async def increase_take_extrinsic( 12 | subtensor: "AsyncSubtensor", 13 | wallet: Wallet, 14 | hotkey_ss58: str, 15 | take: int, 16 | wait_for_inclusion: bool = True, 17 | wait_for_finalization: bool = True, 18 | raise_error: bool = False, 19 | period: Optional[int] = None, 20 | ) -> tuple[bool, str]: 21 | """Sets the delegate 'take' percentage for a neuron identified by its hotkey. 22 | 23 | Args: 24 | subtensor (Subtensor): Blockchain connection. 25 | wallet (Wallet): The wallet to sign the extrinsic. 26 | hotkey_ss58 (str): SS58 address of the hotkey to set take for. 27 | take (int): The percentage of rewards that the delegate claims from nominators. 28 | wait_for_inclusion (bool, optional): Wait for inclusion before returning. Defaults to True. 29 | wait_for_finalization (bool, optional): Wait for finalization before returning. Defaults to True. 30 | raise_error (bool, optional): Raise error on failure. Defaults to False. 31 | period: The number of blocks during which the transaction will remain valid after it's submitted. If 32 | the transaction is not included in a block within that number of blocks, it will expire and be rejected. 33 | You can think of it as an expiration date for the transaction. 34 | 35 | Returns: 36 | tuple[bool, str]: Success flag and status message. 37 | """ 38 | 39 | unlock = unlock_key(wallet, raise_error=raise_error) 40 | 41 | if not unlock.success: 42 | return False, unlock.message 43 | 44 | call = await subtensor.substrate.compose_call( 45 | call_module="SubtensorModule", 46 | call_function="increase_take", 47 | call_params={ 48 | "hotkey": hotkey_ss58, 49 | "take": take, 50 | }, 51 | ) 52 | 53 | return await subtensor.sign_and_send_extrinsic( 54 | call=call, 55 | wallet=wallet, 56 | wait_for_inclusion=wait_for_inclusion, 57 | wait_for_finalization=wait_for_finalization, 58 | period=period, 59 | raise_error=raise_error, 60 | ) 61 | 62 | 63 | async def decrease_take_extrinsic( 64 | subtensor: "AsyncSubtensor", 65 | wallet: Wallet, 66 | hotkey_ss58: str, 67 | take: int, 68 | wait_for_inclusion: bool = True, 69 | wait_for_finalization: bool = True, 70 | raise_error: bool = False, 71 | period: Optional[int] = None, 72 | ) -> tuple[bool, str]: 73 | """Sets the delegate 'take' percentage for a neuron identified by its hotkey. 74 | 75 | Args: 76 | subtensor (Subtensor): Blockchain connection. 77 | wallet (Wallet): The wallet to sign the extrinsic. 78 | hotkey_ss58 (str): SS58 address of the hotkey to set take for. 79 | take (int): The percentage of rewards that the delegate claims from nominators. 80 | wait_for_inclusion (bool, optional): Wait for inclusion before returning. Defaults to True. 81 | wait_for_finalization (bool, optional): Wait for finalization before returning. Defaults to True. 82 | raise_error (bool, optional): Raise error on failure. Defaults to False. 83 | period: The number of blocks during which the transaction will remain valid after it's submitted. If 84 | the transaction is not included in a block within that number of blocks, it will expire and be rejected. 85 | You can think of it as an expiration date for the transaction. 86 | 87 | Returns: 88 | tuple[bool, str]: Success flag and status message. 89 | """ 90 | unlock = unlock_key(wallet, raise_error=raise_error) 91 | 92 | if not unlock.success: 93 | return False, unlock.message 94 | 95 | call = await subtensor.substrate.compose_call( 96 | call_module="SubtensorModule", 97 | call_function="decrease_take", 98 | call_params={ 99 | "hotkey": hotkey_ss58, 100 | "take": take, 101 | }, 102 | ) 103 | 104 | return await subtensor.sign_and_send_extrinsic( 105 | call=call, 106 | wallet=wallet, 107 | wait_for_inclusion=wait_for_inclusion, 108 | wait_for_finalization=wait_for_finalization, 109 | period=period, 110 | raise_error=raise_error, 111 | ) 112 | -------------------------------------------------------------------------------- /bittensor/core/extrinsics/start_call.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING, Optional 2 | 3 | from bittensor.utils import unlock_key 4 | from bittensor.utils.btlogging import logging 5 | 6 | if TYPE_CHECKING: 7 | from bittensor_wallet import Wallet 8 | from bittensor.core.subtensor import Subtensor 9 | 10 | 11 | def start_call_extrinsic( 12 | subtensor: "Subtensor", 13 | wallet: "Wallet", 14 | netuid: int, 15 | wait_for_inclusion: bool = True, 16 | wait_for_finalization: bool = False, 17 | period: Optional[int] = None, 18 | ) -> tuple[bool, str]: 19 | """ 20 | Submits a start_call extrinsic to the blockchain, to trigger the start call process for a subnet (used to start a 21 | new subnet's emission mechanism). 22 | 23 | Args: 24 | subtensor (Subtensor): The Subtensor client instance used for blockchain interaction. 25 | wallet (Wallet): The wallet used to sign the extrinsic (must be unlocked). 26 | netuid (int): The UID of the target subnet for which the call is being initiated. 27 | wait_for_inclusion (bool, optional): Whether to wait for the extrinsic to be included in a block. Defaults to True. 28 | wait_for_finalization (bool, optional): Whether to wait for finalization of the extrinsic. Defaults to False. 29 | period: The number of blocks during which the transaction will remain valid after it's submitted. If 30 | the transaction is not included in a block within that number of blocks, it will expire and be rejected. 31 | You can think of it as an expiration date for the transaction. 32 | 33 | Returns: 34 | Tuple[bool, str]: 35 | - True and a success message if the extrinsic is successfully submitted or processed. 36 | - False and an error message if the submission fails or the wallet cannot be unlocked. 37 | """ 38 | if not (unlock := unlock_key(wallet)).success: 39 | logging.error(unlock.message) 40 | return False, unlock.message 41 | 42 | start_call = subtensor.substrate.compose_call( 43 | call_module="SubtensorModule", 44 | call_function="start_call", 45 | call_params={"netuid": netuid}, 46 | ) 47 | 48 | success, message = subtensor.sign_and_send_extrinsic( 49 | call=start_call, 50 | wallet=wallet, 51 | wait_for_inclusion=wait_for_inclusion, 52 | wait_for_finalization=wait_for_finalization, 53 | period=period, 54 | ) 55 | 56 | if not wait_for_finalization and not wait_for_inclusion: 57 | return True, message 58 | 59 | if success: 60 | return True, "Success with `start_call` response." 61 | 62 | return True, message 63 | -------------------------------------------------------------------------------- /bittensor/core/extrinsics/take.py: -------------------------------------------------------------------------------- 1 | from typing import TYPE_CHECKING, Optional 2 | 3 | from bittensor_wallet.bittensor_wallet import Wallet 4 | 5 | from bittensor.utils import unlock_key 6 | 7 | if TYPE_CHECKING: 8 | from bittensor.core.subtensor import Subtensor 9 | 10 | 11 | def increase_take_extrinsic( 12 | subtensor: "Subtensor", 13 | wallet: Wallet, 14 | hotkey_ss58: str, 15 | take: int, 16 | wait_for_inclusion: bool = True, 17 | wait_for_finalization: bool = True, 18 | raise_error: bool = False, 19 | period: Optional[int] = None, 20 | ) -> tuple[bool, str]: 21 | """Sets the delegate 'take' percentage for a neuron identified by its hotkey. 22 | 23 | Args: 24 | subtensor (Subtensor): Blockchain connection. 25 | wallet (Wallet): The wallet to sign the extrinsic. 26 | hotkey_ss58 (str): SS58 address of the hotkey to set take for. 27 | take (int): The percentage of rewards that the delegate claims from nominators. 28 | wait_for_inclusion (bool, optional): Wait for inclusion before returning. Defaults to True. 29 | wait_for_finalization (bool, optional): Wait for finalization before returning. Defaults to True. 30 | raise_error (bool, optional): Raise error on failure. Defaults to False. 31 | period: The number of blocks during which the transaction will remain valid after it's submitted. If 32 | the transaction is not included in a block within that number of blocks, it will expire and be rejected. 33 | You can think of it as an expiration date for the transaction. 34 | 35 | Returns: 36 | tuple[bool, str]: Success flag and status message. 37 | """ 38 | unlock = unlock_key(wallet, raise_error=raise_error) 39 | 40 | if not unlock.success: 41 | return False, unlock.message 42 | 43 | call = subtensor.substrate.compose_call( 44 | call_module="SubtensorModule", 45 | call_function="increase_take", 46 | call_params={ 47 | "hotkey": hotkey_ss58, 48 | "take": take, 49 | }, 50 | ) 51 | 52 | return subtensor.sign_and_send_extrinsic( 53 | call=call, 54 | wallet=wallet, 55 | wait_for_inclusion=wait_for_inclusion, 56 | wait_for_finalization=wait_for_finalization, 57 | period=period, 58 | raise_error=raise_error, 59 | ) 60 | 61 | 62 | def decrease_take_extrinsic( 63 | subtensor: "Subtensor", 64 | wallet: Wallet, 65 | hotkey_ss58: str, 66 | take: int, 67 | wait_for_inclusion: bool = True, 68 | wait_for_finalization: bool = True, 69 | raise_error: bool = False, 70 | period: Optional[int] = None, 71 | ) -> tuple[bool, str]: 72 | """Sets the delegate `take` percentage for a neuron identified by its hotkey. 73 | 74 | Args: 75 | subtensor (Subtensor): Blockchain connection. 76 | wallet (Wallet): The wallet to sign the extrinsic. 77 | hotkey_ss58 (str): SS58 address of the hotkey to set take for. 78 | take (int): The percentage of rewards that the delegate claims from nominators. 79 | wait_for_inclusion (bool, optional): Wait for inclusion before returning. Defaults to True. 80 | wait_for_finalization (bool, optional): Wait for finalization before returning. Defaults to True. 81 | raise_error (bool, optional): Raise error on failure. Defaults to False. 82 | period (Optional[int]): The number of blocks during which the transaction will remain valid after it's submitted. 83 | If the transaction is not included in a block within that number of blocks, it will expire and be rejected. 84 | You can think of it as an expiration date for the transaction. 85 | 86 | Returns: 87 | tuple[bool, str]: Success flag and status message. 88 | """ 89 | unlock = unlock_key(wallet, raise_error=raise_error) 90 | 91 | if not unlock.success: 92 | return False, unlock.message 93 | 94 | call = subtensor.substrate.compose_call( 95 | call_module="SubtensorModule", 96 | call_function="decrease_take", 97 | call_params={ 98 | "hotkey": hotkey_ss58, 99 | "take": take, 100 | }, 101 | ) 102 | 103 | return subtensor.sign_and_send_extrinsic( 104 | call=call, 105 | wallet=wallet, 106 | wait_for_inclusion=wait_for_inclusion, 107 | raise_error=raise_error, 108 | wait_for_finalization=wait_for_finalization, 109 | period=period, 110 | ) 111 | -------------------------------------------------------------------------------- /bittensor/core/extrinsics/utils.py: -------------------------------------------------------------------------------- 1 | """Module with helper functions for extrinsics.""" 2 | 3 | from typing import TYPE_CHECKING 4 | 5 | from bittensor.utils.balance import Balance 6 | 7 | if TYPE_CHECKING: 8 | from bittensor_wallet import Wallet 9 | from bittensor.core.chain_data import StakeInfo 10 | 11 | 12 | def get_old_stakes( 13 | wallet: "Wallet", 14 | hotkey_ss58s: list[str], 15 | netuids: list[int], 16 | all_stakes: list["StakeInfo"], 17 | ) -> list["Balance"]: 18 | """ 19 | Retrieve the previous staking balances for a wallet's hotkeys across given netuids. 20 | 21 | This function searches through the provided staking data to find the stake amounts for the specified hotkeys and 22 | netuids associated with the wallet's coldkey. If no match is found for a particular hotkey and netuid combination, 23 | a default balance of zero is returned. 24 | 25 | Args: 26 | wallet: The wallet containing the coldkey to compare with stake data. 27 | hotkey_ss58s: List of hotkey SS58 addresses for which stakes are retrieved. 28 | netuids: List of network unique identifiers (netuids) corresponding to the hotkeys. 29 | all_stakes: A collection of all staking information to search through. 30 | 31 | Returns: 32 | list[Balance]: A list of Balances, each representing the stake for a given hotkey and netuid. 33 | """ 34 | stake_lookup = { 35 | (stake.hotkey_ss58, stake.coldkey_ss58, stake.netuid): stake.stake 36 | for stake in all_stakes 37 | } 38 | return [ 39 | stake_lookup.get( 40 | (hotkey_ss58, wallet.coldkeypub.ss58_address, netuid), 41 | Balance.from_tao(0), # Default to 0 balance if no match found 42 | ) 43 | for hotkey_ss58, netuid in zip(hotkey_ss58s, netuids) 44 | ] 45 | -------------------------------------------------------------------------------- /bittensor/core/subtensor_api/chain.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | from bittensor.core.subtensor import Subtensor as _Subtensor 3 | from bittensor.core.async_subtensor import AsyncSubtensor as _AsyncSubtensor 4 | 5 | 6 | class Chain: 7 | """Class for managing chain state operations.""" 8 | 9 | def __init__(self, subtensor: Union["_Subtensor", "_AsyncSubtensor"]): 10 | self.get_block_hash = subtensor.get_block_hash 11 | self.get_current_block = subtensor.get_current_block 12 | self.get_delegate_identities = subtensor.get_delegate_identities 13 | self.get_existential_deposit = subtensor.get_existential_deposit 14 | self.get_minimum_required_stake = subtensor.get_minimum_required_stake 15 | self.get_vote_data = subtensor.get_vote_data 16 | self.get_timestamp = subtensor.get_timestamp 17 | self.is_fast_blocks = subtensor.is_fast_blocks 18 | self.last_drand_round = subtensor.last_drand_round 19 | self.state_call = subtensor.state_call 20 | self.tx_rate_limit = subtensor.tx_rate_limit 21 | -------------------------------------------------------------------------------- /bittensor/core/subtensor_api/commitments.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | from bittensor.core.subtensor import Subtensor as _Subtensor 3 | from bittensor.core.async_subtensor import AsyncSubtensor as _AsyncSubtensor 4 | 5 | 6 | class Commitments: 7 | """Class for managing any commitment operations.""" 8 | 9 | def __init__(self, subtensor: Union["_Subtensor", "_AsyncSubtensor"]): 10 | self.commit_reveal_enabled = subtensor.commit_reveal_enabled 11 | self.get_all_commitments = subtensor.get_all_commitments 12 | self.get_all_revealed_commitments = subtensor.get_all_revealed_commitments 13 | self.get_commitment = subtensor.get_commitment 14 | self.get_current_weight_commit_info = subtensor.get_current_weight_commit_info 15 | self.get_revealed_commitment = subtensor.get_revealed_commitment 16 | self.get_revealed_commitment_by_hotkey = ( 17 | subtensor.get_revealed_commitment_by_hotkey 18 | ) 19 | self.set_commitment = subtensor.set_commitment 20 | self.set_reveal_commitment = subtensor.set_reveal_commitment 21 | -------------------------------------------------------------------------------- /bittensor/core/subtensor_api/delegates.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | from bittensor.core.subtensor import Subtensor as _Subtensor 3 | from bittensor.core.async_subtensor import AsyncSubtensor as _AsyncSubtensor 4 | 5 | 6 | class Delegates: 7 | """Class for managing delegate operations.""" 8 | 9 | def __init__(self, subtensor: Union["_Subtensor", "_AsyncSubtensor"]): 10 | self.is_hotkey_delegate = subtensor.is_hotkey_delegate 11 | self.get_delegate_by_hotkey = subtensor.get_delegate_by_hotkey 12 | self.set_delegate_take = subtensor.set_delegate_take 13 | self.get_delegate_identities = subtensor.get_delegate_identities 14 | self.get_delegate_take = subtensor.get_delegate_take 15 | self.get_delegated = subtensor.get_delegated 16 | self.get_delegates = subtensor.get_delegates 17 | -------------------------------------------------------------------------------- /bittensor/core/subtensor_api/extrinsics.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | from bittensor.core.subtensor import Subtensor as _Subtensor 3 | from bittensor.core.async_subtensor import AsyncSubtensor as _AsyncSubtensor 4 | 5 | 6 | class Extrinsics: 7 | """Class for managing extrinsic operations.""" 8 | 9 | def __init__(self, subtensor: Union["_Subtensor", "_AsyncSubtensor"]): 10 | self.add_stake = subtensor.add_stake 11 | self.add_stake_multiple = subtensor.add_stake_multiple 12 | self.burned_register = subtensor.burned_register 13 | self.commit_weights = subtensor.commit_weights 14 | self.move_stake = subtensor.move_stake 15 | self.register = subtensor.register 16 | self.register_subnet = subtensor.register_subnet 17 | self.reveal_weights = subtensor.reveal_weights 18 | self.root_register = subtensor.root_register 19 | self.root_set_weights = subtensor.root_set_weights 20 | self.set_children = subtensor.set_children 21 | self.set_subnet_identity = subtensor.set_subnet_identity 22 | self.set_weights = subtensor.set_weights 23 | self.serve_axon = subtensor.serve_axon 24 | self.start_call = subtensor.start_call 25 | self.swap_stake = subtensor.swap_stake 26 | self.transfer = subtensor.transfer 27 | self.transfer_stake = subtensor.transfer_stake 28 | self.unstake = subtensor.unstake 29 | self.unstake_multiple = subtensor.unstake_multiple 30 | -------------------------------------------------------------------------------- /bittensor/core/subtensor_api/metagraphs.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | from bittensor.core.subtensor import Subtensor as _Subtensor 3 | from bittensor.core.async_subtensor import AsyncSubtensor as _AsyncSubtensor 4 | 5 | 6 | class Metagraphs: 7 | """Class for managing metagraph operations.""" 8 | 9 | def __init__(self, subtensor: Union["_Subtensor", "_AsyncSubtensor"]): 10 | self.get_metagraph_info = subtensor.get_metagraph_info 11 | self.get_all_metagraphs_info = subtensor.get_all_metagraphs_info 12 | self.metagraph = subtensor.metagraph 13 | -------------------------------------------------------------------------------- /bittensor/core/subtensor_api/neurons.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | from bittensor.core.subtensor import Subtensor as _Subtensor 3 | from bittensor.core.async_subtensor import AsyncSubtensor as _AsyncSubtensor 4 | 5 | 6 | class Neurons: 7 | """Class for managing neuron operations.""" 8 | 9 | def __init__(self, subtensor: Union["_Subtensor", "_AsyncSubtensor"]): 10 | self.get_all_neuron_certificates = subtensor.get_all_neuron_certificates 11 | self.get_neuron_certificate = subtensor.get_neuron_certificate 12 | self.neuron_for_uid = subtensor.neuron_for_uid 13 | self.neurons = subtensor.neurons 14 | self.neurons_lite = subtensor.neurons_lite 15 | self.query_identity = subtensor.query_identity 16 | -------------------------------------------------------------------------------- /bittensor/core/subtensor_api/queries.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | from bittensor.core.subtensor import Subtensor as _Subtensor 3 | from bittensor.core.async_subtensor import AsyncSubtensor as _AsyncSubtensor 4 | 5 | 6 | class Queries: 7 | """Class for managing subtensor query operations.""" 8 | 9 | def __init__(self, subtensor: Union["_Subtensor", "_AsyncSubtensor"]): 10 | self.query_constant = subtensor.query_constant 11 | self.query_map = subtensor.query_map 12 | self.query_map_subtensor = subtensor.query_map_subtensor 13 | self.query_module = subtensor.query_module 14 | self.query_runtime_api = subtensor.query_runtime_api 15 | self.query_subtensor = subtensor.query_subtensor 16 | -------------------------------------------------------------------------------- /bittensor/core/subtensor_api/staking.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | from bittensor.core.subtensor import Subtensor as _Subtensor 3 | from bittensor.core.async_subtensor import AsyncSubtensor as _AsyncSubtensor 4 | 5 | 6 | class Staking: 7 | """Class for managing staking operations.""" 8 | 9 | def __init__(self, subtensor: Union["_Subtensor", "_AsyncSubtensor"]): 10 | self.add_stake = subtensor.add_stake 11 | self.add_stake_multiple = subtensor.add_stake_multiple 12 | self.get_hotkey_stake = subtensor.get_hotkey_stake 13 | self.get_minimum_required_stake = subtensor.get_minimum_required_stake 14 | self.get_stake = subtensor.get_stake 15 | self.get_stake_add_fee = subtensor.get_stake_add_fee 16 | self.get_stake_for_coldkey = subtensor.get_stake_for_coldkey 17 | self.get_stake_for_coldkey_and_hotkey = ( 18 | subtensor.get_stake_for_coldkey_and_hotkey 19 | ) 20 | self.get_stake_info_for_coldkey = subtensor.get_stake_info_for_coldkey 21 | self.get_stake_movement_fee = subtensor.get_stake_movement_fee 22 | self.get_unstake_fee = subtensor.get_unstake_fee 23 | self.unstake = subtensor.unstake 24 | self.unstake_multiple = subtensor.unstake_multiple 25 | -------------------------------------------------------------------------------- /bittensor/core/subtensor_api/subnets.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | from bittensor.core.async_subtensor import AsyncSubtensor as _AsyncSubtensor 4 | from bittensor.core.subtensor import Subtensor as _Subtensor 5 | 6 | 7 | class Subnets: 8 | """Class for managing subnet operations.""" 9 | 10 | def __init__(self, subtensor: Union["_Subtensor", "_AsyncSubtensor"]): 11 | self.all_subnets = subtensor.all_subnets 12 | self.blocks_since_last_step = subtensor.blocks_since_last_step 13 | self.blocks_since_last_update = subtensor.blocks_since_last_update 14 | self.bonds = subtensor.bonds 15 | self.difficulty = subtensor.difficulty 16 | self.get_all_subnets_info = subtensor.get_all_subnets_info 17 | self.get_children = subtensor.get_children 18 | self.get_children_pending = subtensor.get_children_pending 19 | self.get_current_weight_commit_info = subtensor.get_current_weight_commit_info 20 | self.get_hyperparameter = subtensor.get_hyperparameter 21 | self.get_neuron_for_pubkey_and_subnet = ( 22 | subtensor.get_neuron_for_pubkey_and_subnet 23 | ) 24 | self.get_next_epoch_start_block = subtensor.get_next_epoch_start_block 25 | self.get_subnet_burn_cost = subtensor.get_subnet_burn_cost 26 | self.get_subnet_hyperparameters = subtensor.get_subnet_hyperparameters 27 | self.get_subnet_info = subtensor.get_subnet_info 28 | self.get_subnet_owner_hotkey = subtensor.get_subnet_owner_hotkey 29 | self.get_subnet_reveal_period_epochs = subtensor.get_subnet_reveal_period_epochs 30 | self.get_subnet_validator_permits = subtensor.get_subnet_validator_permits 31 | self.get_subnets = subtensor.get_subnets 32 | self.get_total_subnets = subtensor.get_total_subnets 33 | self.get_uid_for_hotkey_on_subnet = subtensor.get_uid_for_hotkey_on_subnet 34 | self.immunity_period = subtensor.immunity_period 35 | self.is_hotkey_registered_on_subnet = subtensor.is_hotkey_registered_on_subnet 36 | self.is_subnet_active = subtensor.is_subnet_active 37 | self.max_weight_limit = subtensor.max_weight_limit 38 | self.min_allowed_weights = subtensor.min_allowed_weights 39 | self.recycle = subtensor.recycle 40 | self.register_subnet = subtensor.register_subnet 41 | self.set_subnet_identity = subtensor.set_subnet_identity 42 | self.subnet = subtensor.subnet 43 | self.subnet_exists = subtensor.subnet_exists 44 | self.subnetwork_n = subtensor.subnetwork_n 45 | self.tempo = subtensor.tempo 46 | self.weights_rate_limit = subtensor.weights_rate_limit 47 | self.weights = subtensor.weights 48 | -------------------------------------------------------------------------------- /bittensor/core/subtensor_api/wallets.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | from bittensor.core.subtensor import Subtensor as _Subtensor 3 | from bittensor.core.async_subtensor import AsyncSubtensor as _AsyncSubtensor 4 | 5 | 6 | class Wallets: 7 | """Class for managing coldkey, hotkey, wallet operations.""" 8 | 9 | def __init__(self, subtensor: Union["_Subtensor", "_AsyncSubtensor"]): 10 | self.does_hotkey_exist = subtensor.does_hotkey_exist 11 | self.filter_netuids_by_registered_hotkeys = ( 12 | subtensor.filter_netuids_by_registered_hotkeys 13 | ) 14 | self.is_hotkey_registered_any = subtensor.is_hotkey_registered_any 15 | self.is_hotkey_registered = subtensor.is_hotkey_registered 16 | self.is_hotkey_delegate = subtensor.is_hotkey_delegate 17 | self.get_balance = subtensor.get_balance 18 | self.get_balances = subtensor.get_balances 19 | self.get_children = subtensor.get_children 20 | self.get_children_pending = subtensor.get_children_pending 21 | self.get_delegate_by_hotkey = subtensor.get_delegate_by_hotkey 22 | self.get_delegate_take = subtensor.get_delegate_take 23 | self.get_delegated = subtensor.get_delegated 24 | self.get_hotkey_owner = subtensor.get_hotkey_owner 25 | self.get_hotkey_stake = subtensor.get_hotkey_stake 26 | self.get_minimum_required_stake = subtensor.get_minimum_required_stake 27 | self.get_netuids_for_hotkey = subtensor.get_netuids_for_hotkey 28 | self.get_owned_hotkeys = subtensor.get_owned_hotkeys 29 | self.get_stake = subtensor.get_stake 30 | self.get_stake_add_fee = subtensor.get_stake_add_fee 31 | self.get_stake_for_coldkey = subtensor.get_stake_for_coldkey 32 | self.get_stake_for_coldkey_and_hotkey = ( 33 | subtensor.get_stake_for_coldkey_and_hotkey 34 | ) 35 | self.get_stake_for_hotkey = subtensor.get_stake_for_hotkey 36 | self.get_stake_info_for_coldkey = subtensor.get_stake_info_for_coldkey 37 | self.get_stake_movement_fee = subtensor.get_stake_movement_fee 38 | self.get_transfer_fee = subtensor.get_transfer_fee 39 | self.get_unstake_fee = subtensor.get_unstake_fee 40 | -------------------------------------------------------------------------------- /bittensor/utils/axon_utils.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | ALLOWED_DELTA = 4_000_000_000 # Delta of 4 seconds for nonce validation 4 | NANOSECONDS_IN_SECOND = 1_000_000_000 5 | 6 | 7 | def allowed_nonce_window_ns( 8 | current_time_ns: int, synapse_timeout: Optional[float] = None 9 | ) -> int: 10 | """ 11 | Calculates the allowed window for a nonce in nanoseconds. 12 | 13 | Args: 14 | current_time_ns (int): The current time in nanoseconds. 15 | synapse_timeout (Optional[float]): The optional timeout for the synapse in seconds. If None, it defaults to 0. 16 | 17 | Returns: 18 | int: The allowed nonce window in nanoseconds. 19 | """ 20 | synapse_timeout_ns = (synapse_timeout or 0) * NANOSECONDS_IN_SECOND 21 | allowed_window_ns = current_time_ns - ALLOWED_DELTA - synapse_timeout_ns 22 | return allowed_window_ns 23 | 24 | 25 | def calculate_diff_seconds( 26 | current_time: int, synapse_timeout: Optional[float], synapse_nonce: int 27 | ): 28 | """ 29 | Calculates the difference in seconds between the current time and the synapse nonce, 30 | and also returns the allowed delta in seconds. 31 | 32 | Args: 33 | current_time (int): The current time in nanoseconds. 34 | synapse_timeout (Optional[float]): The optional timeout for the synapse in seconds. 35 | synapse_nonce (int): The nonce value for the synapse in nanoseconds. 36 | 37 | Returns: 38 | tuple: A tuple containing the difference in seconds (float) and the allowed delta in seconds (float). 39 | """ 40 | synapse_timeout_ns = (synapse_timeout or 0) * NANOSECONDS_IN_SECOND 41 | diff_seconds = (current_time - synapse_nonce) / NANOSECONDS_IN_SECOND 42 | allowed_delta_seconds = (ALLOWED_DELTA + synapse_timeout_ns) / NANOSECONDS_IN_SECOND 43 | return diff_seconds, allowed_delta_seconds 44 | -------------------------------------------------------------------------------- /bittensor/utils/btlogging/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | btlogging sub-package standardized logging for Bittensor. 3 | 4 | This module provides logging functionality for the Bittensor package. It includes custom loggers, handlers, and 5 | formatters to ensure consistent logging throughout the project. 6 | """ 7 | 8 | from .loggingmachine import LoggingMachine 9 | 10 | 11 | logging = LoggingMachine(LoggingMachine.config()) 12 | -------------------------------------------------------------------------------- /bittensor/utils/btlogging/console.py: -------------------------------------------------------------------------------- 1 | """ 2 | BittensorConsole class gives the ability to log messages to the terminal without changing Bittensor logging level. 3 | 4 | Example: 5 | from bittensor import logging 6 | 7 | # will be logged 8 | logging.console.info("info message") 9 | logging.console.error("error message") 10 | logging.console.success("success message") 11 | logging.console.warning("warning message") 12 | logging.console.critical("critical message") 13 | 14 | # will not be logged 15 | logging.info("test info") 16 | """ 17 | 18 | from functools import wraps 19 | from typing import Callable, TYPE_CHECKING 20 | 21 | from .helpers import all_loggers 22 | 23 | if TYPE_CHECKING: 24 | from .loggingmachine import LoggingMachine 25 | 26 | 27 | def _print_wrapper(func: "Callable"): 28 | @wraps(func) 29 | def wrapper(self: "BittensorConsole", *args, **kwargs): 30 | """A wrapper function to temporarily set the logger level to debug.""" 31 | old_logger_level = self.logger.get_level() 32 | self.logger.set_console() 33 | func(self, *args, **kwargs) 34 | 35 | for logger in all_loggers(): 36 | logger.setLevel(old_logger_level) 37 | 38 | return wrapper 39 | 40 | 41 | class BittensorConsole: 42 | def __init__(self, logger: "LoggingMachine"): 43 | self.logger = logger 44 | 45 | @_print_wrapper 46 | def debug(self, message: str): 47 | """Logs a DEBUG message to the console.""" 48 | self.logger.debug(message) 49 | 50 | @_print_wrapper 51 | def info(self, message: str): 52 | """Logs a INFO message to the console.""" 53 | self.logger.info(message) 54 | 55 | @_print_wrapper 56 | def success(self, message: str): 57 | """Logs a SUCCESS message to the console.""" 58 | self.logger.success(message) 59 | 60 | @_print_wrapper 61 | def warning(self, message: str): 62 | """Logs a WARNING message to the console.""" 63 | self.logger.warning(message) 64 | 65 | @_print_wrapper 66 | def error(self, message: str): 67 | """Logs a ERROR message to the console.""" 68 | self.logger.error(message) 69 | 70 | @_print_wrapper 71 | def critical(self, message: str): 72 | """Logs a CRITICAL message to the console.""" 73 | self.logger.critical(message) 74 | -------------------------------------------------------------------------------- /bittensor/utils/btlogging/defines.py: -------------------------------------------------------------------------------- 1 | """Btlogging constant definition module.""" 2 | 3 | BASE_LOG_FORMAT = "%(asctime)s | %(levelname)s | %(message)s" 4 | TRACE_LOG_FORMAT = ( 5 | f"%(asctime)s | %(levelname)s | %(name)s:%(filename)s:%(lineno)s | %(message)s" 6 | ) 7 | DATE_FORMAT = "%Y-%m-%d %H:%M:%S" 8 | BITTENSOR_LOGGER_NAME = "bittensor" 9 | DEFAULT_LOG_FILE_NAME = "bittensor.log" 10 | DEFAULT_MAX_ROTATING_LOG_FILE_SIZE = 25 * 1024 * 1024 11 | DEFAULT_LOG_BACKUP_COUNT = 10 12 | -------------------------------------------------------------------------------- /bittensor/utils/btlogging/helpers.py: -------------------------------------------------------------------------------- 1 | """ 2 | btlogging.helpers module provides helper functions for the Bittensor logging system. 3 | """ 4 | 5 | import logging 6 | from typing import Generator 7 | 8 | 9 | def all_loggers() -> Generator["logging.Logger", None, None]: 10 | """Generator that yields all logger instances in the application. 11 | 12 | Iterates through the logging root manager's logger dictionary and yields all active `Logger` instances. It skips 13 | placeholders and other types that are not instances of `Logger`. 14 | 15 | Yields: 16 | logger (logging.Logger): An active logger instance. 17 | """ 18 | for logger in logging.root.manager.loggerDict.values(): 19 | if isinstance(logger, logging.PlaceHolder): 20 | continue 21 | # In some versions of Python, the values in loggerDict might be 22 | # LoggerAdapter instances instead of Logger instances. 23 | # We check for Logger instances specifically. 24 | if isinstance(logger, logging.Logger): 25 | yield logger 26 | else: 27 | # If it's not a Logger instance, it could be a LoggerAdapter or 28 | # another form that doesn't directly offer logging methods. 29 | # This branch can be extended to handle such cases as needed. 30 | pass 31 | 32 | 33 | def all_logger_names() -> Generator[str, None, None]: 34 | """ 35 | Generate the names of all active loggers. 36 | 37 | This function iterates through the logging root manager's logger dictionary and yields the names of all active 38 | `Logger` instances. It skips placeholders and other types that are not instances of `Logger`. 39 | 40 | Yields: 41 | name (str): The name of an active logger. 42 | """ 43 | for name, logger in logging.root.manager.loggerDict.items(): 44 | if isinstance(logger, logging.PlaceHolder): 45 | continue 46 | # In some versions of Python, the values in loggerDict might be 47 | # LoggerAdapter instances instead of Logger instances. 48 | # We check for Logger instances specifically. 49 | if isinstance(logger, logging.Logger): 50 | yield name 51 | else: 52 | # If it's not a Logger instance, it could be a LoggerAdapter or 53 | # another form that doesn't directly offer logging methods. 54 | # This branch can be extended to handle such cases as needed. 55 | pass 56 | 57 | 58 | def get_max_logger_name_length() -> int: 59 | """ 60 | Calculate and return the length of the longest logger name. 61 | 62 | This function iterates through all active logger names and determines the length of the longest name. 63 | 64 | Returns: 65 | max_length (int): The length of the longest logger name. 66 | """ 67 | max_length = 0 68 | for name in all_logger_names(): 69 | if len(name) > max_length: 70 | max_length = len(name) 71 | return max_length 72 | -------------------------------------------------------------------------------- /bittensor/utils/certifi.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Locate Python 3 4 | PYTHON=$(command -v python3) 5 | if [ -z "$PYTHON" ]; then 6 | echo "Error: Python 3 is not installed or not found in PATH." 7 | exit 1 8 | fi 9 | 10 | echo "Using Python: $PYTHON" 11 | 12 | echo " -- Upgrading the certifi package" 13 | $PYTHON -m pip install --upgrade certifi 14 | 15 | echo " -- Fetching the path to the certifi certificate bundle" 16 | CERTIFI_CAFILE=$($PYTHON -c "import certifi; print(certifi.where())") 17 | 18 | echo " -- Resolving OpenSSL directory and certificate file path" 19 | OPENSSL_DIR=$($PYTHON -c "import ssl; print(ssl.get_default_verify_paths().openssl_cafile.rsplit('/', 1)[0])") 20 | OPENSSL_CAFILE=$($PYTHON -c "import ssl; print(ssl.get_default_verify_paths().openssl_cafile.rsplit('/', 1)[-1])") 21 | 22 | echo " -- Navigating to the OpenSSL directory" 23 | cd "$OPENSSL_DIR" || { echo "Failed to navigate to $OPENSSL_DIR"; exit 1; } 24 | 25 | echo " -- Removing any existing certificate file or symlink" 26 | rm -f "$OPENSSL_CAFILE" 27 | 28 | echo " -- Creating a symlink to the certifi certificate bundle" 29 | ln -s "$CERTIFI_CAFILE" "$OPENSSL_CAFILE" 30 | 31 | echo " -- Setting appropriate file permissions" 32 | chmod 775 "$OPENSSL_CAFILE" 33 | 34 | echo " -- Update complete" 35 | -------------------------------------------------------------------------------- /bittensor/utils/formatting.py: -------------------------------------------------------------------------------- 1 | import math 2 | 3 | 4 | def get_human_readable(num, suffix="H"): 5 | """Convert a number into a human-readable format with suffixes.""" 6 | for unit in ["", "K", "M", "G", "T", "P", "E", "Z"]: 7 | if abs(num) < 1000.0: 8 | return f"{num:3.1f}{unit}{suffix}" 9 | num /= 1000.0 10 | return f"{num:.1f}Y{suffix}" 11 | 12 | 13 | def millify(n: int): 14 | """Converts a number into a more readable format with suffixes.""" 15 | mill_names = ["", " K", " M", " B", " T"] 16 | n = float(n) 17 | mill_idx = max( 18 | 0, 19 | min( 20 | len(mill_names) - 1, 21 | int(math.floor(0 if n == 0 else math.log10(abs(n)) / 3)), 22 | ), 23 | ) 24 | return "{:.2f}{}".format(n / 10 ** (3 * mill_idx), mill_names[mill_idx]) 25 | -------------------------------------------------------------------------------- /bittensor/utils/mock/__init__.py: -------------------------------------------------------------------------------- 1 | from .subtensor_mock import MockSubtensor 2 | -------------------------------------------------------------------------------- /bittensor/utils/networking.py: -------------------------------------------------------------------------------- 1 | """Utils for handling local network with ip and ports.""" 2 | 3 | import os 4 | from typing import Optional 5 | from urllib import request as urllib_request 6 | 7 | import netaddr 8 | import requests 9 | from async_substrate_interface.utils import json 10 | 11 | 12 | class ExternalIPNotFound(Exception): 13 | """Raised if we cannot attain your external ip from CURL/URLLIB/IPIFY/AWS""" 14 | 15 | 16 | def int_to_ip(int_val: int) -> str: 17 | """Maps an integer to a unique ip-string 18 | 19 | Arguments: 20 | int_val (int): The integer representation of an ip. Must be in the range (0, 3.4028237e+38). 21 | 22 | Returns: 23 | str_val (str): The string representation of an ip. Of form *.*.*.* for ipv4 or *::*:*:*:* for ipv6 24 | """ 25 | return str(netaddr.IPAddress(int_val)) 26 | 27 | 28 | def ip_to_int(str_val: str) -> int: 29 | """Maps an ip-string to a unique integer. 30 | 31 | Arguments: 32 | str_val (str): The string representation of an ip. Of form *.*.*.* for ipv4 or *::*:*:*:* for ipv6 33 | 34 | Returns: 35 | int_val (int): The integer representation of an ip. Must be in the range (0, 3.4028237e+38). 36 | """ 37 | return int(netaddr.IPAddress(str_val)) 38 | 39 | 40 | def ip_version(str_val: str) -> int: 41 | """Returns the ip version (IPV4 or IPV6). 42 | 43 | Arguments: 44 | str_val (str): The string representation of an ip. Of form *.*.*.* for ipv4 or *::*:*:*:* for ipv6 45 | 46 | Returns: 47 | int_val (int): The ip version (Either 4 or 6 for IPv4/IPv6) 48 | """ 49 | return int(netaddr.IPAddress(str_val).version) 50 | 51 | 52 | def ip__str__(ip_type: int, ip_str: str, port: int): 53 | """Return a formatted ip string""" 54 | return "/ipv%i/%s:%i" % (ip_type, ip_str, port) 55 | 56 | 57 | def get_external_ip() -> str: 58 | """Checks CURL/URLLIB/IPIFY/AWS for your external ip. 59 | 60 | Returns: 61 | external_ip (str): Your routers external facing ip as a string. 62 | 63 | Raises: 64 | ExternalIPNotFound(Exception): Raised if all external ip attempts fail. 65 | """ 66 | # --- Try AWS 67 | try: 68 | external_ip = requests.get("https://checkip.amazonaws.com").text.strip() 69 | assert isinstance(ip_to_int(external_ip), int) 70 | return str(external_ip) 71 | except ExternalIPNotFound: 72 | pass 73 | 74 | # --- Try ipconfig. 75 | try: 76 | process = os.popen("curl -s ifconfig.me") 77 | external_ip = process.readline() 78 | process.close() 79 | assert isinstance(ip_to_int(external_ip), int) 80 | return str(external_ip) 81 | except ExternalIPNotFound: 82 | pass 83 | 84 | # --- Try ipinfo. 85 | try: 86 | process = os.popen("curl -s https://ipinfo.io") 87 | external_ip = json.loads(process.read())["ip"] 88 | process.close() 89 | assert isinstance(ip_to_int(external_ip), int) 90 | return str(external_ip) 91 | except ExternalIPNotFound: 92 | pass 93 | 94 | # --- Try myip.dnsomatic 95 | try: 96 | process = os.popen("curl -s myip.dnsomatic.com") 97 | external_ip = process.readline() 98 | process.close() 99 | assert isinstance(ip_to_int(external_ip), int) 100 | return str(external_ip) 101 | except ExternalIPNotFound: 102 | pass 103 | 104 | # --- Try urllib ipv6 105 | try: 106 | external_ip = urllib_request.urlopen("https://ident.me").read().decode("utf8") 107 | assert isinstance(ip_to_int(external_ip), int) 108 | return str(external_ip) 109 | except ExternalIPNotFound: 110 | pass 111 | 112 | # --- Try Wikipedia 113 | try: 114 | external_ip = requests.get("https://www.wikipedia.org").headers["X-Client-IP"] 115 | assert isinstance(ip_to_int(external_ip), int) 116 | return str(external_ip) 117 | except ExternalIPNotFound: 118 | pass 119 | 120 | raise ExternalIPNotFound 121 | 122 | 123 | def get_formatted_ws_endpoint_url(endpoint_url: Optional[str]) -> Optional[str]: 124 | """ 125 | Returns a formatted websocket endpoint url. 126 | 127 | Arguments: 128 | endpoint_url (Optional[str]): The endpoint url to format. 129 | 130 | Returns: 131 | formatted_endpoint_url (Optional[str]): The formatted endpoint url. In the form of ws:// or 132 | wss:// 133 | 134 | Note: The port (or lack thereof) is left unchanged. 135 | """ 136 | if endpoint_url is None: 137 | return None 138 | 139 | if endpoint_url[0:6] != "wss://" and endpoint_url[0:5] != "ws://": 140 | endpoint_url = f"ws://{endpoint_url}" 141 | 142 | return endpoint_url 143 | -------------------------------------------------------------------------------- /bittensor/utils/registration/__init__.py: -------------------------------------------------------------------------------- 1 | from bittensor.utils.registration.pow import ( 2 | create_pow, 3 | legacy_torch_api_compat, 4 | log_no_torch_error, 5 | torch, 6 | use_torch, 7 | LazyLoadedTorch, 8 | POWSolution, 9 | ) 10 | from bittensor.utils.registration.async_pow import create_pow_async 11 | 12 | __all__ = [ 13 | create_pow, 14 | legacy_torch_api_compat, 15 | log_no_torch_error, 16 | torch, 17 | use_torch, 18 | LazyLoadedTorch, 19 | POWSolution, 20 | create_pow_async, 21 | ] 22 | -------------------------------------------------------------------------------- /bittensor/utils/registration/register_cuda.py: -------------------------------------------------------------------------------- 1 | """This module provides functions for solving Proof of Work (PoW) problems using CUDA.""" 2 | 3 | import binascii 4 | import hashlib 5 | import io 6 | from contextlib import redirect_stdout 7 | from typing import Any, Union 8 | 9 | import numpy as np 10 | from Crypto.Hash import keccak 11 | 12 | 13 | def _hex_bytes_to_u8_list(hex_bytes: bytes) -> list[int]: 14 | """ 15 | Convert a sequence of bytes in hexadecimal format to a list of 16 | unsigned 8-bit integers. 17 | 18 | Args: 19 | hex_bytes (bytes): A sequence of bytes in hexadecimal format. 20 | 21 | Returns: 22 | A list of unsigned 8-bit integers. 23 | 24 | """ 25 | return [int(hex_bytes[i : i + 2], 16) for i in range(0, len(hex_bytes), 2)] 26 | 27 | 28 | def _create_seal_hash(block_and_hotkey_hash_hex_: bytes, nonce: int) -> bytes: 29 | """Creates a seal hash from the block and hotkey hash and nonce.""" 30 | nonce_bytes = binascii.hexlify(nonce.to_bytes(8, "little")) 31 | pre_seal = nonce_bytes + block_and_hotkey_hash_hex_ 32 | seal_sh256 = hashlib.sha256(bytearray(_hex_bytes_to_u8_list(pre_seal))).digest() 33 | kec = keccak.new(digest_bits=256) 34 | return kec.update(seal_sh256).digest() 35 | 36 | 37 | def _seal_meets_difficulty(seal_: bytes, difficulty: int, limit: int) -> bool: 38 | """Checks if the seal meets the given difficulty.""" 39 | seal_number = int.from_bytes(seal_, "big") 40 | product = seal_number * difficulty 41 | # limit = int(math.pow(2, 256)) - 1 42 | return product < limit 43 | 44 | 45 | def solve_cuda( 46 | nonce_start: "np.int64", 47 | update_interval: "np.int64", 48 | tpb: int, 49 | block_and_hotkey_hash_bytes: bytes, 50 | difficulty: int, 51 | limit: int, 52 | dev_id: int = 0, 53 | ) -> Union[tuple[Any, bytes], tuple[int, bytes], tuple[Any, None]]: 54 | """ 55 | Solves the PoW problem using CUDA. 56 | 57 | Args: 58 | nonce_start (numpy.int64): Starting nonce. 59 | update_interval (numpy.int64): Number of nonces to solve before updating block information. 60 | tpb (int): Threads per block. 61 | block_and_hotkey_hash_bytes (bytes): Keccak(Bytes of the block hash + bytes of the hotkey) 64 bytes. 62 | difficulty (int): Difficulty of the PoW problem. 63 | limit (int): Upper limit of the nonce. 64 | dev_id (int): The CUDA device ID. Defaults to ``0``. 65 | 66 | Returns: 67 | (Union[tuple[Any, bytes], tuple[int, bytes], tuple[Any, None]]): Tuple of the nonce and the seal corresponding 68 | to the solution. Returns -1 for nonce if no solution is found. 69 | """ 70 | 71 | try: 72 | import cubit 73 | except ImportError: 74 | raise ImportError( 75 | "Please install cubit. See the instruction https://github.com/opentensor/cubit?tab=readme-ov-file#install." 76 | ) 77 | 78 | upper = int(limit // difficulty) 79 | 80 | upper_bytes = upper.to_bytes(32, byteorder="little", signed=False) 81 | 82 | # Call cython function 83 | # int blockSize, uint64 nonce_start, uint64 update_interval, const unsigned char[:] limit, 84 | # const unsigned char[:] block_bytes, int dev_id 85 | block_and_hotkey_hash_hex = binascii.hexlify(block_and_hotkey_hash_bytes)[:64] 86 | 87 | solution = cubit.solve_cuda( 88 | tpb, 89 | nonce_start, 90 | update_interval, 91 | upper_bytes, 92 | block_and_hotkey_hash_hex, 93 | dev_id, 94 | ) # 0 is first GPU 95 | seal = None 96 | if solution != -1: 97 | seal = _create_seal_hash(block_and_hotkey_hash_hex, solution) 98 | if _seal_meets_difficulty(seal, difficulty, limit): 99 | return solution, seal 100 | else: 101 | return -1, b"\x00" * 32 102 | return solution, seal 103 | 104 | 105 | def reset_cuda(): 106 | """Resets the CUDA environment.""" 107 | try: 108 | import cubit 109 | except ImportError: 110 | raise ImportError("Please install cubit") 111 | cubit.reset_cuda() 112 | 113 | 114 | def log_cuda_errors() -> str: 115 | """Logs any CUDA errors.""" 116 | try: 117 | import cubit 118 | except ImportError: 119 | raise ImportError("Please install cubit") 120 | 121 | file = io.StringIO() 122 | with redirect_stdout(file): 123 | cubit.log_cuda_errors() 124 | return file.getvalue() 125 | -------------------------------------------------------------------------------- /bittensor/utils/subnets.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import Any, Union, Optional, TYPE_CHECKING 3 | 4 | from bittensor.core.axon import Axon 5 | from bittensor.core.dendrite import Dendrite 6 | from bittensor.utils.btlogging import logging 7 | 8 | # For annotation purposes 9 | if TYPE_CHECKING: 10 | from bittensor_wallet import Wallet 11 | from bittensor.core.synapse import Synapse 12 | 13 | 14 | # Community uses this class 15 | class SubnetsAPI(ABC): 16 | """This class is not used within the bittensor package, but is actively used by the community.""" 17 | 18 | def __init__(self, wallet: "Wallet"): 19 | self.wallet = wallet 20 | self.dendrite = Dendrite(wallet=wallet) 21 | 22 | async def __call__(self, *args, **kwargs): 23 | return await self.query_api(*args, **kwargs) 24 | 25 | @abstractmethod 26 | def prepare_synapse(self, *args, **kwargs) -> Any: 27 | """Prepare the synapse-specific payload.""" 28 | 29 | @abstractmethod 30 | def process_responses(self, responses: list[Union["Synapse", Any]]) -> Any: 31 | """Process the responses from the network.""" 32 | 33 | async def query_api( 34 | self, 35 | axons: Union["Axon", list["Axon"]], 36 | deserialize: Optional[bool] = False, 37 | timeout: Optional[int] = 12, 38 | **kwargs, 39 | ) -> Any: 40 | """ 41 | Queries the API nodes of a subnet using the given synapse and bespoke query function. 42 | 43 | Args: 44 | axons (Union[bt.axon, list[bt.axon]]): The list of axon(s) to query. 45 | deserialize (Optional[bool]): Whether to deserialize the responses. Defaults to False. 46 | timeout (Optional[int]): The timeout in seconds for the query. Defaults to 12. 47 | **kwargs: Keyword arguments for the prepare_synapse_fn. 48 | 49 | Returns: 50 | Any: The result of the process_responses_fn. 51 | """ 52 | synapse = self.prepare_synapse(**kwargs) 53 | logging.debug(f"Querying validator axons with synapse {synapse.name}...") 54 | responses = await self.dendrite( 55 | axons=axons, 56 | synapse=synapse, 57 | deserialize=deserialize, 58 | timeout=timeout, 59 | ) 60 | return self.process_responses(responses) 61 | -------------------------------------------------------------------------------- /bittensor/utils/substrate_utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opentensor/bittensor/be9808dc030b443de1948988629b99461a689f4a/bittensor/utils/substrate_utils/__init__.py -------------------------------------------------------------------------------- /bittensor/utils/substrate_utils/hasher.py: -------------------------------------------------------------------------------- 1 | """Helper functions used to calculate keys for Substrate storage items""" 2 | 3 | from hashlib import blake2b 4 | 5 | import xxhash 6 | 7 | 8 | def blake2_256(data): 9 | """ 10 | Helper function to calculate a 32 bytes Blake2b hash for provided data, used as key for Substrate storage items 11 | """ 12 | return blake2b(data, digest_size=32).digest() 13 | 14 | 15 | def blake2_128(data): 16 | """ 17 | Helper function to calculate a 16 bytes Blake2b hash for provided data, used as key for Substrate storage items 18 | """ 19 | return blake2b(data, digest_size=16).digest() 20 | 21 | 22 | def blake2_128_concat(data): 23 | """ 24 | Helper function to calculate a 16 bytes Blake2b hash for provided data, concatenated with data, used as key 25 | for Substrate storage items 26 | """ 27 | return blake2b(data, digest_size=16).digest() + data 28 | 29 | 30 | def xxh128(data): 31 | """ 32 | Helper function to calculate a 2 concatenated xxh64 hash for provided data, used as key for several Substrate 33 | """ 34 | storage_key1 = bytearray(xxhash.xxh64(data, seed=0).digest()) 35 | storage_key1.reverse() 36 | 37 | storage_key2 = bytearray(xxhash.xxh64(data, seed=1).digest()) 38 | storage_key2.reverse() 39 | 40 | return storage_key1 + storage_key2 41 | 42 | 43 | def two_x64_concat(data): 44 | """ 45 | Helper function to calculate a xxh64 hash with concatenated data for provided data, 46 | used as key for several Substrate 47 | """ 48 | storage_key = bytearray(xxhash.xxh64(data, seed=0).digest()) 49 | storage_key.reverse() 50 | 51 | return storage_key + data 52 | 53 | 54 | def xxh64(data): 55 | storage_key = bytearray(xxhash.xxh64(data, seed=0).digest()) 56 | storage_key.reverse() 57 | 58 | return storage_key 59 | 60 | 61 | def identity(data): 62 | return data 63 | -------------------------------------------------------------------------------- /contrib/CODE_REVIEW_DOCS.md: -------------------------------------------------------------------------------- 1 | # Code Review 2 | ### Conceptual Review 3 | 4 | A review can be a conceptual review, where the reviewer leaves a comment 5 | * `Concept (N)ACK`, meaning "I do (not) agree with the general goal of this pull 6 | request", 7 | * `Approach (N)ACK`, meaning `Concept ACK`, but "I do (not) agree with the 8 | approach of this change". 9 | 10 | A `NACK` needs to include a rationale why the change is not worthwhile. 11 | NACKs without accompanying reasoning may be disregarded. 12 | After conceptual agreement on the change, code review can be provided. A review 13 | begins with `ACK BRANCH_COMMIT`, where `BRANCH_COMMIT` is the top of the PR 14 | branch, followed by a description of how the reviewer did the review. The 15 | following language is used within pull request comments: 16 | 17 | - "I have tested the code", involving change-specific manual testing in 18 | addition to running the unit, functional, or fuzz tests, and in case it is 19 | not obvious how the manual testing was done, it should be described; 20 | - "I have not tested the code, but I have reviewed it and it looks 21 | OK, I agree it can be merged"; 22 | - A "nit" refers to a trivial, often non-blocking issue. 23 | 24 | ### Code Review 25 | Project maintainers reserve the right to weigh the opinions of peer reviewers 26 | using common sense judgement and may also weigh based on merit. Reviewers that 27 | have demonstrated a deeper commitment and understanding of the project over time 28 | or who have clear domain expertise may naturally have more weight, as one would 29 | expect in all walks of life. 30 | 31 | Where a patch set affects consensus-critical code, the bar will be much 32 | higher in terms of discussion and peer review requirements, keeping in mind that 33 | mistakes could be very costly to the wider community. This includes refactoring 34 | of consensus-critical code. 35 | 36 | Where a patch set proposes to change the Bittensor consensus, it must have been 37 | discussed extensively on the discord server and other channels, be accompanied by a widely 38 | discussed BIP and have a generally widely perceived technical consensus of being 39 | a worthwhile change based on the judgement of the maintainers. 40 | 41 | ### Finding Reviewers 42 | 43 | As most reviewers are themselves developers with their own projects, the review 44 | process can be quite lengthy, and some amount of patience is required. If you find 45 | that you've been waiting for a pull request to be given attention for several 46 | months, there may be a number of reasons for this, some of which you can do something 47 | about: 48 | 49 | - It may be because of a feature freeze due to an upcoming release. During this time, 50 | only bug fixes are taken into consideration. If your pull request is a new feature, 51 | it will not be prioritized until after the release. Wait for the release. 52 | - It may be because the changes you are suggesting do not appeal to people. Rather than 53 | nits and critique, which require effort and means they care enough to spend time on your 54 | contribution, thundering silence is a good sign of widespread (mild) dislike of a given change 55 | (because people don't assume *others* won't actually like the proposal). Don't take 56 | that personally, though! Instead, take another critical look at what you are suggesting 57 | and see if it: changes too much, is too broad, doesn't adhere to the 58 | [developer notes](DEVELOPMENT_WORKFLOW.md), is dangerous or insecure, is messily written, etc. 59 | Identify and address any of the issues you find. Then ask e.g. on IRC if someone could give 60 | their opinion on the concept itself. 61 | - It may be because your code is too complex for all but a few people, and those people 62 | may not have realized your pull request even exists. A great way to find people who 63 | are qualified and care about the code you are touching is the 64 | [Git Blame feature](https://docs.github.com/en/github/managing-files-in-a-repository/managing-files-on-github/tracking-changes-in-a-file). Simply 65 | look up who last modified the code you are changing and see if you can find 66 | them and give them a nudge. Don't be incessant about the nudging, though. 67 | - Finally, if all else fails, ask on IRC or elsewhere for someone to give your pull request 68 | a look. If you think you've been waiting for an unreasonably long time (say, 69 | more than a month) for no particular reason (a few lines changed, etc.), 70 | this is totally fine. Try to return the favor when someone else is asking 71 | for feedback on their code, and the universe balances out. 72 | - Remember that the best thing you can do while waiting is give review to others! -------------------------------------------------------------------------------- /contrib/RELEASE_GUIDELINES.md: -------------------------------------------------------------------------------- 1 | # Release Guidelines 2 | 3 | The release manager in charge can release a Bittensor version using two scripts: 4 | - [../scripts/release/versioning.sh](../scripts/release/versioning.sh) 5 | - [../scripts/release/release.sh](../scripts/release/release.sh) 6 | 7 | The release manager will need the right permissions for: 8 | - github.com 9 | - pypi.org 10 | - hub.docker.com 11 | 12 | If you are new in this role, ask for the proper setup you need to run this process manually. 13 | 14 | ## Process of release 15 | 16 | 1. Create a branch called `release/VERSION`, having VERSION with the version to release. 17 | 1. Make sure twine is installed: `pip install twine` 18 | 1. Within the release branch: 19 | 1. Update the version executing:`./scripts/release/versioning.sh --update UPDATE_TYPE` 20 | 1. **UPDATE_TYPE** could be *major*, *minor* or *patch*. 21 | 1. Add release notes to CHANGELOG executing: `./scripts/release/add_notes_changelog.sh -A -V NEW_VERSION -P PREVIOUS_TAG -T GH_ACCESS_TOKEN` 22 | 1. **NEW_VERSION**: e.g.: 3.6.4 23 | 1. **PREVIOUS_TAG**: e.g.: v3.6.3 24 | 1. **GH_ACCESS_TOKEN**: A github [personal access token](https://docs.github.com/en/enterprise-server@3.4/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token) you need. 25 | 26 | 1. Test the release branch and verify that it meets the requirements. 27 | 1. After merging the release branch; Run the release script 28 | 29 | ## Versioning script usage 30 | 31 | Options: 32 | - -U, --update: type of update. It could be major, minor, patch or rc (release candidate). 33 | - -A, --apply: This specifies to apply the release. Without this the versioning will just show a dry run with no changes. 34 | 35 | ## Release script usage 36 | 37 | Options: 38 | - -A, --apply: This specifies to apply the release. Without this the release will just show a dry run with no changes. 39 | - -T,--github-token: A github personal access token to interact with the Github API. 40 | 41 | ### Github token 42 | 43 | Since you need to use a secret when releasing bittensor (github personal access token), I encourage you to use [pass](https://www.passwordstore.org/) or a similar tool that allows you to store the secret safely and not expose it in the history of the machine you use. 44 | 45 | So you can have: 46 | ``` 47 | GITHUB_ACCESS_TOKEN=$(pass github/your_personal_token_with_permisions) 48 | ``` 49 | 50 | or 51 | ``` 52 | GITHUB_ACCESS_TOKEN=$(whatever you need to get the token safely) 53 | ``` 54 | 55 | ### Executions 56 | 57 | So, executing the script to release a minor version will be: 58 | 59 | ``` 60 | # For a dry run 61 | ./scripts/release/release.sh 62 | ``` 63 | 64 | ``` 65 | # Applying changes 66 | ./scripts/release/release.sh --apply --github-token $GITHUB_ACCESS_TOKEN` 67 | ``` 68 | 69 | ## Checking release 70 | 71 | After the execution of the release script we would have generated: 72 | - A new git tag in [github.com](https://github.com/opentensor/bittensor/tags) 73 | - A new github release in [github.com](https://github.com/opentensor/bittensor/releases) 74 | - A new pip package in [pypi.org](https://pypi.org/project/bittensor/#history) 75 | - A new docker image in [hub.docker.com](https://hub.docker.com/r/opentensorfdn/bittensor/tags) 76 | 77 | ## After release 78 | 79 | After a Bittensor release we have to 80 | - Update [cubit](https://github.com/opentensor/cubit). 81 | 82 | ### Updating cubit 83 | 84 | 1. Updating the [Dockerfile](https://github.com/opentensor/cubit/blob/master/docker/Dockerfile) 85 | 1. Building its docker image (follow its README instructions) 86 | 1. Push it to hub.docker.com 87 | 1. The generated name will be the same but with `-cubit` in its name 88 | -------------------------------------------------------------------------------- /contrib/TESTING.md: -------------------------------------------------------------------------------- 1 | # Testing Guide for Bittensor 2 | 3 | Testing is an essential part of software development that ensures the correctness and performance of your code. Bittensor uses a combination of unit tests and integration tests to verify the functionality of its components. This guide will walk you through how to run and write tests for Bittensor. 4 | 5 | ## Running Tests 6 | 7 | Bittensor uses `pytest` for running its tests. To run all tests, navigate to the root directory of the Bittensor repository and run: 8 | 9 | ```bash 10 | pytest 11 | ``` 12 | 13 | This will automatically discover all test files (those that start with `test_`) and run them. 14 | 15 | If you want to run a specific test file, you can specify it directly. For example, to run the tests in `test_wallet.py`, you would use: 16 | 17 | ```bash 18 | pytest tests/test_wallet.py 19 | ``` 20 | 21 | Similarly, you can run a specific test within a file by appending `::` and the test name. For example: 22 | 23 | ```bash 24 | pytest tests/test_wallet.py::test_create_new_coldkey 25 | ``` 26 | 27 | ## Writing Tests 28 | 29 | When writing tests for Bittensor, you should aim to cover both the "happy path" (where everything works as expected) and any potential error conditions. Here's a basic structure for a test file: 30 | 31 | ```python 32 | import pytest 33 | import bittensor 34 | 35 | def test_some_functionality(): 36 | # Setup any necessary objects or state. 37 | wallet = bittensor.wallet() 38 | 39 | # Call the function you're testing. 40 | result = wallet.create_new_coldkey() 41 | 42 | # Assert that the function behaved as expected. 43 | assert result is not None 44 | ``` 45 | 46 | In this example, we're testing the `create_new_coldkey` function of the `wallet` object. We assert that the result is not `None`, which is the expected behavior. 47 | 48 | ## Mocking 49 | 50 | In some cases, you may need to mock certain functions or objects to isolate the functionality you're testing. Bittensor uses the `unittest.mock` library for this. Here's a simple example from the axon unittest: 51 | 52 | ```python 53 | def test_axon_start(self): 54 | mock_wallet = MagicMock( 55 | spec=bittensor.Wallet, 56 | coldkey=MagicMock(), 57 | coldkeypub=MagicMock( 58 | # mock ss58 address 59 | ss58_address="5DD26kC2kxajmwfbbZmVmxhrY9VeeyR1Gpzy9i8wxLUg6zxm" 60 | ), 61 | hotkey=MagicMock( 62 | ss58_address="5CtstubuSoVLJGCXkiWRNKrrGg2DVBZ9qMs2qYTLsZR4q1Wg" 63 | ), 64 | ) 65 | axon = bittensor.axon(wallet=mock_wallet, metagraph=None) 66 | axon.start() 67 | assert axon.server._state.stage == grpc._server._ServerStage.STARTED 68 | ``` 69 | 70 | In this example, we're mocking the `coldkey`, `coldkeypub` and `hotkey` for a wallet. This allows us to test how the axon code behaves when `bittensor.Wallet()` would normally be called, without actually calling the constructor. 71 | ## Test Coverage 72 | 73 | It's important to ensure that your tests cover as much of your code as possible. You can use the `pytest-cov` plugin to measure your test coverage. To use it, first install it with pip: 74 | 75 | ```bash 76 | pip install pytest-cov 77 | ``` 78 | 79 | Then, you can run your tests with coverage like this: 80 | 81 | ```bash 82 | pytest --cov=bittensor 83 | ``` 84 | 85 | This will output a coverage report showing the percentage of your code that's covered by tests. 86 | 87 | Remember, while high test coverage is a good goal, it's also important to write meaningful tests. A test isn't very useful if it doesn't accurately represent the conditions under which your code will run. 88 | 89 | ## Continuous Integration 90 | 91 | Bittensor uses CircleCI for continuous integration. This means that every time you push changes to the repository, all tests are automatically run. If any tests fail, you'll be notified so you can fix the issue before merging your changes. 92 | 93 | 94 | Remember, tests are an important part of maintaining the health of a codebase. They help catch issues early and make it easier to add new features or refactor existing code. Happy testing! -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.2" 2 | 3 | services: 4 | dev: 5 | container_name: node-bittensor 6 | image: "bittensor/bittensor:latest" 7 | ports: 8 | - "8091:8091" 9 | volumes: 10 | - ~/.bittensor:/root/.bittensor -------------------------------------------------------------------------------- /example.env: -------------------------------------------------------------------------------- 1 | # To use legacy Torch-based of bittensor, you must set USE_TORCH=1 2 | USE_TORCH=0 3 | # If set to 0 (or anything else than 1), it will use current, numpy-based, bittensor interface. 4 | # This is generally what you want unless you want legacy interoperability. 5 | # Please note that the legacy interface is deprecated, and is not tested nearly as much. 6 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | ignore_missing_imports = True 3 | ignore_errors = True 4 | 5 | [mypy-*.axon.*] 6 | ignore_errors = False 7 | 8 | [mypy-*.dendrite.*] 9 | ignore_errors = False 10 | 11 | [mypy-bittensor.metagraph.*] 12 | ignore_errors = False 13 | 14 | [mypy-*.subtensor.*] 15 | ignore_errors = False 16 | 17 | [mypy-*.synapse.*] 18 | ignore_errors = False 19 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=70.0.0", "wheel"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "bittensor" 7 | version = "9.7.1" 8 | description = "Bittensor" 9 | readme = "README.md" 10 | authors = [ 11 | {name = "bittensor.com"} 12 | ] 13 | license = { file = "LICENSE" } 14 | requires-python = ">=3.9,<3.14" 15 | dependencies = [ 16 | 17 | "wheel", 18 | "setuptools~=70.0.0", 19 | "aiohttp~=3.9", 20 | "asyncstdlib~=3.13.0", 21 | "colorama~=0.4.6", 22 | "fastapi~=0.110.1", 23 | "munch~=2.5.0", 24 | "numpy~=2.0.1", 25 | "msgpack-numpy-opentensor~=0.5.0", 26 | "nest_asyncio==1.6.0", 27 | "netaddr==1.3.0", 28 | "packaging", 29 | "python-statemachine~=2.1", 30 | "pycryptodome>=3.18.0,<4.0.0", 31 | "pyyaml>=6.0", 32 | "retry==0.9.2", 33 | "requests>=2.0.0,<3.0", 34 | "pydantic>=2.3, <3", 35 | "scalecodec==1.2.11", 36 | "uvicorn", 37 | "bittensor-drand>=0.5.0", 38 | "bittensor-wallet>=3.0.8", 39 | "async-substrate-interface>=1.2.0" 40 | ] 41 | 42 | [project.optional-dependencies] 43 | dev = [ 44 | "pytest==8.3.5", 45 | "pytest-asyncio==0.26.0", 46 | "pytest-mock==3.14.0", 47 | "pytest-split==0.10.0", 48 | "pytest-xdist==3.6.1", 49 | "pytest-rerunfailures==10.2", 50 | "coveralls==3.3.1", 51 | "pytest-cov==4.0.0", 52 | "ddt==1.6.0", 53 | "hypothesis==6.81.1", 54 | "flake8==7.0.0", 55 | "mypy==1.8.0", 56 | "types-retry==0.9.9.4", 57 | "freezegun==1.5.0", 58 | "httpx==0.27.0", 59 | "ruff==0.11.5", 60 | "aioresponses==0.7.6", 61 | "factory-boy==3.3.0", 62 | "types-requests", 63 | "torch>=1.13.1,<3.0" 64 | ] 65 | torch = [ 66 | "torch>=1.13.1,<3.0" 67 | ] 68 | cli = [ 69 | "bittensor-cli>=9.0.2" 70 | ] 71 | 72 | 73 | [project.urls] 74 | # more details can be found here 75 | homepage = "https://github.com/opentensor/bittensor" 76 | Repository = "https://github.com/opentensor/bittensor" 77 | 78 | [tool.flit.metadata] 79 | classifiers = [ 80 | "Development Status :: 5 - Production/Stable", 81 | "Intended Audience :: Developers", 82 | "License :: OSI Approved :: MIT License", 83 | "Programming Language :: Python :: 3 :: Only", 84 | "Programming Language :: Python :: 3.9", 85 | "Programming Language :: Python :: 3.10", 86 | "Programming Language :: Python :: 3.11", 87 | "Programming Language :: Python :: 3.12", 88 | "Topic :: Scientific/Engineering", 89 | "Topic :: Scientific/Engineering :: Mathematics", 90 | "Topic :: Scientific/Engineering :: Artificial Intelligence", 91 | "Topic :: Software Development", 92 | "Topic :: Software Development :: Libraries", 93 | "Topic :: Software Development :: Libraries :: Python Modules" 94 | ] 95 | 96 | [tool.setuptools] 97 | package-dir = {"bittensor" = "bittensor"} 98 | script-files = ["bittensor/utils/certifi.sh"] -------------------------------------------------------------------------------- /scripts/check_pre_submit.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # ruff checks formatting 4 | echo ">>> Run the pre-submit format check with \`ruff format .\`." 5 | ruff format . 6 | 7 | echo ">>> Run the pre-submit format check with \`mypy\`." 8 | 9 | # mypy checks python versions compatibility 10 | versions=("3.9" "3.10" "3.11") 11 | for version in "${versions[@]}"; do 12 | echo "Running mypy for Python $version..." 13 | mypy --ignore-missing-imports bittensor/ --python-version="$version" 14 | done 15 | 16 | # flake8 checks errors count in bittensor folder 17 | error_count=$(flake8 bittensor/ --count) 18 | echo ">>> Flake8 found ${error_count} errors." 19 | -------------------------------------------------------------------------------- /scripts/check_requirements_changes.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Check if requirements files have changed in the last commit 4 | if git diff --name-only HEAD | grep -E 'pyproject.toml'; then 5 | echo "Requirements files may have changed. Running compatibility checks..." 6 | echo 'export REQUIREMENTS_CHANGED="true"' >> $BASH_ENV 7 | else 8 | echo "Requirements files have not changed. Skipping compatibility checks..." 9 | echo 'export REQUIREMENTS_CHANGED="false"' >> $BASH_ENV 10 | fi 11 | -------------------------------------------------------------------------------- /scripts/create_wallet.sh: -------------------------------------------------------------------------------- 1 | mkdir -p ~/.bittensor/wallets/default/hotkeys 2 | rm ~/.bittensor/wallets/default/coldkeypub.txt 3 | rm ~/.bittensor/wallets/default/hotkeys/default 4 | touch ~/.bittensor/wallets/default/coldkeypub.txt 5 | touch ~/.bittensor/wallets/default/hotkeys/default 6 | echo "0x74acaa8d7829336dfff7569f19225818cc593335b9aafcde3f69db23c3538561" >> ~/.bittensor/wallets/default/coldkeypub.txt 7 | echo '{"accountId": "0x9cf7085aa3304c21dc0f571c0134abb12f2e8e1bc9dbfc82440b8d6ba7908655", "publicKey": "0x9cf7085aa3304c21dc0f571c0134abb12f2e8e1bc9dbfc82440b8d6ba7908655", "secretPhrase": "document usage siren cross across crater shrug jump marine distance absurd caught", "secretSeed": "0x2465ae0757117bea271ad622e1cd0c4b319c96896a3c7d9469a68e63cf7f9646", "ss58Address": "5FcWiCiFoSspGGocSxzatNL5kT6cjxjXQ9LuAuYbvFNUqcfX"}' >> ~/.bittensor/wallets/default/hotkeys/default 8 | chmod 0600 ~/.bittensor/wallets/default/coldkeypub.txt 9 | chmod 0600 ~/.bittensor/wallets/default/hotkeys/default 10 | echo "~/.bittensor/wallets/default/coldkeypub.txt" 11 | cat ~/.bittensor/wallets/default/coldkeypub.txt 12 | echo "~/.bittensor/wallets/default/hotkeys/default" 13 | cat ~/.bittensor/wallets/default/hotkeys/default -------------------------------------------------------------------------------- /scripts/post_install_cli.py: -------------------------------------------------------------------------------- 1 | import os 2 | import subprocess 3 | import sys 4 | 5 | 6 | def post_install(): 7 | # Determine the shell type (bash, zsh, etc.) 8 | shell = os.environ.get("SHELL") 9 | if "bash" in shell: 10 | shell_config = "~/.bashrc" 11 | elif "zsh" in shell: 12 | shell_config = "~/.zshrc" 13 | else: 14 | print("Unsupported shell for autocompletion.") 15 | return 16 | 17 | # Generate the completion script 18 | completion_script = subprocess.check_output( 19 | [sys.executable, "-m", "bittensor.cli", "--print-completion", shell] 20 | ).decode() 21 | 22 | # Append the completion script to the shell configuration file 23 | with open(os.path.expanduser(shell_config), "a") as file: 24 | file.write("\n# Bittensor CLI Autocompletion\n") 25 | file.write(completion_script) 26 | 27 | 28 | if __name__ == "__main__": 29 | post_install() 30 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opentensor/bittensor/be9808dc030b443de1948988629b99461a689f4a/tests/__init__.py -------------------------------------------------------------------------------- /tests/e2e_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opentensor/bittensor/be9808dc030b443de1948988629b99461a689f4a/tests/e2e_tests/__init__.py -------------------------------------------------------------------------------- /tests/e2e_tests/test_axon.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | 5 | from bittensor.utils import networking 6 | 7 | 8 | @pytest.mark.asyncio 9 | async def test_axon(subtensor, templates, alice_wallet): 10 | """ 11 | Test the Axon mechanism and successful registration on the network. 12 | 13 | Steps: 14 | 1. Register a subnet and register Alice 15 | 2. Check if metagraph.axon is updated and check axon attributes 16 | 3. Run Alice as a miner on subnet 17 | 4. Check the metagraph again after running the miner and verify all attributes 18 | Raises: 19 | AssertionError: If any of the checks or verifications fail 20 | """ 21 | 22 | print("Testing test_axon") 23 | 24 | netuid = 2 25 | 26 | # Register a subnet, netuid 2 27 | assert subtensor.register_subnet(alice_wallet), "Subnet wasn't created" 28 | 29 | # Verify subnet created successfully 30 | assert subtensor.subnet_exists(netuid), "Subnet wasn't created successfully" 31 | 32 | metagraph = subtensor.metagraph(netuid) 33 | 34 | # Validate current metagraph stats 35 | old_axon = metagraph.axons[0] 36 | assert len(metagraph.axons) == 1, f"Expected 1 axon, but got {len(metagraph.axons)}" 37 | assert old_axon.hotkey == alice_wallet.hotkey.ss58_address, ( 38 | "Hotkey mismatch for the axon" 39 | ) 40 | assert old_axon.coldkey == alice_wallet.coldkey.ss58_address, ( 41 | "Coldkey mismatch for the axon" 42 | ) 43 | assert old_axon.ip == "0.0.0.0", f"Expected IP 0.0.0.0, but got {old_axon.ip}" 44 | assert old_axon.port == 0, f"Expected port 0, but got {old_axon.port}" 45 | assert old_axon.ip_type == 0, f"Expected IP type 0, but got {old_axon.ip_type}" 46 | 47 | async with templates.miner(alice_wallet, netuid): 48 | # Waiting for 5 seconds for metagraph to be updated 49 | await asyncio.sleep(5) 50 | 51 | # Refresh the metagraph 52 | metagraph = subtensor.metagraph(netuid) 53 | updated_axon = metagraph.axons[0] 54 | external_ip = networking.get_external_ip() 55 | 56 | # Assert updated attributes 57 | assert len(metagraph.axons) == 1, ( 58 | f"Expected 1 axon, but got {len(metagraph.axons)} after mining" 59 | ) 60 | 61 | assert len(metagraph.neurons) == 1, ( 62 | f"Expected 1 neuron, but got {len(metagraph.neurons)}" 63 | ) 64 | 65 | assert updated_axon.ip == external_ip, ( 66 | f"Expected IP {external_ip}, but got {updated_axon.ip}" 67 | ) 68 | 69 | assert updated_axon.ip_type == networking.ip_version(external_ip), ( 70 | f"Expected IP type {networking.ip_version(external_ip)}, but got {updated_axon.ip_type}" 71 | ) 72 | 73 | assert updated_axon.port == 8091, f"Expected port 8091, but got {updated_axon.port}" 74 | 75 | assert updated_axon.hotkey == alice_wallet.hotkey.ss58_address, ( 76 | "Hotkey mismatch after mining" 77 | ) 78 | 79 | assert updated_axon.coldkey == alice_wallet.coldkey.ss58_address, ( 80 | "Coldkey mismatch after mining" 81 | ) 82 | 83 | print("✅ Passed test_axon") 84 | -------------------------------------------------------------------------------- /tests/e2e_tests/test_cross_subtensor_compatibility.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | import pytest 3 | 4 | 5 | @pytest.mark.asyncio 6 | async def test_get_timestamp(subtensor, async_subtensor, local_chain): 7 | with subtensor: 8 | block_number = subtensor.get_current_block() 9 | assert isinstance( 10 | subtensor.get_timestamp(), datetime 11 | ) # verify it works with no block number specified 12 | sync_result = subtensor.get_timestamp( 13 | block=block_number 14 | ) # verify it works with block number specified 15 | async with async_subtensor: 16 | assert isinstance(await async_subtensor.get_timestamp(), datetime) 17 | async_result = await async_subtensor.get_timestamp(block=block_number) 18 | assert sync_result == async_result 19 | -------------------------------------------------------------------------------- /tests/e2e_tests/test_dendrite.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import pytest 4 | 5 | from bittensor.utils.balance import Balance 6 | from bittensor.utils.btlogging import logging 7 | from tests.e2e_tests.utils.chain_interactions import ( 8 | sudo_set_admin_utils, 9 | wait_epoch, 10 | ) 11 | from tests.e2e_tests.utils.e2e_test_utils import wait_to_start_call 12 | 13 | 14 | @pytest.mark.asyncio 15 | async def test_dendrite(local_chain, subtensor, templates, alice_wallet, bob_wallet): 16 | """ 17 | Test the Dendrite mechanism 18 | 19 | Steps: 20 | 1. Register a subnet through Alice 21 | 2. Register Bob as a validator 22 | 3. Add stake to Bob and ensure neuron is not a validator yet 23 | 4. Run Bob as a validator and wait epoch 24 | 5. Ensure Bob's neuron has all correct attributes of a validator 25 | Raises: 26 | AssertionError: If any of the checks or verifications fail 27 | """ 28 | 29 | alice_subnet_netuid = subtensor.get_total_subnets() # 2 30 | logging.console.info("Testing test_dendrite") 31 | 32 | # Register a subnet, netuid 2 33 | assert subtensor.register_subnet(alice_wallet, True, True), "Subnet wasn't created" 34 | 35 | # Verify subnet created successfully 36 | assert subtensor.subnet_exists(alice_subnet_netuid), ( 37 | "Subnet wasn't created successfully" 38 | ) 39 | 40 | assert wait_to_start_call(subtensor, alice_wallet, alice_subnet_netuid) 41 | 42 | # Make sure Alice is Top Validator 43 | assert subtensor.add_stake( 44 | alice_wallet, 45 | netuid=alice_subnet_netuid, 46 | amount=Balance.from_tao(1), 47 | ) 48 | 49 | # update max_allowed_validators so only one neuron can get validator_permit 50 | assert sudo_set_admin_utils( 51 | local_chain, 52 | alice_wallet, 53 | call_function="sudo_set_max_allowed_validators", 54 | call_params={ 55 | "netuid": alice_subnet_netuid, 56 | "max_allowed_validators": 1, 57 | }, 58 | ) 59 | 60 | # update weights_set_rate_limit for fast-blocks 61 | status, error = sudo_set_admin_utils( 62 | local_chain, 63 | alice_wallet, 64 | call_function="sudo_set_weights_set_rate_limit", 65 | call_params={ 66 | "netuid": alice_subnet_netuid, 67 | "weights_set_rate_limit": 10, 68 | }, 69 | ) 70 | 71 | assert error is None 72 | assert status is True 73 | 74 | # Register Bob to the network 75 | assert subtensor.burned_register(bob_wallet, alice_subnet_netuid), ( 76 | "Unable to register Bob as a neuron" 77 | ) 78 | 79 | metagraph = subtensor.metagraph(alice_subnet_netuid) 80 | 81 | # Assert neurons are Alice and Bob 82 | assert len(metagraph.neurons) == 2 83 | 84 | alice_neuron = metagraph.neurons[0] 85 | assert alice_neuron.hotkey == alice_wallet.hotkey.ss58_address 86 | assert alice_neuron.coldkey == alice_wallet.coldkey.ss58_address 87 | 88 | bob_neuron = metagraph.neurons[1] 89 | assert bob_neuron.hotkey == bob_wallet.hotkey.ss58_address 90 | assert bob_neuron.coldkey == bob_wallet.coldkey.ss58_address 91 | 92 | # Assert stake is 0 93 | assert bob_neuron.stake.tao == 0 94 | 95 | # Stake to become to top neuron after the first epoch 96 | tao = Balance.from_tao(10_000) 97 | alpha, _ = subtensor.subnet(alice_subnet_netuid).tao_to_alpha_with_slippage(tao) 98 | 99 | assert subtensor.add_stake( 100 | bob_wallet, 101 | netuid=alice_subnet_netuid, 102 | amount=tao, 103 | ) 104 | 105 | # Refresh metagraph 106 | metagraph = subtensor.metagraph(alice_subnet_netuid) 107 | bob_neuron = metagraph.neurons[1] 108 | 109 | # Assert alpha is close to stake equivalent 110 | assert 0.95 < bob_neuron.stake.rao / alpha.rao < 1.05 111 | 112 | # Assert neuron is not a validator yet 113 | assert bob_neuron.active is True 114 | assert bob_neuron.validator_permit is False 115 | assert bob_neuron.validator_trust == 0.0 116 | assert bob_neuron.pruning_score == 0 117 | 118 | async with templates.validator(bob_wallet, alice_subnet_netuid): 119 | await asyncio.sleep(5) # wait for 5 seconds for the Validator to process 120 | 121 | await wait_epoch(subtensor, netuid=alice_subnet_netuid) 122 | 123 | # Refresh metagraph 124 | metagraph = subtensor.metagraph(alice_subnet_netuid) 125 | 126 | # Refresh validator neuron 127 | updated_neuron = metagraph.neurons[1] 128 | 129 | assert len(metagraph.neurons) == 2 130 | assert updated_neuron.active is True 131 | assert updated_neuron.validator_permit is True 132 | assert updated_neuron.hotkey == bob_wallet.hotkey.ss58_address 133 | assert updated_neuron.coldkey == bob_wallet.coldkey.ss58_address 134 | assert updated_neuron.pruning_score != 0 135 | 136 | logging.console.info("✅ Passed test_dendrite") 137 | -------------------------------------------------------------------------------- /tests/e2e_tests/test_neuron_certificate.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from bittensor.core.axon import Axon 3 | from bittensor.utils.btlogging import logging 4 | 5 | 6 | @pytest.mark.asyncio 7 | async def test_neuron_certificate(subtensor, alice_wallet): 8 | """ 9 | Tests the metagraph 10 | 11 | Steps: 12 | 1. Register a subnet through Alice 13 | 2. Serve Alice axon with neuron certificate 14 | 3. Verify neuron certificate can be retrieved 15 | Raises: 16 | AssertionError: If any of the checks or verifications fail 17 | """ 18 | logging.info("Testing neuron_certificate") 19 | netuid = 2 20 | 21 | # Register root as Alice - the subnet owner and validator 22 | assert subtensor.register_subnet(alice_wallet) 23 | 24 | # Verify subnet created successfully 25 | assert subtensor.subnet_exists(netuid), "Subnet wasn't created successfully" 26 | 27 | # Register Alice as a neuron on the subnet 28 | assert subtensor.burned_register(alice_wallet, netuid), ( 29 | "Unable to register Alice as a neuron" 30 | ) 31 | 32 | # Serve Alice's axon with a certificate 33 | axon = Axon(wallet=alice_wallet) 34 | encoded_certificate = "?FAKE_ALICE_CERT" 35 | subtensor.serve_axon( 36 | netuid, 37 | axon, 38 | certificate=encoded_certificate, 39 | wait_for_inclusion=True, 40 | wait_for_finalization=True, 41 | ) 42 | 43 | # Verify we are getting the correct certificate 44 | assert ( 45 | subtensor.get_neuron_certificate( 46 | netuid=netuid, 47 | hotkey=alice_wallet.hotkey.ss58_address, 48 | ) 49 | == encoded_certificate 50 | ) 51 | all_certs_query = subtensor.get_all_neuron_certificates(netuid=netuid) 52 | assert alice_wallet.hotkey.ss58_address in all_certs_query.keys() 53 | assert all_certs_query[alice_wallet.hotkey.ss58_address] == encoded_certificate 54 | 55 | logging.info("✅ Passed test_neuron_certificate") 56 | -------------------------------------------------------------------------------- /tests/e2e_tests/test_reveal_commitments.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | import pytest 4 | 5 | from bittensor.utils.btlogging import logging 6 | from tests.e2e_tests.utils.e2e_test_utils import wait_to_start_call 7 | 8 | 9 | @pytest.mark.parametrize("local_chain", [True], indirect=True) 10 | @pytest.mark.asyncio 11 | async def test_set_reveal_commitment(local_chain, subtensor, alice_wallet, bob_wallet): 12 | """ 13 | Tests the set/reveal commitments with TLE (time-locked encrypted commitments) mechanism. 14 | 15 | Steps: 16 | 1. Register a subnet through Alice 17 | 2. Register Bob's neuron and add stake 18 | 3. Set commitment from Alice hotkey 19 | 4. Set commitment from Bob hotkey 20 | 5. Wait until commitment is revealed. 21 | 5. Verify commitment is revealed by Alice and Bob and available via mutual call. 22 | 6. Verify commitment is revealed by Alice and Bob and available via separate calls. 23 | Raises: 24 | AssertionError: If any of the checks or verifications fail 25 | 26 | Note: Actually we can run this tests in fast block mode. For this we need to set `BLOCK_TIME` to 0.25 and replace 27 | `False` to `True` in `pytest.mark.parametrize` decorator. 28 | """ 29 | BLOCK_TIME = ( 30 | 0.25 if subtensor.is_fast_blocks() else 12.0 31 | ) # 12 for non-fast-block, 0.25 for fast block 32 | BLOCKS_UNTIL_REVEAL = 10 if subtensor.is_fast_blocks() else 5 33 | 34 | alice_subnet_netuid = subtensor.get_total_subnets() # 2 35 | 36 | logging.console.info("Testing Drand encrypted commitments.") 37 | 38 | # Register subnet as Alice 39 | assert subtensor.register_subnet(alice_wallet, True, True), ( 40 | "Unable to register the subnet" 41 | ) 42 | 43 | assert wait_to_start_call(subtensor, alice_wallet, alice_subnet_netuid) 44 | 45 | # Register Bob's neuron 46 | assert subtensor.burned_register(bob_wallet, alice_subnet_netuid, True, True), ( 47 | "Bob's neuron was not register." 48 | ) 49 | 50 | # Verify subnet 2 created successfully 51 | assert subtensor.subnet_exists(alice_subnet_netuid), ( 52 | "Subnet wasn't created successfully" 53 | ) 54 | 55 | # Set commitment from Alice hotkey 56 | message_alice = f"This is test message with time {time.time()} from Alice." 57 | 58 | response = subtensor.set_reveal_commitment( 59 | alice_wallet, 60 | alice_subnet_netuid, 61 | message_alice, 62 | BLOCKS_UNTIL_REVEAL, 63 | BLOCK_TIME, 64 | ) 65 | assert response[0] is True 66 | 67 | # Set commitment from Bob's hotkey 68 | message_bob = f"This is test message with time {time.time()} from Bob." 69 | 70 | response = subtensor.set_reveal_commitment( 71 | bob_wallet, 72 | alice_subnet_netuid, 73 | message_bob, 74 | BLOCKS_UNTIL_REVEAL, 75 | block_time=BLOCK_TIME, 76 | ) 77 | assert response[0] is True 78 | 79 | target_reveal_round = response[1] 80 | 81 | # Sometimes the chain doesn't update the repository right away and the commit doesn't appear in the expected 82 | # `last_drand_round`. In this case need to wait a bit. 83 | print(f"Waiting for reveal round {target_reveal_round}") 84 | chain_offset = 1 if subtensor.is_fast_blocks() else 24 85 | while subtensor.last_drand_round() <= target_reveal_round + chain_offset: 86 | # wait one drand period (3 sec) 87 | print(f"Current last reveled drand round {subtensor.last_drand_round()}") 88 | time.sleep(3) 89 | 90 | actual_all = subtensor.get_all_revealed_commitments(alice_subnet_netuid) 91 | 92 | alice_result = actual_all.get(alice_wallet.hotkey.ss58_address) 93 | assert alice_result is not None, "Alice's commitment was not received." 94 | 95 | bob_result = actual_all.get(bob_wallet.hotkey.ss58_address) 96 | assert bob_result is not None, "Bob's commitment was not received." 97 | 98 | alice_actual_block, alice_actual_message = alice_result[0] 99 | bob_actual_block, bob_actual_message = bob_result[0] 100 | 101 | # We do not check the release block because it is a dynamic number. It depends on the load of the chain, the number 102 | # of commits in the chain and the computing power. 103 | assert message_alice == alice_actual_message 104 | assert message_bob == bob_actual_message 105 | 106 | # Assertions for get_revealed_commitment (based of hotkey) 107 | actual_alice_block, actual_alice_message = subtensor.get_revealed_commitment( 108 | alice_subnet_netuid, 0 109 | )[0] 110 | actual_bob_block, actual_bob_message = subtensor.get_revealed_commitment( 111 | alice_subnet_netuid, 1 112 | )[0] 113 | 114 | assert message_alice == actual_alice_message 115 | assert message_bob == actual_bob_message 116 | -------------------------------------------------------------------------------- /tests/e2e_tests/test_set_subnet_identity_extrinsic.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from bittensor.core.chain_data import SubnetIdentity 4 | from bittensor.utils.btlogging import logging 5 | 6 | 7 | @pytest.mark.asyncio 8 | async def test_set_subnet_identity_extrinsic_happy_pass(subtensor, alice_wallet): 9 | logging.console.info( 10 | "[magenta]Testing `set_subnet_identity_extrinsic` with success result.[/magenta]" 11 | ) 12 | 13 | netuid = subtensor.get_total_subnets() # 2 14 | 15 | # Register a subnet, netuid 2 16 | assert subtensor.register_subnet(alice_wallet), "Subnet wasn't created" 17 | 18 | # Verify subnet created successfully 19 | assert subtensor.subnet_exists(netuid), "Subnet wasn't created successfully" 20 | 21 | # make sure subnet_identity is empty 22 | assert subtensor.subnet(netuid).subnet_identity is None, ( 23 | "Subnet identity should be None before set" 24 | ) 25 | 26 | # prepare SubnetIdentity for subnet 27 | subnet_identity = SubnetIdentity( 28 | subnet_name="e2e test subnet", 29 | github_repo="e2e test repo", 30 | subnet_contact="e2e test contact", 31 | subnet_url="e2e test url", 32 | discord="e2e test discord", 33 | description="e2e test description", 34 | additional="e2e test additional", 35 | ) 36 | 37 | # set SubnetIdentity to subnet 38 | assert ( 39 | subtensor.set_subnet_identity( 40 | wallet=alice_wallet, 41 | netuid=netuid, 42 | subnet_identity=subnet_identity, 43 | )[0] 44 | is True 45 | ), "Set subnet identity failed" 46 | 47 | # check SubnetIdentity of the subnet 48 | assert subtensor.subnet(netuid).subnet_identity == subnet_identity 49 | 50 | 51 | @pytest.mark.asyncio 52 | async def test_set_subnet_identity_extrinsic_failed( 53 | subtensor, alice_wallet, bob_wallet 54 | ): 55 | """ 56 | Test case for verifying the behavior of the `set_subnet_identity_extrinsic` function in the 57 | scenario where the result of the function is expected to fail. It ensures proper handling 58 | and validation when attempting to set the subnet identity under specific conditions. 59 | 60 | Args: 61 | subtensor: The instance of the subtensor class under test. 62 | alice_wallet: A mock or test wallet associated with Alice, used for creating a subnet. 63 | bob_wallet: A mock or test wallet associated with Bob, used for setting the subnet identity. 64 | 65 | Decorators: 66 | @pytest.mark.asyncio: Marks this test as an asynchronous test. 67 | """ 68 | logging.console.info( 69 | "[magenta]Testing `set_subnet_identity_extrinsic` with failed result.[/magenta]" 70 | ) 71 | 72 | netuid = 2 73 | 74 | # Register a subnet, netuid 2 75 | assert subtensor.register_subnet(alice_wallet), "Subnet wasn't created" 76 | 77 | # Verify subnet created successfully 78 | assert subtensor.subnet_exists(netuid), "Subnet wasn't created successfully" 79 | 80 | # make sure subnet_identity is empty 81 | assert subtensor.subnet(netuid).subnet_identity is None, ( 82 | "Subnet identity should be None before set" 83 | ) 84 | 85 | # prepare SubnetIdentity for subnet 86 | subnet_identity = SubnetIdentity( 87 | subnet_name="e2e test subnet", 88 | github_repo="e2e test repo", 89 | subnet_contact="e2e test contact", 90 | subnet_url="e2e test url", 91 | discord="e2e test discord", 92 | description="e2e test description", 93 | additional="e2e test additional", 94 | ) 95 | 96 | # set SubnetIdentity to subnet 97 | assert ( 98 | subtensor.set_subnet_identity( 99 | wallet=bob_wallet, 100 | netuid=netuid, 101 | subnet_identity=subnet_identity, 102 | )[0] 103 | is False 104 | ), "Set subnet identity failed" 105 | -------------------------------------------------------------------------------- /tests/e2e_tests/test_stake_fee.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from bittensor import Balance 3 | 4 | 5 | @pytest.mark.parametrize("local_chain", [False], indirect=True) 6 | @pytest.mark.asyncio 7 | async def test_stake_fee_api(local_chain, subtensor, alice_wallet, bob_wallet): 8 | """ 9 | Tests the stake fee calculation mechanism for various staking operations 10 | 11 | Steps: 12 | 1. Register a subnet through Alice 13 | 2. Test stake fees for: 14 | - Adding new stake 15 | - Removing stake 16 | - Moving stake between hotkeys/subnets/coldkeys 17 | """ 18 | 19 | netuid = 2 20 | root_netuid = 0 21 | stake_amount = Balance.from_tao(100) # 100 TAO 22 | min_stake_fee = Balance.from_rao(50_000) 23 | 24 | # Register subnet as Alice 25 | assert subtensor.register_subnet(alice_wallet), "Unable to register the subnet" 26 | assert subtensor.subnet_exists(netuid), "Subnet wasn't created successfully" 27 | 28 | # Test add_stake fee 29 | stake_fee_0 = subtensor.get_stake_add_fee( 30 | amount=stake_amount, 31 | netuid=netuid, 32 | coldkey_ss58=alice_wallet.coldkeypub.ss58_address, 33 | hotkey_ss58=alice_wallet.hotkey.ss58_address, 34 | ) 35 | assert isinstance(stake_fee_0, Balance), "Stake fee should be a Balance object" 36 | assert stake_fee_0 >= min_stake_fee, ( 37 | "Stake fee should be greater than the minimum stake fee" 38 | ) 39 | 40 | # Test unstake fee 41 | stake_fee_1 = subtensor.get_unstake_fee( 42 | amount=stake_amount, 43 | netuid=root_netuid, 44 | coldkey_ss58=alice_wallet.coldkeypub.ss58_address, 45 | hotkey_ss58=bob_wallet.hotkey.ss58_address, 46 | ) 47 | assert isinstance(stake_fee_1, Balance), "Stake fee should be a Balance object" 48 | assert stake_fee_1 >= min_stake_fee, ( 49 | "Stake fee should be greater than the minimum stake fee" 50 | ) 51 | 52 | # Test various stake movement scenarios 53 | movement_scenarios = [ 54 | # Move from root to non-root 55 | { 56 | "origin_netuid": root_netuid, 57 | "origin_hotkey": alice_wallet.hotkey.ss58_address, 58 | "origin_coldkey": alice_wallet.coldkeypub.ss58_address, 59 | "dest_netuid": netuid, 60 | "dest_hotkey": alice_wallet.hotkey.ss58_address, 61 | "dest_coldkey": alice_wallet.coldkeypub.ss58_address, 62 | }, 63 | # Move between hotkeys on root 64 | { 65 | "origin_netuid": root_netuid, 66 | "origin_hotkey": alice_wallet.hotkey.ss58_address, 67 | "origin_coldkey": alice_wallet.coldkeypub.ss58_address, 68 | "dest_netuid": root_netuid, 69 | "dest_hotkey": bob_wallet.hotkey.ss58_address, 70 | "dest_coldkey": alice_wallet.coldkeypub.ss58_address, 71 | }, 72 | # Move between coldkeys 73 | { 74 | "origin_netuid": root_netuid, 75 | "origin_hotkey": bob_wallet.hotkey.ss58_address, 76 | "origin_coldkey": alice_wallet.coldkeypub.ss58_address, 77 | "dest_netuid": root_netuid, 78 | "dest_hotkey": bob_wallet.hotkey.ss58_address, 79 | "dest_coldkey": bob_wallet.coldkeypub.ss58_address, 80 | }, 81 | ] 82 | 83 | for scenario in movement_scenarios: 84 | stake_fee = subtensor.get_stake_movement_fee( 85 | amount=stake_amount, 86 | origin_netuid=scenario["origin_netuid"], 87 | origin_hotkey_ss58=scenario["origin_hotkey"], 88 | origin_coldkey_ss58=scenario["origin_coldkey"], 89 | destination_netuid=scenario["dest_netuid"], 90 | destination_hotkey_ss58=scenario["dest_hotkey"], 91 | destination_coldkey_ss58=scenario["dest_coldkey"], 92 | ) 93 | assert isinstance(stake_fee, Balance), "Stake fee should be a Balance object" 94 | assert stake_fee >= min_stake_fee, ( 95 | "Stake fee should be greater than the minimum stake fee" 96 | ) 97 | 98 | # Test cross-subnet movement 99 | netuid2 = 3 100 | assert subtensor.register_subnet(alice_wallet), ( 101 | "Unable to register the second subnet" 102 | ) 103 | assert subtensor.subnet_exists(netuid2), "Second subnet wasn't created successfully" 104 | 105 | stake_fee = subtensor.get_stake_movement_fee( 106 | amount=stake_amount, 107 | origin_netuid=netuid, 108 | origin_hotkey_ss58=bob_wallet.hotkey.ss58_address, 109 | origin_coldkey_ss58=alice_wallet.coldkeypub.ss58_address, 110 | destination_netuid=netuid2, 111 | destination_hotkey_ss58=bob_wallet.hotkey.ss58_address, 112 | destination_coldkey_ss58=alice_wallet.coldkeypub.ss58_address, 113 | ) 114 | assert isinstance(stake_fee, Balance), "Stake fee should be a Balance object" 115 | assert stake_fee >= min_stake_fee, ( 116 | "Stake fee should be greater than the minimum stake fee" 117 | ) 118 | -------------------------------------------------------------------------------- /tests/e2e_tests/test_subnets.py: -------------------------------------------------------------------------------- 1 | def test_subnets(subtensor, alice_wallet): 2 | """ 3 | Tests: 4 | - Querying subnets 5 | - Filtering subnets 6 | - Checks default TxRateLimit 7 | """ 8 | 9 | subnets = subtensor.all_subnets() 10 | 11 | assert len(subnets) == 2 12 | 13 | subtensor.register_subnet( 14 | alice_wallet, 15 | wait_for_inclusion=True, 16 | wait_for_finalization=True, 17 | ) 18 | 19 | subnets = subtensor.all_subnets() 20 | 21 | assert len(subnets) == 3 22 | 23 | netuids = subtensor.filter_netuids_by_registered_hotkeys( 24 | all_netuids=[0, 1, 2], 25 | filter_for_netuids=[2], 26 | all_hotkeys=[alice_wallet], 27 | block=subtensor.block, 28 | ) 29 | 30 | assert netuids == [2] 31 | 32 | tx_rate_limit = subtensor.tx_rate_limit() 33 | 34 | assert tx_rate_limit == 1000 35 | -------------------------------------------------------------------------------- /tests/e2e_tests/test_transfer.py: -------------------------------------------------------------------------------- 1 | from bittensor.utils.balance import Balance 2 | from bittensor import logging 3 | 4 | logging.set_trace() 5 | 6 | 7 | def test_transfer(subtensor, alice_wallet): 8 | """ 9 | Test the transfer mechanism on the chain 10 | 11 | Steps: 12 | 1. Calculate existing balance and transfer 2 Tao 13 | 2. Calculate balance after transfer call and verify calculations 14 | Raises: 15 | AssertionError: If any of the checks or verifications fail 16 | """ 17 | 18 | print("Testing test_transfer") 19 | 20 | transfer_value = Balance.from_tao(2) 21 | dest_coldkey = "5GpzQgpiAKHMWNSH3RN4GLf96GVTDct9QxYEFAY7LWcVzTbx" 22 | 23 | # Fetch transfer fee 24 | transfer_fee = subtensor.get_transfer_fee( 25 | wallet=alice_wallet, 26 | dest=dest_coldkey, 27 | value=transfer_value, 28 | ) 29 | 30 | # Account details before transfer 31 | balance_before = subtensor.get_balance(alice_wallet.coldkeypub.ss58_address) 32 | 33 | # Transfer Tao 34 | assert subtensor.transfer( 35 | wallet=alice_wallet, 36 | dest=dest_coldkey, 37 | amount=transfer_value, 38 | wait_for_finalization=True, 39 | wait_for_inclusion=True, 40 | ) 41 | # Account details after transfer 42 | balance_after = subtensor.get_balance(alice_wallet.coldkeypub.ss58_address) 43 | 44 | # Assert correct transfer calculations 45 | assert balance_before - transfer_fee - transfer_value == balance_after, ( 46 | f"Expected {balance_before - transfer_value - transfer_fee}, got {balance_after}" 47 | ) 48 | 49 | print("✅ Passed test_transfer") 50 | -------------------------------------------------------------------------------- /tests/helpers/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | from .helpers import ( # noqa: F401 3 | CLOSE_IN_VALUE, 4 | __mock_wallet_factory__, 5 | ) 6 | from bittensor_wallet.mock.wallet_mock import ( # noqa: F401 7 | get_mock_coldkey, 8 | get_mock_hotkey, 9 | get_mock_keypair, 10 | get_mock_wallet, 11 | ) 12 | 13 | 14 | def is_running_in_circleci(): 15 | """Checks that tests are running in the app.circleci.com environment.""" 16 | return os.getenv("CIRCLECI") == "true" 17 | -------------------------------------------------------------------------------- /tests/integration_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opentensor/bittensor/be9808dc030b443de1948988629b99461a689f4a/tests/integration_tests/__init__.py -------------------------------------------------------------------------------- /tests/integration_tests/test_metagraph_integration.py: -------------------------------------------------------------------------------- 1 | import os 2 | from unittest import mock 3 | 4 | import torch 5 | 6 | import bittensor 7 | from bittensor.core.metagraph import METAGRAPH_STATE_DICT_NDARRAY_KEYS, get_save_dir 8 | from bittensor.utils.mock import MockSubtensor 9 | 10 | _subtensor_mock: MockSubtensor = MockSubtensor() 11 | 12 | 13 | def setUpModule(): 14 | _subtensor_mock.reset() 15 | _subtensor_mock.create_subnet(netuid=3) 16 | _subtensor_mock.set_difficulty(netuid=3, difficulty=0) # Set diff 0 17 | 18 | 19 | class TestMetagraph: 20 | def setup_method(self): 21 | self.sub = MockSubtensor() 22 | self.metagraph = bittensor.Metagraph(netuid=3, network="mock", sync=False) 23 | 24 | def test_print_empty(self): 25 | print(self.metagraph) 26 | 27 | def test_lite_sync(self): 28 | with mock.patch.object( 29 | self.sub, "get_metagraph_info", return_value=mock.MagicMock() 30 | ): 31 | self.metagraph.sync(lite=True, subtensor=self.sub) 32 | 33 | def test_full_sync(self): 34 | with mock.patch.object( 35 | self.sub, "get_metagraph_info", return_value=mock.MagicMock() 36 | ): 37 | self.metagraph.sync(lite=False, subtensor=self.sub) 38 | 39 | def test_sync_block_0(self): 40 | with mock.patch.object( 41 | self.sub, "get_metagraph_info", return_value=mock.MagicMock() 42 | ): 43 | self.metagraph.sync(lite=True, block=0, subtensor=self.sub) 44 | 45 | def test_load_sync_save(self): 46 | with ( 47 | mock.patch.object(self.sub, "neurons_lite", return_value=[]), 48 | mock.patch.object( 49 | self.sub, "get_metagraph_info", return_value=mock.MagicMock() 50 | ), 51 | ): 52 | self.metagraph.sync(lite=True, subtensor=self.sub) 53 | self.metagraph.save() 54 | self.metagraph.load() 55 | self.metagraph.save() 56 | 57 | def test_load_sync_save_from_torch(self): 58 | with ( 59 | mock.patch.object(self.sub, "neurons_lite", return_value=[]), 60 | mock.patch.object( 61 | self.sub, "get_metagraph_info", return_value=mock.MagicMock() 62 | ), 63 | ): 64 | self.metagraph.sync(lite=True, subtensor=self.sub) 65 | 66 | def deprecated_save_torch(metagraph): 67 | save_directory = get_save_dir(metagraph.network, metagraph.netuid) 68 | os.makedirs(save_directory, exist_ok=True) 69 | graph_filename = save_directory + f"/block-{metagraph.block.item()}.pt" 70 | state_dict = metagraph.state_dict() 71 | for key in METAGRAPH_STATE_DICT_NDARRAY_KEYS: 72 | state_dict[key] = torch.nn.Parameter( 73 | torch.tensor(state_dict[key]), requires_grad=False 74 | ) 75 | torch.save(state_dict, graph_filename) 76 | 77 | deprecated_save_torch(self.metagraph) 78 | self.metagraph.load() 79 | 80 | def test_state_dict(self): 81 | self.metagraph.load() 82 | state = self.metagraph.state_dict() 83 | assert "version" in state 84 | assert "n" in state 85 | assert "block" in state 86 | assert "stake" in state 87 | assert "ranks" in state 88 | assert "trust" in state 89 | assert "consensus" in state 90 | assert "validator_trust" in state 91 | assert "incentive" in state 92 | assert "emission" in state 93 | assert "dividends" in state 94 | assert "active" in state 95 | assert "last_update" in state 96 | assert "validator_permit" in state 97 | assert "weights" in state 98 | assert "bonds" in state 99 | assert "uids" in state 100 | 101 | def test_properties(self): 102 | metagraph = self.metagraph 103 | metagraph.hotkeys 104 | metagraph.coldkeys 105 | metagraph.addresses 106 | metagraph.validator_trust 107 | metagraph.S 108 | metagraph.R 109 | metagraph.I 110 | metagraph.E 111 | metagraph.C 112 | metagraph.T 113 | metagraph.Tv 114 | metagraph.D 115 | metagraph.B 116 | metagraph.W 117 | -------------------------------------------------------------------------------- /tests/integration_tests/test_timelock.py: -------------------------------------------------------------------------------- 1 | import struct 2 | import time 3 | 4 | import pytest 5 | 6 | from bittensor.core import timelock 7 | 8 | 9 | def test_encrypt_returns_valid_tuple(): 10 | """Test that encrypt() returns a (bytes, int) tuple.""" 11 | encrypted, reveal_round = timelock.encrypt("Bittensor", n_blocks=1) 12 | assert isinstance(encrypted, bytes) 13 | assert isinstance(reveal_round, int) 14 | assert reveal_round > 0 15 | 16 | 17 | def test_encrypt_with_fast_block_time(): 18 | """Test encrypt() with fast-blocks mode (block_time = 0.25s).""" 19 | encrypted, reveal_round = timelock.encrypt("Fast mode", 5, block_time=0.25) 20 | assert isinstance(encrypted, bytes) 21 | assert isinstance(reveal_round, int) 22 | 23 | 24 | def test_decrypt_returns_bytes_or_none(): 25 | """Test that decrypt() returns bytes after reveal round, or None before.""" 26 | data = b"Decode me" 27 | encrypted, reveal_round = timelock.encrypt(data, 1) 28 | 29 | current_round = timelock.get_latest_round() 30 | if current_round < reveal_round: 31 | decrypted = timelock.decrypt(encrypted) 32 | assert decrypted is None 33 | else: 34 | decrypted = timelock.decrypt(encrypted) 35 | assert decrypted == data 36 | 37 | 38 | def test_decrypt_raises_if_no_errors_false_and_invalid_data(): 39 | """Test that decrypt() raises an error on invalid data when no_errors=False.""" 40 | with pytest.raises(Exception): 41 | timelock.decrypt(b"corrupt data", no_errors=False) 42 | 43 | 44 | def test_decrypt_with_return_str(): 45 | """Test decrypt() with return_str=True returns a string.""" 46 | plaintext = "Stringified!" 47 | encrypted, _ = timelock.encrypt(plaintext, 1, block_time=0.25) 48 | result = timelock.decrypt(encrypted, no_errors=True, return_str=True) 49 | if result is not None: 50 | assert isinstance(result, str) 51 | 52 | 53 | def test_get_latest_round_is_monotonic(): 54 | """Test that get_latest_round() is monotonic over time.""" 55 | r1 = timelock.get_latest_round() 56 | time.sleep(3) 57 | r2 = timelock.get_latest_round() 58 | assert r2 >= r1 59 | 60 | 61 | def test_wait_reveal_and_decrypt_auto_round(): 62 | """Test wait_reveal_and_decrypt() without explicit reveal_round.""" 63 | msg = "Reveal and decrypt test" 64 | encrypted, _ = timelock.encrypt(msg, 1) 65 | result = timelock.wait_reveal_and_decrypt(encrypted, return_str=True) 66 | assert result == msg 67 | 68 | 69 | def test_wait_reveal_and_decrypt_manual_round(): 70 | """Test wait_reveal_and_decrypt() with explicit reveal_round.""" 71 | msg = "Manual round decryption" 72 | encrypted, reveal_round = timelock.encrypt(msg, 1) 73 | result = timelock.wait_reveal_and_decrypt(encrypted, reveal_round, return_str=True) 74 | assert result == msg 75 | 76 | 77 | def test_unpack_reveal_round_struct(): 78 | """Test that reveal_round can be extracted from encrypted data.""" 79 | encrypted, reveal_round = timelock.encrypt("parse test", 1) 80 | parsed = struct.unpack( 81 | "