├── debian ├── compat ├── debhelper-build-stamp ├── py3dist-overrides ├── files ├── rules ├── control ├── copyright └── changelog ├── SUMMARY.md ├── .gitignore ├── tt_flash ├── error.py ├── data │ ├── blackhole │ │ └── fw_defines.yaml │ ├── grayskull │ │ └── fw_defines.yaml │ └── wormhole │ │ └── fw_defines.yaml ├── __init__.py ├── blackhole.py ├── boot_fs.py ├── utility.py ├── main.py ├── chip.py └── flash.py ├── LICENSE_understanding.txt ├── .github └── workflows │ ├── community-issue-tagging.yml │ ├── build-pypi.yml │ ├── build-all.yml │ ├── bh-update-status-by-label.yml │ ├── build-debian.yml │ └── release.yml ├── Makefile ├── setup.py ├── .gitlab-ci.yml ├── CHANGELOG.md ├── pyproject.toml ├── README.md └── LICENSE /debian/compat: -------------------------------------------------------------------------------- 1 | 10 -------------------------------------------------------------------------------- /debian/debhelper-build-stamp: -------------------------------------------------------------------------------- 1 | tt-flash 2 | -------------------------------------------------------------------------------- /SUMMARY.md: -------------------------------------------------------------------------------- 1 | # Table of contents 2 | 3 | * [tt-flash](README.md) 4 | -------------------------------------------------------------------------------- /debian/py3dist-overrides: -------------------------------------------------------------------------------- 1 | pyluwen python3-pyluwen 2 | tt_tools_common python3-tt-tools-common 3 | -------------------------------------------------------------------------------- /debian/files: -------------------------------------------------------------------------------- 1 | tt-flash_1.4.17-1_all.deb utils optional 2 | tt-flash_1.4.17.buildinfo utils optional 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .eggs/ 2 | *.egg-info 3 | my-env/ 4 | .env/ 5 | .ignored/ 6 | __pycache__/ 7 | .vscode/ 8 | build/ 9 | -------------------------------------------------------------------------------- /debian/rules: -------------------------------------------------------------------------------- 1 | #!/usr/bin/make -f 2 | 3 | %: 4 | dh $@ \ 5 | --buildsystem=pybuild \ 6 | --with=python3 7 | echo "Done Stage 1" 8 | 9 | override_dh_auto_test: 10 | # Skip tests during package build 11 | 12 | -------------------------------------------------------------------------------- /tt_flash/error.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: © 2024 Tenstorrent AI ULC 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | 5 | class TTError(Exception): 6 | """Base class for exceptions in this module.""" 7 | 8 | pass 9 | -------------------------------------------------------------------------------- /LICENSE_understanding.txt: -------------------------------------------------------------------------------- 1 | For the avoidance of doubt, this software assists in programming Tenstorrent products. 2 | 3 | However, making, using, or selling hardware, models, or IP may require the license of rights (such as patent rights) from Tenstorrent or others. 4 | -------------------------------------------------------------------------------- /tt_flash/data/blackhole/fw_defines.yaml: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: © 2024 Tenstorrent AI ULC 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | MSG_TYPE_ARC_STATE0: 0xa0 5 | MSG_TYPE_ARC_STATE3: 0xa3 6 | MSG_CONFIRM_FLASHED_SPI: 0xc4 7 | -------------------------------------------------------------------------------- /tt_flash/data/grayskull/fw_defines.yaml: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: © 2024 Tenstorrent AI ULC 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | MSG_TYPE_ARC_STATE0: 0xa0 5 | MSG_TYPE_ARC_STATE3: 0xa3 6 | MSG_TYPE_FW_VERSION: 0xb9 7 | -------------------------------------------------------------------------------- /tt_flash/data/wormhole/fw_defines.yaml: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: © 2024 Tenstorrent AI ULC 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | MSG_TYPE_ARC_STATE0: 0xa0 5 | MSG_TYPE_ARC_STATE3: 0xa3 6 | MSG_TYPE_FW_VERSION: 0xb9 7 | MSG_TRIGGER_SPI_COPY_LtoR: 0x50 8 | -------------------------------------------------------------------------------- /.github/workflows/community-issue-tagging.yml: -------------------------------------------------------------------------------- 1 | name: "Auto-label Community Issues" 2 | 3 | on: 4 | issues: 5 | types: [opened] 6 | pull_request: 7 | types: [opened] 8 | 9 | permissions: 10 | contents: read 11 | issues: write 12 | pull-requests: write 13 | 14 | jobs: 15 | community-labeling: 16 | uses: tenstorrent/tt-github-actions/.github/workflows/issues-community-autotag.yml@main 17 | secrets: 18 | AUTOLABEL_COMMUNITY_ISSUES: ${{ secrets.AUTOLABEL_COMMUNITY_ISSUES }} 19 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | all: build 2 | 3 | # Python >= 3 is required, we do not support the older 4 | # Python (I.E. 2.7) 5 | PYTHON ?= python3 6 | LUWEN_DIR ?= $${HOME}/work/luwen 7 | 8 | .PHONY: build 9 | build: 10 | ${PYTHON} -m venv .env 11 | . ./.env/bin/activate && python -m pip install --upgrade pip 12 | . ./.env/bin/activate && python -m pip install --upgrade --ignore-installed -ve .[dev] 13 | 14 | .PHONY: release 15 | release: 16 | ${PYTHON} -m venv my-env 17 | . ./my-env/bin/activate && python -m pip install --upgrade pip 18 | . ./my-env/bin/activate && python -m pip install --upgrade -v --ignore-installed -r requirements.txt 19 | . ./my-env/bin/activate && python -m pip install --upgrade -v . 20 | 21 | .PHONY: clean 22 | clean: 23 | rm -rf .env 24 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Compatibility setup.py for Ubuntu 22.04 packaging tools. 4 | Uses setuptools_scm for dynamic versioning from git tags. 5 | """ 6 | from setuptools import setup, find_packages 7 | 8 | import tomli 9 | 10 | if __name__ == "__main__": 11 | with open("pyproject.toml", "rb") as f: 12 | toml_data = tomli.load(f) 13 | 14 | setup( 15 | # Fallback for older setuptools versions 16 | name="tt-flash", 17 | version=toml_data['project']['version'], 18 | packages=find_packages(), 19 | python_requires=">=3.10", 20 | entry_points={ 21 | 'console_scripts': [ 22 | 'tt-flash = tt_flash:main', 23 | ] 24 | }, 25 | ) 26 | -------------------------------------------------------------------------------- /.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | # You can override the included template(s) by including variable overrides 2 | # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings 3 | # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings 4 | # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings 5 | # Container Scanning customization: https://docs.gitlab.com/ee/user/application_security/container_scanning/#customizing-the-container-scanning-settings 6 | # Note that environment variables can be set in several places 7 | # See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence 8 | stages: 9 | - test 10 | sast: 11 | stage: test 12 | include: 13 | - template: Security/SAST.gitlab-ci.yml 14 | -------------------------------------------------------------------------------- /debian/control: -------------------------------------------------------------------------------- 1 | Source: tt-flash 2 | Section: utils 3 | Priority: optional 4 | Maintainer: Sam Bansal 5 | Build-depends: 6 | debhelper (>= 10), 7 | dh-python, 8 | pybuild-plugin-pyproject, 9 | python3, 10 | python3-setuptools, 11 | python3-wheel, 12 | python3-all, 13 | python3-pip 14 | Standards-Version: 4.5.0 15 | Homepage: https://github.com/tenstorrent/tt-flash 16 | Vcs-Git: https://github.com/tenstorrent/tt-flash.git 17 | Vcs-Browser: https://github.com/tenstorrent/tt-flash 18 | 19 | Package: tt-flash 20 | Architecture: all 21 | Essential: no 22 | Depends: 23 | ${misc:Depends}, 24 | ${python3:Depends}, 25 | python3-pyluwen, 26 | Description: Tenstorrent Firmware Flash tool 27 | # tt-flash 28 | . 29 | This is a utility to flash firmware blobs to tenstorrent devices. 30 | . 31 | ## Official Repository 32 | [https://github.com/tenstorrent/tt-flash](https://github.com/tenstorrent/tt-flash) 33 | 34 | -------------------------------------------------------------------------------- /debian/copyright: -------------------------------------------------------------------------------- 1 | Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ 2 | Upstream-Name: tt-flash 3 | Source: https://github.com/tenstorrent/tt-flash 4 | 5 | Files: * 6 | Copyright: 2025 Tenstorrent Inc. 7 | License: Apache-2.0 8 | 9 | License: Apache-2.0 10 | Licensed under the Apache License, Version 2.0 (the "License"); 11 | you may not use this file except in compliance with the License. 12 | You may obtain a copy of the License at 13 | . 14 | https://www.apache.org/licenses/LICENSE-2.0 15 | . 16 | Unless required by applicable law or agreed to in writing, software 17 | distributed under the License is distributed on an "AS IS" BASIS, 18 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 19 | See the License for the specific language governing permissions and 20 | limitations under the License. 21 | . 22 | On Debian systems, the complete text of the Apache version 2.0 license 23 | can be found in "/usr/share/common-licenses/Apache-2.0". 24 | -------------------------------------------------------------------------------- /.github/workflows/build-pypi.yml: -------------------------------------------------------------------------------- 1 | name: Build Python 🐍 distribution 2 | 3 | on: 4 | workflow_dispatch: 5 | workflow_call: 6 | inputs: 7 | ref: 8 | description: 'Git ref to build from' 9 | required: true 10 | type: string 11 | 12 | jobs: 13 | # Build wheel for Python 14 | build-wheel: 15 | name: Build wheel for Python 16 | runs-on: ubuntu-latest 17 | steps: 18 | - name: Checkout 19 | uses: actions/checkout@v4 20 | with: 21 | ref: ${{ inputs.ref }} 22 | fetch-tags: true 23 | fetch-depth: 0 24 | - name: Set up Python 3.10 25 | uses: actions/setup-python@v5 26 | with: 27 | python-version: "3.10" 28 | - name: Build release distributions 29 | run: | 30 | python -m pip install build 31 | python -m build 32 | 33 | - name: Upload distributions 34 | uses: actions/upload-artifact@v4 35 | with: 36 | name: release-dists 37 | path: dist/ 38 | 39 | -------------------------------------------------------------------------------- /.github/workflows/build-all.yml: -------------------------------------------------------------------------------- 1 | name: Build All 2 | run-name: Build All 3 | on: 4 | workflow_dispatch: 5 | workflow_call: 6 | inputs: 7 | ref: 8 | default: ${{ github.ref }} 9 | required: false 10 | type: string 11 | MAJOR: 12 | required: false 13 | type: string 14 | MINOR: 15 | required: false 16 | type: string 17 | PATCH: 18 | required: false 19 | type: string 20 | NUMBER_OF_COMMITS_SINCE_TAG: 21 | required: false 22 | type: string 23 | 24 | jobs: 25 | build_all_depends: 26 | name: Build All the packages! 27 | runs-on: ubuntu-latest 28 | steps: 29 | - run: echo "Ok, so here we go for building *ALL* the packages. I said *ALL* of them now!" 30 | - run: | 31 | echo "What started this ref: ${{ github.ref }}" 32 | echo "What started this sha: ${{ github.sha }}" 33 | echo "Did we get a ref pass in? ${{ inputs.ref }}" 34 | builddebian: 35 | needs: 36 | - build_all_depends 37 | name: Build Ubuntu / Debian 38 | uses: ./.github/workflows/build-debian.yml 39 | with: 40 | ref: ${{ inputs.ref }} 41 | MAJOR: ${{ inputs.MAJOR }} 42 | MINOR: ${{ inputs.MINOR }} 43 | PATCH: ${{ inputs.PATCH }} 44 | NUMBER_OF_COMMITS_SINCE_TAG: ${{ inputs.NUMBER_OF_COMMITS_SINCE_TAG }} 45 | secrets: inherit 46 | buildpypi: 47 | needs: 48 | - build_all_depends 49 | uses: ./.github/workflows/build-pypi.yml 50 | with: 51 | ref: ${{ inputs.ref }} 52 | secrets: inherit 53 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 7 | 8 | ## 3.4.0 - 30/07/25 9 | 10 | - Bump pyyaml 6.0.1 -> 6.0.2 11 | - Improve error message formatting 12 | - No longer have to use --force for flashing BH cards 13 | 14 | ## 3.3.5 - 03/07/25 15 | 16 | - Bump luwen 0.7.3 -> 0.7.5 17 | 18 | ## 3.3.4 - 02/07/25 19 | 20 | - Bump tt-tools-common 1.4.16 -> 1.4.17 21 | - Bump luwen 0.6.4 -> 0.7.3 22 | 23 | ## 3.3.3 - 05/06/2025 24 | 25 | - Bumped tt-tools-common version to fix driver version check for compatability with tt-kmd 2.0.0 26 | 27 | ## 3.3.2 - 14/05/2025 28 | 29 | - Bump tt-tools-common version to latest 30 | 31 | ## 3.2.0 - 12/03/2025 32 | 33 | ### Updated 34 | 35 | - luwen version bump to bring inline with tt-smi; provides stability fixes 36 | 37 | ## 3.1.3 - 06/03/2025 38 | 39 | ### Added 40 | 41 | - luwen version bump to include bh arc init checks 42 | 43 | ## 3.1.2 - 28/02/2025 44 | 45 | ### Added 46 | 47 | - Support for more BH cards: p100a, p150, and p150c 48 | 49 | ## 3.1.1 - 06/01/2025 50 | 51 | ### Updated 52 | 53 | - Bumped luwen version to accomodate Maturin updates 54 | 55 | ## 3.1.0 - 29/10/2024 56 | 57 | ### Added 58 | 59 | - Support for flashing the BH tt-boot-fs file format 60 | - Bumped luwen version to 0.4.6 to allow resets when chip is inaccessible 61 | 62 | ## 3.0.2 - 17/10/2024 63 | 64 | ### Fixed 65 | - Unbound variable when exception is thrown when getting current fw-version 66 | 67 | ## 3.0.1 - 16/10/2024 68 | 69 | ### Changed 70 | - Bumped luwen version to 0.4.5 to resolve false positives on bad chip detection 71 | 72 | ## 3.0.0 - 23/08/2024 73 | 74 | - NO BREAKING CHANGES! Major version bump to signify new generation of product. 75 | - Added support for p100 76 | 77 | ## 2.2.0 - 19/07/2024 78 | 79 | ### Updated 80 | - Added support for an alternative spi flash configuration via a new version of luwen 81 | 82 | ## 2.0.8 - 14/05/2024 83 | 84 | ### Updated 85 | - Bumped luwen (0.3.8) and tt_tools_common (1.4.3) lib versions 86 | 87 | ## 2.0.1 - 2.0.7 88 | - Dependency updates 89 | 90 | ## 2.0.0 91 | - WH flash release 92 | 93 | ## 1.0.0 94 | 95 | - GS flash release 96 | -------------------------------------------------------------------------------- /tt_flash/__init__.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: © 2024 Tenstorrent AI ULC 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | # Adapted from https://github.com/python-poetry/poetry/issues/273#issuecomment-1877789967 5 | # This will get the semantic version from the current pyproject package definition. 6 | 7 | from typing import Any 8 | 9 | try: 10 | import importlib.metadata as importlib_metadata 11 | except ModuleNotFoundError: 12 | import importlib_metadata 13 | from pathlib import Path 14 | 15 | __package_version = "unknown" 16 | 17 | 18 | def __get_package_version() -> str: 19 | """Find the version of this package.""" 20 | global __package_version 21 | 22 | if __package_version != "unknown": 23 | # We already set it at some point in the past, 24 | # so return that previous value without any 25 | # extra work. 26 | return __package_version 27 | 28 | try: 29 | # Try to get the version of the current package if 30 | # it is running from a distribution. 31 | __package_version = importlib_metadata.version("tt-flash") 32 | except importlib.metadata.PackageNotFoundError: 33 | # Fall back on getting it from a local pyproject.toml. 34 | # This works in a development environment where the 35 | # package has not been installed from a distribution. 36 | try: 37 | # This gets added to the standard library as tomllib in Python3.11 38 | # therefore we expect to hit a ModuleNotFoundError. 39 | import tomli as toml 40 | except ModuleNotFoundError: 41 | import tomllib as toml 42 | 43 | pyproject_toml_file = Path(__file__).parent.parent / "pyproject.toml" 44 | if pyproject_toml_file.exists() and pyproject_toml_file.is_file(): 45 | __package_version = toml.loads(pyproject_toml_file.read_text())["project"][ 46 | "version" 47 | ] 48 | # Indicate it might be locally modified or unreleased. 49 | __package_version = __package_version + "+" 50 | 51 | return __package_version 52 | 53 | 54 | def __getattr__(name: str) -> Any: 55 | """Get package attributes.""" 56 | if name == "__version__": 57 | return __get_package_version() 58 | else: 59 | raise AttributeError(f"No attribute {name} in module {__name__}.") 60 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "tt-flash" 3 | version = "3.5.0" 4 | description = "Utility to flash firmware blobs to tt devices" 5 | readme = "README.md" 6 | requires-python = ">=3.10" 7 | license = {file = "LICENSE"} 8 | authors = [ 9 | { name = "Daniel Rosen", email = "drosen@tenstorrent.com" } 10 | ] 11 | maintainers = [ 12 | { name = "Daniel Rosen", email = "drosen@tenstorrent.com" } 13 | ] 14 | classifiers = [ 15 | "Development Status :: 4 - Beta", 16 | "Environment :: Console :: Curses", 17 | "License :: OSI Approved :: Apache Software License", 18 | "Programming Language :: Python :: 3", 19 | "Programming Language :: Python :: 3.10", 20 | "Programming Language :: Python :: 3.11", 21 | "Programming Language :: Python :: 3 :: Only", 22 | ] 23 | dependencies = [ 24 | "pyyaml == 6.0.2", 25 | 'tt_tools_common ~= 1.5', 26 | "pyluwen ~= 0.8.0", 27 | "tabulate == 0.9.0", 28 | "tomli == 2.0.1; python_version < '3.11'", 29 | 30 | # This is a hack to get around the lack of importlib-metadata pre 3.8. 31 | "importlib-metadata >= 1.4.0; python_version < '3.8'", 32 | 33 | # This is a hack to get around the lack of the files function in importlib-resources pre 3.9. 34 | "importlib-resources >= 1.3.0; python_version < '3.9'", 35 | ] 36 | 37 | [project.optional-dependencies] 38 | dev = [ 39 | "black == 24.10.0; python_version >= '3.9'", 40 | "black == 24.8.0; python_version == '3.8'", 41 | "black == 23.3.0; python_version == '3.7'" 42 | ] 43 | 44 | [project.urls] 45 | "Homepage" = "http://tenstorrent.com" 46 | "Bug Reports" = "https://github.com/tenstorrent/tt-flash/issues" 47 | "Source" = "https://github.com/tenstorrent/tt-flash" 48 | 49 | [project.scripts] 50 | tt-flash = "tt_flash.main:main" 51 | 52 | [tool.setuptools] 53 | include-package-data = true 54 | 55 | [tool.setuptools.package-data] 56 | "*" = [ 57 | ".ignored/version.txt", 58 | "data/*/*.yaml" 59 | ] 60 | 61 | [tool.setuptools.exclude-package-data] 62 | "*" = [ 63 | 'build', 64 | 'debian', 65 | ] 66 | 67 | [tool.basedpyright] 68 | pythonVersion = "3.7" 69 | typeCheckingMode = "standard" 70 | 71 | [tool.setuptools.packages.find] 72 | where = [ ".", ] 73 | 74 | [build-system] 75 | # These are the assumed default build requirements from pip: 76 | # https://pip.pypa.io/en/stable/reference/pip/#pep-517-and-518-support 77 | requires = [ 78 | "setuptools>=43.0.0", 79 | "wheel" 80 | ] 81 | build-backend = "setuptools.build_meta" 82 | 83 | # vim: expandtab:ts=2:sw=2:ai 84 | -------------------------------------------------------------------------------- /tt_flash/blackhole.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: © 2024 Tenstorrent AI ULC 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import ctypes 5 | from dataclasses import dataclass 6 | 7 | from tt_flash.boot_fs import tt_boot_fs_fd 8 | from tt_flash.error import TTError 9 | from . import boot_fs 10 | 11 | from tt_flash.chip import BhChip 12 | 13 | @dataclass 14 | class FlashWrite: 15 | offset: int 16 | write: bytearray 17 | 18 | def writeback_boardcfg(chip: BhChip, writes: list[FlashWrite]) -> list[FlashWrite]: 19 | # Find boardcfg on chip 20 | fd_in_spi = boot_fs.read_tag( 21 | lambda addr, size: chip.spi_read(addr, size), "boardcfg" 22 | ) 23 | if fd_in_spi is None: 24 | raise TTError("Couldn't find boardcfg on chip") 25 | 26 | # Find boardcfg in current fd 27 | fd_to_flash = None 28 | boardcfg_write = None 29 | for write in writes: 30 | fd_to_flash = boot_fs.read_tag( 31 | lambda addr, size: write.write[addr : addr + size], "boardcfg" 32 | ) 33 | if fd_to_flash is not None: 34 | boardcfg_write = write 35 | break 36 | if fd_to_flash is None: 37 | raise TTError("Couldn't find boardcfg in flash package") 38 | fd_as_data = bytes(fd_in_spi[1]) 39 | boardcfg_write.write[fd_to_flash[0] : fd_to_flash[0] + len(fd_as_data)] = fd_as_data 40 | 41 | flashed_fd = boot_fs.read_tag( 42 | lambda addr, size: boardcfg_write.write[addr : addr + size], "boardcfg" 43 | ) 44 | assert flashed_fd[1] == fd_in_spi[1], f"{flashed_fd[1]} != {fd_in_spi[1]}" 45 | 46 | return writes 47 | 48 | 49 | TAG_HANDLERS = {"write-boardcfg": writeback_boardcfg} 50 | 51 | 52 | def boot_fs_write( 53 | chip: BhChip, boardname_to_display: str, mask: dict, writes: list[FlashWrite] 54 | ) -> bytearray: 55 | param_handlers = [] 56 | for v in mask: 57 | tag = v.get("tag", None) 58 | 59 | if tag is None or not isinstance(tag, str): 60 | raise TTError( 61 | f"Invalid mask format for {boardname_to_display}; expected to see a list of dicts with keys 'tag'" 62 | ) 63 | 64 | if tag in TAG_HANDLERS: 65 | param_handlers.append(TAG_HANDLERS[tag]) 66 | else: 67 | if len(TAG_HANDLERS) > 0: 68 | pretty_tags = [f"'{x}'" for x in TAG_HANDLERS.keys()] 69 | pretty_tags[-1] = f"or {pretty_tags[-1]}" 70 | raise TTError( 71 | f"Invalid tag {tag} for {boardname_to_display}; expected to see one of {pretty_tags}" 72 | ) 73 | else: 74 | raise TTError( 75 | f"Invalid tag {tag} for {boardname_to_display}; there aren't any tags defined!" 76 | ) 77 | 78 | for handler in param_handlers: 79 | writes = handler(chip, writes) 80 | 81 | return writes 82 | -------------------------------------------------------------------------------- /.github/workflows/bh-update-status-by-label.yml: -------------------------------------------------------------------------------- 1 | # This workflow will run once a day 2 | on: 3 | schedule: 4 | - cron: '0 0 * * *' # Runs daily at midnight UTC 5 | workflow_dispatch: # Allow manual trigger 6 | 7 | jobs: 8 | check-and-update-issues: 9 | runs-on: ubuntu-latest 10 | 11 | steps: 12 | # Step 1: Check out the repository 13 | - name: Checkout repository 14 | uses: actions/checkout@v3 15 | 16 | # Step 2: Set up Node.js (for running JavaScript-based scripts) 17 | - name: Set up Node.js 18 | uses: actions/setup-node@v3 19 | with: 20 | node-version: '16' # You can use a different version of Node.js if necessary 21 | 22 | # Step 3: Install dependencies 23 | - name: Install dependencies 24 | run: | 25 | npm install axios 26 | 27 | # Step 4: Run the script to update issue statuses 28 | - name: Check issues and update status 29 | env: 30 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # GitHub token for authentication 31 | PROJECT_ID: ${{ secrets.BH_SW_BOARD_ID }} # You should store your project board ID in GitHub Secrets 32 | run: | 33 | node -e " 34 | const axios = require('axios'); 35 | const token = process.env.GITHUB_TOKEN; 36 | const projectId = process.env.BH_SW_BOARD_ID; 37 | 38 | const headers = { 39 | 'Authorization': \`Bearer \${token}\`, 40 | 'Accept': 'application/vnd.github.v3+json', 41 | }; 42 | 43 | // Fetch all issues from the project board 44 | async function updateIssueStatus() { 45 | try { 46 | const { data: issues } = await axios.get(\`https://api.github.com/projects/columns/cards\`, { 47 | params: { project_id: projectId }, 48 | headers: headers, 49 | }); 50 | 51 | for (const issue of issues) { 52 | const { labels, id } = issue.content; 53 | 54 | // Check the labels on the issue 55 | let status = ''; 56 | if (labels.some(label => label.name === 'P0')) { 57 | status = 'P0'; 58 | } else if (labels.some(label => label.name === 'P1')) { 59 | status = 'P1'; 60 | } else if (labels.some(label => label.name === 'P2')) { 61 | status = 'P2'; 62 | } else if (labels.some(label => label.name === 'P3')) { 63 | status = 'P3'; 64 | } 65 | 66 | // If a matching label is found, update the status field 67 | if (status) { 68 | await axios.patch(\`https://api.github.com/projects/columns/cards/\${id}\`, { 69 | note: \`Status: \${status}\`, 70 | }, { 71 | headers: headers, 72 | }); 73 | console.log(\`Updated issue \${id} with status: \${status}\`); 74 | } 75 | } 76 | } catch (error) { 77 | console.error('Error updating issue status:', error); 78 | } 79 | } 80 | 81 | updateIssueStatus(); 82 | " 83 | -------------------------------------------------------------------------------- /tt_flash/boot_fs.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: © 2024 Tenstorrent AI ULC 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from typing import Callable, Optional, Tuple 5 | import ctypes 6 | 7 | # Define constants 8 | TT_BOOT_FS_FD_HEAD_ADDR = 0x0 9 | TT_BOOT_FS_SECURITY_BINARY_FD_ADDR = 0x3FE0 10 | TT_BOOT_FS_FAILOVER_HEAD_ADDR = 0x4000 11 | IMAGE_TAG_SIZE = 8 12 | 13 | 14 | class ExtendedStructure(ctypes.Structure): 15 | def __eq__(self, other): 16 | if not isinstance(other, self.__class__): 17 | return False 18 | for field in self._fields_: 19 | field_name = field[0] 20 | 21 | self_value = getattr(self, field_name) 22 | other_value = getattr(other, field_name) 23 | 24 | # Handle comparison for ctypes.Array fields 25 | if isinstance(self_value, ctypes.Array): 26 | if len(self_value) != len(other_value): 27 | return False 28 | for i in range(len(self_value)): 29 | if self_value[i] != other_value[i]: 30 | return False 31 | else: 32 | if self_value != other_value: 33 | return False 34 | return True 35 | 36 | def __ne__(self, other): 37 | return not self.__eq__(other) 38 | 39 | def __repr__(self): 40 | field_strings = [] 41 | for field in self._fields_: 42 | field_name = field[0] 43 | 44 | field_value = getattr(self, field_name) 45 | 46 | # Handle string representation for ctypes.Array fields 47 | if isinstance(field_value, ctypes.Array): 48 | array_str = ", ".join(str(x) for x in field_value) 49 | field_strings.append(f"{field_name}=[{array_str}]") 50 | else: 51 | field_strings.append(f"{field_name}={field_value}") 52 | 53 | fields_repr = ", ".join(field_strings) 54 | return f"{self.__class__.__name__}({fields_repr})" 55 | 56 | 57 | class ExtendedUnion(ctypes.Union): 58 | def __eq__(self, other): 59 | for fld in self._fields_: 60 | if getattr(self, fld[0]) != getattr(other, fld[0]): 61 | return False 62 | return True 63 | 64 | def __ne__(self, other): 65 | for fld in self._fields_: 66 | if getattr(self, fld[0]) != getattr(other, fld[0]): 67 | return True 68 | return False 69 | 70 | def __repr__(self): 71 | field_strings = [] 72 | for field in self._fields_: 73 | field_name = field[0] 74 | 75 | field_value = getattr(self, field_name) 76 | field_strings.append(f"{field_name}={field_value}") 77 | fields_repr = ", ".join(field_strings) 78 | return f"{self.__class__.__name__}({fields_repr})" 79 | 80 | 81 | # Define fd_flags structure 82 | class fd_flags(ExtendedStructure): 83 | _fields_ = [ 84 | ("image_size", ctypes.c_uint32, 24), 85 | ("invalid", ctypes.c_uint32, 1), 86 | ("executable", ctypes.c_uint32, 1), 87 | ("fd_flags_rsvd", ctypes.c_uint32, 6), 88 | ] 89 | 90 | 91 | # Define fd_flags union 92 | class fd_flags_u(ExtendedUnion): 93 | _fields_ = [("val", ctypes.c_uint32), ("f", fd_flags)] 94 | 95 | 96 | # Define security_fd_flags structure 97 | class security_fd_flags(ExtendedStructure): 98 | _fields_ = [ 99 | ("signature_size", ctypes.c_uint32, 12), 100 | ("sb_phase", ctypes.c_uint32, 8), # 0 - Phase0A, 1 - Phase0B 101 | ] 102 | 103 | 104 | # Define security_fd_flags union 105 | class security_fd_flags_u(ExtendedUnion): 106 | _fields_ = [("val", ctypes.c_uint32), ("f", security_fd_flags)] 107 | 108 | 109 | # Define tt_boot_fs_fd structure (File descriptor) 110 | class tt_boot_fs_fd(ExtendedStructure): 111 | _fields_ = [ 112 | ("spi_addr", ctypes.c_uint32), 113 | ("copy_dest", ctypes.c_uint32), 114 | ("flags", fd_flags_u), 115 | ("data_crc", ctypes.c_uint32), 116 | ("security_flags", security_fd_flags_u), 117 | ("image_tag", ctypes.c_uint8 * IMAGE_TAG_SIZE), 118 | ("fd_crc", ctypes.c_uint32), 119 | ] 120 | 121 | def image_tag_str(self): 122 | output = "" 123 | for c in self.image_tag: 124 | if c == "\0": 125 | break 126 | output += chr(c) 127 | return output 128 | 129 | 130 | def read_fd(reader, addr: int) -> tt_boot_fs_fd: 131 | fd = reader(addr, ctypes.sizeof(tt_boot_fs_fd)) 132 | return tt_boot_fs_fd.from_buffer_copy(fd) 133 | 134 | 135 | def read_tag( 136 | reader: Callable[[int, int], bytes], tag: str 137 | ) -> Optional[Tuple[int, tt_boot_fs_fd]]: 138 | curr_addr = 0 139 | while True: 140 | fd = read_fd(reader, curr_addr) 141 | 142 | if fd.flags.f.invalid != 0: 143 | return None 144 | 145 | if fd.image_tag_str() == tag: 146 | return curr_addr, fd 147 | 148 | curr_addr += ctypes.sizeof(tt_boot_fs_fd) 149 | -------------------------------------------------------------------------------- /debian/changelog: -------------------------------------------------------------------------------- 1 | tt-flash (3.5.0) noble; urgency=medium 2 | 3 | [ Zakhary Kaplan ] 4 | * build(deps): bump pyluwen to 0.8.0, tt-tools-common to 1.5 5 | * ci(release): specify version bump type 6 | 7 | [ Tenstorrent Releases ] 8 | * pyproject.toml- updating version to 3.5.0 9 | 10 | -- Tenstorrent Releases Fri, 19 Dec 2025 20:39:34 +0000 11 | 12 | tt-flash (3.4.12) noble; urgency=medium 13 | 14 | [ Petra Alexson ] 15 | * Bump tt-tools-common to 1.4.33 16 | 17 | [ Tenstorrent Releases ] 18 | * pyproject.toml- updating version to 3.4.12 19 | 20 | -- Tenstorrent Releases Mon, 08 Dec 2025 16:40:39 +0000 21 | 22 | tt-flash (3.4.11) noble; urgency=medium 23 | 24 | [ Samridhi Bansal ] 25 | * Chore: bump pyluwen to include bh arc msg fix 26 | 27 | [ Tenstorrent Releases ] 28 | * pyproject.toml- updating version to 3.4.11 29 | 30 | -- Tenstorrent Releases Fri, 28 Nov 2025 16:34:55 +0000 31 | 32 | tt-flash (3.4.10) noble; urgency=medium 33 | 34 | [ Petra Alexson ] 35 | * Bump pyluwen version to 0.7.15 36 | 37 | [ Tenstorrent Releases ] 38 | * pyproject.toml- updating version to 3.4.10 39 | 40 | -- Tenstorrent Releases Thu, 27 Nov 2025 18:45:35 +0000 41 | 42 | tt-flash (3.4.9) noble; urgency=medium 43 | 44 | [ Petra Alexson ] 45 | * Bump tt-tools-common version to 1.4.32 46 | 47 | [ Tenstorrent Releases ] 48 | * pyproject.toml- updating version to 3.4.9 49 | 50 | -- Tenstorrent Releases Wed, 26 Nov 2025 20:32:24 +0000 51 | 52 | tt-flash (3.4.8) noble; urgency=medium 53 | 54 | [ Petra Alexson ] 55 | * Remove legacy code needed for flashing Galaxy 4U 56 | * if FW version is 80.X.Y.Z, consider X the major version 57 | * fix: enable reset after flash for WH UBB systems 58 | 59 | [ Zakhary Kaplan ] 60 | * build(deps): bump pyluwen from 0.7.12 to 0.7.14 61 | 62 | [ Petra Alexson ] 63 | * Remove all references to Grayskull 64 | 65 | [ Tenstorrent Releases ] 66 | * pyproject.toml- updating version to 3.4.8 67 | 68 | -- Tenstorrent Releases Wed, 26 Nov 2025 18:20:26 +0000 69 | 70 | tt-flash (3.4.7) noble; urgency=medium 71 | 72 | [ Daniel DeGrasse ] 73 | * support partial writes when address offsets are provided in image binary 74 | 75 | [ Petra Alexson ] 76 | * Add more ways of providing fwbundle file as an arg 77 | * Warn if deprecated --fw-tar arg is used 78 | 79 | [ Jonathan Baker ] 80 | * Updated to automatic community labeling v2 81 | 82 | [ Petra Alexson ] 83 | * Update README.md 84 | 85 | [ Daniel DeGrasse ] 86 | * support manifest versions up to 2.0.0, add validation code for 2.0.0 87 | * fallback to GPIO strap detection for left/right chip on blackhole 88 | * delay additional time during resets if updating across major versions 89 | * prevent downgrades across major version 90 | * confirm blackhole flash is valid using ARC message 91 | * updated pyproject requirements to pull in new tools 92 | 93 | [ Tenstorrent Releases ] 94 | * pyproject.toml- updating version to 3.4.7 95 | 96 | -- Tenstorrent Releases Fri, 17 Oct 2025 20:22:29 +0000 97 | 98 | tt-flash (3.4.6) noble; urgency=medium 99 | 100 | [ Petra Alexson ] 101 | * Add board type for Blackhole Galaxy UBB 102 | * Add Galaxy UBB reset 103 | * Bump tt-tools-common version to 1.4.29 104 | 105 | [ Tenstorrent Releases ] 106 | * pyproject.toml- updating version to 3.4.6 107 | 108 | -- Tenstorrent Releases Tue, 23 Sep 2025 20:09:52 +0000 109 | 110 | tt-flash (3.4.5) noble; urgency=medium 111 | 112 | [ John 'Warthog9' Hawley ] 113 | * Fixing older setuptools again 114 | * Workflows - Debian Builds: include changes files in artifacts 115 | * Setting minimal python version to 3.10 or newer 116 | 117 | [ Tenstorrent Releases ] 118 | * pyproject.toml- updating version to 3.4.5 119 | 120 | -- Tenstorrent Releases Mon, 15 Sep 2025 19:56:08 +0000 121 | 122 | tt-flash (3.4.4) noble; urgency=medium 123 | 124 | [ Petra Alexson ] 125 | * Strictly require latest version of tt-tools-common and pyluwen 126 | 127 | [ Tenstorrent Releases ] 128 | * pyproject.toml- updating version to 3.4.4 129 | 130 | -- Tenstorrent Releases Wed, 10 Sep 2025 16:14:35 +0000 131 | 132 | tt-flash (3.4.3) noble; urgency=medium 133 | 134 | [ John 'Warthog9' Hawley ] 135 | * Fixing build process 136 | * Chasing down that signing problem 137 | * build-debian: Fixing number of commits counter 138 | 139 | [ Tenstorrent Releases ] 140 | * pyproject.toml- updating version to 3.4.3 141 | 142 | -- Tenstorrent Releases Mon, 18 Aug 2025 23:07:30 +0000 143 | 144 | tt-flash (3.4.2) noble; urgency=medium 145 | 146 | [ Samridhi Bansal ] 147 | * feat: Added p300 support to flash - Identify L and R chip before flashing 148 | 149 | [ Tenstorrent Releases ] 150 | * pyproject.toml- updating version to 3.4.2 151 | 152 | -- Tenstorrent Releases Fri, 08 Aug 2025 15:34:26 +0000 153 | 154 | tt-flash (3.4.1) noble; urgency=medium 155 | 156 | [ John 'Warthog9' Hawley ] 157 | * Adjust dependencies to reflect off pypi now 158 | * Lets get the release pipelines and builds all going! 159 | 160 | [ Tenstorrent Releases ] 161 | * pyproject.toml- updating version to 3.4.1 162 | 163 | -- Tenstorrent Releases Tue, 05 Aug 2025 18:53:08 +0000 164 | 165 | tt-flash (3.1.1-1) stable; urgency=medium 166 | 167 | [ John 'Warthog9' Hawley ] 168 | * Start of Packaging changelog 169 | 170 | -- Tenstorrent Releases Tue, 05 Aug 2025 23:20:43 +0000 171 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # tt-flash 2 | 3 | This is a utility to flash firmware blobs to tenstorrent devices. 4 | 5 | Flash firmware on all devices on a system using one command: 6 | 7 | ``` 8 | tt-flash 9 | ``` 10 | 11 | ## Official Repository 12 | 13 | [https://github.com/tenstorrent/tt-flash](https://github.com/tenstorrent/tt-flash) 14 | 15 | ## Getting started 16 | ### Install Rust (if you don't already have it) 17 | If Rust isn't already installed on your system, you can install it through either of the following methods: 18 | 19 | #### Using Distribution packages (preferred) 20 | * **Fedora / EL9:**
`sudo dnf install cargo` 21 | * **Ubuntu / Debian:**
`sudo apt install cargo` 22 | #### Using Rustup 23 | ``` 24 | curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh 25 | source "$HOME/.cargo/env" 26 | ``` 27 | 28 | ### User installation 29 | tt-flash is available on pypi and can be installed using pip. 30 | 31 | ``` 32 | pip install tt-flash 33 | ``` 34 | 35 | #### (Optional) Virtual environment 36 | 37 | If you aren't doing 38 | this as a system-level install, a virtual environment is recommended. 39 | 40 | ``` 41 | python -m venv .venv 42 | source .venv/bin/activate 43 | ``` 44 | 45 | 46 | ### Developer installation 47 | #### Clone the repository 48 | ``` 49 | git clone https://github.com/tenstorrent/tt-flash.git 50 | cd tt-flash 51 | ``` 52 | 53 | #### Building the repository 54 | ``` 55 | pip install . 56 | ``` 57 | 58 | or for users who would like to edit the code without re-building 59 | 60 | ``` 61 | pip install --editable . 62 | ``` 63 | 64 | ### Help text 65 | Use the `-h` argument to print the help text. 66 | 67 | ``` 68 | $ tt-flash -h 69 | 70 | usage: tt-flash [-h] [-v] [--sys-config SYS_CONFIG] [--no-color] [--no-tty] {flash,verify} ... 71 | 72 | options: 73 | -h, --help show this help message and exit 74 | -v, --version show program's version number and exit 75 | --sys-config SYS_CONFIG 76 | Path to the pre generated sys-config json 77 | --no-color Disable the colorful output 78 | --no-tty Force disable the tty command output 79 | 80 | command: 81 | {flash,verify} 82 | flash Flash firmware to Tenstorrent devices on the system. Run tt-flash flash -h for further command-specific help. 83 | verify Verify the contents of the SPI. Will display the currently running and flashed bundle version of the fw and checksum the fw against either what was flashed previously according 84 | the the file system state, or a given fw bundle. In the case where a fw bundle or flash record are not provided the program will search known locations that the flash record 85 | may have been written to and exit with an error if it cannot be found or read. Run tt-flash verify -h for further command-specific help. 86 | ``` 87 | 88 | ``` 89 | $ tt-flash flash -h 90 | 91 | usage: tt-flash flash [-h] [--sys-config SYS_CONFIG] [--fw-tar FW_TAR] [--skip-missing-fw] [--force] [--no-reset] [fwbundle] 92 | 93 | positional arguments: 94 | fwbundle Path to the firmware bundle 95 | 96 | options: 97 | -h, --help show this help message and exit 98 | --sys-config SYS_CONFIG 99 | Path to the pre generated sys-config json 100 | --fw-tar FW_TAR Path to the firmware tarball (deprecated) 101 | --skip-missing-fw If the fw packages doesn't contain the fw for a detected board, continue flashing 102 | --force Force update the ROM 103 | --no-reset Do not reset devices at the end of flash 104 | ``` 105 | 106 | ## Typical usage 107 | ``` 108 | tt-flash 109 | ``` 110 | 111 | ### Firmware files 112 | Firmware files are licensed and distributed independently, as tt-flash solely acts as a utility to update devices with provided firmware images. You can find firmware bundles in a seperate repo at [https://github.com/tenstorrent/tt-firmware](https://github.com/tenstorrent/tt-firmware). 113 | 114 | ### Example output 115 | 116 | This is an example of what you can expect to see when you flash a device. 117 | 118 | ``` 119 | $ tt-flash ~/tt-firmware/latest.fwbundle 120 | 121 | Stage: SETUP 122 | Searching for default sys-config path 123 | Checking /etc/tenstorrent/config.json: not found 124 | Checking ~/.config/tenstorrent/config.json: not found 125 | 126 | Could not find config in default search locations, if you need it, either pass it in explicitly or generate one 127 | Warning: continuing without sys-config, galaxy systems will not be reset 128 | Stage: DETECT 129 | Stage: FLASH 130 | Sub Stage: VERIFY 131 | Verifying fw-package can be flashed: complete 132 | Verifying Blackhole[0] can be flashed 133 | Stage: FLASH 134 | Sub Stage FLASH Step 1: Blackhole[0] 135 | ROM version is: (18, 10, 0, 0). tt-flash version is: (18, 12, 0, 0) 136 | FW bundle version > ROM version. ROM will now be updated. 137 | Sub Stage FLASH Step 2: Blackhole[0] {p150a} 138 | Writing new firmware... (this may take up to 1 minute) 139 | Writing new firmware... SUCCESS 140 | Verifying flashed firmware... (this may also take up to 1 minute) 141 | Firmware verification... SUCCESS 142 | Stage: RESET 143 | Starting PCI link reset on BH devices at PCI indices: 0 144 | Waiting for up to 60 seconds for asic to come back after reset 145 | Config space reset completed for device 0 146 | Finishing PCI link reset on BH devices at PCI indices: 0 147 | FLASH SUCCESS 148 | ``` 149 | 150 | ## Supported products 151 | 152 | tt-flash can be used to flash Wormhole and Blackhole products. The last version that supported flashing Grayskull products was [v3.4.7](https://github.com/tenstorrent/tt-flash/releases/tag/v3.4.7). 153 | 154 | ## License 155 | 156 | Apache 2.0 - https://www.apache.org/licenses/LICENSE-2.0.txt 157 | -------------------------------------------------------------------------------- /tt_flash/utility.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: © 2024 Tenstorrent AI ULC 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | import os 5 | 6 | from typing import Callable, Type, TYPE_CHECKING 7 | 8 | from base64 import b16decode 9 | 10 | if TYPE_CHECKING: 11 | from tt_flash.chip import TTChip 12 | 13 | try: 14 | from importlib.resources import files, as_file 15 | except (ModuleNotFoundError, ImportError): 16 | from importlib_resources import files, as_file 17 | import sys 18 | from typing import Optional 19 | 20 | from tt_tools_common.ui_common.themes import CMD_LINE_COLOR 21 | 22 | 23 | # Returns the root path of the package, so we can access data files and such 24 | def package_root_path(): 25 | return as_file(files("tt_flash")) 26 | 27 | 28 | # Get path of this script. 'frozen' means packaged with pyinstaller. 29 | def application_path(): 30 | if getattr(sys, "frozen", False): 31 | application_path = os.path.dirname(sys.executable) 32 | elif __file__: 33 | application_path = os.path.dirname(__file__) 34 | else: 35 | application_path = None 36 | return application_path 37 | 38 | 39 | def get_board_type(board_id: int, from_type: bool = False) -> Optional[str]: 40 | """ 41 | Get board type from board ID string. 42 | Ex: 43 | Board ID: AA-BBBBB-C-D-EE-FF-XXX 44 | ^ ^ ^ ^ ^ ^ ^ 45 | | | | | | | +- XXX 46 | | | | | | +----- FF 47 | | | | | +-------- EE 48 | | | | +----------- D 49 | | | +------------- C = Revision 50 | | +--------------- BBBBB = Unique Part Identifier (UPI) 51 | +--------------------- AA 52 | """ 53 | if from_type: 54 | upi = board_id 55 | rev = None 56 | else: 57 | upi = (board_id >> 36) & 0xFFFFF 58 | rev = (board_id >> 32) & 0xF 59 | 60 | if upi == 0x1: 61 | if rev is None: 62 | return None 63 | 64 | if rev == 0x2: 65 | return "E300_R2" 66 | elif rev in (0x3, 0x4): 67 | return "E300_R3" 68 | else: 69 | return None 70 | elif upi == 0x3: 71 | return "E300_105" 72 | elif upi == 0x7: 73 | return "E75" 74 | elif upi == 0x8: 75 | return "NEBULA_CB" 76 | elif upi == 0xA: 77 | return "E300_X2" 78 | elif upi == 0xB: 79 | return "GALAXY" 80 | elif upi == 0x14: 81 | return "NEBULA_X2" 82 | elif upi == 0x18: 83 | return "NEBULA_X1" 84 | elif upi == 0x35: 85 | return "WH_UBB" 86 | elif upi == 0x36: 87 | return "P100-1" 88 | elif upi == 0x40: 89 | return "P150A-1" 90 | elif upi == 0x41: 91 | return "P150B-1" 92 | elif upi == 0x42: 93 | return "P150C-1" 94 | elif upi == 0x43: 95 | return "P100A-1" 96 | elif upi == 0x44: 97 | return "P300B-1" 98 | elif upi == 0x45: 99 | return "P300A-1" 100 | elif upi == 0x46: 101 | return "P300C-1" 102 | elif upi == 0x47: 103 | return "GALAXY-1" 104 | else: 105 | return None 106 | 107 | 108 | def change_to_public_name(codename: str) -> str: 109 | name_map = { 110 | "E300_105": "e150", 111 | "E300_X2": "e300", 112 | "E75": "e75", 113 | "NEBULA_X1": "n150", 114 | "NEBULA_X2": "n300", 115 | "WH_UBB": "Galaxy Wormhole", 116 | "P100-1": "p100", 117 | "P150A-1": "p150a", 118 | "P150B-1": "p150b", 119 | "P150C-1": "p150c", 120 | "P300": "p300", 121 | "P300A": "p300", 122 | "P300C": "p300", 123 | "GALAXY-1": "Galaxy Blackhole", 124 | } 125 | 126 | boardname = name_map.get(codename) 127 | if boardname is None: 128 | return codename 129 | else: 130 | return boardname 131 | 132 | 133 | def semver_to_hex(semver: str): 134 | """Converts a semantic version string from format 10.15.1 to hex 0x0A0F0100""" 135 | major, minor, patch = semver.split(".") 136 | byte_array = bytearray([0, int(major), int(minor), int(patch)]) 137 | return f"{int.from_bytes(byte_array, byteorder='big'):08x}" 138 | 139 | 140 | def date_to_hex(date: int): 141 | """Converts a given date string from format YYYYMMDDHHMM to hex 0xYMDDHHMM""" 142 | year = int(date[0:4]) - 2020 143 | month = int(date[4:6]) 144 | day = int(date[6:8]) 145 | hour = int(date[8:10]) 146 | minute = int(date[10:12]) 147 | byte_array = bytearray([year * 16 + month, day, hour, minute]) 148 | return f"{int.from_bytes(byte_array, byteorder='big'):08x}" 149 | 150 | 151 | def hex_to_semver(hexsemver: int): 152 | """Converts a semantic version string from format 0x0A0F0100 to 10.15.1""" 153 | major = hexsemver >> 16 & 0xFF 154 | minor = hexsemver >> 8 & 0xFF 155 | patch = hexsemver >> 0 & 0xFF 156 | return f"{major}.{minor}.{patch}" 157 | 158 | 159 | def hex_to_date(hexdate: int): 160 | """Converts a date given in hex from format 0xYMDDHHMM to string YYYY-MM-DD HH:MM""" 161 | year = (hexdate >> 28 & 0xF) + 2020 162 | month = hexdate >> 24 & 0xF 163 | day = hexdate >> 16 & 0xFF 164 | hour = hexdate >> 8 & 0xFF 165 | minute = hexdate & 0xFF 166 | 167 | return f"{year:04}-{month:02}-{day:02} {hour:02}:{minute:02}" 168 | 169 | 170 | class ConfigurableCmdColor: 171 | def __init__(self, use_color: bool) -> None: 172 | self.use_color = use_color 173 | 174 | def __getattr__(self, k): 175 | if k == "use_color": 176 | return self.use_color 177 | elif self.use_color: 178 | return getattr(CMD_LINE_COLOR, k) 179 | else: 180 | return "" 181 | 182 | 183 | class CmdLineConfig: 184 | def __init__(self, use_color: bool, force_no_tty: bool) -> None: 185 | self.COLOR = ConfigurableCmdColor(use_color) 186 | self.force_no_tty = force_no_tty 187 | 188 | def is_tty(self) -> bool: 189 | return (not self.force_no_tty) and sys.stdout.isatty() 190 | 191 | 192 | CConfig = CmdLineConfig(True, False) 193 | -------------------------------------------------------------------------------- /.github/workflows/build-debian.yml: -------------------------------------------------------------------------------- 1 | name: Build Ubuntu Packages 📦 2 | 3 | on: 4 | workflow_dispatch: 5 | workflow_call: 6 | inputs: 7 | ref: 8 | description: 'Git ref to build from' 9 | default: ${{ github.ref }} 10 | required: true 11 | type: string 12 | MAJOR: 13 | required: false 14 | type: string 15 | MINOR: 16 | required: false 17 | type: string 18 | PATCH: 19 | required: false 20 | type: string 21 | NUMBER_OF_COMMITS_SINCE_TAG: 22 | required: false 23 | type: string 24 | 25 | jobs: 26 | build-deb: 27 | name: Build ${{ matrix.os }} Package 28 | strategy: 29 | fail-fast: false 30 | matrix: 31 | os: 32 | - ubuntu-22.04 33 | - ubuntu-24.04 34 | - ubuntu-latest 35 | runs-on: ${{ matrix.os }} 36 | steps: 37 | - run: | 38 | echo "What started this ref: ${{ github.ref }}" 39 | echo "What started this ref: ${{ github.ref_name }}" 40 | echo "What started this sha: ${{ github.sha }}" 41 | echo "Did we get a ref pass in? ${{ inputs.ref }}" 42 | - name: Figure out branch name 43 | id: ref_name 44 | run: | 45 | echo "ref_name=${{ github.ref_name }}" 46 | echo "ref_name=${{ github.ref_name }}" >> $GITHUB_ENV 47 | if [[ -n "${{ inputs.ref }}" ]] 48 | then 49 | echo "ref_name=${{ inputs.ref }}" 50 | echo "ref_name=${{ inputs.ref }}" >> $GITHUB_ENV 51 | fi 52 | - run: sudo apt update 53 | - name: Install build dependencies 54 | run: | 55 | sudo apt install -y \ 56 | build-essential \ 57 | debhelper \ 58 | dh-python \ 59 | dh-sequence-python3 \ 60 | git-buildpackage \ 61 | gnupg \ 62 | libpython3-all-dev \ 63 | pybuild-plugin-pyproject \ 64 | python3-all \ 65 | python3-pip \ 66 | python3-tomli 67 | - name: Import GPG key 68 | id: gpg_key_import 69 | uses: crazy-max/ghaction-import-gpg@v6 70 | with: 71 | gpg_private_key: ${{ secrets.PKG_SIGNING_KEY_DEB }} 72 | - run: gpg --list-keys 73 | - run: gpg --list-secret-keys 74 | - uses: actions/checkout@v4 75 | name: Checkout 76 | with: 77 | ref: ${{ inputs.ref }} 78 | fetch-tags: true 79 | fetch-depth: 0 80 | - name: Extract version from tag 81 | uses: damienaicheh/extract-version-from-tag-action@v1.3.0 82 | - run: git tag 83 | - name: Ok Figure out version 84 | run: | 85 | if [[ -n "${{ inputs.MAJOR }}" ]] 86 | then 87 | echo "MAJOR=${{ inputs.MAJOR }}" >> $GITHUB_ENV 88 | echo "MAJOR=${{ inputs.MAJOR }}" 89 | fi 90 | 91 | if [[ -n "${{ inputs.MINOR }}" ]] 92 | then 93 | echo "MINOR=${{ inputs.MINOR }}" >> $GITHUB_ENV 94 | echo "MINOR=${{ inputs.MINOR }}" 95 | fi 96 | 97 | if [[ -n "${{ inputs.PATCH }}" ]] 98 | then 99 | echo "PATCH=${{ inputs.PATCH }}" >> $GITHUB_ENV 100 | echo "PATCH=${{ inputs.PATCH }}" 101 | fi 102 | 103 | if [[ -n "${{ inputs.NUMBER_OF_COMMITS_SINCE_TAG }}" ]] 104 | then 105 | echo "NUMBER_OF_COMMITS_SINCE_TAG=${{ inputs.NUMBER_OF_COMMITS_SINCE_TAG }}" >> $GITHUB_ENV 106 | echo "NUMBER_OF_COMMITS_SINCE_TAG=${{ inputs.NUMBER_OF_COMMITS_SINCE_TAG }}" 107 | fi 108 | 109 | if [[ -n "${{ inputs.ref }}" ]] 110 | then 111 | echo "REF=${{ inputs.ref }}" >> $GITHUB_ENV 112 | echo "REF=${{ inputs.ref }}" 113 | else 114 | echo "REF=${{ github.ref_name }}" >> $GITHUB_ENV 115 | echo "REF=${{ github.ref_name }}" 116 | fi 117 | # Ok this needs to get up leveled somewhere so that it can be shared across Debian and Ubuntu obviously 118 | - run: ls -lah 119 | - run: | 120 | gbp dch \ 121 | --debian-branch ${ref_name} \ 122 | -R \ 123 | -N ${MAJOR}.${MINOR}.${PATCH} \ 124 | --spawn-editor=never 125 | env: 126 | EMAIL: releases@tenstorrent.com 127 | NAME: Tenstorrent Releases 128 | - run: | 129 | gbp buildpackage \ 130 | --git-ignore-new 131 | #continue-on-error: true 132 | env: 133 | DEBSIGN_KEYID: ${{ steps.gpg_key_import.outputs.fingerprint }} 134 | 135 | - name: Move Debian packages 136 | run: mkdir -p artifacts && mv ../*.deb artifacts/ 137 | - name: Copy Changelog & changes to artifacts 138 | run: | 139 | cp \ 140 | debian/changelog \ 141 | ../*.changes \ 142 | artifacts/ 143 | - name: Set environment variable for distro 144 | id: distro_ident 145 | run: | 146 | source /etc/os-release && \ 147 | #echo "DISTRO_IDENT=$( echo \"${NAME}\" | sed 's/"//g' | tr '[:upper:]' '[:lower:]' )-${VERSION_ID}" \ 148 | # >> $GITHUB_ENV 149 | echo "DISTRO_IDENT=${{ matrix.os }}" >> $GITHUB_ENV 150 | - name: Set environment variable for deb filename 151 | id: deb_filename 152 | run: echo "DEB_FILENAME=$(basename artifacts/*.deb)" >> $GITHUB_ENV 153 | - name: Set environment variable for deb filename 154 | id: deb_filename_distro 155 | run: echo DEB_FILENAME_DISTRO=$(basename artifacts/*.deb | sed "s/_all/_all-${{ env.DISTRO_IDENT }}/" ) >> $GITHUB_ENV 156 | - name: Set environment variable for deb changes file 157 | id: deb_filename_changes_distro 158 | run: echo DEB_FILENAME_CHANGES_DISTRO=$(basename artifacts/*.changes | sed "s/_amd64/_amd64-${{ env.DISTRO_IDENT }}/" ) >> $GITHUB_ENV 159 | - name: Upload Debian package 160 | uses: actions/upload-artifact@v4 161 | with: 162 | name: ${{ env.DEB_FILENAME_DISTRO }} 163 | path: artifacts/${{ env.DEB_FILENAME }} 164 | - name: Upload Changelog 165 | uses: actions/upload-artifact@v4 166 | with: 167 | name: debian-changelog-${{ env.DISTRO_IDENT }} 168 | path: debian/changelog 169 | - name: Upload Artifacts 170 | uses: actions/upload-artifact@v4 171 | with: 172 | name: artifacts-${{ env.DISTRO_IDENT }}.zip 173 | path: artifacts/ 174 | - name: Upload Changes 175 | uses: actions/upload-artifact@v4 176 | with: 177 | name: ${{ env.DEB_FILENAME_CHANGES_DISTRO }} 178 | path: artifacts/*.changes 179 | -------------------------------------------------------------------------------- /tt_flash/main.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: © 2024 Tenstorrent AI ULC 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from __future__ import annotations 5 | 6 | from typing import Optional 7 | 8 | import argparse 9 | import datetime 10 | import os 11 | import json 12 | import sys 13 | import tarfile 14 | from pathlib import Path 15 | 16 | import tt_flash 17 | from tt_flash import utility 18 | from tt_flash.error import TTError 19 | from tt_flash.utility import CConfig 20 | from tt_flash.flash import flash_chips 21 | 22 | from .chip import detect_local_chips 23 | 24 | # Make version available in --help 25 | with utility.package_root_path() as path: 26 | VERSION_FILE = path.joinpath(".ignored/version.txt") 27 | if os.path.isfile(VERSION_FILE): 28 | VERSION_STR = open(VERSION_FILE, "r").read().strip() 29 | VERSION_DATE = datetime.datetime.strptime(VERSION_STR[:10], "%Y-%m-%d").date() 30 | VERSION_HASH = int(VERSION_STR[-16:], 16) 31 | else: 32 | VERSION_STR = tt_flash.__version__ 33 | 34 | if __doc__ is None: 35 | __doc__ = f"Version: {VERSION_STR}" 36 | else: 37 | __doc__ = f"Version: {VERSION_STR}. {__doc__}" 38 | 39 | 40 | class ArgumentParseError(Exception): 41 | pass 42 | 43 | 44 | # A custom ArgumentParser which by default will raise an Exception on error 45 | # instead of exiting the program. 46 | EXIT_ON_ERROR = False 47 | 48 | 49 | class NoExitArgumentParser(argparse.ArgumentParser): 50 | def error(self, message): 51 | global EXIT_ON_ERROR 52 | 53 | if EXIT_ON_ERROR: 54 | self.print_help(sys.stderr) 55 | self.exit(2, "%s: error: %s\n" % (self.prog, message)) 56 | else: 57 | raise ArgumentParseError(message) 58 | 59 | 60 | def parse_args(): 61 | # Parse arguments 62 | parser = NoExitArgumentParser(description=__doc__) 63 | parser.add_argument( 64 | "-v", 65 | "--version", 66 | action="version", 67 | version=VERSION_STR, 68 | ) 69 | parser.add_argument( 70 | "--no-color", 71 | help="Disable the colorful output", 72 | default=False, 73 | action="store_true", 74 | ) 75 | parser.add_argument( 76 | "--no-tty", 77 | help="Force disable the tty command output", 78 | default=False, 79 | action="store_true", 80 | ) 81 | 82 | subparsers = parser.add_subparsers(title="command", dest="command", required=True) 83 | 84 | flash = subparsers.add_parser("flash", help="Flash firmware to Tenstorrent devices on the system. Run tt-flash flash -h for further command-specific help.") 85 | flash.add_argument( 86 | "fwbundle", 87 | nargs="?", 88 | help="Path to the firmware bundle", 89 | type=Path, 90 | ) 91 | flash.add_argument("--fw-tar", help="Path to the firmware tarball (deprecated)", type=Path) 92 | flash.add_argument( 93 | "--skip-missing-fw", 94 | help="If the fw packages doesn't contain the fw for a detected board, continue flashing", 95 | default=False, 96 | action="store_true", 97 | required=False, 98 | ) 99 | flash.add_argument( 100 | "--force", default=False, action="store_true", help="Force update the ROM" 101 | ) 102 | flash.add_argument( 103 | "--no-reset", 104 | help="Do not reset devices at the end of flash", 105 | default=False, 106 | action="store_true", 107 | ) 108 | 109 | verify = subparsers.add_parser( 110 | "verify", 111 | help="Verify the contents of the SPI.\nWill display the currently running and flashed bundle version of the fw and checksum the fw against either what was flashed previously according the the file system state, or a given fw bundle.\nIn the case where a fw bundle or flash record are not provided the program will search known locations that the flash record may have been written to and exit with an error if it cannot be found or read. Run tt-flash verify -h for further command-specific help.", 112 | ) 113 | config_group = verify.add_mutually_exclusive_group() 114 | config_group.add_argument( 115 | "fwbundle", 116 | nargs="?", 117 | help="Path to the firmware bundle", 118 | type=Path, 119 | ) 120 | config_group.add_argument("--fw-tar", help="Path to the firmware tarball (deprecated)", type=Path) 121 | verify.add_argument( 122 | "--skip-missing-fw", 123 | help="If the fw packages doesn't contain the fw for a detected board, continue flashing", 124 | default=False, 125 | action="store_true", 126 | required=False, 127 | ) 128 | flash.add_argument( 129 | "--allow-major-downgrades", default=False, action="store_true", help="Allow major version downgrades" 130 | ) 131 | 132 | cmd_args = sys.argv.copy()[1:] 133 | 134 | # So... I want to swap to having tt-flash respond to explicit subcommands 135 | # but to maintain backwards compatibility I had to make sure that the flash subcommand 136 | # would be assumed if none was given. 137 | 138 | # To start I have set the argument parser to no exit on failure 139 | try: 140 | # First try to parse the initial command line arguments 141 | parser.parse_args(args=cmd_args) 142 | 143 | # If it passes then we can continue as normal 144 | except ArgumentParseError: 145 | # But if it failed then insert flash into the first argument. 146 | # This is fine as long as flash remains a valid first argument. 147 | # This does not break -h or -v because they will have triggered the program 148 | # to exit during the initial parse_args call. 149 | cmd_args.insert(0, "flash") 150 | 151 | try: 152 | # Now try to parse the arguments after inserting the flash subcommand. 153 | parser.parse_args(args=cmd_args) 154 | except ArgumentParseError: 155 | # If we still fail then it's likely that we had a different problem than no 156 | # subcommand specified. So remove the inserted flash to make the error reflect 157 | # what the user entered. 158 | cmd_args = cmd_args[1:] 159 | 160 | # Reenable exit on failure (the default behaviour) 161 | global EXIT_ON_ERROR 162 | EXIT_ON_ERROR = True 163 | 164 | # Parse the args with the default behaviour 165 | args = parser.parse_args(args=cmd_args) 166 | 167 | # One of either args.fwbundle or args.fw_tar is required 168 | if args.fwbundle is not None and args.fw_tar is not None: 169 | parser.error("argument --fw-tar not allowed with positional fwbundle argument") 170 | if args.fwbundle is None and args.fw_tar is None: 171 | parser.error("one of the following arguments are required: fwbundle or --fw-tar") 172 | 173 | # --fw-tar is deprecated, warn if it's being used 174 | if args.fw_tar: 175 | print(f"{CConfig.COLOR.YELLOW}Warning: --fw-tar is deprecated, use positional argument instead: tt-flash {args.command} {args.fw_tar}{CConfig.COLOR.ENDC}") 176 | 177 | return parser, args 178 | 179 | def load_manifest(path: str): 180 | tar = tarfile.open(path, "r") 181 | 182 | manifest_data = tar.extractfile("./manifest.json") 183 | if manifest_data is None: 184 | raise TTError(f"Could not find manifest in {path}") 185 | 186 | manifest = json.loads(manifest_data.read()) 187 | version = manifest.get("version", None) 188 | if version is None: 189 | raise TTError(f"Could not find version in {path}/manifest.json") 190 | 191 | try: 192 | int_version = tuple(map(int, version.split("."))) 193 | if len(int_version) != 3: 194 | int_version = None 195 | except ValueError: 196 | int_version = None 197 | 198 | if int_version is None: 199 | raise TTError(f"Invalid version ({version}) in {path}/manifest.json") 200 | 201 | return tar, int_version 202 | 203 | 204 | def main(): 205 | parser, args = parse_args() 206 | 207 | CConfig.force_no_tty = args.no_tty 208 | CConfig.COLOR.use_color = not args.no_color 209 | fwbundle = args.fwbundle or args.fw_tar 210 | 211 | try: 212 | if args.command == "flash": 213 | print(f"{CConfig.COLOR.GREEN}Stage:{CConfig.COLOR.ENDC} SETUP") 214 | try: 215 | tar, version = load_manifest(fwbundle) 216 | except Exception as e: 217 | print(f"Opening of {fwbundle} failed with - {e}\n\n---\n") 218 | parser.print_help() 219 | sys.exit(1) 220 | 221 | print(f"{CConfig.COLOR.GREEN}Stage:{CConfig.COLOR.ENDC} DETECT") 222 | devices = detect_local_chips(ignore_ethernet=True) 223 | 224 | print(f"{CConfig.COLOR.GREEN}Stage:{CConfig.COLOR.ENDC} FLASH") 225 | 226 | return flash_chips( 227 | devices, 228 | tar, 229 | args.force, 230 | args.no_reset, 231 | version, 232 | args.allow_major_downgrades, 233 | skip_missing_fw=args.skip_missing_fw, 234 | ) 235 | else: 236 | raise TTError(f"No handler for command {args.command}.") 237 | except Exception as e: 238 | print(f"{CConfig.COLOR.RED}Error: {e} {CConfig.COLOR.ENDC}") 239 | 240 | 241 | if __name__ == "__main__": 242 | sys.exit(main()) 243 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | 204 | 205 | Copyright (c) 2024 Tenstorrent AI ULC 206 | 207 | -------------------------------------------------------------------------------- /tt_flash/chip.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: © 2024 Tenstorrent AI ULC 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from __future__ import annotations 5 | 6 | from abc import ABC, abstractmethod 7 | from dataclasses import dataclass 8 | import time 9 | from typing import Union, Optional 10 | import sys 11 | import yaml 12 | 13 | from pyluwen import PciChip, Telemetry 14 | from pyluwen import detect_chips as luwen_detect_chips 15 | from pyluwen import detect_chips_fallible as luwen_detect_chips_fallible 16 | 17 | from tt_flash import utility 18 | from tt_flash.error import TTError 19 | 20 | 21 | @dataclass 22 | class FwVersion: 23 | allow_exception: bool 24 | exception: Optional[Exception] 25 | running: Optional[tuple[int, int, int, int]] 26 | spi: Optional[tuple[int, int, int, int]] 27 | 28 | 29 | def get_bundle_version_v1(chip: TTChip) -> FwVersion: 30 | """ 31 | Get the currently running bundle version for wh, using a legacy method 32 | 33 | @param chip 34 | 35 | @return the detected fw bundle version. 36 | """ 37 | running_bundle_version = None 38 | spi_bundle_version = None 39 | exception = None 40 | 41 | try: 42 | fw_version = chip.arc_msg( 43 | chip.fw_defines["MSG_TYPE_FW_VERSION"], wait_for_done=True, arg0=0, arg1=0 44 | )[0] 45 | 46 | # Pre fw version 5 we don't have bundle support 47 | # this version of tt-flash only works with bundled fw 48 | # so it's safe to assume that we need to update 49 | if fw_version >= chip.min_fw_version(): 50 | temp = chip.arc_msg( 51 | chip.fw_defines["MSG_TYPE_FW_VERSION"], 52 | wait_for_done=True, 53 | arg0=1, 54 | arg1=0, 55 | )[0] 56 | 57 | if temp not in [0xFFFFFFFF, 0xDEAD]: 58 | patch = temp & 0xFF 59 | minor = (temp >> 8) & 0xFF 60 | major = (temp >> 16) & 0xFF 61 | component = (temp >> 24) & 0xFF 62 | running_bundle_version = (component, major, minor, patch) 63 | 64 | # There is a version of the firmware that doesn't correctly return an error when setting arg0 to an unknown option. 65 | # The running_bundle_version and fw_version can never be the same (as mandated by the version formatting) so I can safely check to see if they are the same when checking for this older FW. 66 | if ( 67 | running_bundle_version != 0xDEAD 68 | and fw_version != running_bundle_version 69 | ): 70 | temp = chip.arc_msg( 71 | chip.fw_defines["MSG_TYPE_FW_VERSION"], 72 | wait_for_done=True, 73 | arg0=2, 74 | arg1=0, 75 | )[0] 76 | 77 | if temp not in [0xFFFFFFFF, 0xDEAD]: 78 | patch = temp & 0xFF 79 | minor = (temp >> 8) & 0xFF 80 | major = (temp >> 16) & 0xFF 81 | component = (temp >> 24) & 0xFF 82 | spi_bundle_version = (component, major, minor, patch) 83 | except Exception as e: 84 | exception = e 85 | 86 | return FwVersion( 87 | allow_exception=True, 88 | exception=exception, 89 | running=running_bundle_version, 90 | spi=spi_bundle_version, 91 | ) 92 | 93 | 94 | def get_chip_data(chip, file, internal: bool): 95 | with utility.package_root_path() as path: 96 | if isinstance(chip, WhChip): 97 | prefix = "wormhole" 98 | elif isinstance(chip, BhChip): 99 | prefix = "blackhole" 100 | else: 101 | raise TTError("Only support flashing WH or BH chips") 102 | if internal: 103 | prefix = f".ignored/{prefix}" 104 | else: 105 | prefix = f"data/{prefix}" 106 | return open(str(path.joinpath(f"{prefix}/{file}"))) 107 | 108 | 109 | def init_fw_defines(chip): 110 | return yaml.safe_load(get_chip_data(chip, "fw_defines.yaml", False)) 111 | 112 | 113 | class TTChip: 114 | def __init__(self, chip: PciChip): 115 | self.luwen_chip = chip 116 | self.interface_id = chip.pci_interface_id() 117 | 118 | self.fw_defines = init_fw_defines(self) 119 | 120 | self.telmetry_cache = None 121 | 122 | def reinit(self, callback=None): 123 | self.luwen_chip = PciChip(self.interface_id) 124 | self.telmetry_cache = None 125 | 126 | chip_count = 0 127 | block_count = 0 128 | last_draw = time.time() 129 | 130 | def chip_detect_callback(status): 131 | nonlocal chip_count, last_draw, block_count 132 | 133 | if status.new_chip(): 134 | chip_count += 1 135 | elif status.correct_down(): 136 | chip_count -= 1 137 | chip_count = max(chip_count, 0) 138 | 139 | if sys.stdout.isatty(): 140 | current_time = time.time() 141 | if current_time - last_draw > 0.1: 142 | last_draw = current_time 143 | 144 | if block_count > 0: 145 | print(f"\033[{block_count}A", end="", flush=True) 146 | print(f"\033[J", end="", flush=True) 147 | 148 | print(f"\rDetected Chips: {chip_count}\n", end="", flush=True) 149 | block_count = 1 150 | 151 | status_string = status.status_string() 152 | if status_string is not None: 153 | for line in status_string.splitlines(): 154 | block_count += 1 155 | print(f"\r{line}", flush=True) 156 | else: 157 | time.sleep(0.01) 158 | 159 | self.luwen_chip.init( 160 | callback=chip_detect_callback if callback is None else callback 161 | ) 162 | 163 | def get_telemetry(self) -> Telemetry: 164 | self.telmetry_cache = self.luwen_chip.get_telemetry() 165 | return self.telmetry_cache 166 | 167 | def get_telemetry_unchanged(self) -> Telemetry: 168 | if self.telmetry_cache is None: 169 | self.telmetry_cache = self.luwen_chip.get_telemetry() 170 | 171 | return self.telmetry_cache 172 | 173 | def __vnum_to_version(self, version: int) -> tuple[int, int, int, int]: 174 | return ( 175 | (version >> 24) & 0xFF, 176 | (version >> 16) & 0xFF, 177 | (version >> 8) & 0xFF, 178 | version & 0xFF, 179 | ) 180 | 181 | def m3_fw_app_version(self): 182 | telem = self.get_telemetry_unchanged() 183 | return self.__vnum_to_version(telem.m3_app_fw_version) 184 | 185 | def smbus_fw_version(self): 186 | telem = self.get_telemetry_unchanged() 187 | return self.__vnum_to_version(telem.arc1_fw_version) 188 | 189 | def arc_l2_fw_version(self): 190 | telem = self.get_telemetry_unchanged() 191 | return self.__vnum_to_version(telem.arc0_fw_version) 192 | 193 | def get_asic_location(self) -> int: 194 | """ 195 | Get the location of the ASIC on the chip for p300 196 | 0 is L 197 | 1 is R 198 | """ 199 | telem = self.get_telemetry_unchanged() 200 | return telem.asic_location 201 | 202 | def board_type(self): 203 | return self.luwen_chip.pci_board_type() 204 | 205 | def axi_write32(self, addr: int, value: int): 206 | self.luwen_chip.axi_write32(addr, value) 207 | 208 | def axi_write(self, addr: int, data: bytes): 209 | self.luwen_chip.axi_write(addr, data) 210 | 211 | def axi_read32(self, addr: int) -> int: 212 | return self.luwen_chip.axi_read32(addr) 213 | 214 | def axi_read(self, addr: int, size: int) -> bytes: 215 | data = bytearray(size) 216 | self.luwen_chip.axi_read(addr, data) 217 | 218 | return bytes(data) 219 | 220 | def spi_write(self, addr: int, data: bytes): 221 | self.luwen_chip.spi_write(addr, data) 222 | 223 | def spi_read(self, addr: int, size: int) -> bytes: 224 | data = bytearray(size) 225 | self.luwen_chip.spi_read(addr, data) 226 | 227 | return bytes(data) 228 | 229 | def arc_msg(self, *args, **kwargs): 230 | return self.luwen_chip.arc_msg(*args, **kwargs) 231 | 232 | @abstractmethod 233 | def min_fw_version(self): 234 | pass 235 | 236 | @abstractmethod 237 | def get_bundle_version(self) -> FwVersion: 238 | pass 239 | 240 | 241 | class BhChip(TTChip): 242 | def min_fw_version(self): 243 | return 0x0 244 | 245 | def __repr__(self): 246 | return f"Blackhole[{self.interface_id}]" 247 | 248 | def get_bundle_version(self) -> FwVersion: 249 | running = None 250 | spi = None 251 | exception = None 252 | try: 253 | # Read running FW bundle version from telemetry 254 | telem = self.get_telemetry_unchanged() 255 | temp = telem.fw_bundle_version 256 | patch = temp & 0xFF 257 | minor = (temp >> 8) & 0xFF 258 | major = (temp >> 16) & 0xFF 259 | component = (temp >> 24) & 0xFF 260 | running = (component, major, minor, patch) 261 | 262 | # Read SPI FW bundle version 263 | cmfwcfg = self.luwen_chip.decode_boot_fs_table("cmfwcfg") 264 | temp = cmfwcfg["fw_bundle_version"] 265 | patch = temp & 0xFF 266 | minor = (temp >> 8) & 0xFF 267 | major = (temp >> 16) & 0xFF 268 | component = (temp >> 24) & 0xFF 269 | spi = (component, major, minor, patch) 270 | except Exception as e: 271 | exception = e 272 | 273 | return FwVersion( 274 | allow_exception=True, exception=exception, running=running, spi=spi 275 | ) 276 | 277 | def get_asic_location(self) -> int: 278 | """ 279 | Get the location of the ASIC on the chip for p300 280 | 0 is L 281 | 1 is R 282 | """ 283 | # Records state of GPIO inputs [0:31] at boot time 284 | GPIO_STRAP_REG_L = 0x80030D20 285 | try: 286 | location = super().get_asic_location() 287 | except Exception: 288 | print(f"\rWarning: Unable to retrieve telemetry, reading ASIC location " 289 | "via fallback\n", end="", flush=True) 290 | gpio_strap = self.luwen_chip.axi_read32(GPIO_STRAP_REG_L) 291 | # If GPIO6 is high, we are on the left ASIC 292 | location = (gpio_strap >> 6) & 0x1 293 | 294 | return location 295 | 296 | 297 | class WhChip(TTChip): 298 | def min_fw_version(self): 299 | return 0x2170000 300 | 301 | def __repr__(self): 302 | return f"Wormhole[{self.interface_id}]" 303 | 304 | def get_bundle_version(self) -> FwVersion: 305 | return get_bundle_version_v1(self) 306 | 307 | 308 | def detect_local_chips( 309 | ignore_ethernet: bool = False, 310 | ) -> list[Union[WhChip, BhChip]]: 311 | """ 312 | This will create a chip which only gaurentees that you have communication with the chip. 313 | """ 314 | 315 | chip_count = 0 316 | block_count = 0 317 | last_draw = time.time() 318 | did_draw = False 319 | 320 | def chip_detect_callback(status): 321 | nonlocal chip_count, last_draw, block_count, did_draw 322 | 323 | if status.new_chip(): 324 | chip_count += 1 325 | elif status.correct_down(): 326 | chip_count -= 1 327 | chip_count = max(chip_count, 0) 328 | 329 | if sys.stdout.isatty(): 330 | did_draw = True 331 | current_time = time.time() 332 | if current_time - last_draw > 0.1: 333 | last_draw = current_time 334 | 335 | if block_count > 0: 336 | print(f"\033[{block_count}A", end="", flush=True) 337 | print(f"\033[J", end="", flush=True) 338 | 339 | print(f"\rDetected Chips: {chip_count}\n", end="", flush=True) 340 | block_count = 1 341 | 342 | status_string = status.status_string() 343 | if status_string is not None: 344 | for line in status_string.splitlines(): 345 | block_count += 1 346 | print(f"\r{line}", flush=True) 347 | else: 348 | time.sleep(0.01) 349 | 350 | output = [] 351 | for device in luwen_detect_chips_fallible( 352 | local_only=True, 353 | continue_on_failure=False, 354 | callback=chip_detect_callback, 355 | noc_safe=ignore_ethernet, 356 | ): 357 | if not device.have_comms(): 358 | raise Exception( 359 | f"Do not have communication with {device}, you should reset or remove this device from your system before continuing." 360 | ) 361 | 362 | device = device.force_upgrade() 363 | 364 | if device.as_wh() is not None: 365 | output.append(WhChip(device.as_wh())) 366 | elif device.as_bh() is not None: 367 | output.append(BhChip(device.as_bh())) 368 | else: 369 | raise ValueError("Did not recognize board") 370 | 371 | if not did_draw: 372 | print(f"\tDetected Chips: {chip_count}") 373 | 374 | return output 375 | 376 | 377 | def detect_chips(local_only: bool = False) -> list[Union[WhChip, BhChip]]: 378 | output = [] 379 | for device in luwen_detect_chips(local_only=local_only): 380 | if device.as_wh() is not None: 381 | output.append(WhChip(device.as_wh())) 382 | elif device.as_bh() is not None: 383 | output.append(BhChip(device.as_bh())) 384 | else: 385 | raise ValueError("Did not recognize board") 386 | 387 | return output 388 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Create Release 2 | run-name: Create Release 3 | 4 | on: 5 | workflow_dispatch: 6 | inputs: 7 | version_bump_type: 8 | description: 'Version bump type' 9 | required: true 10 | default: 'patch' 11 | type: choice 12 | options: 13 | - patch 14 | - minor 15 | - major 16 | workflow_call: 17 | 18 | env: 19 | project_name: tt-flash 20 | 21 | jobs: 22 | # what this should do is properly do a release of everything here 23 | # that has a new version in it. We aren't there yet. So in the 24 | # interim assume the main core, c++ bindings, and python bindings 25 | # are all part of a release now, and they will be using the same version 26 | # going forward. 27 | 28 | ### 29 | # Repo Updates 30 | # So, fun fact, the way actions/checkout@v4 works it pulls / works based on what triggered it 31 | # this leads to a problem because I don't want to progress till various things have completed 32 | # in a roughly specific order, but I also need to know what/where things came from - which ok 33 | # fair enough. 34 | # 35 | # This means that repo changes, so version changes, changelogs, etc I guess all need to happen 36 | # not only at once but somehow need to wait on things down the line outside this triggering to 37 | # come back and allow other things to happen. Or I suppose I snag the sha from versionchange 38 | # and pass it along as "latest" in the series, could work too and bypass some of this? 39 | ### 40 | 41 | create-temp-branch: 42 | runs-on: ubuntu-latest 43 | permissions: 44 | contents: write 45 | outputs: 46 | temp_branch_ref: ${{ steps.temp_branch_ref.outputs.temp_branch_ref }} 47 | steps: 48 | - name: Checkout the repo 49 | uses: actions/checkout@v4 50 | with: 51 | fetch-tags: true 52 | fetch-depth: 0 53 | - name: Figure out branch name 54 | id: temp_branch_ref 55 | run: | 56 | export temp_branch_ref="temp_branch_ref=rc-temp-$( git rev-parse --short HEAD )-$( date +%Y.%m.%d-%H.%M.%S )" 57 | echo "${temp_branch_ref}" 58 | echo "${temp_branch_ref}" >> "$GITHUB_OUTPUT" 59 | - name: Create the branch 60 | run: git checkout -b "${{ steps.temp_branch_ref.outputs.temp_branch_ref }}" 61 | - name: Push the branch to work from 62 | run: git push --branches 63 | 64 | versionchange: 65 | needs: 66 | - create-temp-branch 67 | # Runs on needs to be the oldest we can make it 68 | # just to make sure that the python environment is the oldest 69 | # we likely need to support 70 | runs-on: ubuntu-22.04 71 | permissions: 72 | contents: write 73 | outputs: 74 | git_hash: ${{ steps.new-git-hash.outputs.git_hash }} 75 | package_name: ${{ steps.packagename.outputs.package_name }} 76 | package_version: ${{ steps.packagever.outputs.package_version }} 77 | package_version_new: ${{ steps.new-package-version.outputs.package_version_new }} 78 | version_major: ${{ steps.version.outputs.major }} 79 | version_minor: ${{ steps.version.outputs.minor }} 80 | version_patch: ${{ steps.version.outputs.patch }} 81 | version_prerelease: ${{ steps.version.outputs.prerelease }} 82 | version_build: ${{ steps.version.outputs.build }} 83 | version_full: ${{ steps.version.outputs.full }} 84 | number_of_commits_since_tag: ${{ steps.num-commits-since-tag.outputs.num_commits_since_tag }} 85 | steps: 86 | - uses: actions/checkout@v4 87 | with: 88 | fetch-tags: true 89 | fetch-depth: 0 90 | ref: ${{ needs.create-temp-branch.outputs.temp_branch_ref }} 91 | - name: Git e-mail 92 | run: git config --global user.email "${EMAIL}" 93 | - name: Git name 94 | run: git config --global user.name "${NAME}" 95 | - name: Install toml-cli 96 | run: | 97 | python3 -m venv venv 98 | . ./venv/bin/activate 99 | pip install --upgrade pip 100 | pip install toml-cli 101 | - name: What version are we? 102 | id: packagever 103 | run: | 104 | . ./venv/bin/activate 105 | export package_version="$( toml get project.version --toml-path pyproject.toml )" 106 | echo "package_version=${package_version}" 107 | echo "package_version=${package_version}" >> "$GITHUB_OUTPUT" 108 | - name: What package are we? 109 | id: packagename 110 | run: | 111 | . ./venv/bin/activate 112 | export package_name="$( toml get project.name --toml-path pyproject.toml )" 113 | echo "package_name=${package_name}" 114 | echo "package_name=${package_name}" >> "$GITHUB_OUTPUT" 115 | - name: Bump the version 116 | run: | 117 | . ./venv/bin/activate 118 | pip install bump2version 119 | bump2version \ 120 | --current-version ${{ steps.packagever.outputs.package_version }} \ 121 | ${{ github.event.inputs.version_bump_type }} pyproject.toml 122 | - name: What NEW Version? 123 | id: new-package-version 124 | run: | 125 | python3 -m venv venv 126 | . ./venv/bin/activate 127 | pip install --upgrade pip 128 | pip install toml-cli 129 | export package_version_new="$( toml get project.version --toml-path pyproject.toml )" 130 | echo "package_version_new=${package_version_new}" 131 | echo "package_version_new=${package_version_new}" >> "$GITHUB_OUTPUT" 132 | - name: Parse version from string 133 | id: version 134 | uses: release-kit/semver@v2 135 | with: 136 | string: ${{ steps.new-package-version.outputs.package_version_new }} 137 | - name: Find number of commits since last tag 138 | id: num-commits-since-tag 139 | run: | 140 | export num_commits_since="$( git rev-list $(git describe --tags --abbrev=0)..HEAD --count )" 141 | echo "num_commits_since_tag=${num_commits_since}" 142 | echo "num_commits_since_tag=${num_commits_since}" >> "$GITHUB_OUTPUT" 143 | - run: echo ${{ steps.packagever.outputs.package_version }} 144 | - name: Update Version - git push 145 | run: | 146 | git commit \ 147 | pyproject.toml \ 148 | -m "pyproject.toml- updating version to ${{ steps.new-package-version.outputs.package_version_new }}" 149 | git push 150 | - name: Find New Git Hash 151 | id: new-git-hash 152 | run: | 153 | export git_hash_env="git_hash=$( git log --format="%H" -n 1 )" 154 | echo "${git_hash_env}" 155 | echo "${git_hash_env}" >> "$GITHUB_OUTPUT" 156 | - name: New Git Hash 157 | run: echo "${git_hash_env}" 158 | env: 159 | EMAIL: releases@tenstorrent.com 160 | NAME: Tenstorrent Releases 161 | 162 | ### 163 | # Change Logs Generation 164 | ### 165 | changelogs: 166 | needs: 167 | - create-temp-branch 168 | - versionchange 169 | runs-on: ubuntu-latest 170 | permissions: 171 | contents: write 172 | packages: write 173 | id-token: write 174 | outputs: 175 | git_hash: ${{ steps.new-git-hash.outputs.git_hash }} 176 | env: 177 | MAJOR: ${{ needs.versionchange.outputs.version_major }} 178 | MINOR: ${{ needs.versionchange.outputs.version_minor }} 179 | PATCH: ${{ needs.versionchange.outputs.version_patch }} 180 | NUMBER_OF_COMMITS_SINCE_TAG: ${{ needs.versionchange.outputs.number_of_commits_since_tag }} 181 | steps: 182 | - name: New Git Hash 183 | run: echo "${{ needs.versionchange.outputs.git_hash }}" 184 | - run: echo ${{ needs.versionchange.outputs.git_hash }} 185 | - run: sudo apt update 186 | - run: | 187 | sudo apt install -y \ 188 | git-buildpackage 189 | - uses: actions/checkout@v4 190 | with: 191 | fetch-tags: true 192 | fetch-depth: 0 193 | ref: ${{ needs.create-temp-branch.outputs.temp_branch_ref }} 194 | # Ok checkout doesn't do what I want it to do in the way it does it *sigh* 195 | # so we need to snag a pull to catch up from where it started 196 | #- run: git pull 197 | - run: git log --format="%H" -n 1 198 | #- name: Extract version from tag 199 | # uses: damienaicheh/extract-version-from-tag-action@v1.3.0 200 | - run: git tag 201 | # Ok this needs to get up leveled somewhere so that it can be shared across Debian and Ubuntu obviously 202 | - run: ls -lah 203 | - name: Generate Ubuntu / Debian Changelog 204 | run: | 205 | echo "Major: ${MAJOR}" 206 | echo "Minor: ${MINOR}" 207 | echo "Patch: ${PATCH}" 208 | echo "Num Patch: ${NUMBER_OF_COMMITS_SINCE_TAG}" 209 | gbp dch \ 210 | --debian-branch ${{ needs.create-temp-branch.outputs.temp_branch_ref }} \ 211 | -R \ 212 | -N ${MAJOR}.${MINOR}.${PATCH} \ 213 | --spawn-editor=never 214 | env: 215 | EMAIL: releases@tenstorrent.com 216 | NAME: Tenstorrent Releases 217 | - name: ok what happened 218 | run: git diff 219 | - name: Setup commit 220 | run: | 221 | git config --global user.email "${EMAIL}" 222 | git config --global user.name "${NAME}" 223 | git add . 224 | git commit \ 225 | -m "Generating Ubuntu & Debian Changelog for ${MAJOR}.${MINOR}.${PATCH}-${NUMBER_OF_COMMITS_SINCE_TAG}" 226 | git pull --rebase 227 | git push 228 | env: 229 | EMAIL: releases@tenstorrent.com 230 | NAME: Tenstorrent Releases 231 | - name: Find New Git Hash 232 | id: new-git-hash 233 | run: | 234 | export git_hash_env="git_hash=$( git log --format="%H" -n 1 )" 235 | echo "${git_hash_env}" 236 | echo "${git_hash_env}" >> "$GITHUB_OUTPUT" 237 | 238 | ### 239 | # Trigger Building Everything 240 | ### 241 | build_all_depends: 242 | needs: 243 | - create-temp-branch 244 | - versionchange 245 | - changelogs 246 | uses: ./.github/workflows/build-all.yml 247 | with: 248 | ref: ${{ needs.create-temp-branch.outputs.temp_branch_ref }} 249 | MAJOR: ${{ needs.versionchange.outputs.version_major }} 250 | MINOR: ${{ needs.versionchange.outputs.version_minor }} 251 | PATCH: ${{ needs.versionchange.outputs.version_patch }} 252 | NUMBER_OF_COMMITS_SINCE_TAG: ${{ needs.versionchange.outputs.number_of_commits_since_tag }} 253 | secrets: inherit 254 | 255 | # Tag the Release 256 | ### 257 | tagrelease: 258 | name: Tag the Release 259 | needs: 260 | - versionchange 261 | - changelogs 262 | - build_all_depends 263 | runs-on: ubuntu-latest 264 | permissions: 265 | contents: write 266 | packages: write 267 | id-token: write 268 | steps: 269 | - name: New Git Hash 270 | run: echo "${{ needs.changelogs.outputs.git_hash }}" 271 | - run: echo ${{ needs.versionchange.outputs.git_hash }} 272 | - uses: actions/checkout@v4 273 | with: 274 | ref: ${{ needs.changelogs.outputs.git_hash }} 275 | fetch-tags: true 276 | fetch-depth: 0 277 | - run: git log --format="%H" -n 1 278 | - run: | 279 | echo ${{ needs.versionchange.outputs.package_version_new }} 280 | echo ${{ needs.versionchange.outputs.git_hash }} 281 | git tag v${{ needs.versionchange.outputs.package_version_new }} 282 | git tag 283 | git push --tags 284 | 285 | ### 286 | # Generate Release 287 | ### 288 | generate-release: 289 | name: Create GitHub Release 290 | needs: 291 | - create-temp-branch 292 | - versionchange 293 | - changelogs 294 | - build_all_depends 295 | - tagrelease 296 | runs-on: ubuntu-latest 297 | permissions: 298 | contents: write 299 | packages: write 300 | id-token: write 301 | steps: 302 | - name: Download Artifacts 303 | uses: actions/download-artifact@v4 304 | 305 | - name: Build Changelog 306 | id: build_changelog 307 | uses: mikepenz/release-changelog-builder-action@v5 308 | with: 309 | mode: "HYBRID" # Include both PRs and direct commits 310 | configurationJSON: | 311 | { 312 | "template": "#{{CHANGELOG}}\n\n## Contributors\n#{{CONTRIBUTORS}}", 313 | "categories": [ 314 | { 315 | "title": "## 🔄 Changes", 316 | "labels": [] 317 | } 318 | ], 319 | "pr_template": "- #{{TITLE}} (#{{NUMBER}}) by @#{{AUTHOR}}", 320 | "commit_template": "- #{{TITLE}} (#{{MERGE_SHA}}) by @#{{AUTHOR}}" 321 | } 322 | 323 | - name: Find the deb & whl files 324 | run: | 325 | find ${GITHUB_WORKSPACE} -type f -iname \*.deb -o -iname \*.whl -exec ls -lah {} \; 326 | find /home/runner/work/${{ env.project_name }}/${{ env.project_name }}/ -exec ls -lah {} \; 327 | 328 | - name: Deb file renames so they don't overlap 329 | run: | 330 | for x in $( \ 331 | find ${GITHUB_WORKSPACE} -type f -iname \*.deb | \ 332 | grep -v "artifacts-ubuntu" \ 333 | ) 334 | do 335 | want="$( \ 336 | echo "${x}" | \ 337 | xargs dirname | \ 338 | tr "/" "\n" | \ 339 | tail -n 1 \ 340 | )" 341 | mv -v \ 342 | "${x}" \ 343 | "$( echo "${x}" | xargs dirname )/${want}" 344 | done 345 | 346 | - name: Create GitHub Release 347 | id: create_release 348 | uses: softprops/action-gh-release@v1 349 | with: 350 | tag_name: v${{ needs.versionchange.outputs.package_version_new }} 351 | files: | 352 | ${{ github.workspace }}/${{ env.project_name }}*/*.deb 353 | ${{ github.workspace }}/[!artifacts-]*/*.deb 354 | ${{ github.workspace }}/*/*.whl 355 | body: ${{ steps.build_changelog.outputs.changelog }} 356 | draft: false 357 | prerelease: false 358 | 359 | ### 360 | # Merge back 361 | ### 362 | mergeback: 363 | needs: 364 | - create-temp-branch 365 | - generate-release 366 | runs-on: ubuntu-latest 367 | permissions: 368 | contents: write 369 | packages: write 370 | id-token: write 371 | steps: 372 | - name: Checkout fun! 373 | uses: actions/checkout@v4 374 | with: 375 | # Note: you want the original ref, we are merging back 376 | #ref: ${{ github.ref }}" 377 | fetch-tags: true 378 | fetch-depth: 0 379 | - name: Merge back 380 | run: | 381 | git log -3 --oneline 382 | git rebase origin/${{ needs.create-temp-branch.outputs.temp_branch_ref }} 383 | git pull --rebase 384 | git log -3 --oneline 385 | git push 386 | git push origin --delete ${{ needs.create-temp-branch.outputs.temp_branch_ref }} 387 | 388 | ### 389 | # Publish TEST PyPi 390 | ### 391 | publish-to-testpypi: 392 | name: >- 393 | Publish Python 🐍 distribution 📦 to TestPyPI 394 | # Disable for now 395 | #if: false 396 | needs: 397 | - build_all_depends 398 | - generate-release 399 | runs-on: ubuntu-latest 400 | environment: 401 | name: testpypi 402 | url: https://test.pypi.org/p/${{ env.project_name }} 403 | 404 | permissions: 405 | id-token: write # IMPORTANT: mandatory for trusted publishing 406 | 407 | steps: 408 | - name: Download Python distributions 409 | uses: actions/download-artifact@v4 410 | with: 411 | pattern: ${{ env.project_name }}*.whl 412 | name: release-dists 413 | path: dist/ 414 | - name: Publish distribution 📦 to TestPyPI 415 | #run: echo "Hi!" 416 | uses: pypa/gh-action-pypi-publish@release/v1 417 | with: 418 | repository-url: https://test.pypi.org/legacy/ 419 | verbose: true 420 | 421 | ### 422 | # Publish PyPi 423 | ### 424 | publish-to-pypi: 425 | name: >- 426 | Publish Python 🐍 distribution 📦 to PyPI 427 | # Disable for now 428 | #if: false 429 | needs: 430 | - build_all_depends 431 | - generate-release 432 | - publish-to-testpypi 433 | runs-on: ubuntu-latest 434 | environment: 435 | name: pypi 436 | url: https://pypi.org/p/${{ env.project_name }} 437 | 438 | permissions: 439 | id-token: write # IMPORTANT: mandatory for trusted publishing 440 | 441 | steps: 442 | - name: Download Python distributions 443 | uses: actions/download-artifact@v4 444 | with: 445 | pattern: ${{ env.project_name }}*.whl 446 | name: release-dists 447 | path: dist/ 448 | - name: Publish distribution 📦 to PyPI 449 | #run: echo "Hi!" 450 | uses: pypa/gh-action-pypi-publish@release/v1 451 | with: 452 | verbose: true 453 | 454 | -------------------------------------------------------------------------------- /tt_flash/flash.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: © 2024 Tenstorrent AI ULC 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from __future__ import annotations 5 | 6 | from dataclasses import dataclass 7 | from base64 import b16decode 8 | from datetime import date 9 | from enum import Enum, auto 10 | import json 11 | import requests 12 | import signal 13 | import tarfile 14 | import time 15 | from typing import Callable, Optional, Union 16 | import sys 17 | import random 18 | 19 | import tt_flash 20 | from tt_flash.blackhole import boot_fs_write 21 | from tt_flash.blackhole import FlashWrite 22 | from tt_flash.chip import BhChip, TTChip, WhChip, detect_chips 23 | from tt_flash.error import TTError 24 | from tt_flash.utility import change_to_public_name, get_board_type, CConfig 25 | 26 | from tt_tools_common.reset_common.wh_reset import WHChipReset 27 | from tt_tools_common.reset_common.bh_reset import BHChipReset 28 | from tt_tools_common.reset_common.galaxy_reset import GalaxyReset 29 | from tt_tools_common.utils_common.tools_utils import detect_chips_with_callback 30 | from pyluwen import run_wh_ubb_ipmi_reset, run_ubb_wait_for_driver_load 31 | 32 | 33 | def rmw_param( 34 | chip: TTChip, data: bytearray, spi_addr: int, data_addr: int, len: int 35 | ) -> bytearray: 36 | # Read the existing data 37 | existing_data = chip.spi_read(spi_addr, len) 38 | 39 | # Do the RMW 40 | data[data_addr : data_addr + len] = existing_data 41 | 42 | return data 43 | 44 | 45 | def incr_param( 46 | chip: TTChip, data: bytearray, spi_addr: int, data_addr: int, len: int 47 | ) -> bytearray: 48 | # Read the existing data 49 | existing_data = chip.spi_read(spi_addr, len) 50 | 51 | try: 52 | data_bytes = (int.from_bytes(existing_data, "little") + 1).to_bytes( 53 | len, "little" 54 | ) 55 | except OverflowError: 56 | # If we overflow, just set it to 0 57 | data_bytes = (1).to_bytes(len, "little") 58 | 59 | # Do the RMW 60 | data[data_addr : data_addr + len] = data_bytes 61 | 62 | return data 63 | 64 | 65 | def date_param( 66 | chip: TTChip, data: bytearray, spi_addr: int, data_addr: int, len: int 67 | ) -> bytearray: 68 | today = date.today() 69 | int_date = int(f"0x{today.strftime('%Y%m%d')}", 16) # Date in 0xYYYYMMDD 70 | 71 | # Do the RMW 72 | data[data_addr : data_addr + len] = int_date.to_bytes(len, "little") 73 | 74 | return data 75 | 76 | 77 | def flash_version( 78 | chip: TTChip, data: bytearray, spi_addr: int, data_addr: int, len: int 79 | ) -> bytearray: 80 | version = tt_flash.__version__ 81 | 82 | version_parts = version.split(".") 83 | for _ in range(version_parts.__len__(), 4): 84 | version_parts.insert(0, "0") 85 | version_parts = version_parts[:4] 86 | 87 | version = [ 88 | int(version_parts[3]), 89 | int(version_parts[2]), 90 | int(version_parts[1]), 91 | int(version_parts[0]), 92 | ] 93 | 94 | # Do the RMW 95 | data[data_addr : data_addr + len] = bytes(version) 96 | 97 | return data 98 | 99 | 100 | # HACK(drosen): I don't want to update the callback function just to implement the bundle version 101 | # but it is only set once so it's not too bad to just set it as a global. 102 | __SEMANTIC_BUNDLE_VERSION = [0xFF, 0xFF, 0xFF, 0xFF] 103 | 104 | 105 | def bundle_version( 106 | chip: TTChip, data: bytearray, spi_addr: int, data_addr: int, len: int 107 | ) -> bytearray: 108 | global __SEMANTIC_BUNDLE_VERSION 109 | 110 | for _ in range(__SEMANTIC_BUNDLE_VERSION.__len__(), 4): 111 | __SEMANTIC_BUNDLE_VERSION.append(0) 112 | version_parts = __SEMANTIC_BUNDLE_VERSION[:4] 113 | 114 | version = [ 115 | int(version_parts[3]), 116 | int(version_parts[2]), 117 | int(version_parts[1]), 118 | int(version_parts[0]), 119 | ] 120 | 121 | # Do the RMW 122 | data[data_addr : data_addr + len] = bytes(version) 123 | 124 | return data 125 | 126 | 127 | def normalize_fw_version(version: Optional[tuple[int, int, int, int]]) -> Optional[tuple[int, int, int, int]]: 128 | """ 129 | Old FW bundles used to start with 80 and the version format was 80.major.minor.patch. 130 | FW version switched over at major version 18 from 80.18.X.X -> 18.X.X. 131 | 132 | If version[0] == 80, return (major, minor, patch, 0). 133 | Otherwise, just return the version. 134 | """ 135 | if version is None: 136 | return None 137 | if version[0] == 80: 138 | return (version[1], version[2], version[3], 0) 139 | return version 140 | 141 | 142 | TAG_HANDLERS: dict[str, Callable[[TTChip, bytearray, int, int, int], bytearray]] = { 143 | "rmw": rmw_param, 144 | "incr": incr_param, 145 | "date": date_param, 146 | "flash_version": flash_version, 147 | "bundle_version": bundle_version, 148 | } 149 | 150 | 151 | def live_countdown(wait_time: float, name: str, print_initial: bool = True): 152 | if print_initial: 153 | print(f"{name} started, will wait {wait_time} seconds for it to complete") 154 | 155 | # If True then we are running in an interactive environment 156 | if CConfig.is_tty(): 157 | start = time.time() 158 | elapsed = time.time() - start 159 | while elapsed < wait_time: 160 | print( 161 | f"\r\033[K{name} ongoing, waiting {wait_time - elapsed:.1f} more seconds for it to complete", 162 | end="", 163 | flush=True, 164 | ) 165 | 166 | time.sleep(0.1) 167 | elapsed = time.time() - start 168 | print(f"\r\033[K{name} completed", flush=True) 169 | else: 170 | time.sleep(wait_time) 171 | print(f"{name} completed") 172 | 173 | @dataclass 174 | class FlashData: 175 | write: list[FlashWrite] 176 | name: str 177 | idname: str 178 | 179 | 180 | class FlashStageResultState(Enum): 181 | Ok = auto() 182 | NoFlash = auto() 183 | Err = auto() 184 | 185 | 186 | @dataclass 187 | class FlashStageResult: 188 | state: FlashStageResultState 189 | can_reset: bool 190 | msg: str 191 | data: Optional[FlashData] 192 | 193 | 194 | def flash_chip_stage1( 195 | chip: TTChip, 196 | boardname: str, 197 | manifest: Manifest, 198 | fw_package: tarfile.TarFile, 199 | force: bool, 200 | allow_major_downgrades: bool, 201 | skip_missing_fw: bool = False, 202 | ) -> FlashStageResult: 203 | """ 204 | Check the chip and determine if it is a candidate to be flashed. 205 | 206 | The possible outcomes for this function are 207 | 1. The chip is running old fw and can be flashed 208 | 2. The chip is running fw too old to get the status from 209 | a. Force was used, so it will get flashed 210 | b. Force was not used, return an error and don't continue the flash process 211 | 3. The chip is running up to date fw, so we don't flash it 212 | 4. Force was used so we flash the fw no matter what 213 | """ 214 | 215 | try: 216 | chip.arc_msg( 217 | chip.fw_defines["MSG_TYPE_ARC_STATE3"], wait_for_done=True, timeout=0.1 218 | ) 219 | except Exception as err: 220 | # Ok to keep going if there's a timeout 221 | pass 222 | 223 | fw_bundle_version = chip.get_bundle_version() 224 | 225 | # If FW version is formatted like (80, major, minor, patch) reformat it to (major, minor, patch, 0) 226 | spi_version = normalize_fw_version(fw_bundle_version.spi) 227 | running_version = normalize_fw_version(fw_bundle_version.running) 228 | manifest_version = normalize_fw_version(manifest.bundle_version) 229 | 230 | if fw_bundle_version.exception is not None: 231 | if fw_bundle_version.allow_exception: 232 | # Very old wh fw doesn't have support for getting the fw version at all 233 | # so it's safe to assume that we need to update 234 | if force: 235 | print( 236 | f"\t\t\tHit error {fw_bundle_version.exception} while trying to determine running firmware. Falling back to assuming that it needs an update" 237 | ) 238 | else: 239 | raise TTError( 240 | f"Hit error {fw_bundle_version.exception} while trying to determine running firmware. If you know what you are doing you may still update by re-rerunning using the --force flag." 241 | ) 242 | else: 243 | # BH must always successfully be able to return a fw_version 244 | raise TTError( 245 | f"Hit error {fw_bundle_version.exception} while trying to determine running firmware." 246 | ) 247 | 248 | 249 | bundle_version = None 250 | if fw_bundle_version.running is None: 251 | # Certain old fw versions won't have the running_bundle_version populated. 252 | # In that case we can just assume that an upgrade is required. 253 | if force: 254 | print( 255 | "\t\t\tLooks like you are running a very old set of fw, assuming that it needs an update" 256 | ) 257 | else: 258 | raise TTError( 259 | "Looks like you are running a very old set of fw, it's safe to assume that it needs an update but please update it using --force" 260 | ) 261 | print(f"\t\t\tNow flashing tt-flash version: {manifest.bundle_version}") 262 | else: 263 | if running_version[0] > manifest_version[0]: 264 | if allow_major_downgrades: 265 | print( 266 | f"\t\t\tDetected major version downgrade from {fw_bundle_version.running} to {manifest.bundle_version}, " 267 | "but major downgrades are allowed so we are proceeding" 268 | ) 269 | else: 270 | raise TTError( 271 | f"Detected major version downgrade from {fw_bundle_version.running} to {manifest.bundle_version}, this is not supported. " 272 | "If you really want to do this please re-run with --allow-major-downgrades" 273 | ) 274 | if running_version[0] == manifest_version[0] - 1: 275 | # Permit updates across only one major version boundary 276 | print( 277 | f"\t\t\t{CConfig.COLOR.YELLOW}Detected major version upgrade from " 278 | f"{fw_bundle_version.running} to {manifest.bundle_version}{CConfig.COLOR.ENDC}" 279 | ) 280 | elif running_version[0] != manifest_version[0]: 281 | if force: 282 | print( 283 | f"\t\t\tFound unexpected bundle version ('{running_version[0]}'), however you ran with force so we are barreling onwards" 284 | ) 285 | else: 286 | raise TTError( 287 | f"Bundle fwId ({manifest_version[0]}) does not match expected fwId ({running_version[0]}); {manifest.bundle_version} != {fw_bundle_version.running} " 288 | "bypass with --force" 289 | ) 290 | 291 | print( 292 | f"\t\t\tROM version is: {fw_bundle_version.running}. tt-flash version is: {manifest.bundle_version}" 293 | ) 294 | 295 | detected_version = True 296 | if force: 297 | detected_version = False 298 | print("\t\t\tForced ROM update requested. ROM will now be updated.") 299 | # Best check is for if we have already flashed the desired fw (or newer fw) to spi 300 | 301 | elif fw_bundle_version.spi is not None: 302 | if spi_version >= manifest_version: 303 | # Now that we know if the SPI is newer we should check to see if the problem is that we have flashed the correct FW, but are running something too old 304 | if fw_bundle_version.running is not None: 305 | if running_version >= manifest_version: 306 | print("\t\t\tROM does not need to be updated.") 307 | if running_version < manifest_version: 308 | print( 309 | "\t\t\tROM does not need to be updated, while the chip is running old FW the SPI is up to date. You can load the new firmware after a reboot, or in the case of WH a reset. Or skip this check with --force." 310 | ) 311 | else: 312 | print( 313 | "\t\t\tROM does not need to be updated, cannot detect the running FW version but the SPI is ahead of the firmware you are attempting to flash. You can load the newer firmware after a reboot, or in the case of WH a reset. Or skip this check with --force." 314 | ) 315 | 316 | return FlashStageResult( 317 | state=FlashStageResultState.NoFlash, data=None, msg="", can_reset=False 318 | ) 319 | # We did not see any spi versions returned... just go by running 320 | elif fw_bundle_version.running is not None: 321 | if running_version >= manifest_version: 322 | print("\t\t\tROM does not need to be updated.") 323 | return FlashStageResult( 324 | state=FlashStageResultState.NoFlash, data=None, msg="", can_reset=False 325 | ) 326 | else: 327 | detected_version = False 328 | print( 329 | "\t\t\tWas not able to fetch current firmware information, assuming that it needs an update" 330 | ) 331 | 332 | if detected_version: 333 | print("\t\t\tFW bundle version > ROM version. ROM will now be updated.") 334 | 335 | try: 336 | image = fw_package.extractfile(f"./{boardname}/image.bin") 337 | except KeyError: 338 | # If file is not found then key error is raised 339 | image = None 340 | try: 341 | mask = fw_package.extractfile(f"./{boardname}/mask.json") 342 | except KeyError: 343 | # If file is not found then key error is raised 344 | mask = None 345 | 346 | boardname_to_display = change_to_public_name(boardname) 347 | if image is None and mask is None: 348 | if skip_missing_fw: 349 | print( 350 | f"\t\t\tCould not find flash data for {boardname_to_display} in tarfile" 351 | ) 352 | return FlashStageResult( 353 | state=FlashStageResultState.NoFlash, data=None, msg="", can_reset=False 354 | ) 355 | else: 356 | raise TTError( 357 | f"Could not find flash data for {boardname_to_display} in tarfile" 358 | ) 359 | elif image is None: 360 | raise TTError( 361 | f"Could not find flash image for {boardname_to_display} in tarfile; expected to see {boardname}/image.bin" 362 | ) 363 | elif mask is None: 364 | raise TTError( 365 | f"Could not find param data for {boardname_to_display} in tarfile; expected to see {boardname}/mask.json" 366 | ) 367 | 368 | # First we verify that the format of mask is valid so we don't partially flash before discovering that the mask is invalid 369 | mask = json.loads(mask.read()) 370 | 371 | # Now we load the image and start replacing parameters 372 | image = image.read() 373 | 374 | if isinstance(chip, BhChip): 375 | writes = [] 376 | 377 | curr_addr = 0 378 | for line in image.decode("utf-8").splitlines(): 379 | line = line.strip() 380 | if line.startswith("@"): 381 | curr_addr = int(line.lstrip("@").strip()) 382 | else: 383 | data = b16decode(line) 384 | curr_stop = curr_addr + len(data) 385 | if not isinstance(data, bytearray): 386 | data = bytearray(data) 387 | writes.append(FlashWrite(curr_addr, data)) 388 | 389 | curr_addr = curr_stop 390 | 391 | writes.sort(key=lambda x: x.offset) 392 | 393 | writes = boot_fs_write(chip, boardname_to_display, mask, writes) 394 | else: 395 | # I expected to see a list of dicts, with the keys 396 | # "start", "end", "tag" 397 | param_handlers = [] 398 | for v in mask: 399 | start = v.get("start", None) 400 | end = v.get("end", None) 401 | tag = v.get("tag", None) 402 | 403 | if ( 404 | (start is None or not isinstance(start, int)) 405 | or (end is None or not isinstance(end, int)) 406 | or (tag is None or not isinstance(tag, str)) 407 | ): 408 | raise TTError( 409 | f"Invalid mask format for {boardname_to_display}; expected to see a list of dicts with keys 'start', 'end', 'tag'" 410 | ) 411 | 412 | if tag in TAG_HANDLERS: 413 | param_handlers.append(((start, end), TAG_HANDLERS[tag])) 414 | else: 415 | if len(TAG_HANDLERS) > 0: 416 | pretty_tags = [f"'{x}'" for x in TAG_HANDLERS.keys()] 417 | pretty_tags[-1] = f"or {pretty_tags[-1]}" 418 | raise TTError( 419 | f"Invalid tag {tag} for {boardname_to_display}; expected to see one of {pretty_tags}" 420 | ) 421 | else: 422 | raise TTError( 423 | f"Invalid tag {tag} for {boardname_to_display}; there aren't any tags defined!" 424 | ) 425 | writes = [] 426 | 427 | curr_addr = 0 428 | for line in image.decode("utf-8").splitlines(): 429 | line = line.strip() 430 | if line.startswith("@"): 431 | curr_addr = int(line.lstrip("@").strip()) 432 | else: 433 | data = b16decode(line) 434 | 435 | curr_stop = curr_addr + len(data) 436 | 437 | for (start, end), handler in param_handlers: 438 | if start < curr_stop and end > curr_addr: 439 | # chip, data, spi_addr, data_addr, len 440 | if not isinstance(data, bytearray): 441 | data = bytearray(data) 442 | data = handler( 443 | chip, data, start, start - curr_addr, end - start 444 | ) 445 | elif start >= curr_addr and start < curr_stop and end >= curr_stop: 446 | raise TTError( 447 | f"A parameter write ({start}:{end}) splits a writeable region ({curr_addr}:{curr_stop}) in {boardname_to_display}! This is not supported." 448 | ) 449 | 450 | if not isinstance(data, bytes): 451 | data = bytes(data) 452 | writes.append(FlashWrite(curr_addr, data)) 453 | 454 | curr_addr = curr_stop 455 | 456 | writes.sort(key=lambda x: x.offset) 457 | 458 | 459 | if boardname in ["NEBULA_X1", "NEBULA_X2"]: 460 | print( 461 | "\t\t\tBoard will require reset to complete update, checking if an automatic reset is possible" 462 | ) 463 | can_reset = False 464 | 465 | try: 466 | can_reset = ( 467 | chip.m3_fw_app_version() >= (5, 5, 0, 0) 468 | and chip.arc_l2_fw_version() >= (2, 0xC, 0, 0) 469 | and chip.smbus_fw_version() >= (2, 0xC, 0, 0) 470 | ) 471 | if can_reset: 472 | print( 473 | f"\t\t\t\t{CConfig.COLOR.GREEN}Success:{CConfig.COLOR.ENDC} Board can be auto reset; will be triggered if the flash is successful" 474 | ) 475 | except Exception as e: 476 | print( 477 | f"\t\t\t\t{CConfig.COLOR.YELLOW}Fail:{CConfig.COLOR.ENDC} Board cannot be auto reset: Failed to get the current firmware versions. This won't stop the flash, but will require manual reset" 478 | ) 479 | can_reset = False 480 | elif isinstance(chip, BhChip): 481 | can_reset = True 482 | elif boardname == "WH_UBB": 483 | can_reset = True 484 | else: 485 | can_reset = False 486 | 487 | return FlashStageResult( 488 | state=FlashStageResultState.Ok, 489 | can_reset=can_reset, 490 | msg="", 491 | data=FlashData(write=writes, name=boardname_to_display, idname=boardname), 492 | ) 493 | 494 | 495 | def flash_chip_stage2( 496 | chip: TTChip, 497 | data: FlashData, 498 | ) -> Optional[bool]: 499 | # Install sigint handler 500 | def signal_handler(sig, frame): 501 | print("Ctrl-C Caught: this process should not be interrupted") 502 | 503 | def perform_write(chip, writes: FlashWrite): 504 | original_sigint_handler = signal.getsignal(signal.SIGINT) 505 | signal.signal(signal.SIGINT, signal_handler) 506 | 507 | try: 508 | for write in writes: 509 | chip.spi_write(write.offset, write.write) 510 | finally: 511 | signal.signal(signal.SIGINT, original_sigint_handler) 512 | 513 | def perform_verify(chip, writes: FlashWrite) -> Optional[Union[int, int]]: 514 | original_sigint_handler = signal.getsignal(signal.SIGINT) 515 | signal.signal(signal.SIGINT, signal_handler) 516 | 517 | try: 518 | for write in writes: 519 | base_data = chip.spi_read(write.offset, len(write.write)) 520 | 521 | if base_data != write.write: 522 | first_mismatch = None 523 | mismatch_count = 0 524 | for index, (a, b) in enumerate(zip(base_data, write.write)): 525 | if a != b: 526 | mismatch_count += 1 527 | if first_mismatch is None: 528 | first_mismatch = index 529 | return first_mismatch, mismatch_count 530 | finally: 531 | signal.signal(signal.SIGINT, original_sigint_handler) 532 | 533 | return None 534 | 535 | if CConfig.is_tty(): 536 | print( 537 | "\t\t\tWriting new firmware... (this may take up to 1 minute)", 538 | end="", 539 | flush=True, 540 | ) 541 | else: 542 | print("\t\t\tWriting new firmware... (this may take up to 1 minute)") 543 | 544 | perform_write(chip, data.write) 545 | 546 | if CConfig.is_tty(): 547 | print("\r\033[K", end="") 548 | print( 549 | f"\t\t\tWriting new firmware... {CConfig.COLOR.GREEN}SUCCESS{CConfig.COLOR.ENDC}" 550 | ) 551 | 552 | print( 553 | "\t\t\tVerifying flashed firmware... (this may also take up to 1 minute)", 554 | end="", 555 | flush=True, 556 | ) 557 | if not CConfig.is_tty(): 558 | print() 559 | 560 | verify_result = perform_verify(chip, data.write) 561 | if verify_result is not None: 562 | (first_mismatch, mismatch_count) = verify_result 563 | 564 | if CConfig.is_tty(): 565 | print(f"\r\033[K", end="") 566 | print( 567 | f"\t\t\tIntial verification: {CConfig.COLOR.RED}failed{CConfig.COLOR.ENDC}" 568 | ) 569 | print(f"\t\t\t\tFirst Mismatch at: {first_mismatch}") 570 | print(f"\t\t\t\tFound {mismatch_count} mismatches") 571 | 572 | if CConfig.is_tty(): 573 | print( 574 | "\t\t\tAttempted to write firmware one more time... (this, again, may also take up to 1 minute)", 575 | end="", 576 | flush=True, 577 | ) 578 | else: 579 | print( 580 | "\t\t\tAttempted to write firmware one more time... (this, again, may also take up to 1 minute)" 581 | ) 582 | 583 | perform_write(chip, data.write) 584 | 585 | if CConfig.is_tty(): 586 | print("\r\033[K", end="") 587 | print( 588 | f"\t\t\tAttempted to write firmware one more time... {CConfig.COLOR.GREEN}SUCCESS{CConfig.COLOR.ENDC}" 589 | ) 590 | 591 | print( 592 | "\t\t\tVerifying second flash attempt... (this may also take up to 1 minute)", 593 | end="", 594 | flush=True, 595 | ) 596 | if not CConfig.is_tty(): 597 | print() 598 | 599 | verify_result = perform_verify(chip, data.write) 600 | if verify_result is not None: 601 | (first_mismatch, mismatch_count) = verify_result 602 | 603 | if CConfig.is_tty(): 604 | print(f"\r\033[K", end="") 605 | print( 606 | f"\t\t\tSecond verification {CConfig.COLOR.RED}failed{CConfig.COLOR.ENDC}, please do not reset or poweroff the board and contact support for further assistance." 607 | ) 608 | 609 | print(f"\t\t\t\tFirst Mismatch at: {first_mismatch}") 610 | print(f"\t\t\t\tFound {mismatch_count} mismatches") 611 | return None 612 | 613 | if CConfig.is_tty(): 614 | print(f"\r\033[K", end="") 615 | print( 616 | f"\t\t\tFirmware verification... {CConfig.COLOR.GREEN}SUCCESS{CConfig.COLOR.ENDC}" 617 | ) 618 | 619 | trigged_copy = False 620 | if data.idname == "NEBULA_X2": 621 | print("\t\t\tInitiating local to remote data copy") 622 | 623 | # There is a bug in m3 app version 5.8.0.1 where we can trigger a boot loop during the left to right copy. 624 | # In this condition we will disable the auto-reset before triggering the left to right copy. 625 | if chip.m3_fw_app_version() == (5, 8, 0, 1): 626 | print("Mitigating bootloop bug") 627 | triggered_reset_disable = False 628 | try: 629 | chip.arc_msg( 630 | chip.fw_defines["MSG_UPDATE_M3_AUTO_RESET_TIMEOUT"], arg0=0 631 | ) 632 | triggered_reset_disable = True 633 | except Exception as e: 634 | print( 635 | f"\t\t\t{CConfig.COLOR.BLUE}NOTE:{CConfig.COLOR.ENDC} Failed to disable the m3 autoreset; please reboot/reset your system and flash again to initiate the left to right copy." 636 | ) 637 | return None 638 | if triggered_reset_disable: 639 | live_countdown(1.0, "\t\t\tDisable m3 reset") 640 | 641 | try: 642 | chip.arc_msg(chip.fw_defines["MSG_TRIGGER_SPI_COPY_LtoR"]) 643 | trigged_copy = True 644 | except Exception as e: 645 | print( 646 | f"\t\t\t{CConfig.COLOR.BLUE}NOTE:{CConfig.COLOR.ENDC} Failed to initiate left to right copy; please reset the host to reset the board and then rerun the flash with the --force flag to complete flash." 647 | ) 648 | return None 649 | 650 | return trigged_copy 651 | 652 | 653 | @dataclass 654 | class Manifest: 655 | data: dict 656 | bundle_version: tuple[int, int, int, int] 657 | 658 | # Mapping of validation functions for each bundle version 659 | BUNDLE_VALIDATION_FUNCS = { 660 | (2, 0, 0): lambda bundle_version: bundle_version[0] >= 19, # Ensure major release is 19 or newer 661 | } 662 | 663 | def verify_package(fw_package: tarfile.TarFile, version: tuple[int, int, int]): 664 | manifest_data = fw_package.extractfile("./manifest.json") 665 | if manifest_data is None: 666 | if CConfig.is_tty(): 667 | # HACK(drosen): Would not have ended the last line with a '\n' 668 | print("\n") 669 | raise TTError( 670 | "Could not find manifest in fw package, please check that the correct one was used." 671 | ) 672 | manifest = json.loads(manifest_data.read()) 673 | 674 | manifest_bundle_version = manifest.get("bundle_version", {}) 675 | 676 | new_bundle_version = ( 677 | manifest_bundle_version.get("fwId", 0), 678 | manifest_bundle_version.get("releaseId", 0), 679 | manifest_bundle_version.get("patch", 0), 680 | manifest_bundle_version.get("debug", 0), 681 | ) 682 | 683 | # Note- we only validate versions >= 2.0.0, for backwards compatibility with 1.x.x 684 | if version[0] != 1: 685 | if version not in BUNDLE_VALIDATION_FUNCS: 686 | raise TTError( 687 | f"Unsupported manifest version ({'.'.join(map(str, version))}). Please update tt-flash to the latest version." 688 | ) 689 | elif not BUNDLE_VALIDATION_FUNCS[version](new_bundle_version): 690 | raise TTError( 691 | f"Bundle version {new_bundle_version} does not meet the requirements for version {'.'.join(map(str, version))}" 692 | ) 693 | 694 | global __SEMANTIC_BUNDLE_VERSION 695 | __SEMANTIC_BUNDLE_VERSION = list(new_bundle_version) 696 | 697 | return Manifest(data=manifest, bundle_version=new_bundle_version) 698 | 699 | 700 | def check_galaxy_eth_link_status(devices): 701 | """ 702 | Check the Galaxy Ethernet link status. 703 | Returns True if the link is up, False otherwise. 704 | """ 705 | noc_id = 0 706 | DEBUG_BUF_ADDR = 0x12c0 # For eth fw 5.0.0 and above 707 | eth_locations_noc_0 = [ (9, 0), (1, 0), (8, 0), (2, 0), (7, 0), (3, 0), (6, 0), (4, 0), 708 | (9, 6), (1, 6), (8, 6), (2, 6), (7, 6), (3, 6), (6, 6), (4, 6) ] 709 | LINK_INACTIVE_FAIL_DUMMY_PACKET = 10 710 | # Check that we have 32 devices 711 | if len(devices) != 32: 712 | raise TTError( 713 | f"Expected 32 devices for Galaxy Ethernet link status check, seeing {len(devices)}, please try reset again or cold boot the system.", 714 | ) 715 | 716 | # Collect all the link errors in a dictionary 717 | link_errors = {} 718 | # Check all 16 eth links for all devices 719 | for i, device in enumerate(devices): 720 | for eth in range(16): 721 | eth_x, eth_y = eth_locations_noc_0[eth] 722 | link_error = device.noc_read32(noc_id, eth_x, eth_y, DEBUG_BUF_ADDR + 0x4*96) 723 | if link_error == LINK_INACTIVE_FAIL_DUMMY_PACKET: 724 | link_errors[i] = eth 725 | 726 | if link_errors: 727 | for board_idx, eth in link_errors.items(): 728 | print( 729 | CConfig.COLOR.RED, 730 | f"\t\tBoard {board_idx} has link error on eth port {eth}", 731 | CConfig.COLOR.ENDC, 732 | ) 733 | raise TTError( 734 | "Galaxy Ethernet link errors detected" 735 | ) 736 | 737 | 738 | def glx_6u_trays_reset(reinit=True, ubb_num="0xF", dev_num="0xFF", op_mode="0x0", reset_time="0xF"): 739 | """ 740 | Reset the WH asics on the galaxy systems with the following steps: 741 | 1. Reset the trays with ipmi command 742 | 2. Wait for 30s 743 | 3. Reinit all chips 744 | 745 | Args: 746 | reinit (bool): Whether to reinitialize the chips after reset. 747 | ubb_num (str): The UBB number to reset. 0x0~0xF (bit map) 748 | dev_num (str): The device number to reset. 0x0~0xFF(bit map) 749 | op_mode (str): The operation mode to use. 750 | 0x0 - Asserted/Deassert reset with a reset period (reset_time) 751 | 0x1 - Asserted reset 752 | 0x2 - Deasserted reset 753 | reset_time (str): The reset time to use. resolution 10ms (ex. 0xF => 15 => 150ms) 754 | """ 755 | print( 756 | CConfig.COLOR.PURPLE, 757 | f"\t\tResetting Galaxy trays with reset command...", 758 | CConfig.COLOR.ENDC, 759 | ) 760 | run_wh_ubb_ipmi_reset(ubb_num, dev_num, op_mode, reset_time) 761 | live_countdown(30, "Galaxy reset") 762 | run_ubb_wait_for_driver_load() 763 | print( 764 | CConfig.COLOR.PURPLE, 765 | f"\t\tRe-initializing boards after reset....", 766 | CConfig.COLOR.ENDC, 767 | ) 768 | if not reinit: 769 | print( 770 | CConfig.COLOR.GREEN, 771 | f"\t\tExiting after galaxy reset without re-initializing chips.", 772 | CConfig.COLOR.ENDC, 773 | ) 774 | return 775 | # eth status 2 has been reused to denote "connected", leading to false hangs when detecting chips 776 | # discover local only to fix that 777 | chips = detect_chips_with_callback(local_only=True, ignore_ethernet=True) 778 | # Check the eth link status for WH Galaxy 779 | 780 | # after re-init check eth status - only if doing a full galaxy reset. 781 | # If doing a partial reset, eth connections will be broken because eth training will go out of sync 782 | if ubb_num == 0xF: 783 | check_wh_galaxy_eth_link_status(chips) 784 | # All went well 785 | print( 786 | CConfig.COLOR.GREEN, 787 | f"\t\tRe-initialized {len(chips)} boards after reset.", 788 | CConfig.COLOR.ENDC, 789 | ) 790 | 791 | 792 | def flash_chips( 793 | devices: list[TTChip], 794 | fw_package: tarfile.TarFile, 795 | force: bool, 796 | no_reset: bool, 797 | version: tuple[int, int, int], 798 | allow_major_downgrades: bool, 799 | skip_missing_fw: bool = False, 800 | ): 801 | print(f"\t{CConfig.COLOR.GREEN}Sub Stage:{CConfig.COLOR.ENDC} VERIFY") 802 | if CConfig.is_tty(): 803 | print("\t\tVerifying fw-package can be flashed ", end="", flush=True) 804 | else: 805 | print("\t\tVerifying fw-package can be flashed") 806 | manifest = verify_package(fw_package, version) 807 | 808 | if CConfig.is_tty(): 809 | print( 810 | f"\r\t\tVerifying fw-package can be flashed: {CConfig.COLOR.GREEN}complete{CConfig.COLOR.ENDC}" 811 | ) 812 | else: 813 | print( 814 | f"\t\tVerifying fw-package can be flashed: {CConfig.COLOR.GREEN}complete{CConfig.COLOR.ENDC}" 815 | ) 816 | 817 | to_flash = [] 818 | for dev in devices: 819 | print( 820 | f"\t\tVerifying {CConfig.COLOR.BLUE}{dev}{CConfig.COLOR.ENDC} can be flashed" 821 | ) 822 | try: 823 | boardname = get_board_type(dev.board_type(), from_type=True) 824 | except: 825 | boardname = None 826 | 827 | if boardname is None: 828 | raise TTError(f"Did not recognize board type for {dev}") 829 | 830 | # For p300 we need to check if its L or R chip 831 | if "P300" in boardname: 832 | # 0 = Right, 1 = Left 833 | if dev.get_asic_location() == 0: 834 | boardname = f"{boardname}_right" 835 | elif dev.get_asic_location() == 1: 836 | boardname = f"{boardname}_left" 837 | 838 | to_flash.append(boardname) 839 | 840 | print(f"\t{CConfig.COLOR.GREEN}Stage:{CConfig.COLOR.ENDC} FLASH") 841 | 842 | flash_data = [] 843 | flash_error = [] 844 | needs_reset_wh = [] 845 | needs_reset_bh = [] 846 | for chip, boardname in zip(devices, to_flash): 847 | print( 848 | f"\t\t{CConfig.COLOR.GREEN}Sub Stage{CConfig.COLOR.ENDC} FLASH Step 1: {CConfig.COLOR.BLUE}{chip}{CConfig.COLOR.ENDC}" 849 | ) 850 | result = flash_chip_stage1( 851 | chip, 852 | boardname, 853 | manifest, 854 | fw_package, 855 | force, 856 | allow_major_downgrades, 857 | skip_missing_fw=skip_missing_fw, 858 | ) 859 | 860 | if result.state == FlashStageResultState.Err: 861 | flash_error.append(f"{chip}: {result.msg}") 862 | elif result.state == FlashStageResultState.Ok: 863 | flash_data.append((chip, result.data)) 864 | if result.can_reset: 865 | if isinstance(chip, WhChip): 866 | needs_reset_wh.append(chip.interface_id) 867 | elif isinstance(chip, BhChip): 868 | needs_reset_bh.append(chip.interface_id) 869 | 870 | rc = 0 871 | 872 | triggered_copy = False 873 | for chip, data in flash_data: 874 | print( 875 | f"\t\t{CConfig.COLOR.GREEN}Sub Stage{CConfig.COLOR.ENDC} FLASH Step 2: {CConfig.COLOR.BLUE}{chip} {{{data.name}}}{CConfig.COLOR.ENDC}" 876 | ) 877 | result = flash_chip_stage2(chip, data) 878 | if result is None: 879 | rc += 1 880 | else: 881 | triggered_copy |= result 882 | 883 | # If we flashed an X2 then we will wait for the copy to complete 884 | if triggered_copy: 885 | print( 886 | f"\t\tFlash and verification for all chips completed, will now wait for n300 remote copy to complete..." 887 | ) 888 | live_countdown(15.0, "\t\tRemote copy", print_initial=False) 889 | 890 | if len(needs_reset_wh) > 0 or len(needs_reset_bh) > 0: 891 | print(f"{CConfig.COLOR.GREEN}Stage:{CConfig.COLOR.ENDC} RESET") 892 | 893 | m3_delay = 20 # M3 takes 20 seconds to boot and be ready after a reset 894 | running_version = chip.get_bundle_version().running 895 | if (running_version is None) or (running_version[0] != manifest.bundle_version[0]): 896 | # We crossed a major version boundary, give a longer boot timeout 897 | print( 898 | "\t\tDetected update across major version, will wait 60 seconds for m3 to boot after reset" 899 | ) 900 | m3_delay = 60 901 | 902 | if no_reset: 903 | if rc != 0: 904 | print( 905 | f"\t\tErrors detected during flash, would not have reset even if --no-reset was not given..." 906 | ) 907 | else: 908 | print( 909 | f"\t\tWould have reset to force m3 recovery, but did not due to --no-reset" 910 | ) 911 | else: 912 | if rc != 0: 913 | print(f"\t\tErrors detected during flash, skipping automatic reset...") 914 | else: 915 | # All chips are on BH Galaxy UBB 916 | if set(to_flash) == {"GALAXY-1"}: 917 | glx_6u_trays_reset() 918 | # All BH chips have now been reset 919 | # Don't reset them conventionally 920 | needs_reset_bh = [] 921 | 922 | # All chips are on WH Galaxy UBB 923 | elif set(to_flash) == {"WH_UBB"}: 924 | glx_6u_trays_reset() 925 | needs_reset_wh = [] # Don't reset WH chips conventionally 926 | 927 | if len(needs_reset_wh) > 0: 928 | WHChipReset().full_lds_reset( 929 | pci_interfaces=needs_reset_wh, reset_m3=True 930 | ) 931 | 932 | if len(needs_reset_bh) > 0: 933 | BHChipReset().full_lds_reset( 934 | pci_interfaces=needs_reset_bh, reset_m3=True, 935 | m3_delay=m3_delay 936 | ) 937 | 938 | if len(needs_reset_wh) > 0 or len(needs_reset_bh) > 0: 939 | devices = detect_chips() 940 | 941 | for idx, chip in enumerate(devices): 942 | if manifest.bundle_version[0] >= 19 and isinstance(chip, BhChip): 943 | # Get a random number to send back as arg0 944 | check_val = random.randint(1, 0xFFFF) 945 | try: 946 | response = chip.arc_msg(chip.fw_defines["MSG_CONFIRM_FLASHED_SPI"], arg0=check_val) 947 | except BaseException: 948 | response = [0] 949 | if (response[0] & 0xFFFF) != check_val: 950 | print(f"{CConfig.COLOR.YELLOW}WARNING:{CConfig.COLOR.ENDC} Post flash check failed for chip {idx}") 951 | print("Try resetting the board to ensure the new firmware is loaded correctly.") 952 | 953 | if rc == 0: 954 | print(f"FLASH {CConfig.COLOR.GREEN}SUCCESS{CConfig.COLOR.ENDC}") 955 | else: 956 | print(f"FLASH {CConfig.COLOR.RED}FAILED{CConfig.COLOR.ENDC}") 957 | 958 | return rc 959 | --------------------------------------------------------------------------------