├── .editorconfig ├── .github ├── dependabot.yml └── workflows │ └── test.yaml ├── .gitignore ├── .pre-commit-config.yaml ├── CHANGELOG.rst ├── LICENSE.txt ├── README.rst ├── changelog.d └── README.txt ├── poetry.lock ├── pyproject.toml ├── requirements ├── README.rst ├── mypy │ ├── poetry.lock │ ├── pyproject.toml │ └── requirements.txt └── test │ ├── poetry.lock │ ├── pyproject.toml │ └── requirements.txt ├── src └── fru │ ├── __init__.py │ ├── __main__.py │ ├── console.py │ ├── exceptions.py │ ├── fru_format.py │ ├── shared.py │ └── toml_format.py ├── tests ├── basic-all.bin ├── basic-all.toml ├── basic-board.bin ├── basic-board.toml ├── basic-chassis.bin ├── basic-chassis.toml ├── basic-empty.bin ├── basic-empty.toml ├── basic-internal-data.bin ├── basic-internal-data.toml ├── basic-internal-file.bin ├── basic-internal-file.toml ├── basic-internal.bin ├── basic-internal.toml ├── basic-product.bin ├── basic-product.toml ├── checksum-zero.bin ├── include_defaults_to_false.toml ├── internal-empty.toml ├── internal-fru-file-not-found.toml ├── internal-fru.bin ├── skip-section.toml ├── test_fru.py └── test_toml.py └── tox.ini /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | charset = utf-8 5 | end_of_line = lf 6 | indent_size = 4 7 | indent_style = space 8 | insert_final_newline = true 9 | trim_trailing_whitespace = true 10 | 11 | [{*.yaml,*.yml}] 12 | indent_size = 2 13 | 14 | [*.bin] 15 | charset = unset 16 | end_of_line = unset 17 | indent_size = unset 18 | indent_style = unset 19 | insert_final_newline = unset 20 | trim_trailing_whitespace = unset 21 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "monthly" 7 | groups: 8 | github-actions: 9 | patterns: 10 | - "*" 11 | -------------------------------------------------------------------------------- /.github/workflows/test.yaml: -------------------------------------------------------------------------------- 1 | name: "🔬 Test" 2 | 3 | on: 4 | pull_request: null 5 | push: 6 | branches: 7 | - "main" 8 | - "releases" 9 | 10 | jobs: 11 | test: 12 | name: "Test (${{ matrix.os.name }})" 13 | 14 | strategy: 15 | matrix: 16 | os: 17 | - name: "Linux" 18 | runner: "ubuntu-latest" 19 | - name: "macOS" 20 | runner: "macos-latest" 21 | - name: "Windows" 22 | runner: "windows-latest" 23 | 24 | # Each operating system should test all Python interpreters simultaneously. 25 | # This nested-list syntax accomplishes that goal 26 | # without creating cross-products of every possible OS and interpreter. 27 | # 28 | # Note: The CPython interpreter versions should be in ascending order 29 | # because the last-listed version will be the default CPython version. 30 | # 31 | cpythons: 32 | - - "3.8" 33 | - "3.9" 34 | - "3.10" 35 | - "3.11" 36 | - "3.12" 37 | cpython-beta: 38 | - "3.13" 39 | fail-fast: false 40 | 41 | runs-on: "${{ matrix.os.runner }}" 42 | steps: 43 | - name: "Checkout the repository" 44 | uses: "actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683" # v4.2.2 45 | 46 | - name: "Setup Pythons" 47 | uses: "actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065" # v5.6.0 48 | env: 49 | # Disable pip upgrade warnings while setting up Python versions. 50 | PIP_DISABLE_PIP_VERSION_CHECK: "1" 51 | with: 52 | python-version: "${{ 53 | format( 54 | '{0}\n{1}', 55 | matrix.cpython-beta, 56 | join(matrix.cpythons, '\n') 57 | ) 58 | }}" 59 | allow-prereleases: true 60 | 61 | - name: "Detect Pythons" 62 | uses: "kurtmckee/detect-pythons@4a7b361b5ee27eb35c8b5026ac757d02751d6688" # v1.1.1 63 | 64 | - name: "Restore cache" 65 | id: "restore-cache" 66 | uses: "actions/cache@5a3ec84eff668545956fd18022155c47e93e2684" # v4.2.3 67 | with: 68 | path: | 69 | .mypy_cache/ 70 | .tox/ 71 | .venv/ 72 | key: "test-os=${{ runner.os }}-hash=${{ hashFiles('.python-identifiers', 'pyproject.toml', 'tox.ini', 'requirements/**/*.txt') }}" 73 | 74 | - name: "Identify venv path" 75 | shell: "bash" 76 | run: | 77 | echo "venv-path=.venv/${{ runner.os == 'Windows' && 'Scripts' || 'bin' }}" >> "$GITHUB_ENV" 78 | 79 | - name: "Create a virtual environment" 80 | if: "steps.restore-cache.outputs.cache-hit == false" 81 | run: | 82 | python -m venv .venv 83 | ${{ env.venv-path }}/python -m pip install --upgrade pip setuptools wheel 84 | ${{ env.venv-path }}/pip install tox 85 | 86 | - name: "Run the test suite" 87 | run: "${{ env.venv-path }}/tox" 88 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /.idea/ 2 | 3 | 4 | # https://github.com/github/gitignore/blob/master/Python.gitignore 5 | # ---------------------------------------------------------------- 6 | 7 | # Byte-compiled / optimized / DLL files 8 | __pycache__/ 9 | *.py[cod] 10 | *$py.class 11 | 12 | # C extensions 13 | *.so 14 | 15 | # Distribution / packaging 16 | .Python 17 | build/ 18 | develop-eggs/ 19 | dist/ 20 | downloads/ 21 | eggs/ 22 | .eggs/ 23 | lib/ 24 | lib64/ 25 | parts/ 26 | sdist/ 27 | var/ 28 | wheels/ 29 | share/python-wheels/ 30 | *.egg-info/ 31 | .installed.cfg 32 | *.egg 33 | MANIFEST 34 | 35 | # PyInstaller 36 | # Usually these files are written by a python script from a template 37 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 38 | *.manifest 39 | *.spec 40 | 41 | # Installer logs 42 | pip-log.txt 43 | pip-delete-this-directory.txt 44 | 45 | # Unit test / coverage reports 46 | htmlcov/ 47 | .tox/ 48 | .nox/ 49 | .coverage 50 | .coverage.* 51 | .cache 52 | nosetests.xml 53 | coverage.xml 54 | *.cover 55 | *.py,cover 56 | .hypothesis/ 57 | .pytest_cache/ 58 | cover/ 59 | 60 | # Translations 61 | *.mo 62 | *.pot 63 | 64 | # Django stuff: 65 | *.log 66 | local_settings.py 67 | db.sqlite3 68 | db.sqlite3-journal 69 | 70 | # Flask stuff: 71 | instance/ 72 | .webassets-cache 73 | 74 | # Scrapy stuff: 75 | .scrapy 76 | 77 | # Sphinx documentation 78 | docs/_build/ 79 | 80 | # PyBuilder 81 | .pybuilder/ 82 | target/ 83 | 84 | # Jupyter Notebook 85 | .ipynb_checkpoints 86 | 87 | # IPython 88 | profile_default/ 89 | ipython_config.py 90 | 91 | # pyenv 92 | # For a library or package, you might want to ignore these files since the code is 93 | # intended to run in multiple environments; otherwise, check them in: 94 | # .python-version 95 | 96 | # pipenv 97 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 98 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 99 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 100 | # install all needed dependencies. 101 | #Pipfile.lock 102 | 103 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 104 | __pypackages__/ 105 | 106 | # Celery stuff 107 | celerybeat-schedule 108 | celerybeat.pid 109 | 110 | # SageMath parsed files 111 | *.sage.py 112 | 113 | # Environments 114 | .env 115 | .venv 116 | env/ 117 | venv/ 118 | ENV/ 119 | env.bak/ 120 | venv.bak/ 121 | 122 | # Spyder project settings 123 | .spyderproject 124 | .spyproject 125 | 126 | # Rope project settings 127 | .ropeproject 128 | 129 | # mkdocs documentation 130 | /site 131 | 132 | # mypy 133 | .mypy_cache/ 134 | .dmypy.json 135 | dmypy.json 136 | 137 | # Pyre type checker 138 | .pyre/ 139 | 140 | # pytype static type analyzer 141 | .pytype/ 142 | 143 | # Cython debug symbols 144 | cython_debug/ 145 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | ci: 2 | autoupdate_schedule: "quarterly" 3 | 4 | default_language_version: 5 | python: "python3.12" 6 | 7 | # Binary files in the test suite must be ignored. 8 | exclude: "\\.bin$" 9 | 10 | repos: 11 | - repo: "meta" 12 | hooks: 13 | - id: "check-hooks-apply" 14 | - id: "check-useless-excludes" 15 | 16 | - repo: "https://github.com/pre-commit/pre-commit-hooks" 17 | rev: "v5.0.0" 18 | hooks: 19 | - id: "check-added-large-files" 20 | - id: "check-merge-conflict" 21 | - id: "check-yaml" 22 | - id: "end-of-file-fixer" 23 | - id: "mixed-line-ending" 24 | args: 25 | - "--fix=lf" 26 | - id: "trailing-whitespace" 27 | 28 | - repo: "https://github.com/asottile/pyupgrade" 29 | rev: "v3.19.1" 30 | hooks: 31 | - id: "pyupgrade" 32 | name: "Enforce Python 3.8+ idioms" 33 | args: 34 | - "--py38-plus" 35 | 36 | - repo: "https://github.com/psf/black-pre-commit-mirror" 37 | rev: "25.1.0" 38 | hooks: 39 | - id: "black" 40 | 41 | - repo: "https://github.com/pycqa/isort" 42 | rev: "6.0.1" 43 | hooks: 44 | - id: "isort" 45 | 46 | - repo: "https://github.com/pycqa/flake8" 47 | rev: "7.2.0" 48 | hooks: 49 | - id: "flake8" 50 | additional_dependencies: 51 | - "flake8-bugbear==24.8.19" 52 | 53 | - repo: "https://github.com/editorconfig-checker/editorconfig-checker.python" 54 | rev: "3.2.1" 55 | hooks: 56 | - id: "editorconfig-checker" 57 | 58 | - repo: "https://github.com/python-jsonschema/check-jsonschema" 59 | rev: "0.32.1" 60 | hooks: 61 | - id: "check-dependabot" 62 | - id: "check-github-workflows" 63 | 64 | - repo: "https://github.com/rhysd/actionlint" 65 | rev: "v1.7.7" 66 | hooks: 67 | - id: "actionlint" 68 | 69 | - repo: "https://github.com/kurtmckee/pre-commit-hooks" 70 | rev: "v1.0.0" 71 | hooks: 72 | - id: "verify-consistent-pyproject-toml-python-requirements" 73 | -------------------------------------------------------------------------------- /CHANGELOG.rst: -------------------------------------------------------------------------------- 1 | .. 2 | This is the FRU Tool changelog. 3 | 4 | It is managed and updated by scriv during development. 5 | Please do not edit this file directly. 6 | Instead, run "scriv create" to create a new changelog fragment. 7 | 8 | 9 | Changelog 10 | ********* 11 | 12 | 13 | Unreleased changes 14 | ================== 15 | 16 | Please see the fragment files in the `changelog.d directory`_. 17 | 18 | .. _changelog.d directory: https://github.com/genotrance/fru-tool/tree/main/changelog.d 19 | 20 | 21 | .. scriv-insert-here 22 | 23 | .. _changelog-4.1.0: 24 | 25 | 4.1.0 - 2024-09-26 26 | ================== 27 | 28 | Added 29 | ----- 30 | 31 | - Decode FRU fields encoded using 6-bit ASCII. 32 | 33 | Note that it is currently not possible to encode fields back to 6-bit ASCII. 34 | 35 | .. _changelog-4.0.2: 36 | 37 | 4.0.2 - 2024-08-04 38 | ================== 39 | 40 | Changed 41 | ------- 42 | 43 | - Exclude FRU sections when the associated ``include_*`` key 44 | has been removed from the ``[common]`` section. (#21) 45 | 46 | The previous behavior was to assume a section should be included 47 | unless the associated ``include_*`` key was explicitly set to false. 48 | 49 | .. _changelog-4.0.1: 50 | 51 | 4.0.1 - 2024-05-30 52 | ================== 53 | 54 | Fixed 55 | ----- 56 | 57 | - Always specify UTF-8 encoding when reading TOML files. 58 | 59 | .. _changelog-4.0.0: 60 | 61 | 4.0.0 - 2024-04-13 62 | ================== 63 | 64 | Python support 65 | -------------- 66 | 67 | * Support Python 3.8 and higher. 68 | 69 | Documentation 70 | ------------- 71 | 72 | * Overhaul the README. 73 | * Add a CHANGELOG. 74 | 75 | Development 76 | ----------- 77 | 78 | * Add configurations for common tools: 79 | 80 | * Dependabot 81 | * EditorConfig 82 | * pre-commit 83 | 84 | * Add a GitHub workflow to test the project. 85 | * Allow project dependencies to auto-update by running ``tox run -m update``. 86 | * Prepare to test the project using mypy. 87 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Dell Technologies 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | FRU Tool 2 | ######## 3 | 4 | FRU Tool is a command-line utility for generating and converting IPMI FRU binary data files. 5 | 6 | 7 | Description 8 | =========== 9 | 10 | Every modern component of a computer or electronic equipment, 11 | commonly referred to as a Field Replaceable Unit or FRU, 12 | contains a memory block that stores the inventory information of that component. 13 | This includes the manufacturer's name, product name, manufacture date, serial numbers 14 | and other details that help identify the component. 15 | 16 | The Intel FRU Information Storage for `IPMI specification`_ defines the standard format 17 | that devices are expected to conform to within their FRU areas. 18 | Each component vendor populates the FRU area during their manufacturing process 19 | and all FRU areas are easily accessible via IPMI. 20 | 21 | The OEM FRU storage feature of Dell EMC PowerEdge servers is an additional FRU area that allows OEM customers, 22 | who use Dell EMC servers as a component of their solution, 23 | to include their own tracking information in the FRU storage area. 24 | This can be loaded into the server during factory deployment 25 | and can be accessed when the information is required during troubleshooting or support. 26 | This allows the OEM customers to store their own part numbers and inventory information within the server, 27 | enabling them to track their solutions in their internal management systems. 28 | This is similar to the way Dell EMC servers use the standard FRU areas to store tracking information 29 | such as service tags and manufacture date 30 | and use that information when having to identify and support those systems once in the field. 31 | 32 | Considering that the FRU area is a binary payload, 33 | it is not trivial to build the content structure by hand. 34 | To simplify the effort for OEM customers, 35 | this Python tool is provided to speed up the process of creating the payload. 36 | 37 | While FRU Tool was specifically authored to support this OEM use case, 38 | it conforms to Intel's specification and can be used to build the FRU structure for any device. 39 | 40 | 41 | Prerequisites 42 | ============= 43 | 44 | FRU Tool is tested with Python 3.8 and higher. 45 | 46 | In order to write, read, or edit the OEM FRU storage area on the target server, 47 | the open source `IPMItool`_ utility or equivalent is required. 48 | This utility can be installed on Linux distributions by using the built-in package manager such as yum or apt-get. 49 | Dell EMC provides a Windows version which can be found in the *Driver and Downloads* section for any PowerEdge server 50 | on `Dell EMC Support`_ under the *Systems Management* section. 51 | It is contained in the package named *Dell OpenManage BMC Utility* which can also be found on Google by searching for the package by name. 52 | For documentation on IPMItool, search for 'man ipmitool' on Google. 53 | 54 | 55 | Installation 56 | ============ 57 | 58 | Installation is as simple as running ``python -m pip install fru``. 59 | 60 | 61 | Usage Instructions 62 | ================== 63 | 64 | FRU Tool includes a CLI named ``frutool``. 65 | It can be run using either of these methods, depending on how your paths are configured: 66 | 67 | .. code-block:: 68 | 69 | frutool 70 | 71 | python -m fru 72 | 73 | These are equivalent commands. 74 | For convenience, the commands below run as ``frutool``. 75 | 76 | 77 | Generate a sample text file 78 | 79 | 80 | To create a complete -- but empty -- text file, run the ``frutool sample`` command: 81 | 82 | .. code-block:: 83 | 84 | frutool sample EDITABLE.txt 85 | 86 | 87 | (Change ``EDITABLE.txt`` to whatever filename matches your needs.) 88 | 89 | You can then open the text file in any editor and edit its contents. 90 | Note that the file format is TOML; 91 | basic format instructions are included as comments at the top of the sample file. 92 | 93 | 94 | Convert a binary FRU file to text 95 | --------------------------------- 96 | 97 | To convert a binary FRU file to an editable text file, run the ``frutool dump`` command: 98 | 99 | .. code-block:: 100 | 101 | frutool dump FRU.bin EDITABLE.txt 102 | 103 | 104 | (Change ``FRU.bin`` and ``EDITABLE.txt`` to whatever filenames match your needs.) 105 | 106 | You can then review and edit the text file. 107 | 108 | 109 | Convert a text file to a binary FRU file 110 | ---------------------------------------- 111 | 112 | To convert a text file to binary FRU file, run the ``frutool generate`` command: 113 | 114 | .. code-block:: 115 | 116 | frutool generate EDITABLE.txt FRU.bin 117 | 118 | 119 | (Change ``EDITABLE.txt`` and ``FRU.bin`` to whatever filenames match your needs.) 120 | 121 | You can then write the binary FRU file to the hardware system using ``ipmitool``: 122 | 123 | .. code-block:: 124 | 125 | ipmitool -I lanplus -H $IP_ADDRESS -U root -P password fru write FRU.bin 126 | 127 | 128 | Detailed usage information and use cases for the OEM FRU feature 129 | can be found in the `Dell OEM FRU Whitepaper`_. 130 | 131 | 132 | Contribution 133 | ============ 134 | 135 | In order to contribute, feel free to fork the project and submit a pull request with all your changes and a description on what was added or removed and why. 136 | If approved, the project owners will merge it. 137 | 138 | 139 | Licensing 140 | ========= 141 | 142 | FRU Tool is freely distributed under the MIT License. 143 | 144 | 145 | Support 146 | ======= 147 | 148 | Please file bugs and issues on the GitHub issues page for this project. 149 | The code and documentation are released with no warranties or SLAs 150 | and are intended to be supported through a community driven process. 151 | 152 | 153 | .. Links 154 | .. ----- 155 | .. 156 | .. _IPMI specification: https://www.intel.com/content/dam/www/public/us/en/documents/specification-updates/ipmi-platform-mgt-fru-info-storage-def-v1-0-rev-1-3-spec-update.pdf 157 | .. _IPMItool: https://codeberg.org/IPMITool/ipmitool 158 | .. _Dell EMC Support: https://support.dell.com 159 | .. _Dell OEM FRU Whitepaper: https://downloads.dell.com/solutions/general-solution-resources/White%20Papers/OEM%20FRU%20Technical%20Whitepaper.pdf 160 | -------------------------------------------------------------------------------- /changelog.d/README.txt: -------------------------------------------------------------------------------- 1 | Changelog fragments in this directory are generated by scriv by running: 2 | 3 | scriv create 4 | 5 | The files are collected during the release process by running: 6 | 7 | scriv collect 8 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "click" 5 | version = "8.1.7" 6 | description = "Composable command line interface toolkit" 7 | optional = false 8 | python-versions = ">=3.7" 9 | files = [ 10 | {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, 11 | {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, 12 | ] 13 | 14 | [package.dependencies] 15 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 16 | 17 | [[package]] 18 | name = "colorama" 19 | version = "0.4.6" 20 | description = "Cross-platform colored terminal text." 21 | optional = false 22 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 23 | files = [ 24 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, 25 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, 26 | ] 27 | 28 | [[package]] 29 | name = "tomli" 30 | version = "2.0.1" 31 | description = "A lil' TOML parser" 32 | optional = false 33 | python-versions = ">=3.7" 34 | files = [ 35 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 36 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 37 | ] 38 | 39 | [metadata] 40 | lock-version = "2.0" 41 | python-versions = ">=3.8" 42 | content-hash = "73714f32fce09f387942d0c1b0c4b4f13c8b48b300a457335caa6ca2c948e178" 43 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "fru" 3 | version = "4.1.0" 4 | description = "Read and write binary FRU files" 5 | authors = [ 6 | "Kurt McKee ", 7 | "Ganesh Viswanathan ", 8 | ] 9 | license = "MIT" 10 | readme = "README.rst" 11 | include = [ 12 | "CHANGELOG.rst", 13 | ] 14 | repository = "https://github.com/genotrance/fru-tool/" 15 | keywords = ["fru", "ipmi"] 16 | classifiers = [ 17 | "Development Status :: 5 - Production/Stable", 18 | "Intended Audience :: Customer Service", 19 | "Intended Audience :: Developers", 20 | "Intended Audience :: Manufacturing", 21 | "Natural Language :: English", 22 | "Operating System :: OS Independent", 23 | "Topic :: System :: Hardware", 24 | "Topic :: Utilities", 25 | ] 26 | 27 | 28 | [tool.poetry.dependencies] 29 | python = ">=3.8" 30 | click = "^8.0.0" 31 | tomli = { version = "^2.0.1", python = "<3.11" } 32 | 33 | 34 | [tool.poetry.scripts] 35 | frutool = 'fru.console:run' 36 | 37 | 38 | [build-system] 39 | requires = ["poetry-core>=1.0.0"] 40 | build-backend = "poetry.core.masonry.api" 41 | 42 | 43 | # coverage 44 | # -------- 45 | 46 | [tool.coverage.run] 47 | branch = true 48 | parallel = true 49 | source = [ 50 | "fru", 51 | "tests", 52 | ] 53 | 54 | [tool.coverage.paths] 55 | source = [ 56 | "src", 57 | "*/site-packages", 58 | ] 59 | 60 | [tool.coverage.report] 61 | fail_under = 82 62 | 63 | 64 | # mypy 65 | # ---- 66 | # 67 | #[tool.mypy] 68 | #packages = "src.fru" 69 | #strict = true 70 | #sqlite_cache = true 71 | 72 | 73 | # isort 74 | # ----- 75 | 76 | [tool.isort] 77 | profile = "black" 78 | 79 | 80 | # pytest 81 | # ------ 82 | [tool.pytest.ini_options] 83 | addopts = "--color=yes" 84 | filterwarnings = [ 85 | "error", 86 | ] 87 | 88 | 89 | # scriv 90 | # ----- 91 | 92 | [tool.scriv] 93 | version = "literal: pyproject.toml: tool.poetry.version" 94 | categories = [ 95 | "Python support", 96 | "Added", 97 | "Fixed", 98 | "Removed", 99 | "Changed", 100 | "Deprecated", 101 | "Security", 102 | "Documentation", 103 | ] 104 | entry_title_template = "{{ version }} - {{ date.strftime('%Y-%m-%d') }}" 105 | format = "rst" 106 | fragment_directory = "changelog.d" 107 | insert_marker = "scriv-insert-here" 108 | main_branches = ["main", "releases"] 109 | -------------------------------------------------------------------------------- /requirements/README.rst: -------------------------------------------------------------------------------- 1 | ``requirements/`` 2 | ################# 3 | 4 | This directory contains the files that manage dependencies for the project. 5 | 6 | At the time of writing, Poetry supports discrete dependency groups 7 | but always resolves dependencies coherently across all groups. 8 | However, in some cases, dependencies do not need to be coherently resolved; 9 | for example, mypy's dependencies do not need to be resolved 10 | together with Sphinx's dependencies. 11 | 12 | Each subdirectory in this directory contains a ``pyproject.toml`` file 13 | with purpose-specific dependencies listed. 14 | 15 | 16 | How it's used 17 | ============= 18 | 19 | Tox is configured to use the exported ``requirements.txt`` files as needed. 20 | In addition, Read the Docs is configured to use ``docs/requirements.txt``. 21 | This helps ensure reproducible testing, linting, and documentation builds. 22 | 23 | 24 | How it's updated 25 | ================ 26 | 27 | A tox label, ``update``, ensures that dependencies can be easily updated, 28 | and that ``requirements.txt`` files are consistently re-exported. 29 | 30 | This can be invoked by running: 31 | 32 | .. code-block:: 33 | 34 | tox run -m update 35 | 36 | 37 | How to add dependencies 38 | ======================= 39 | 40 | New dependencies can be added to a given subdirectory's ``pyproject.toml`` 41 | by either manually modifying the file, or by running a command like: 42 | 43 | .. code-block:: 44 | 45 | poetry add --lock --directory "requirements/$DIR" $DEPENDENCY_NAME 46 | 47 | Either way, the dependencies must be re-exported: 48 | 49 | .. code-block:: 50 | 51 | tox run -m update 52 | -------------------------------------------------------------------------------- /requirements/mypy/poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "mypy" 5 | version = "1.11.2" 6 | description = "Optional static typing for Python" 7 | optional = false 8 | python-versions = ">=3.8" 9 | files = [ 10 | {file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"}, 11 | {file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"}, 12 | {file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"}, 13 | {file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"}, 14 | {file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"}, 15 | {file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"}, 16 | {file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"}, 17 | {file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"}, 18 | {file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"}, 19 | {file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"}, 20 | {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, 21 | {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, 22 | {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, 23 | {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, 24 | {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, 25 | {file = "mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b"}, 26 | {file = "mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86"}, 27 | {file = "mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce"}, 28 | {file = "mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1"}, 29 | {file = "mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b"}, 30 | {file = "mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6"}, 31 | {file = "mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70"}, 32 | {file = "mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d"}, 33 | {file = "mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d"}, 34 | {file = "mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24"}, 35 | {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, 36 | {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, 37 | ] 38 | 39 | [package.dependencies] 40 | mypy-extensions = ">=1.0.0" 41 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} 42 | typing-extensions = ">=4.6.0" 43 | 44 | [package.extras] 45 | dmypy = ["psutil (>=4.0)"] 46 | install-types = ["pip"] 47 | mypyc = ["setuptools (>=50)"] 48 | reports = ["lxml"] 49 | 50 | [[package]] 51 | name = "mypy-extensions" 52 | version = "1.0.0" 53 | description = "Type system extensions for programs checked with the mypy type checker." 54 | optional = false 55 | python-versions = ">=3.5" 56 | files = [ 57 | {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, 58 | {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, 59 | ] 60 | 61 | [[package]] 62 | name = "tomli" 63 | version = "2.0.1" 64 | description = "A lil' TOML parser" 65 | optional = false 66 | python-versions = ">=3.7" 67 | files = [ 68 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 69 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 70 | ] 71 | 72 | [[package]] 73 | name = "typing-extensions" 74 | version = "4.12.2" 75 | description = "Backported and Experimental Type Hints for Python 3.8+" 76 | optional = false 77 | python-versions = ">=3.8" 78 | files = [ 79 | {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, 80 | {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, 81 | ] 82 | 83 | [metadata] 84 | lock-version = "2.0" 85 | python-versions = ">=3.8" 86 | content-hash = "b833d176fca002e4447495cfff24838c65260770cb6aac80fe562d8e705ca316" 87 | -------------------------------------------------------------------------------- /requirements/mypy/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | package-mode = false 3 | 4 | [tool.poetry.dependencies] 5 | python = ">=3.8" 6 | mypy = "*" 7 | -------------------------------------------------------------------------------- /requirements/mypy/requirements.txt: -------------------------------------------------------------------------------- 1 | mypy-extensions==1.0.0 ; python_version >= "3.8" 2 | mypy==1.11.2 ; python_version >= "3.8" 3 | tomli==2.0.1 ; python_version < "3.11" and python_version >= "3.8" 4 | typing-extensions==4.12.2 ; python_version >= "3.8" 5 | -------------------------------------------------------------------------------- /requirements/test/poetry.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. 2 | 3 | [[package]] 4 | name = "colorama" 5 | version = "0.4.6" 6 | description = "Cross-platform colored terminal text." 7 | optional = false 8 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 9 | files = [ 10 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, 11 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, 12 | ] 13 | 14 | [[package]] 15 | name = "coverage" 16 | version = "7.6.1" 17 | description = "Code coverage measurement for Python" 18 | optional = false 19 | python-versions = ">=3.8" 20 | files = [ 21 | {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, 22 | {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, 23 | {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, 24 | {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, 25 | {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, 26 | {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, 27 | {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, 28 | {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, 29 | {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, 30 | {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, 31 | {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, 32 | {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, 33 | {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, 34 | {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, 35 | {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, 36 | {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, 37 | {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, 38 | {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, 39 | {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, 40 | {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, 41 | {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, 42 | {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, 43 | {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, 44 | {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, 45 | {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, 46 | {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, 47 | {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, 48 | {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, 49 | {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, 50 | {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, 51 | {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, 52 | {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, 53 | {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, 54 | {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, 55 | {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, 56 | {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, 57 | {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, 58 | {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, 59 | {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, 60 | {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, 61 | {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, 62 | {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, 63 | {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, 64 | {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, 65 | {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, 66 | {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, 67 | {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, 68 | {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, 69 | {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, 70 | {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, 71 | {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, 72 | {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, 73 | {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, 74 | {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, 75 | {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, 76 | {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, 77 | {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, 78 | {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, 79 | {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, 80 | {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, 81 | {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, 82 | {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, 83 | {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, 84 | {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, 85 | {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, 86 | {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, 87 | {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, 88 | {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, 89 | {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, 90 | {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, 91 | {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, 92 | {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, 93 | ] 94 | 95 | [package.dependencies] 96 | tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} 97 | 98 | [package.extras] 99 | toml = ["tomli"] 100 | 101 | [[package]] 102 | name = "exceptiongroup" 103 | version = "1.2.2" 104 | description = "Backport of PEP 654 (exception groups)" 105 | optional = false 106 | python-versions = ">=3.7" 107 | files = [ 108 | {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, 109 | {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, 110 | ] 111 | 112 | [package.extras] 113 | test = ["pytest (>=6)"] 114 | 115 | [[package]] 116 | name = "importlib-metadata" 117 | version = "8.5.0" 118 | description = "Read metadata from Python packages" 119 | optional = false 120 | python-versions = ">=3.8" 121 | files = [ 122 | {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, 123 | {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, 124 | ] 125 | 126 | [package.dependencies] 127 | zipp = ">=3.20" 128 | 129 | [package.extras] 130 | check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] 131 | cover = ["pytest-cov"] 132 | doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] 133 | enabler = ["pytest-enabler (>=2.2)"] 134 | perf = ["ipython"] 135 | test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] 136 | type = ["pytest-mypy"] 137 | 138 | [[package]] 139 | name = "iniconfig" 140 | version = "2.0.0" 141 | description = "brain-dead simple config-ini parsing" 142 | optional = false 143 | python-versions = ">=3.7" 144 | files = [ 145 | {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, 146 | {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, 147 | ] 148 | 149 | [[package]] 150 | name = "packaging" 151 | version = "24.1" 152 | description = "Core utilities for Python packages" 153 | optional = false 154 | python-versions = ">=3.8" 155 | files = [ 156 | {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, 157 | {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, 158 | ] 159 | 160 | [[package]] 161 | name = "pluggy" 162 | version = "1.5.0" 163 | description = "plugin and hook calling mechanisms for python" 164 | optional = false 165 | python-versions = ">=3.8" 166 | files = [ 167 | {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, 168 | {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, 169 | ] 170 | 171 | [package.extras] 172 | dev = ["pre-commit", "tox"] 173 | testing = ["pytest", "pytest-benchmark"] 174 | 175 | [[package]] 176 | name = "pytest" 177 | version = "8.3.3" 178 | description = "pytest: simple powerful testing with Python" 179 | optional = false 180 | python-versions = ">=3.8" 181 | files = [ 182 | {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, 183 | {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, 184 | ] 185 | 186 | [package.dependencies] 187 | colorama = {version = "*", markers = "sys_platform == \"win32\""} 188 | exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} 189 | iniconfig = "*" 190 | packaging = "*" 191 | pluggy = ">=1.5,<2" 192 | tomli = {version = ">=1", markers = "python_version < \"3.11\""} 193 | 194 | [package.extras] 195 | dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] 196 | 197 | [[package]] 198 | name = "pytest-randomly" 199 | version = "3.15.0" 200 | description = "Pytest plugin to randomly order tests and control random.seed." 201 | optional = false 202 | python-versions = ">=3.8" 203 | files = [ 204 | {file = "pytest_randomly-3.15.0-py3-none-any.whl", hash = "sha256:0516f4344b29f4e9cdae8bce31c4aeebf59d0b9ef05927c33354ff3859eeeca6"}, 205 | {file = "pytest_randomly-3.15.0.tar.gz", hash = "sha256:b908529648667ba5e54723088edd6f82252f540cc340d748d1fa985539687047"}, 206 | ] 207 | 208 | [package.dependencies] 209 | importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} 210 | pytest = "*" 211 | 212 | [[package]] 213 | name = "tomli" 214 | version = "2.0.1" 215 | description = "A lil' TOML parser" 216 | optional = false 217 | python-versions = ">=3.7" 218 | files = [ 219 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 220 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 221 | ] 222 | 223 | [[package]] 224 | name = "zipp" 225 | version = "3.20.2" 226 | description = "Backport of pathlib-compatible object wrapper for zip files" 227 | optional = false 228 | python-versions = ">=3.8" 229 | files = [ 230 | {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, 231 | {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, 232 | ] 233 | 234 | [package.extras] 235 | check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] 236 | cover = ["pytest-cov"] 237 | doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] 238 | enabler = ["pytest-enabler (>=2.2)"] 239 | test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] 240 | type = ["pytest-mypy"] 241 | 242 | [metadata] 243 | lock-version = "2.0" 244 | python-versions = ">=3.8" 245 | content-hash = "ebfffd680b7ea84d2bdbedf40a6aff77b3e645155f439d3016b84f2d6ff32992" 246 | -------------------------------------------------------------------------------- /requirements/test/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | package-mode = false 3 | 4 | [tool.poetry.dependencies] 5 | python = ">=3.8" 6 | coverage = {extras = ["toml"], version = "*"} 7 | pytest = "*" 8 | pytest-randomly = "*" 9 | -------------------------------------------------------------------------------- /requirements/test/requirements.txt: -------------------------------------------------------------------------------- 1 | colorama==0.4.6 ; python_version >= "3.8" and sys_platform == "win32" 2 | coverage[toml]==7.6.1 ; python_version >= "3.8" 3 | exceptiongroup==1.2.2 ; python_version < "3.11" and python_version >= "3.8" 4 | importlib-metadata==8.5.0 ; python_version < "3.10" and python_version >= "3.8" 5 | iniconfig==2.0.0 ; python_version >= "3.8" 6 | packaging==24.1 ; python_version >= "3.8" 7 | pluggy==1.5.0 ; python_version >= "3.8" 8 | pytest-randomly==3.15.0 ; python_version >= "3.8" 9 | pytest==8.3.3 ; python_version >= "3.8" 10 | tomli==2.0.1 ; python_full_version <= "3.11.0a6" and python_version >= "3.8" 11 | zipp==3.20.2 ; python_version < "3.10" and python_version >= "3.8" 12 | -------------------------------------------------------------------------------- /src/fru/__init__.py: -------------------------------------------------------------------------------- 1 | # fru - Read and write binary IPMI FRU files 2 | # Copyright 2018-2024 Kurt McKee 3 | # Copyright 2017 Dell Technologies 4 | # 5 | # https://github.com/genotrance/fru-tool/ 6 | # 7 | # Licensed under the terms of the MIT License: 8 | # https://opensource.org/licenses/MIT 9 | 10 | 11 | from .fru_format import dump, load 12 | 13 | __all__ = ("dump", "load") 14 | -------------------------------------------------------------------------------- /src/fru/__main__.py: -------------------------------------------------------------------------------- 1 | # fru - Read and write binary IPMI FRU files 2 | # Copyright 2018-2024 Kurt McKee 3 | # Copyright 2017 Dell Technologies 4 | # 5 | # https://github.com/genotrance/fru-tool/ 6 | # 7 | # Licensed under the terms of the MIT License: 8 | # https://opensource.org/licenses/MIT 9 | 10 | from . import console 11 | 12 | console.run() 13 | -------------------------------------------------------------------------------- /src/fru/console.py: -------------------------------------------------------------------------------- 1 | # fru - Read and write binary IPMI FRU files 2 | # Copyright 2018-2024 Kurt McKee 3 | # Copyright 2017 Dell Technologies 4 | # 5 | # https://github.com/genotrance/fru-tool/ 6 | # 7 | # Licensed under the terms of the MIT License: 8 | # https://opensource.org/licenses/MIT 9 | 10 | 11 | import pathlib 12 | import sys 13 | 14 | import click 15 | 16 | from . import fru_format, toml_format 17 | 18 | 19 | @click.group() 20 | @click.version_option() 21 | def run(): 22 | pass 23 | 24 | 25 | @click.command("generate", no_args_is_help=True) 26 | @click.argument( 27 | "toml_file", 28 | type=click.Path( 29 | exists=True, dir_okay=False, resolve_path=True, path_type=pathlib.Path 30 | ), 31 | ) 32 | @click.argument( 33 | "fru_file", 34 | type=click.Path( 35 | exists=False, dir_okay=False, resolve_path=True, path_type=pathlib.Path 36 | ), 37 | ) 38 | @click.option( 39 | "--force", is_flag=True, help="Overwrite an existing file if it already exists." 40 | ) 41 | def run_generate(toml_file: pathlib.Path, fru_file: pathlib.Path, force: bool): 42 | """Generate a binary FRU file using data in a TOML file.""" 43 | 44 | if fru_file.exists() and not force: 45 | click.echo(f"{fru_file} exists and will not be overwritten.") 46 | sys.exit(1) 47 | 48 | try: 49 | data = toml_format.load(toml_file) 50 | blob = fru_format.dump(data) 51 | except ValueError as error: 52 | click.echo(error.args[0]) 53 | sys.exit(1) 54 | 55 | with open(fru_file, "wb") as file: 56 | file.write(blob) 57 | 58 | 59 | @click.command("dump", no_args_is_help=True) 60 | @click.argument( 61 | "fru_file", 62 | type=click.Path( 63 | exists=True, dir_okay=False, resolve_path=True, path_type=pathlib.Path 64 | ), 65 | ) 66 | @click.argument( 67 | "toml_file", 68 | type=click.Path( 69 | exists=False, dir_okay=False, resolve_path=True, path_type=pathlib.Path 70 | ), 71 | required=False, 72 | ) 73 | @click.option( 74 | "--force", is_flag=True, help="Overwrite an existing file if it already exists." 75 | ) 76 | def run_dump(fru_file: pathlib.Path, toml_file: pathlib.Path, force: bool): 77 | """Dump data from a binary FRU file to the TOML file format.""" 78 | 79 | if toml_file and toml_file.exists() and not force: 80 | click.echo(f"{toml_file} exists and will not be overwritten.") 81 | sys.exit(1) 82 | 83 | try: 84 | data = fru_format.load(path=fru_file) 85 | except ValueError as error: 86 | click.echo(error.args[0]) 87 | sys.exit(1) 88 | 89 | output = toml_format.dump(data) 90 | if toml_file: 91 | with toml_file.open("wt", encoding="utf8") as file: 92 | file.write(output) 93 | else: 94 | click.echo(output) 95 | 96 | 97 | @click.command("sample") 98 | @click.argument( 99 | "toml_file", 100 | type=click.Path( 101 | exists=False, dir_okay=False, resolve_path=True, path_type=pathlib.Path 102 | ), 103 | required=False, 104 | ) 105 | @click.option( 106 | "--force", is_flag=True, help="Overwrite an existing file if it already exists." 107 | ) 108 | def run_sample(toml_file, force: bool): 109 | """Generate a blank TOML document.""" 110 | 111 | if toml_file and toml_file.exists() and not force: 112 | click.echo(f"{toml_file} exists and will not be overwritten.") 113 | sys.exit(1) 114 | 115 | output = toml_format.dump() 116 | if toml_file: 117 | with toml_file.open("wt", encoding="utf8") as file: 118 | file.write(output) 119 | else: 120 | click.echo(output) 121 | 122 | 123 | run.add_command(run_generate) 124 | run.add_command(run_dump) 125 | run.add_command(run_sample) 126 | -------------------------------------------------------------------------------- /src/fru/exceptions.py: -------------------------------------------------------------------------------- 1 | # fru - Read and write binary IPMI FRU files 2 | # Copyright 2018-2024 Kurt McKee 3 | # Copyright 2017 Dell Technologies 4 | # 5 | # https://github.com/genotrance/fru-tool/ 6 | # 7 | # Licensed under the terms of the MIT License: 8 | # https://opensource.org/licenses/MIT 9 | 10 | 11 | class FRUException(Exception): 12 | """Base exception for the fru module.""" 13 | 14 | 15 | class DateTimeException(FRUException): 16 | """A date or time error was encountered. 17 | 18 | The FRU format stores dates and times as three bytes representing 19 | the number of minutes since 1996-01-01 at 00:00. This forces all 20 | dates and times to stay within a certain range, and prevents seconds 21 | from being represented. 22 | """ 23 | 24 | 25 | class TOMLException(FRUException): 26 | """An error was encountered while encoding or decoding TOML.""" 27 | -------------------------------------------------------------------------------- /src/fru/fru_format.py: -------------------------------------------------------------------------------- 1 | # fru - Read and write binary IPMI FRU files 2 | # Copyright 2018-2024 Kurt McKee 3 | # Copyright 2017 Dell Technologies 4 | # 5 | # https://github.com/genotrance/fru-tool/ 6 | # 7 | # Licensed under the terms of the MIT License: 8 | # https://opensource.org/licenses/MIT 9 | 10 | 11 | import pathlib 12 | import struct 13 | from typing import Dict, List, Union 14 | 15 | from . import shared 16 | 17 | 18 | def validate_checksum(blob: bytes, offset: int, length: int): 19 | """Validate a chassis, board, or product checksum. 20 | 21 | *blob* is the binary data blob, and *offset* is the integer offset that 22 | the chassis, board, or product info area starts at. 23 | """ 24 | 25 | checksum = ord(blob[offset + length - 1 : offset + length]) 26 | data_sum = sum( 27 | struct.unpack("%dB" % (length - 1), blob[offset : offset + length - 1]) 28 | ) 29 | if 0xFF & (data_sum + checksum) != 0: 30 | raise ValueError("The data do not match the checksum") 31 | 32 | 33 | def extract_values(blob: bytes, offset: int, names: List[str]): 34 | """Extract values that are delimited by type/length bytes. 35 | 36 | The values will be extracted into a dictionary. They'll be saved to keys 37 | in the same order that keys are provided in *names*. 38 | 39 | If there are more values than key names then the values will be stored 40 | in the key *custom_fields*. 41 | """ 42 | 43 | data = { 44 | "custom_fields": [], 45 | } 46 | 47 | for name in names: 48 | type_length = ord(blob[offset : offset + 1]) 49 | if type_length == 0xC1: 50 | return data 51 | length = type_length & 0x3F 52 | encoding = (ord(blob[offset : offset + 1]) & 0xC0) >> 6 53 | if encoding == 2: # 6-bit ASCII 54 | data[name] = decode_6_bit_ascii(blob[offset + 1 : offset + length + 1]) 55 | else: 56 | data[name] = blob[offset + 1 : offset + length + 1].decode("ascii") 57 | offset += length + 1 58 | 59 | while True: 60 | type_length = ord(blob[offset : offset + 1]) 61 | if type_length == 0xC1: 62 | return data 63 | length = type_length & 0x3F 64 | encoding = (ord(blob[offset : offset + 1]) & 0xC0) >> 6 65 | if encoding == 2: # 6-bit ASCII 66 | data["custom_fields"].append( 67 | decode_6_bit_ascii(blob[offset + 1 : offset + length + 1]) 68 | ) 69 | else: 70 | data["custom_fields"].append( 71 | blob[offset + 1 : offset + length + 1].decode("ascii") 72 | ) 73 | offset += length + 1 74 | 75 | 76 | def load( 77 | path: Union[pathlib.Path, str] = None, 78 | blob: bytes = None, 79 | ) -> Dict[str, Dict[str, Union[bool, int, str, List]]]: 80 | """Load binary FRU information from a file or binary data blob. 81 | 82 | If *path* is provided, it will be read into memory. 83 | If *blob* is provided it will be used as-is. 84 | """ 85 | 86 | if not path and not blob: 87 | raise ValueError("You must specify *path* or *blob*.") 88 | if path and blob: 89 | raise ValueError("You must specify *path* or *blob*, but not both.") 90 | 91 | if path: 92 | with open(path, "rb") as f: 93 | blob = f.read() 94 | 95 | validate_checksum(blob, 0, 8) 96 | 97 | format_version = ord(blob[0:1]) & 0x0F 98 | internal_offset = ord(blob[1:2]) * 8 99 | chassis_offset = ord(blob[2:3]) * 8 100 | board_offset = ord(blob[3:4]) * 8 101 | product_offset = ord(blob[4:5]) * 8 102 | # multirecord_offset = ord(blob[5:6]) * 8 103 | 104 | data = { 105 | "common": { 106 | "format_version": format_version, 107 | "size": len(blob), 108 | }, 109 | } 110 | 111 | if internal_offset: 112 | next_offset = chassis_offset or board_offset or product_offset 113 | internal_blob = blob[internal_offset + 1 : next_offset or len(blob)] 114 | data["internal"] = { 115 | "format_version": ord(blob[internal_offset : internal_offset + 1]) & 0x0F, 116 | "data": internal_blob, 117 | } 118 | 119 | if chassis_offset: 120 | length = ord(blob[chassis_offset + 1 : chassis_offset + 2]) * 8 121 | validate_checksum(blob, chassis_offset, length) 122 | 123 | data["chassis"] = shared.get_default_chassis_section() 124 | data["chassis"].update( 125 | { 126 | "format_version": ord(blob[chassis_offset : chassis_offset + 1]) & 0x0F, 127 | "type": ord(blob[chassis_offset + 2 : chassis_offset + 3]), 128 | } 129 | ) 130 | names = shared.get_chassis_section_names() 131 | data["chassis"].update(extract_values(blob, chassis_offset + 3, names)) 132 | 133 | if board_offset: 134 | length = ord(blob[board_offset + 1 : board_offset + 2]) * 8 135 | validate_checksum(blob, board_offset, length) 136 | 137 | data["board"] = shared.get_default_board_section() 138 | data["board"].update( 139 | { 140 | "format_version": ord(blob[board_offset : board_offset + 1]) & 0x0F, 141 | "language_code": ord(blob[board_offset + 2 : board_offset + 3]), 142 | "mfg_date_time": sum( 143 | [ 144 | ord(blob[board_offset + 3 : board_offset + 4]), 145 | ord(blob[board_offset + 4 : board_offset + 5]) << 8, 146 | ord(blob[board_offset + 5 : board_offset + 6]) << 16, 147 | ] 148 | ), 149 | } 150 | ) 151 | names = shared.get_board_section_names() 152 | data["board"].update(extract_values(blob, board_offset + 6, names)) 153 | 154 | if product_offset: 155 | length = ord(blob[product_offset + 1 : product_offset + 2]) * 8 156 | validate_checksum(blob, product_offset, length) 157 | 158 | data["product"] = shared.get_default_product_section() 159 | data["product"].update( 160 | { 161 | "format_version": ord(blob[product_offset : product_offset + 1]) & 0x0F, 162 | "language_code": ord(blob[product_offset + 2 : product_offset + 3]), 163 | } 164 | ) 165 | names = shared.get_product_section_names() 166 | data["product"].update(extract_values(blob, product_offset + 3, names)) 167 | 168 | return data 169 | 170 | 171 | def dump(data): 172 | if "common" not in data: 173 | raise ValueError("[common] section missing in data") 174 | 175 | if "format_version" not in data["common"]: 176 | raise ValueError('"format_version" key missing in [common]') 177 | 178 | if "size" not in data["common"]: 179 | raise ValueError('"size" key missing in [common]') 180 | 181 | internal_offset = 0 182 | chassis_offset = 0 183 | board_offset = 0 184 | product_offset = 0 185 | multirecord_offset = 0 186 | 187 | internal = b"" 188 | chassis = b"" 189 | board = b"" 190 | product = b"" 191 | 192 | if data.get("internal", {}).get("data"): 193 | internal = make_internal(data) 194 | if "chassis" in data: 195 | chassis = make_chassis(data) 196 | if "board" in data: 197 | board = make_board(data) 198 | if "product" in data: 199 | product = make_product(data) 200 | 201 | pos = 1 202 | if len(internal): 203 | internal_offset = pos 204 | pos += len(internal) // 8 205 | if len(chassis): 206 | chassis_offset = pos 207 | pos += len(chassis) // 8 208 | if len(board): 209 | board_offset = pos 210 | pos += len(board) // 8 211 | if len(product): 212 | product_offset = pos 213 | 214 | # Header 215 | out = struct.pack( 216 | "BBBBBBB", 217 | data["common"]["format_version"], 218 | internal_offset, 219 | chassis_offset, 220 | board_offset, 221 | product_offset, 222 | multirecord_offset, 223 | 0x00, 224 | ) 225 | 226 | # Checksum 227 | out += struct.pack("B", (0 - sum(bytearray(out))) & 0xFF) 228 | 229 | blob = out + internal + chassis + board + product 230 | difference = data["common"]["size"] - len(blob) 231 | pad = struct.pack("B" * difference, *[0] * difference) 232 | 233 | if len(blob + pad) > data["common"]["size"]: 234 | raise ValueError("Too much content, does not fit") 235 | 236 | return blob + pad 237 | 238 | 239 | def make_internal(data): 240 | return struct.pack( 241 | "B%ds" % len(data["internal"]["data"]), 242 | data["internal"].get("format_version", 1), 243 | data["internal"]["data"], 244 | ) 245 | 246 | 247 | def make_chassis(config): 248 | chassis = shared.get_default_chassis_section() 249 | chassis.update(config["chassis"]) 250 | 251 | out = b"" 252 | 253 | # Type 254 | out += struct.pack("B", chassis["type"]) 255 | 256 | # Strings 257 | fields = shared.get_chassis_section_names() 258 | 259 | for key in fields: 260 | if chassis[key]: 261 | value = chassis[key].encode("ascii") 262 | out += struct.pack("B%ds" % len(value), len(value) | 0xC0, value) 263 | else: 264 | out += struct.pack("B", 0) 265 | 266 | if isinstance(chassis["custom_fields"], (list, tuple)): 267 | for record in chassis["custom_fields"]: 268 | value = record.encode("ascii") 269 | out += struct.pack("B%ds" % len(value), len(value) | 0xC0, value) 270 | 271 | # No more fields 272 | out += struct.pack("B", 0xC1) 273 | 274 | # Padding 275 | while len(out) % 8 != 5: 276 | out += struct.pack("B", 0) 277 | 278 | # Header version and length in bytes 279 | out = ( 280 | struct.pack( 281 | "BB", 282 | chassis["format_version"], 283 | (len(out) + 3) // 8, 284 | ) 285 | + out 286 | ) 287 | 288 | # Checksum 289 | out += struct.pack("B", (0 - sum(bytearray(out))) & 0xFF) 290 | 291 | return out 292 | 293 | 294 | def make_board(config): 295 | board = shared.get_default_board_section() 296 | board.update(config["board"]) 297 | 298 | out = b"" 299 | 300 | # Language 301 | out += struct.pack("B", board["language_code"]) 302 | 303 | # Date 304 | date = board["mfg_date_time"] 305 | out += struct.pack( 306 | "BBB", 307 | (date & 0xFF), 308 | (date & 0xFF00) >> 8, 309 | (date & 0xFF0000) >> 16, 310 | ) 311 | 312 | # String values 313 | fields = shared.get_board_section_names() 314 | 315 | for key in fields: 316 | if board[key]: 317 | value = board[key].encode("ascii") 318 | out += struct.pack("B%ds" % len(value), len(value) | 0xC0, value) 319 | else: 320 | out += struct.pack("B", 0) 321 | 322 | if isinstance(board["custom_fields"], (list, tuple)): 323 | for record in board["custom_fields"]: 324 | value = record.encode("ascii") 325 | out += struct.pack("B%ds" % len(value), len(value) | 0xC0, value) 326 | 327 | # No more fields 328 | out += struct.pack("B", 0xC1) 329 | 330 | # Padding 331 | while len(out) % 8 != 5: 332 | out += struct.pack("B", 0) 333 | 334 | # Header version and length in bytes 335 | out = ( 336 | struct.pack( 337 | "BB", 338 | board["format_version"], 339 | (len(out) + 3) // 8, 340 | ) 341 | + out 342 | ) 343 | 344 | # Checksum 345 | out += struct.pack("B", (0 - sum(bytearray(out))) & 0xFF) 346 | 347 | return out 348 | 349 | 350 | def make_product(config): 351 | product = shared.get_default_product_section() 352 | product.update(config["product"]) 353 | 354 | out = b"" 355 | 356 | # Language 357 | out += struct.pack("B", product["language_code"]) 358 | 359 | # Strings 360 | fields = shared.get_product_section_names() 361 | 362 | for key in fields: 363 | if product[key]: 364 | value = product[key].encode("ascii") 365 | out += struct.pack("B%ds" % len(value), len(value) | 0xC0, value) 366 | else: 367 | out += struct.pack("B", 0) 368 | 369 | if isinstance(product["custom_fields"], (list, tuple)): 370 | for record in product["custom_fields"]: 371 | value = record.encode("ascii") 372 | out += struct.pack("B%ds" % len(value), len(value) | 0xC0, value) 373 | 374 | # No more fields 375 | out += struct.pack("B", 0xC1) 376 | 377 | # Padding 378 | while len(out) % 8 != 5: 379 | out += struct.pack("B", 0) 380 | 381 | # Header version and length in bytes 382 | out = ( 383 | struct.pack( 384 | "BB", 385 | product["format_version"], 386 | (len(out) + 3) // 8, 387 | ) 388 | + out 389 | ) 390 | 391 | # Checksum 392 | out += struct.pack("B", (0 - sum(bytearray(out))) & 0xFF) 393 | 394 | return out 395 | 396 | 397 | def decode_6_bit_ascii(blob: bytes) -> str: 398 | """Decode bytes that encoded in 6-bit ASCII.""" 399 | 400 | extracted_bytes = b"" 401 | for block_offset in range(len(blob) // 3): 402 | block = blob[block_offset:3] 403 | extracted_bits = [ 404 | # Byte 1 -- lower 6 bits of the first byte 405 | block[0] & 0b0011_1111, 406 | # Byte 2 -- lower 4 bits of the second byte + upper 2 bits of the first byte 407 | ((block[1] & 0b0000_1111) << 2) | ((block[0] & 0b1100_0000) >> 6), 408 | # Byte 3 -- lower 2 bits of the third byte + upper 4 bits of the second byte 409 | ((block[2] & 0b0000_0011) << 4) | ((block[1] & 0b1111_0000) >> 4), 410 | # Byte 4 -- upper 6 bits of the third byte 411 | (block[2] & 0b1111_1100) >> 2, 412 | ] 413 | # Each integer represents an ASCII character 0x20 greater than its value. 414 | extracted_bytes += bytes([0x20 + bits for bits in extracted_bits]) 415 | return extracted_bytes.decode("ascii") 416 | -------------------------------------------------------------------------------- /src/fru/shared.py: -------------------------------------------------------------------------------- 1 | # fru - Read and write binary IPMI FRU files 2 | # Copyright 2018-2024 Kurt McKee 3 | # Copyright 2017 Dell Technologies 4 | # 5 | # https://github.com/genotrance/fru-tool/ 6 | # 7 | # Licensed under the terms of the MIT License: 8 | # https://opensource.org/licenses/MIT 9 | 10 | 11 | from typing import Dict, Tuple, Union 12 | 13 | 14 | def get_default_common_section() -> Dict[str, int]: 15 | """Create an empty common section with default values.""" 16 | 17 | return { 18 | "format_version": 1, 19 | "size": 0, # Guarantee an error if this is not updated. 20 | } 21 | 22 | 23 | def get_default_board_section() -> Dict[str, Union[int, str, list]]: 24 | """Create an empty board section with default values.""" 25 | 26 | return { 27 | "format_version": 1, 28 | "language_code": 0, 29 | "mfg_date_time": 0, 30 | "manufacturer": "", 31 | "product_name": "", 32 | "serial_number": "", 33 | "part_number": "", 34 | "fru_file_id": "", 35 | "custom_fields": [], 36 | } 37 | 38 | 39 | def get_board_section_names() -> Tuple[str, ...]: 40 | """Get the list of board section names, in their correct order.""" 41 | 42 | return ( 43 | "manufacturer", 44 | "product_name", 45 | "serial_number", 46 | "part_number", 47 | "fru_file_id", 48 | ) 49 | 50 | 51 | def get_default_chassis_section() -> Dict[str, Union[int, str, list]]: 52 | """Create an empty chassis section with default values.""" 53 | 54 | return { 55 | "format_version": 1, 56 | "type": 0, 57 | "part_number": "", 58 | "serial_number": "", 59 | "custom_fields": [], 60 | } 61 | 62 | 63 | def get_chassis_section_names() -> Tuple[str, ...]: 64 | """Get the list of chassis section names, in their correct order.""" 65 | 66 | return ( 67 | "part_number", 68 | "serial_number", 69 | ) 70 | 71 | 72 | def get_default_product_section() -> Dict[str, Union[int, str, list]]: 73 | """Create an empty product section with default values.""" 74 | 75 | return { 76 | "format_version": 1, 77 | "language_code": 0, 78 | "manufacturer": "", 79 | "product_name": "", 80 | "part_number": "", 81 | "product_version": "", 82 | "serial_number": "", 83 | "asset_tag": "", 84 | "fru_file_id": "", 85 | "custom_fields": [], 86 | } 87 | 88 | 89 | def get_product_section_names() -> Tuple[str, ...]: 90 | """Get the list of product section names, in their correct order.""" 91 | 92 | return ( 93 | "manufacturer", 94 | "product_name", 95 | "part_number", 96 | "product_version", 97 | "serial_number", 98 | "asset_tag", 99 | "fru_file_id", 100 | ) 101 | 102 | 103 | def get_default_internal_section() -> Dict[str, Union[int, bytes]]: 104 | """Create an empty internal section with default values.""" 105 | 106 | return { 107 | "format_version": 1, 108 | "data": b"", 109 | } 110 | -------------------------------------------------------------------------------- /src/fru/toml_format.py: -------------------------------------------------------------------------------- 1 | # fru - Read and write binary IPMI FRU files 2 | # Copyright 2018-2024 Kurt McKee 3 | # Copyright 2017 Dell Technologies 4 | # 5 | # https://github.com/genotrance/fru-tool/ 6 | # 7 | # Licensed under the terms of the MIT License: 8 | # https://opensource.org/licenses/MIT 9 | 10 | 11 | import datetime 12 | import os 13 | import pathlib 14 | import sys 15 | import textwrap 16 | from importlib.metadata import version 17 | from typing import Dict, List, Tuple, Union 18 | 19 | if sys.version_info >= (3, 11): 20 | import tomllib 21 | else: 22 | # Compatibility 23 | import tomli as tomllib 24 | 25 | from . import exceptions, shared 26 | 27 | min_date = datetime.datetime(1996, 1, 1, 0, 0) # 0x000000 28 | max_date = datetime.datetime(2027, 11, 24, 20, 15) # 0xffffff 29 | 30 | 31 | def convert_str_to_minutes(stamp: str) -> int: 32 | """Convert a str to the number of minutes since 1996-01-01 00:00.""" 33 | 34 | try: 35 | date = datetime.datetime.strptime(stamp, "%Y-%m-%d %H:%M") 36 | except ValueError: 37 | msg = f'The date "{stamp}" must follow the format "YYYY-MM-DD HH:MM"' 38 | raise exceptions.DateTimeException(msg) 39 | 40 | if date < min_date: 41 | msg = f'The date/time "{stamp}" must be at least 1996-01-01 00:00' 42 | raise exceptions.DateTimeException(msg) 43 | 44 | if date > max_date: 45 | msg = f'The date/time "{stamp}" must be at most 2027-11-24 20:15' 46 | raise exceptions.DateTimeException(msg) 47 | 48 | return int((date - min_date).total_seconds()) // 60 49 | 50 | 51 | def convert_minutes_to_str(minutes: int) -> str: 52 | """Format minutes as a human-friendly date/time string. 53 | 54 | The return string will be formatted as YYYY-MM-DD HH:MM. 55 | For example, "2021-02-01 19:28". 56 | """ 57 | 58 | if minutes < 0: 59 | msg = f"*minutes* must be >= 0 (got {minutes})" 60 | raise exceptions.DateTimeException(msg) 61 | 62 | if minutes > 0xFF_FF_FF: 63 | msg = f"*minutes* must be <= 0xffffff (got 0x{minutes:x})" 64 | raise exceptions.DateTimeException(msg) 65 | 66 | date = min_date + datetime.timedelta(minutes=minutes) 67 | return date.strftime("%Y-%m-%d %H:%M") 68 | 69 | 70 | def repr_(value: Union[bool, int, str, List]) -> str: 71 | if isinstance(value, bool): 72 | return str(bool(value)).lower() 73 | elif isinstance(value, int): 74 | return str(value) 75 | elif isinstance(value, str): 76 | value = value.replace("\\", "\\\\") 77 | value = value.replace('"', '\\"') 78 | return f'"{value}"' 79 | elif isinstance(value, list): 80 | output = " ".join(f"{repr_(v)}," for v in value).rstrip(",") 81 | return f"[{output}]" 82 | 83 | msg = f"Unable to represent {repr(value)} (type={type(value)}) in the TOML format" 84 | raise exceptions.TOMLException(msg) 85 | 86 | 87 | def repr_internal(value: bytes) -> str: 88 | """Represent the internal section as a sequence of bytes.""" 89 | 90 | if not value: 91 | return "[]" 92 | 93 | output = ["["] 94 | for block in range(0, len(value), 16): 95 | pieces = [] 96 | for i in value[block : block + 16]: 97 | pieces.append(f"0x{i:02x}") 98 | output.append(" " + ", ".join(pieces) + ",") 99 | output.append("]") 100 | return "\n".join(output) 101 | 102 | 103 | def load( 104 | path: Union[pathlib.Path, str] = None, text: str = None 105 | ) -> Dict[str, Dict[str, Union[bytes, int, str]]]: 106 | """Load a TOML file and return its data as a dictionary. 107 | 108 | If *path* is specified it must be a TOML-formatted file. 109 | If *text* is specified it must be a TOML-formatted string. 110 | """ 111 | 112 | if not path and not text: 113 | raise exceptions.FRUException("*path* or *text* must be specified") 114 | 115 | data = { 116 | "common": shared.get_default_common_section(), 117 | "board": shared.get_default_board_section(), 118 | "chassis": shared.get_default_chassis_section(), 119 | "product": shared.get_default_product_section(), 120 | "internal": shared.get_default_internal_section(), 121 | } 122 | 123 | if path: 124 | with open(path, encoding="utf-8") as file: 125 | toml_data = tomllib.loads(file.read()) 126 | else: 127 | toml_data = tomllib.loads(text) 128 | 129 | for section in data: 130 | if section in toml_data: 131 | data[section].update(toml_data[section]) 132 | 133 | # These values must be integers. 134 | integers: Tuple[Tuple[str, str], ...] = ( 135 | ("common", "size"), 136 | ("common", "format_version"), 137 | ("board", "language_code"), 138 | ("chassis", "type"), 139 | ("product", "language_code"), 140 | ) 141 | 142 | dates = (("board", "mfg_date_time"),) 143 | 144 | # Remove sections that are excluded, either explicitly or implicitly. 145 | for section in ["internal", "chassis", "board", "product", "multirecord"]: 146 | include_section = f"include_{section}" 147 | if not data["common"].get(include_section, False) and section in data: 148 | del data[section] 149 | if include_section in data["common"]: 150 | del data["common"][include_section] 151 | 152 | # Standardize integer values. 153 | for section, key in integers: 154 | if not isinstance(data.get(section, {}).get(key, 0), int): 155 | msg = f'Section [{section}] key "{key}" must be a number' 156 | raise exceptions.TOMLException(msg) 157 | 158 | # Standardize date/time values. 159 | for section, key in dates: 160 | if section in data and key in data[section]: 161 | # Convert a default value of 0 to a corresponding string. 162 | if not data[section][key]: 163 | data[section][key] = "1996-01-01 00:00" 164 | if not isinstance(data[section][key], str): 165 | msg = f'Section [{section}] key "{key}" must be a string' 166 | raise exceptions.TOMLException(msg) 167 | data[section][key] = convert_str_to_minutes(data[section][key]) 168 | 169 | # Normalize the internal info area data. 170 | if data.get("internal", {}).get("data"): 171 | msg = 'Section [internal] key "data" must be a list of numbers or a string' 172 | try: 173 | data["internal"]["data"] = bytes(data["internal"]["data"]) 174 | except TypeError: 175 | try: 176 | data["internal"]["data"] = data["internal"]["data"].encode("utf8") 177 | except AttributeError: 178 | raise exceptions.TOMLException(msg) 179 | elif data.get("internal", {}).get("file"): 180 | internal_file = os.path.join(os.path.dirname(path), data["internal"]["file"]) 181 | try: 182 | with open(internal_file, "rb") as f: 183 | data["internal"]["data"] = f.read() 184 | except FileNotFoundError: 185 | msg = f"Internal info area file {internal_file} not found" 186 | raise exceptions.TOMLException(msg) 187 | if "file" in data.get("internal", {}): 188 | del data["internal"]["file"] 189 | 190 | return data 191 | 192 | 193 | def dump(data: Dict[str, Dict[str, Union[bytes, int, str]]] = None) -> str: 194 | """Dump data to the TOML format. 195 | 196 | This function can also generate a blank TOML file. 197 | """ 198 | 199 | data = data or {} 200 | info = { 201 | "common": shared.get_default_common_section(), 202 | "board": shared.get_default_board_section(), 203 | "chassis": shared.get_default_chassis_section(), 204 | "product": shared.get_default_product_section(), 205 | } 206 | for section in ("common", "board", "chassis", "product"): 207 | info[section].update(data.get(section, {})) 208 | 209 | output = f""" 210 | # ------------------------------------------------------------------- 211 | # Generated by frutool {version("fru")} 212 | # https://github.com/genotrance/fru-tool/ 213 | # 214 | # Notes regarding the TOML format, which is like an INI file: 215 | # 216 | # * Values surrounded by quotation marks are strings: "Vendor" 217 | # Literal quotation marks must be escaped using a backslash: "\\"" 218 | # Literal backslashes must also be escaped using a backslash: "\\\\" 219 | # * Boolean values use the words "true" and "false" without quotes. 220 | # * Numbers that begin with 0x are interpreted as hexadecimal: 0x30 221 | # 222 | # ------------------------------------------------------------------- 223 | 224 | 225 | [common] 226 | # Warning: It may be harmful to modify *format_version*. 227 | format_version = {repr_(info["common"]["format_version"])} 228 | 229 | # Warning: It may be harmful to modify *size*. 230 | size = {repr_(info["common"]["size"])} 231 | 232 | # These options control which sections are included in the FRU file. 233 | include_board = {repr_(bool(data.get("board", False)))} 234 | include_chassis = {repr_(bool(data.get("chassis", False)))} 235 | include_product = {repr_(bool(data.get("product", False)))} 236 | include_internal = {repr_(bool(data.get("internal", False)))} 237 | include_multirecord = {repr_(bool(data.get("multirecord", False)))} 238 | 239 | 240 | [board] 241 | # Warning: It may be harmful to modify *format_version*. 242 | format_version = {repr_(info["board"]["format_version"])} 243 | 244 | language_code = {repr_(info["board"]["language_code"])} 245 | 246 | mfg_date_time = "{convert_minutes_to_str(info["board"]["mfg_date_time"])}" 247 | # │ │ │ │ │ 248 | # year ──┘ │ │ │ ╰── minutes 249 | # month ──╯ │ ╰── hours 250 | # day ──╯ 251 | 252 | manufacturer = {repr_(info["board"]["manufacturer"])} 253 | product_name = {repr_(info["board"]["product_name"])} 254 | serial_number = {repr_(info["board"]["serial_number"])} 255 | part_number = {repr_(info["board"]["part_number"])} 256 | fru_file_id = {repr_(info["board"]["fru_file_id"])} 257 | custom_fields = {repr_(info["board"]["custom_fields"])} 258 | 259 | 260 | [chassis] 261 | # Warning: It may be harmful to modify *format_version*. 262 | format_version = {repr_(info["chassis"]["format_version"])} 263 | 264 | type = {repr_(info["chassis"]["type"])} 265 | part_number = {repr_(info["chassis"]["part_number"])} 266 | serial_number = {repr_(info["chassis"]["serial_number"])} 267 | custom_fields = {repr_(info["chassis"]["custom_fields"])} 268 | 269 | 270 | [product] 271 | # Warning: It may be harmful to modify *format_version*. 272 | format_version = {repr_(info["product"]["format_version"])} 273 | 274 | language_code = {repr_(info["product"]["language_code"])} 275 | manufacturer = {repr_(info["product"]["manufacturer"])} 276 | product_name = {repr_(info["product"]["product_name"])} 277 | part_number = {repr_(info["product"]["part_number"])} 278 | product_version = {repr_(info["product"]["product_version"])} 279 | serial_number = {repr_(info["product"]["serial_number"])} 280 | asset_tag = {repr_(info["product"]["asset_tag"])} 281 | fru_file_id = {repr_(info["product"]["fru_file_id"])} 282 | custom_fields = {repr_(info["product"]["custom_fields"])} 283 | 284 | 285 | [internal] 286 | # Warning: It may be harmful to modify *format_version*. 287 | format_version = {repr_(data.get("internal", {}).get("format_version", 1))} 288 | 289 | # The *data* key can be used to encode a sequence of bytes serialized 290 | # as a list of numbers. For small amounts of internal data this might 291 | # be sufficient. 292 | # 293 | # Alternatively, if the *file* key is specified then the file will be 294 | # opened and read in binary mode. 295 | # 296 | # Examples: 297 | # 298 | # data = [0x01, 0x02, 0x03] 299 | # file = "path/to/file" 300 | # 301 | # Do not use the *data* and *file* keys at the same time. 302 | 303 | data = {repr_internal(data.get("internal", {}).get("data", b""))} 304 | """ 305 | 306 | return textwrap.dedent(output).strip() + "\n" 307 | -------------------------------------------------------------------------------- /tests/basic-all.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/genotrance/fru-tool/fe941b152dffdb03364baf8ef3d0fe6d5a18496b/tests/basic-all.bin -------------------------------------------------------------------------------- /tests/basic-all.toml: -------------------------------------------------------------------------------- 1 | [common] 2 | format_version = 1 3 | size = 1024 4 | 5 | include_internal = true 6 | include_chassis = true 7 | include_board = true 8 | include_product = true 9 | include_multirecord = false 10 | 11 | 12 | [internal] 13 | data = "data123" 14 | 15 | 16 | [chassis] 17 | type = 0x17 18 | part_number = "chassis-part" 19 | serial_number = "chassis-serial" 20 | custom_fields = [ 21 | "chassis-extra1", 22 | "chassis-extra2", 23 | "chassis-extra3", 24 | "chassis-extra4", 25 | ] 26 | 27 | 28 | [board] 29 | language_code = 0 30 | mfg_date_time = "2017-07-10 16:30" 31 | manufacturer = "board-manufacturer" 32 | product_name = "board-product" 33 | serial_number = "board-serial" 34 | part_number = "board-part" 35 | fru_file_id = "board-fileid" 36 | custom_fields = ["board-extra1", "board-extra2"] 37 | 38 | 39 | [product] 40 | language_code = 0 41 | manufacturer = "product-manufacturer" 42 | product_name = "product-product" 43 | part_number = "product-part" 44 | product_version= "product-version" 45 | serial_number = "product-serial" 46 | asset_tag = "product-asset" 47 | fru_file_id = "product-fileid" 48 | custom_fields = ["product-extra1", "product-extra2", "product-extra3"] 49 | -------------------------------------------------------------------------------- /tests/basic-board.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/genotrance/fru-tool/fe941b152dffdb03364baf8ef3d0fe6d5a18496b/tests/basic-board.bin -------------------------------------------------------------------------------- /tests/basic-board.toml: -------------------------------------------------------------------------------- 1 | [common] 2 | format_version = 1 3 | size = 1024 4 | 5 | include_internal = false 6 | include_chassis = false 7 | include_board = true 8 | include_product = false 9 | include_multirecord = false 10 | 11 | 12 | [internal] 13 | data = "data123" 14 | 15 | 16 | [chassis] 17 | type = 0x17 18 | part_number = "chassis-part" 19 | serial_number = "chassis-serial" 20 | custom_fields = [ 21 | "chassis-extra1", 22 | "chassis-extra2", 23 | "chassis-extra3", 24 | "chassis-extra4", 25 | ] 26 | 27 | 28 | [board] 29 | language_code = 0 30 | mfg_date_time = "2017-07-10 16:30" 31 | manufacturer = "board-manufacturer" 32 | product_name = "board-product" 33 | serial_number = "board-serial" 34 | part_number = "board-part" 35 | fru_file_id = "board-fileid" 36 | custom_fields = ["board-extra1", "board-extra2"] 37 | 38 | 39 | [product] 40 | language_code = 0 41 | manufacturer = "product-manufacturer" 42 | product_name = "product-product" 43 | part_number = "product-part" 44 | product_format_version = "product-format_version" 45 | serial_number = "product-serial" 46 | asset_tag = "product-asset" 47 | fru_file_id = "product-fileid" 48 | custom_fields = ["product-extra1", "product-extra2", "product-extra3"] 49 | -------------------------------------------------------------------------------- /tests/basic-chassis.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/genotrance/fru-tool/fe941b152dffdb03364baf8ef3d0fe6d5a18496b/tests/basic-chassis.bin -------------------------------------------------------------------------------- /tests/basic-chassis.toml: -------------------------------------------------------------------------------- 1 | [common] 2 | format_version = 1 3 | size = 1024 4 | 5 | include_internal = false 6 | include_chassis = true 7 | include_board = false 8 | include_product = false 9 | include_multirecord = false 10 | 11 | 12 | [internal] 13 | data = "data123" 14 | 15 | 16 | [chassis] 17 | type = 0x17 18 | part_number = "chassis-part" 19 | serial_number = "chassis-serial" 20 | custom_fields = [ 21 | "chassis-extra1", 22 | "chassis-extra2", 23 | "chassis-extra3", 24 | "chassis-extra4", 25 | ] 26 | 27 | 28 | [board] 29 | language_code = 0 30 | mfg_date_time = "2017-07-10 16:30" 31 | manufacturer = "board-manufacturer" 32 | product_name = "board-product" 33 | serial_number = "board-serial" 34 | part_number = "board-part" 35 | fru_file_id = "board-fileid" 36 | custom_fields = ["board-extra1", "board-extra2"] 37 | 38 | 39 | [product] 40 | language_code = 0 41 | manufacturer = "product-manufacturer" 42 | product_name = "product-product" 43 | part_number = "product-part" 44 | product_format_version = "product-format_version" 45 | serial_number = "product-serial" 46 | asset_tag = "product-asset" 47 | fru_file_id = "product-fileid" 48 | custom_fields = ["product-extra1", "product-extra2", "product-extra3"] 49 | -------------------------------------------------------------------------------- /tests/basic-empty.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/genotrance/fru-tool/fe941b152dffdb03364baf8ef3d0fe6d5a18496b/tests/basic-empty.bin -------------------------------------------------------------------------------- /tests/basic-empty.toml: -------------------------------------------------------------------------------- 1 | [common] 2 | format_version = 1 3 | size = 1024 4 | 5 | include_internal = false 6 | include_chassis = false 7 | include_board = false 8 | include_product = false 9 | include_multirecord = false 10 | 11 | 12 | [internal] 13 | data = "data123" 14 | 15 | 16 | [chassis] 17 | type = 0x17 18 | part_number = "chassis-part" 19 | serial_number = "chassis-serial" 20 | custom_fields = [ 21 | "chassis-extra1", 22 | "chassis-extra2", 23 | "chassis-extra3", 24 | "chassis-extra4", 25 | ] 26 | 27 | 28 | [board] 29 | language_code = 0 30 | mfg_date_time = "2017-07-10 16:30" 31 | manufacturer = "board-manufacturer" 32 | product_name = "board-product" 33 | serial_number = "board-serial" 34 | part_number = "board-part" 35 | fru_file_id = "board-fileid" 36 | custom_fields = ["board-extra1", "board-extra2"] 37 | 38 | 39 | [product] 40 | language_code = 0 41 | manufacturer = "product-manufacturer" 42 | product_name = "product-product" 43 | part_number = "product-part" 44 | product_version = "product-version" 45 | serial_number = "product-serial" 46 | asset_tag = "product-asset" 47 | fru_file_id = "product-fileid" 48 | custom_fields = ["product-extra1", "product-extra2", "product-extra3"] 49 | -------------------------------------------------------------------------------- /tests/basic-internal-data.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/genotrance/fru-tool/fe941b152dffdb03364baf8ef3d0fe6d5a18496b/tests/basic-internal-data.bin -------------------------------------------------------------------------------- /tests/basic-internal-data.toml: -------------------------------------------------------------------------------- 1 | [common] 2 | format_version = 1 3 | size = 16 4 | 5 | include_internal = true 6 | include_chassis = false 7 | include_board = false 8 | include_product = false 9 | include_multirecord = false 10 | 11 | 12 | [internal] 13 | data = "data123" 14 | 15 | 16 | [chassis] 17 | type = 0x17 18 | part_number = "chassis-part" 19 | serial_number = "chassis-serial" 20 | custom_fields = [ 21 | "chassis-extra1", 22 | "chassis-extra2", 23 | "chassis-extra3", 24 | "chassis-extra4", 25 | ] 26 | 27 | 28 | [board] 29 | language_code = 0 30 | mfg_date_time = "2017-07-10 16:30" 31 | manufacturer = "board-manufacturer" 32 | product_name = "board-product" 33 | serial_number = "board-serial" 34 | part_number = "board-part" 35 | fru_file_id = "board-fileid" 36 | custom_fields = ["board-extra1", "board-extra2"] 37 | 38 | 39 | [product] 40 | language_code = 0 41 | manufacturer = "product-manufacturer" 42 | product_name = "product-product" 43 | part_number = "product-part" 44 | product_version = "product-version" 45 | serial_number = "product-serial" 46 | asset_tag = "product-asset" 47 | fru_file_id = "product-fileid" 48 | custom_fields = ["product-extra1", "product-extra2", "product-extra3"] 49 | -------------------------------------------------------------------------------- /tests/basic-internal-file.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/genotrance/fru-tool/fe941b152dffdb03364baf8ef3d0fe6d5a18496b/tests/basic-internal-file.bin -------------------------------------------------------------------------------- /tests/basic-internal-file.toml: -------------------------------------------------------------------------------- 1 | [common] 2 | format_version = 1 3 | size = 16 4 | 5 | include_internal = true 6 | include_chassis = false 7 | include_board = false 8 | include_product = false 9 | include_multirecord = false 10 | 11 | 12 | [internal] 13 | file = "internal-fru.bin" 14 | 15 | 16 | [chassis] 17 | type = 0x17 18 | part_number = "chassis-part" 19 | serial_number = "chassis-serial" 20 | custom_fields = [ 21 | "chassis-extra1", 22 | "chassis-extra2", 23 | "chassis-extra3", 24 | "chassis-extra4", 25 | ] 26 | 27 | 28 | [board] 29 | language_code = 0 30 | mfg_date_time = "2017-07-10 16:30" 31 | manufacturer = "board-manufacturer" 32 | product_name = "board-product" 33 | serial_number = "board-serial" 34 | part_number = "board-part" 35 | fru_file_id = "board-fileid" 36 | custom_fields = ["board-extra1", "board-extra2"] 37 | 38 | 39 | [product] 40 | language_code = 0 41 | manufacturer = "product-manufacturer" 42 | product_name = "product-product" 43 | part_number = "product-part" 44 | product_version = "product-version" 45 | serial_number = "product-serial" 46 | asset_tag = "product-asset" 47 | fru_file_id = "product-fileid" 48 | custom_fields = ["product-extra1", "product-extra2", "product-extra3"] 49 | -------------------------------------------------------------------------------- /tests/basic-internal.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/genotrance/fru-tool/fe941b152dffdb03364baf8ef3d0fe6d5a18496b/tests/basic-internal.bin -------------------------------------------------------------------------------- /tests/basic-internal.toml: -------------------------------------------------------------------------------- 1 | [common] 2 | format_version = 1 3 | size = 1024 4 | 5 | include_internal = true 6 | include_chassis = false 7 | include_board = false 8 | include_product = false 9 | include_multirecord = false 10 | 11 | 12 | [internal] 13 | data = "internal-data" 14 | 15 | 16 | [chassis] 17 | type = 0x17 18 | part_number = "chassis-part" 19 | serial_number = "chassis-serial" 20 | custom_fields = [ 21 | "chassis-extra1", 22 | "chassis-extra2", 23 | "chassis-extra3", 24 | "chassis-extra4", 25 | ] 26 | 27 | 28 | [board] 29 | language_code = 0 30 | mfg_date_time = "2017-07-10 16:30" 31 | manufacturer = "board-manufacturer" 32 | product_name = "board-product" 33 | serial_number = "board-serial" 34 | part_number = "board-part" 35 | fru_file_id = "board-fileid" 36 | custom_fields = ["board-extra1", "board-extra2"] 37 | 38 | 39 | [product] 40 | language_code = 0 41 | manufacturer = "product-manufacturer" 42 | product_name = "product-product" 43 | part_number = "product-part" 44 | product_version = "product-version" 45 | serial_number = "product-serial" 46 | asset_tag = "product-asset" 47 | fru_file_id = "product-fileid" 48 | custom_fields = ["product-extra1", "product-extra2", "product-extra3"] 49 | -------------------------------------------------------------------------------- /tests/basic-product.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/genotrance/fru-tool/fe941b152dffdb03364baf8ef3d0fe6d5a18496b/tests/basic-product.bin -------------------------------------------------------------------------------- /tests/basic-product.toml: -------------------------------------------------------------------------------- 1 | [common] 2 | format_version = 1 3 | size = 1024 4 | 5 | include_internal = false 6 | include_chassis = false 7 | include_board = false 8 | include_product = true 9 | include_multirecord = false 10 | 11 | 12 | [internal] 13 | data = "data123" 14 | 15 | 16 | [chassis] 17 | type = 0x17 18 | part_number = "chassis-part" 19 | serial_number = "chassis-serial" 20 | custom_fields = [ 21 | "chassis-extra1", 22 | "chassis-extra2", 23 | "chassis-extra3", 24 | "chassis-extra4", 25 | ] 26 | 27 | 28 | [board] 29 | language_code = 0 30 | mfg_date_time = "2017-07-10 16:30" 31 | manufacturer = "board-manufacturer" 32 | product_name = "board-product" 33 | serial_number = "board-serial" 34 | part_number = "board-part" 35 | fru_file_id = "board-fileid" 36 | custom_fields = ["board-extra1", "board-extra2"] 37 | 38 | 39 | [product] 40 | language_code = 0 41 | manufacturer = "product-manufacturer" 42 | product_name = "product-product" 43 | part_number = "product-part" 44 | product_version = "product-version" 45 | serial_number = "product-serial" 46 | asset_tag = "product-asset" 47 | fru_file_id = "product-fileid" 48 | custom_fields = ["product-extra1", "product-extra2", "product-extra3"] 49 | -------------------------------------------------------------------------------- /tests/checksum-zero.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/genotrance/fru-tool/fe941b152dffdb03364baf8ef3d0fe6d5a18496b/tests/checksum-zero.bin -------------------------------------------------------------------------------- /tests/include_defaults_to_false.toml: -------------------------------------------------------------------------------- 1 | # Missing 'include_*' directives should default to false. 2 | 3 | [common] 4 | format_version = 1 5 | size = 256 6 | include_board = true 7 | 8 | [board] 9 | format_version = 1 10 | language_code = 0 11 | 12 | mfg_date_time = "2024-01-01 01:01" 13 | manufacturer = "Test Factory .Inc" 14 | product_name = "Test Board" 15 | serial_number = "01234" 16 | part_number = "5678" 17 | fru_file_id = "1" 18 | custom_fields = ["", ""] 19 | -------------------------------------------------------------------------------- /tests/internal-empty.toml: -------------------------------------------------------------------------------- 1 | [common] 2 | format_version = 1 3 | size = 1024 4 | include_internal = true 5 | 6 | 7 | [internal] 8 | -------------------------------------------------------------------------------- /tests/internal-fru-file-not-found.toml: -------------------------------------------------------------------------------- 1 | [common] 2 | format_version = 1 3 | size = 1024 4 | internal = 1 5 | 6 | [internal] 7 | file = file-not-found.error 8 | -------------------------------------------------------------------------------- /tests/internal-fru.bin: -------------------------------------------------------------------------------- 1 | file123 -------------------------------------------------------------------------------- /tests/skip-section.toml: -------------------------------------------------------------------------------- 1 | [common] 2 | format_version = 1 3 | size = 1024 4 | 5 | include_internal = true 6 | include_chassis = false 7 | include_board = false 8 | include_product = false 9 | include_multirecord = false 10 | -------------------------------------------------------------------------------- /tests/test_fru.py: -------------------------------------------------------------------------------- 1 | # Unit tests for fru-tool 2 | # Copyright 2018-2024 Kurt McKee 3 | # Copyright 2017 Dell Technologies 4 | # 5 | # https://github.com/genotrance/fru-tool/ 6 | # 7 | # Licensed under the terms of the MIT License: 8 | # https://opensource.org/licenses/MIT 9 | 10 | 11 | import os 12 | 13 | import pytest 14 | 15 | import fru.fru_format 16 | 17 | sections = ( 18 | "all", 19 | "empty", 20 | "board", 21 | "chassis", 22 | "internal-data", 23 | "internal-file", 24 | "product", 25 | ) 26 | 27 | 28 | def test_too_much_data(): 29 | config = { 30 | "common": {"format_version": 1, "size": 20}, 31 | "chassis": {"part_number": "a" * 250}, 32 | } 33 | with pytest.raises(ValueError): 34 | fru.fru_format.dump(config) 35 | 36 | 37 | def test_empty_everything(): 38 | config = { 39 | "common": { 40 | "format_version": 1, 41 | "size": 256, 42 | }, 43 | "internal": {}, 44 | "chassis": {}, 45 | "board": {}, 46 | "product": {}, 47 | } 48 | fru.fru_format.dump(config) 49 | 50 | 51 | def test_missing_required_elements(): 52 | with pytest.raises(ValueError): 53 | fru.fru_format.dump({}) 54 | with pytest.raises(ValueError): 55 | fru.fru_format.dump({"common": {"size": 512}}) 56 | with pytest.raises(ValueError): 57 | fru.fru_format.dump({"common": {"format_version": 1}}) 58 | 59 | 60 | def test_load_bad_calls(): 61 | with pytest.raises(ValueError): 62 | fru.fru_format.load() 63 | with pytest.raises(ValueError): 64 | fru.fru_format.load(path="a", blob=b"a") 65 | 66 | 67 | def test_bad_header_checksum(): 68 | blob = b"\x01\x00\x00\x00\x00\x00\x00\x00" 69 | with pytest.raises(ValueError): 70 | fru.fru_format.load(blob=blob) 71 | 72 | 73 | def test_checksum_of_zero(): 74 | path = os.path.join(os.path.dirname(__file__), "checksum-zero.bin") 75 | fru.fru_format.load(path) 76 | 77 | 78 | def test_decode_6_bit_ascii(): 79 | assert fru.fru_format.decode_6_bit_ascii(b"d\xc9\xb2") == "DELL" 80 | 81 | 82 | @pytest.mark.parametrize("section", ["board", "chassis", "product"]) 83 | @pytest.mark.parametrize("count", [i for i in range(10)]) 84 | def test_custom_fields(section, count): 85 | data = { 86 | "common": {"size": 64, "format_version": 1}, 87 | section: {"custom_fields": [f"{i:02}" for i in range(count)]}, 88 | } 89 | symmetric_data = fru.fru_format.load(blob=fru.fru_format.dump(data)) 90 | assert len(symmetric_data[section]["custom_fields"]) == count 91 | -------------------------------------------------------------------------------- /tests/test_toml.py: -------------------------------------------------------------------------------- 1 | # Unit tests for fru-tool 2 | # Copyright 2018-2024 Kurt McKee 3 | # Copyright 2017 Dell Technologies 4 | # 5 | # https://github.com/genotrance/fru-tool/ 6 | # 7 | # Licensed under the terms of the MIT License: 8 | # https://opensource.org/licenses/MIT 9 | 10 | import os 11 | import unittest.mock 12 | 13 | import pytest 14 | 15 | import fru.exceptions 16 | import fru.fru_format 17 | import fru.toml_format 18 | 19 | sections = [ 20 | "all", 21 | "empty", 22 | "board", 23 | "chassis", 24 | "internal-data", 25 | "internal-file", 26 | "product", 27 | ] 28 | 29 | 30 | @pytest.mark.parametrize( 31 | "stamp, minutes", 32 | ( 33 | ("1996-01-01 00:00", 0x00_00_00), 34 | ("2017-05-29 00:15", 0xAB_CD_EF), 35 | ("2027-11-24 20:15", 0xFF_FF_FF), 36 | ), 37 | ) 38 | def test_convert_valid_dates(stamp, minutes): 39 | """Confirm valid datetime conversions.""" 40 | 41 | assert fru.toml_format.convert_str_to_minutes(stamp) == minutes 42 | assert fru.toml_format.convert_minutes_to_str(minutes) == stamp 43 | 44 | 45 | @pytest.mark.parametrize( 46 | "date", 47 | ( 48 | "1900-01-01 00:00", # too low 49 | "3000-01-01 00:00", # too high 50 | "tomorrow morning", # bad format 51 | ), 52 | ) 53 | def test_convert_str_to_minutes_invalid(date): 54 | """Confirm invalid datetime conversions raise expected errors.""" 55 | 56 | with pytest.raises(fru.exceptions.DateTimeException): 57 | fru.toml_format.convert_str_to_minutes(date) 58 | 59 | 60 | @pytest.mark.parametrize("minutes", (-1, 0x1_00_00_00)) 61 | def test_convert_minutes_to_str_invalid(minutes): 62 | """Confirm invalid minutes values raise expected errors.""" 63 | 64 | with pytest.raises(fru.exceptions.DateTimeException): 65 | fru.toml_format.convert_minutes_to_str(minutes) 66 | 67 | 68 | @pytest.mark.parametrize( 69 | "value, expected", 70 | ( 71 | (False, "false"), 72 | (True, "true"), 73 | (0, "0"), 74 | (1, "1"), 75 | ("", '""'), 76 | ("aa", '"aa"'), 77 | ('"', '"\\""'), 78 | ("\\", '"\\\\"'), 79 | ([], "[]"), 80 | (["aa", "bb", "00"], '["aa", "bb", "00"]'), 81 | ), 82 | ) 83 | def test_repr_(value, expected): 84 | """Confirm value types are represented correctly in the TOML output.""" 85 | 86 | assert fru.toml_format.repr_(value) == expected 87 | 88 | 89 | def test_repr_bad_value(): 90 | with pytest.raises(fru.exceptions.TOMLException): 91 | # noinspection PyTypeChecker 92 | fru.toml_format.repr_(None) 93 | 94 | 95 | def test_dump_empty(): 96 | """Confirm that an empty dump raises no errors.""" 97 | 98 | assert isinstance(fru.toml_format.dump(), str) 99 | 100 | 101 | @pytest.mark.parametrize("section", ("board", "chassis", "product", "internal")) 102 | def test_roundtrip_dict_toml_dict(section): 103 | """Confirm that dict -> TOML -> dict roundtrips work.""" 104 | 105 | original_data = { 106 | "common": {"format_version": 1, "size": 1024}, 107 | section: {"format_version": 2}, 108 | } 109 | roundtrip_data = fru.toml_format.load(text=fru.toml_format.dump(original_data)) 110 | 111 | for section in original_data: 112 | for key in original_data[section]: 113 | assert original_data[section][key] == roundtrip_data[section][key] 114 | 115 | 116 | @pytest.mark.parametrize("section", ("board", "chassis", "product")) 117 | def test_roundtrip_fru_toml_fru(section): 118 | """Confirm FRU -> TOML -> FRU roundtrips work.""" 119 | 120 | with open(f"tests/basic-{section}.bin", "rb") as file: 121 | original_blob = file.read() 122 | fru_data = fru.fru_format.load(blob=original_blob) 123 | toml_text = fru.toml_format.dump(fru_data) 124 | toml_data = fru.toml_format.load(text=toml_text) 125 | fru_blob = fru.fru_format.dump(toml_data) 126 | 127 | assert original_blob == fru_blob 128 | 129 | 130 | @pytest.mark.parametrize("name", sections) 131 | def test_basic_toml_sections(name): 132 | path = os.path.join(os.path.dirname(__file__), f"basic-{name}.toml") 133 | config = fru.toml_format.load(path) 134 | actual = fru.fru_format.dump(config) 135 | 136 | path = os.path.join(os.path.dirname(__file__), f"basic-{name}.bin") 137 | with open(path, "rb") as f: 138 | expected = f.read() 139 | 140 | assert actual == expected 141 | 142 | 143 | @pytest.mark.parametrize("name", sections) 144 | def test_identical_loading(name): 145 | path = os.path.join(os.path.dirname(__file__), f"basic-{name}.toml") 146 | toml_data = fru.toml_format.load(path) 147 | 148 | path = os.path.join(os.path.dirname(__file__), f"basic-{name}.bin") 149 | bin_data = fru.fru_format.load(path=path) 150 | 151 | assert len(toml_data) == len(bin_data) 152 | assert toml_data == bin_data 153 | 154 | 155 | def test_skipped_section(): 156 | path = os.path.join(os.path.dirname(__file__), "skip-section.toml") 157 | data = fru.toml_format.load(path) 158 | assert "internal" in data 159 | 160 | 161 | def test_internal_fru_file_not_found(): 162 | path = os.path.join(os.path.dirname(__file__), "internal-fru-file-not-found.toml") 163 | with pytest.raises(ValueError) as error: 164 | fru.toml_format.load(path) 165 | assert "not found" in error.msg 166 | 167 | 168 | def test_internal_fru_requested_but_empty(): 169 | path = os.path.join(os.path.dirname(__file__), "internal-empty.toml") 170 | data = fru.toml_format.load(path) 171 | assert "internal" in data 172 | 173 | 174 | def test_repr_internal_empty(): 175 | assert fru.toml_format.repr_internal(b"") == "[]" 176 | 177 | 178 | def test_repr_internal(): 179 | expected = "[\n 0x31, 0x32, 0x33,\n]" 180 | assert fru.toml_format.repr_internal(b"123") == expected 181 | 182 | 183 | def test_encoding(): 184 | """Verify UTF-8 encoding is explicitly specified when opening a path.""" 185 | 186 | original_open = open 187 | 188 | def replacement_open(*args, **kwargs): 189 | # If 'encoding' is not specified, default to "UTF-32" to force a test failure. 190 | encoding = kwargs.pop("encoding", "utf-32") 191 | return original_open(*args, encoding=encoding, **kwargs) 192 | 193 | path = os.path.join(os.path.dirname(__file__), "basic-all.toml") 194 | 195 | with unittest.mock.patch("builtins.open", replacement_open): 196 | assert fru.toml_format.load(path=path) is not None 197 | 198 | 199 | def test_include_values_default_to_false(): 200 | """Verify 'include_*' values default to false.""" 201 | 202 | path = os.path.join(os.path.dirname(__file__), "include_defaults_to_false.toml") 203 | data = fru.toml_format.load(path) 204 | assert "board" in data 205 | assert "chassis" not in data 206 | assert "product" not in data 207 | assert "internal" not in data 208 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | min_version = 4.3.5 3 | 4 | envlist = 5 | coverage_erase 6 | # If this list of Python versions changes, 7 | # use search-and-replace to update this file 8 | # and update the `test.yaml` GitHub workflow. 9 | py{3.13, 3.12, 3.11, 3.10, 3.9, 3.8} 10 | coverage_report 11 | #mypy 12 | 13 | labels = 14 | update=update 15 | 16 | skip_missing_interpreters = True 17 | isolated_build = True 18 | 19 | 20 | [testenv:coverage_erase] 21 | skip_install = true 22 | deps = 23 | -rrequirements/test/requirements.txt 24 | commands = 25 | coverage erase 26 | 27 | 28 | [testenv:py{3.13, 3.12, 3.11, 3.10, 3.9, 3.8}] 29 | package = wheel 30 | wheel_build_env = build_wheel 31 | depends = 32 | coverage_erase 33 | deps = 34 | -rrequirements/test/requirements.txt 35 | commands = 36 | coverage run -m pytest 37 | 38 | 39 | [testenv:coverage_report] 40 | depends = 41 | py{3.13, 3.12, 3.11, 3.10, 3.9, 3.8} 42 | skip_install = true 43 | deps = 44 | -rrequirements/test/requirements.txt 45 | commands_pre = 46 | coverage combine 47 | coverage html --fail-under=0 48 | commands = 49 | coverage report 50 | 51 | 52 | ;[testenv:mypy] 53 | ;deps = 54 | ; -rrequirements/mypy/requirements.txt 55 | ;commands = 56 | ; mypy 57 | 58 | 59 | [testenv:update] 60 | base_python = py3.12 61 | recreate = true 62 | description = Update tool dependency versions 63 | skip_install = true 64 | deps = 65 | poetry 66 | poetry-plugin-export 67 | pre-commit 68 | upadup 69 | commands = 70 | # Update test requirements 71 | poetry update --directory="requirements/mypy" --lock 72 | poetry export --directory="requirements/mypy" --output="requirements/mypy/requirements.txt" --without-hashes 73 | poetry update --directory="requirements/test" --lock 74 | poetry export --directory="requirements/test" --output="requirements/test/requirements.txt" --without-hashes 75 | 76 | # Update pre-commit hook versions 77 | pre-commit autoupdate 78 | upadup 79 | 80 | 81 | [flake8] 82 | max-line-length = 88 83 | extend-ignore = E203 84 | --------------------------------------------------------------------------------