├── .cspell.json ├── .githooks ├── advanced-pre-commit-config.yaml └── basic-pre-commit-config.yaml ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.yml │ ├── doc_report.yml │ └── feature_request.yml ├── dependabot.yml └── workflows │ ├── CIRunner.yml │ ├── UnitTestRunner.yml │ ├── VariableProducer.yml │ ├── codeql.yml │ ├── doc-release.yml │ └── run-ci.yml ├── .markdownlint.yaml ├── .vscode ├── extensions.json └── settings.json ├── BasicDevTests.py ├── ConfirmVersionAndTag.py ├── LICENSE ├── MANIFEST.in ├── azure-pipelines └── azure-pipelines-release.yml ├── docs ├── contributor │ ├── developing.md │ ├── publishing.md │ ├── python_msv.md │ ├── python_release.md │ └── using.md └── user │ ├── features │ ├── .pages │ ├── build_objects.md │ ├── edk2_db.md │ ├── logging.ansi_handler.md │ ├── utility_functions.GetHostInfo.md │ └── windows_firmware_policy.md │ ├── gen_api.py │ └── index.md ├── edk2toollib ├── __init__.py ├── acpi │ ├── __init__.py │ ├── dmar_parser.py │ └── ivrs_parser.py ├── bin │ └── __init__.py ├── database │ ├── __init__.py │ ├── edk2_db.py │ └── tables │ │ ├── __init__.py │ │ ├── environment_table.py │ │ ├── inf_table.py │ │ ├── instanced_fv_table.py │ │ ├── instanced_inf_table.py │ │ ├── package_table.py │ │ └── source_table.py ├── gitignore_parser.py ├── log │ ├── __init__.py │ ├── ansi_handler.py │ ├── file_handler.py │ ├── junit_report_format.py │ └── string_handler.py ├── os │ ├── __init__.py │ └── uefivariablesupport.py ├── tpm │ ├── __init__.py │ ├── tpm2_defs.py │ ├── tpm2_policy_calc.py │ ├── tpm2_simulator.py │ └── tpm2_stream.py ├── uefi │ ├── __init__.py │ ├── authenticated_variables_structure_support.py │ ├── bmp_object.py │ ├── device_path.py │ ├── edk2 │ │ ├── __init__.py │ │ ├── build_objects │ │ │ ├── __init__.py │ │ │ ├── dsc.py │ │ │ └── dsc_translator.py │ │ ├── fmp_payload_header.py │ │ ├── ftw_working_block_format.py │ │ ├── guid_list.py │ │ ├── parsers │ │ │ ├── __init__.py │ │ │ ├── base_parser.py │ │ │ ├── buildreport_parser.py │ │ │ ├── dec_parser.py │ │ │ ├── dsc_parser.py │ │ │ ├── fdf_parser.py │ │ │ ├── guid_parser.py │ │ │ ├── inf_parser.py │ │ │ ├── override_parser.py │ │ │ └── targettxt_parser.py │ │ ├── path_utilities.py │ │ ├── variable_format.py │ │ ├── variable_policy.py │ │ └── variablestore_manulipulations.py │ ├── fmp_auth_header.py │ ├── fmp_capsule_header.py │ ├── pi_firmware_file.py │ ├── pi_firmware_volume.py │ ├── status_codes.py │ ├── uefi_capsule_header.py │ ├── uefi_multi_phase.py │ ├── uefi_types.py │ └── wincert.py ├── utility_functions.py └── windows │ ├── __init__.py │ ├── capsule │ ├── __init__.py │ ├── cat_generator.py │ ├── inf_generator.py │ └── inf_generator2.py │ ├── locate_tools.py │ └── policy │ ├── __init__.py │ └── firmware_policy.py ├── mkdocs.yml ├── pyproject.toml ├── readme.md └── tests.unit ├── __init__.py ├── database ├── common.py ├── test_edk2_db.py ├── test_environment_table.py ├── test_inf_table.py ├── test_instanced_fv_table.py ├── test_instanced_inf_table.py ├── test_package_table.py └── test_source_table.py ├── parsers ├── IncludedDefinesChild.fdf.inc ├── IncludedDefinesChildConditional.fdf.inc ├── IncludedDefinesParent.fdf ├── SimpleDefines.fdf ├── __init__.py ├── test_base_parser.py ├── test_dec_parser.py ├── test_dmar_parser.py ├── test_dsc_parser.py ├── test_fdf_parser.py ├── test_gitingore_parser.py ├── test_guid_parser.py ├── test_hash_file_parser.py ├── test_inf_parser.py ├── test_ivrs_parser.py └── test_override_parser.py ├── test_ansi_handler.py ├── test_authenticated_variables_structure_support.py ├── test_bmp_object.py ├── test_cat_generator.py ├── test_device_path.py ├── test_dsc.py ├── test_dsc_translator.py ├── test_firmware_policy.py ├── test_fmp_capsule_header.py ├── test_guid_list.py ├── test_inf_generator.py ├── test_inf_generator2.py ├── test_junit_report_format.py ├── test_locate_tools.py ├── test_path_utilities.py ├── test_status_codes.py ├── test_string_handler.py ├── test_tpm2_defs.py ├── test_tpm2_policy_calc.py ├── test_tpm2_stream.py ├── test_uefi_multi_phase.py ├── test_utility_functions.py ├── test_variable_format.py ├── test_variable_policy.py ├── test_wincert.py └── testdata ├── __init__.py └── certificate_blobs.py /.cspell.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.1", 3 | "language": "en", 4 | "dictionaries": [ 5 | "companies ", 6 | "softwareTerms", 7 | "python", 8 | "cpp" 9 | ], 10 | "minWordLength": 5, 11 | "ignorePaths": [ 12 | "*.exe" 13 | ], 14 | "allowCompoundWords": true, 15 | "ignoreWords": [], 16 | "words": [ 17 | "abspath", 18 | "apath", 19 | "argparser", 20 | "bdist", 21 | "cobertura", 22 | "codecov", 23 | "corebuild", 24 | "coveragerc", 25 | "cpython", 26 | "ctypes", 27 | "DBXFILE", 28 | "decodefs", 29 | "depex", 30 | "dirid", 31 | "Docstring", 32 | "Docstrings", 33 | "dwlength", 34 | "DYNAMICEX", 35 | "DYNAMICEXHII", 36 | "DYNAMICEXVPD", 37 | "DYNAMICHII", 38 | "DYNAMICVPD", 39 | "edkii", 40 | "etree", 41 | "extdep", 42 | "featurepcd", 43 | "fixedpcd", 44 | "gitattributes", 45 | "gitignore", 46 | "gitsubmodule", 47 | "Guids", 48 | "hexlify", 49 | "iglob", 50 | "implictly", 51 | "invocable", 52 | "invocables", 53 | "ioapic", 54 | "iommu", 55 | "isabs", 56 | "isclass", 57 | "isdir", 58 | "isfile", 59 | "junit", 60 | "junitxml", 61 | "levelno", 62 | "localizable", 63 | "markdownlint", 64 | "MDEPKG", 65 | "mkdocs", 66 | "nologo", 67 | "nonlocalizable", 68 | "noout", 69 | "nuget", 70 | "nupkg", 71 | "OEMID", 72 | "OPROM", 73 | "outfs", 74 | "packagepatahlist", 75 | "PATCHABLEINMODULE", 76 | "patchpcd", 77 | "Pathsep", 78 | "PCD's", 79 | "pcdtype", 80 | "prebuild", 81 | "pyasn", 82 | "pygount", 83 | "pypath", 84 | "pytest", 85 | "PYTHONPATH", 86 | "pytool", 87 | "pytools", 88 | "pyyaml", 89 | "rmtree", 90 | "rpartition", 91 | "rsassa", 92 | "sdist", 93 | "SECUREBOOT", 94 | "setuptools", 95 | "shutil", 96 | "signtool", 97 | "SKUID", 98 | "smoosh", 99 | "submodule", 100 | "testfv", 101 | "tianocore", 102 | "tinydb", 103 | "toolchain", 104 | "toolext", 105 | "toollib", 106 | "unittest", 107 | "urlunsplit", 108 | "vcvars", 109 | "vcvarsall", 110 | "vsvars", 111 | "vswhere", 112 | "uefivariablesupport", 113 | "ntdll", 114 | "NTSTATUS", 115 | "efivarfs", 116 | "chattr", 117 | "bootmgfw", 118 | "ISCSI", 119 | "SASEX", 120 | "CDROM" 121 | ] 122 | } 123 | -------------------------------------------------------------------------------- /.githooks/advanced-pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v3.2.0 4 | hooks: 5 | - id: trailing-whitespace 6 | types_or: [python, markdown] 7 | - id: end-of-file-fixer 8 | - id: check-added-large-files 9 | - id: check-merge-conflict 10 | - id: check-ast # check python ast 11 | - repo: https://github.com/streetsidesoftware/cspell-cli 12 | rev: v6.31.0 13 | hooks: 14 | - id: cspell 15 | types_or: [markdown, python] 16 | - repo: https://github.com/charliermarsh/ruff-pre-commit 17 | rev: "v0.0.265" 18 | hooks: 19 | - id: ruff 20 | args: [--fix, --exit-non-zero-on-fix] 21 | - repo: https://github.com/igorshubovych/markdownlint-cli 22 | rev: v0.12.0 23 | hooks: 24 | - id: markdownlint 25 | args: [--config, .markdownlint.yaml] 26 | - repo: local 27 | hooks: 28 | - id: BasicDevTests 29 | name: BasicDevTests 30 | language: system 31 | entry: python BasicDevTests.py 32 | - id: mkdocs-build 33 | name: Build MkDocs site 34 | entry: mkdocs build --strict 35 | language: python 36 | always_run: true 37 | pass_filenames: false 38 | require_serial: true 39 | -------------------------------------------------------------------------------- /.githooks/basic-pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v3.2.0 4 | hooks: 5 | - id: trailing-whitespace 6 | - id: end-of-file-fixer 7 | types_or: [python, markdown, yaml] 8 | - id: check-added-large-files 9 | - id: check-merge-conflict 10 | - id: check-ast # check python ast 11 | - repo: https://github.com/streetsidesoftware/cspell-cli 12 | rev: v6.31.0 13 | hooks: 14 | - id: cspell 15 | types_or: [markdown, python] 16 | - repo: https://github.com/charliermarsh/ruff-pre-commit 17 | rev: "v0.0.265" 18 | hooks: 19 | - id: ruff 20 | args: [--fix, --exit-non-zero-on-fix] 21 | - repo: https://github.com/igorshubovych/markdownlint-cli 22 | rev: v0.12.0 23 | hooks: 24 | - id: markdownlint 25 | args: [--config, .markdownlint.yaml] 26 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | name: Bug Report 2 | description: File a bug report 3 | title: "[Bug]: " 4 | labels: ["new", "bug"] 5 | 6 | body: 7 | - type: markdown 8 | attributes: 9 | value: Thanks for taking the time to file this bug! 10 | - type: input 11 | id: contact 12 | attributes: 13 | label: Contact Details 14 | description: "How can we get in touch with you if we need more info?" 15 | placeholder: ex. email@example.com 16 | validations: 17 | required: false 18 | - type: textarea 19 | id: bug-description 20 | attributes: 21 | label: Describe the Bug 22 | description: "A clear and concise description of what the bug is." 23 | validations: 24 | required: true 25 | - type: textarea 26 | id: repro 27 | attributes: 28 | label: Reproduction steps 29 | description: "How did you find this bug? Walk us through it step by step." 30 | placeholder: | 31 | 1. 32 | 2. 33 | 3. 34 | ... 35 | validations: 36 | required: true 37 | - type: textarea 38 | id: expected-behavior 39 | attributes: 40 | label: Expected behavior 41 | description: "A clear and concise description of what you expected to happen." 42 | validations: 43 | required: true 44 | - type: dropdown 45 | id: py_version 46 | attributes: 47 | label: What Python version are you using? 48 | description: "Note: Bug fixes are only supported on these Python versions." 49 | multiple: true 50 | options: 51 | - Python 3.10 52 | - Python 3.11 53 | - Python 3.12 54 | - Python 3.13 55 | validations: 56 | required: true 57 | - type: textarea 58 | id: env 59 | attributes: 60 | label: Execution Environment 61 | description: "List the OS, python micro version (e.g. 3.11), system environment variables, etc." 62 | validations: 63 | required: false 64 | - type: textarea 65 | id: pip 66 | attributes: 67 | label: Pip packages 68 | description: "If python related, run 'python -m pip list' and copy the output here." 69 | validations: 70 | required: false 71 | - type: textarea 72 | id: context 73 | attributes: 74 | label: Additional context 75 | description: "Any other additional context about the problem not covered by the above." 76 | validations: 77 | required: false 78 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/doc_report.yml: -------------------------------------------------------------------------------- 1 | name: Documentation Update 2 | description: Request a Documentation Update 3 | title: "[Doc]: " 4 | labels: ["new", "documentation"] 5 | 6 | body: 7 | - type: markdown 8 | attributes: 9 | value: Thanks for taking the time to request a documentation update! 10 | - type: input 11 | id: doc-location 12 | attributes: 13 | label: Link to the documentation 14 | description: "Python source if for an API reference, or the markdown file if not." 15 | validations: 16 | required: true 17 | - type: textarea 18 | id: description 19 | attributes: 20 | label: Description of Documentation Update 21 | description: "Why does the documentation need updated?" 22 | validations: 23 | required: true 24 | - type: textarea 25 | id: context 26 | attributes: 27 | label: Additional context 28 | description: "Any other additional context about the problem not covered by the above." 29 | validations: 30 | required: false 31 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | name: Feature Request 2 | description: Submit a feature request 3 | title: "[Feature]: " 4 | labels: ["new", "enhancement"] 5 | 6 | body: 7 | - type: markdown 8 | attributes: 9 | value: Thanks for suggesting an idea for this project! 10 | - type: textarea 11 | id: description 12 | attributes: 13 | label: What does the feature solve? 14 | description: "Is your feature request related to a problem?" 15 | placeholder: "Ex. I'm always frustrated when..." 16 | validations: 17 | required: true 18 | - type: textarea 19 | id: solution 20 | attributes: 21 | label: Describe the solution 22 | description: "A clear and concise description of what you want to happen" 23 | validations: 24 | required: true 25 | - type: textarea 26 | id: alternative 27 | attributes: 28 | label: Have you considered any alternatives? 29 | description: "A clear and concise description of any alternative solutions, features, or workarounds you've considered" 30 | validations: 31 | required: false 32 | - type: textarea 33 | id: context 34 | attributes: 35 | label: Additional context 36 | description: "Any other additional context about the problem not covered by the above." 37 | validations: 38 | required: false 39 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | ## @file 2 | # Dependabot configuration file to enable GitHub services for managing and updating 3 | # dependencies. 4 | # 5 | # Copyright (c) Microsoft Corporation. 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | # 8 | # Please see the documentation for all configuration options: 9 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 10 | ## 11 | version: 2 12 | updates: 13 | - package-ecosystem: "pip" # See documentation for possible values 14 | directory: "/" # Location of package manifests 15 | schedule: 16 | interval: "weekly" 17 | day: "monday" 18 | time: "01:00" 19 | - package-ecosystem: "github-actions" 20 | directory: "/" 21 | schedule: 22 | interval: "weekly" 23 | day: "monday" 24 | time: "01:00" 25 | -------------------------------------------------------------------------------- /.github/workflows/CIRunner.yml: -------------------------------------------------------------------------------- 1 | # This workflow Runs CI Tests on the specified directory using the specified 2 | # python version and node version. 3 | # 4 | # Copyright (c) Microsoft Corporation. 5 | # SPDX-License-Identifier: BSD-2-Clause-Patent 6 | 7 | name: Continuous Integration Test Workflow 8 | 9 | on: 10 | workflow_call: 11 | inputs: 12 | python-version: 13 | description: 'Python Version to use for CI' 14 | required: true 15 | type: string 16 | node-version: 17 | description: 'Node Version to use for CI' 18 | required: true 19 | type: string 20 | package-src: 21 | description: 'Directory containing package to test' 22 | required: true 23 | type: string 24 | 25 | jobs: 26 | run: 27 | name: Run 28 | 29 | runs-on: ubuntu-latest 30 | 31 | steps: 32 | - uses: actions/checkout@v4 33 | 34 | - name: Set up Python ${{ inputs.python-version }} 35 | uses: actions/setup-python@v5 36 | with: 37 | python-version: ${{ inputs.python-version }} 38 | cache: 'pip' 39 | 40 | - name: Install pip Dependencies 41 | run: | 42 | python -m pip install --upgrade pip 43 | pip install --upgrade -e .[dev,docs] 44 | 45 | - name: Set up Node ${{ inputs.node-version }} 46 | uses: actions/setup-node@v4 47 | with: 48 | node-version: ${{ inputs.node-version }} 49 | 50 | - name: Install npm Dependencies 51 | run: | 52 | npm install -g markdownlint-cli@0.39.0 53 | npm install -g cspell@5.20.0 54 | 55 | - name: Run ruff linter 56 | if: success() || failure() 57 | run: ruff check --output-format=github . 58 | 59 | - name: Run ruff formatter 60 | if: success() || failure() 61 | run: ruff format --check . 62 | 63 | - name: Run markdownlint 64 | if: success() || failure() 65 | run: markdownlint "**/*.md" 66 | 67 | - name: Run cspell 68 | if: success() || failure() 69 | run: cspell -c .cspell.json "**/*.py" "**/*.md" 70 | 71 | - name: Run mkdocs build 72 | if: success() || failure() 73 | run: mkdocs build --strict 74 | 75 | - name: Run basic dev tests 76 | if: success() || failure() 77 | run: python BasicDevTests.py 78 | -------------------------------------------------------------------------------- /.github/workflows/UnitTestRunner.yml: -------------------------------------------------------------------------------- 1 | # This workflow Runs Unit Tests using the specified python version(s) on 2 | # windows-latest and ubuntu-latest 3 | # 4 | # Copyright (c) Microsoft Corporation. 5 | # SPDX-License-Identifier: BSD-2-Clause-Patent 6 | 7 | on: 8 | workflow_call: 9 | inputs: 10 | python-versions: 11 | description: 'Python Versions to use for Unit Tests. A Matrix is created from this' 12 | required: true 13 | type: string 14 | 15 | jobs: 16 | run: 17 | name: Run 18 | 19 | runs-on: ${{ matrix.os }} 20 | 21 | strategy: 22 | matrix: 23 | python-version: ${{ fromJson(inputs.python-versions) }} 24 | os: [ubuntu-latest, windows-latest] 25 | 26 | steps: 27 | - uses: actions/checkout@v4 28 | 29 | - name: Set up Python ${{ matrix.python-version }} 30 | uses: actions/setup-python@v5 31 | with: 32 | python-version: ${{ matrix.python-version }} 33 | cache: 'pip' 34 | 35 | - name: Install pip Dependencies 36 | run: | 37 | python -m pip install --upgrade pip 38 | pip install --upgrade -e .[dev] 39 | 40 | - name: Run Unit Tests 41 | run: coverage run -m pytest 42 | 43 | - name: Format Coverage results 44 | run: coverage xml 45 | 46 | - name: Upload coverage to codecov 47 | uses: codecov/codecov-action@v5 48 | with: 49 | verbose: false 50 | 51 | - uses: actions/upload-artifact@v4 52 | with: 53 | name: Artifacts-${{matrix.os}}-${{matrix.python-version}} 54 | path: | 55 | pytest_report.html 56 | test.junit.xml 57 | if: failure() 58 | -------------------------------------------------------------------------------- /.github/workflows/VariableProducer.yml: -------------------------------------------------------------------------------- 1 | # This workflow produces variables ingested by other workflows 2 | # 3 | # Copyright (c) Microsoft Corporation. 4 | # SPDX-License-Identifier: BSD-2-Clause-Patent 5 | 6 | on: 7 | workflow_call: 8 | outputs: 9 | python-versions: 10 | description: 'Python Versions' 11 | value: ${{ jobs.produce.outputs.python-versions }} 12 | node-versions: 13 | description: 'Node Versions' 14 | value: ${{ jobs.produce.outputs.node-versions }} 15 | python-msv: 16 | description: 'Minimum Supported Python Version' 17 | value: ${{ jobs.produce.outputs.python-msv }} 18 | 19 | env: 20 | msv: "['3.10']" 21 | pythonversions: "['3.13', '3.12', '3.11']" # Keep Python Versions in descending order 22 | nodeversions: "['19']" 23 | 24 | jobs: 25 | produce: 26 | name: Produce 27 | 28 | runs-on: ubuntu-latest 29 | 30 | outputs: 31 | python-versions: ${{ steps.set-python-versions.outputs.VERSION }} 32 | node-versions: ${{ steps.set-node-versions.outputs.VERSION }} 33 | python-msv: ${{ steps.set-python-msv.outputs.VERSION }} 34 | 35 | steps: 36 | - name: Produce Python Version 37 | id: set-python-versions 38 | run: echo "VERSION=$pythonversions" >> $GITHUB_OUTPUT 39 | 40 | - name: Produce Node Version 41 | id: set-node-versions 42 | run: echo "VERSION=$nodeversions" >> $GITHUB_OUTPUT 43 | 44 | - name: Produce Minimum Supported Python Version 45 | id: set-python-msv 46 | run: echo "VERSION=$msv" >> $GITHUB_OUTPUT 47 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ "master" ] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: [ "master" ] 20 | schedule: 21 | - cron: '29 4 * * 5' 22 | 23 | jobs: 24 | analyze: 25 | name: Analyze 26 | runs-on: ubuntu-latest 27 | permissions: 28 | actions: read 29 | contents: read 30 | security-events: write 31 | 32 | strategy: 33 | fail-fast: false 34 | matrix: 35 | language: [ 'python' ] 36 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] 37 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support 38 | 39 | steps: 40 | - name: Checkout repository 41 | uses: actions/checkout@v4 42 | 43 | # Initializes the CodeQL tools for scanning. 44 | - name: Initialize CodeQL 45 | uses: github/codeql-action/init@v3 46 | with: 47 | languages: ${{ matrix.language }} 48 | # If you wish to specify custom queries, you can do so here or in a config file. 49 | # By default, queries listed here will override any specified in a config file. 50 | # Prefix the list here with "+" to use these queries and those in the config file. 51 | 52 | # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs 53 | # queries: security-extended,security-and-quality 54 | 55 | 56 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 57 | # If this step fails, then you should remove it and run the build manually (see below) 58 | - name: Autobuild 59 | uses: github/codeql-action/autobuild@v3 60 | 61 | # ℹ️ Command-line programs to run using the OS shell. 62 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun 63 | 64 | # If the Autobuild fails above, remove it and uncomment the following three lines. 65 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. 66 | 67 | # - run: | 68 | # echo "Run, Build Application using script" 69 | # ./location_of_script_within_repo/buildscript.sh 70 | 71 | - name: Perform CodeQL Analysis 72 | uses: github/codeql-action/analyze@v3 73 | -------------------------------------------------------------------------------- /.github/workflows/doc-release.yml: -------------------------------------------------------------------------------- 1 | # This workflow builds and deploys documentation on a release. 2 | # 3 | # Copyright (c) Microsoft Corporation. 4 | # SPDX-License-Identifier: BSD-2-Clause-Patent 5 | 6 | name: Documentation 7 | 8 | on: 9 | release: 10 | types: [published] 11 | 12 | jobs: 13 | 14 | variables: 15 | name: Variables 16 | uses: ./.github/workflows/VariableProducer.yml 17 | 18 | build: 19 | name: Build 20 | needs: variables 21 | runs-on: ubuntu-latest 22 | 23 | steps: 24 | - name: Checkout 25 | uses: actions/checkout@v4 26 | 27 | - name: Set up Python ${{ fromJson(needs.variables.outputs.python-versions)[0] }} 28 | uses: actions/setup-python@v5 29 | with: 30 | python-version: ${{ fromJson(needs.variables.outputs.python-versions)[0] }} 31 | cache: 'pip' 32 | 33 | - name: Install dependencies 34 | run: | 35 | python -m pip install --upgrade pip 36 | pip install --upgrade -e .[docs] 37 | 38 | - name: Build Documentation 39 | run: | 40 | mkdocs build --strict 41 | 42 | - name: Upload Github Pages Site Artifact 43 | uses: actions/upload-pages-artifact@v3 44 | with: 45 | name: "github-pages" 46 | path: "site/" 47 | 48 | deploy: 49 | name: Deploy 50 | 51 | needs: [variables, build] 52 | 53 | permissions: 54 | pages: write 55 | id-token: write 56 | 57 | runs-on: ubuntu-latest 58 | 59 | steps: 60 | - name: Deploy Github Pages Site 61 | uses: actions/deploy-pages@v4.0.5 62 | with: 63 | token: ${{ github.token }} 64 | artifact_name: "github-pages" 65 | -------------------------------------------------------------------------------- /.github/workflows/run-ci.yml: -------------------------------------------------------------------------------- 1 | # This workflow runs unit tests against the specified python versions on both 2 | # ubuntu and windows. Additionally, it performs CI against the codebase. 3 | # 4 | # Copyright (c) Microsoft Corporation. 5 | # SPDX-License-Identifier: BSD-2-Clause-Patent 6 | 7 | name: CI 8 | 9 | on: 10 | push: 11 | branches: [ "master" ] 12 | pull_request: 13 | branches: [ "master" ] 14 | 15 | jobs: 16 | 17 | variables: 18 | name: Variables 19 | uses: ./.github/workflows/VariableProducer.yml 20 | 21 | ci: 22 | needs: variables 23 | name: CI 24 | uses: ./.github/workflows/CIRunner.yml 25 | with: 26 | python-version: ${{ fromJson(needs.variables.outputs.python-versions)[0] }} 27 | node-version: ${{ fromJson(needs.variables.outputs.node-versions)[0] }} 28 | package-src: edk2toollib 29 | 30 | msv: 31 | needs: [variables,ci] 32 | name: MSV Validation 33 | uses: ./.github/workflows/UnitTestRunner.yml 34 | with: 35 | python-versions: ${{ needs.variables.outputs.python-msv }} 36 | 37 | unit-test: 38 | needs: [variables,ci] 39 | name: Unit Test 40 | uses: ./.github/workflows/UnitTestRunner.yml 41 | with: 42 | python-versions: ${{ needs.variables.outputs.python-versions }} 43 | -------------------------------------------------------------------------------- /.markdownlint.yaml: -------------------------------------------------------------------------------- 1 | # Rules can be found here: https://github.com/DavidAnson/markdownlint/blob/main/doc/Rules.md 2 | # Config info: https://github.com/DavidAnson/markdownlint#configuration 3 | { 4 | "default": true, 5 | "MD013": {"line_length": 120, "code_blocks": false } 6 | } 7 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": ["charliermarsh.ruff"] 3 | } 4 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.testing.pytestEnabled": true, 3 | "python.testing.pytestArgs": [ 4 | "${workspaceRoot}/tests.unit", 5 | ], 6 | "python.testing.unittestEnabled": false, 7 | "[python]": { 8 | "editor.defaultFormatter": null, 9 | "editor.codeActionsOnSave": { 10 | "source.fixAll.ruff": "explicit" 11 | } 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /BasicDevTests.py: -------------------------------------------------------------------------------- 1 | ## 2 | # Quick script to check that python code in the package 3 | # aligns with pep8 and file encoding. I have not found 4 | # a way to enforce that with tools like flake8 5 | # 6 | # There must be a better way. :) 7 | # 8 | # Copyright (c) Microsoft Corporation 9 | # 10 | # SPDX-License-Identifier: BSD-2-Clause-Patent 11 | ## 12 | 13 | import glob 14 | import os 15 | import sys 16 | import logging 17 | import re 18 | 19 | 20 | def TestEncodingOk(apath, encodingValue): 21 | try: 22 | with open(apath, "rb") as f_obj: 23 | f_obj.read().decode(encodingValue) 24 | except Exception as exp: 25 | logging.critical("Encoding failure: file: {0} type: {1}".format(apath, encodingValue)) 26 | logging.error("EXCEPTION: while processing {1} - {0}".format(exp, apath)) 27 | return False 28 | return True 29 | 30 | 31 | def TestFilenameLowercase(apath): 32 | if apath != apath.lower(): 33 | logging.critical(f"Lowercase failure: file {apath} not lower case path") 34 | logging.error(f"\n\tLOWERCASE: {apath.lower()}\n\tINPUTPATH: {apath}") 35 | return False 36 | return True 37 | 38 | 39 | def PackageAndModuleValidCharacters(apath): 40 | """check pep8 recommendations for package and module names""" 41 | 42 | match = re.match("^[a-z0-9_/.]+$", apath.replace("\\", "/")) 43 | if match is None: 44 | logging.critical( 45 | f"PackageAndModuleValidCharacters failure: package or module name {apath} has something invalid" 46 | ) 47 | return False 48 | return True 49 | 50 | 51 | def TestNoSpaces(apath): 52 | if " " in apath: 53 | logging.critical(f"NoSpaces failure: file {apath} has spaces in path") 54 | return False 55 | return True 56 | 57 | 58 | def TestRequiredLicense(apath): 59 | licenses = [ 60 | "SPDX-License-Identifier: BSD-2-Clause-Patent", 61 | ] 62 | try: 63 | with open(apath, "rb") as f_obj: 64 | contents = f_obj.read().decode() 65 | found = False 66 | for lic in licenses: 67 | if lic in contents: 68 | found = True 69 | break 70 | if not found: 71 | logging.critical(f"License failure: file {apath} has incorrect, invalid, or unsupported license") 72 | return False 73 | except Exception as exp: 74 | logging.critical(f"License failure: Exception trying to read file: {apath}") 75 | logging.error("EXCEPTION: while processing {1} - {0}".format(exp, apath)) 76 | return False 77 | return True 78 | 79 | 80 | p = os.path.join(os.getcwd(), "edk2toollib") 81 | py_files = glob.glob(os.path.join(p, "**", "*.py"), recursive=True) 82 | error = 0 83 | for a in py_files: 84 | aRelativePath = os.path.relpath(a, os.getcwd()) 85 | if not TestEncodingOk(a, "ascii"): 86 | error += 1 87 | if not TestFilenameLowercase(aRelativePath): 88 | error += 1 89 | if not TestNoSpaces(aRelativePath): 90 | error += 1 91 | if not TestRequiredLicense(a): 92 | error += 1 93 | if not PackageAndModuleValidCharacters(aRelativePath): # use relative path so only test within package 94 | error += 1 95 | 96 | logging.critical(f"Found {error} error(s) in {len(py_files)} file(s)") 97 | sys.exit(error) 98 | -------------------------------------------------------------------------------- /ConfirmVersionAndTag.py: -------------------------------------------------------------------------------- 1 | ## @file 2 | # Quick script to check that the wheel/package created is aligned on a git tag. 3 | # Official releases should not be made from non-tagged code. 4 | # 5 | # Copyright (c) Microsoft Corporation 6 | # 7 | # SPDX-License-Identifier: BSD-2-Clause-Patent 8 | ## 9 | 10 | import glob 11 | import os 12 | import sys 13 | 14 | p = os.path.join(os.getcwd(), "dist") 15 | whl_files = glob.glob(os.path.join(p, "*.whl")) 16 | if len(whl_files) != 1: 17 | for filename in whl_files: 18 | print(filename) 19 | raise Exception("Too many wheel files") 20 | rfn = os.path.relpath(whl_files[0], os.getcwd()) 21 | v = rfn.split("-")[1] 22 | if v.count(".") != 2: 23 | raise Exception("Version %s not in format major.minor.patch" % v) 24 | if "dev" in v: 25 | raise Exception("No Dev versions allowed to be published.") 26 | print("version: " + str(v)) 27 | sys.exit(0) 28 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2019, TianoCore and contributors. All rights reserved. 2 | Copyright (c) Microsoft All rights reserved. 3 | Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved. 4 | 5 | SPDX-License-Identifier: BSD-2-Clause-Patent 6 | 7 | Redistribution and use in source and binary forms, with or without 8 | modification, are permitted provided that the following conditions are met: 9 | 10 | 1. Redistributions of source code must retain the above copyright notice, 11 | this list of conditions and the following disclaimer. 12 | 13 | 2. Redistributions in binary form must reproduce the above copyright notice, 14 | this list of conditions and the following disclaimer in the documentation 15 | and/or other materials provided with the distribution. 16 | 17 | Subject to the terms and conditions of this license, each copyright holder 18 | and contributor hereby grants to those receiving rights under this license 19 | a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable 20 | (except for failure to satisfy the conditions of this license) patent 21 | license to make, have made, use, offer to sell, sell, import, and otherwise 22 | transfer this software, where such license applies only to those patent 23 | claims, already acquired or hereafter acquired, licensable by such copyright 24 | holder or contributor that are necessarily infringed by: 25 | 26 | (a) their Contribution(s) (the licensed copyrights of copyright holders and 27 | non-copyrightable additions of contributors, in source or binary form) 28 | alone; or 29 | 30 | (b) combination of their Contribution(s) with the work of authorship to 31 | which such Contribution(s) was added by such copyright holder or 32 | contributor, if, at the time the Contribution is added, such addition 33 | causes such combination to be necessarily infringed. The patent license 34 | shall not apply to any other combinations which include the 35 | Contribution. 36 | 37 | Except as expressly stated above, no rights or licenses from any copyright 38 | holder or contributor is granted under this license, whether expressly, by 39 | implication, estoppel or otherwise. 40 | 41 | DISCLAIMER 42 | 43 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 44 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 45 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 46 | ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE 47 | LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 48 | CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 49 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 50 | INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 51 | CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 52 | ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 53 | POSSIBILITY OF SUCH DAMAGE. 54 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | exclude *.yml 2 | exclude *.md 3 | exclude *.txt 4 | exclude .flake8 5 | exclude .coveragerc 6 | exclude .gitignore 7 | exclude .cspell.json 8 | exclude .markdownlint.yaml 9 | exclude BasicDevTests.py 10 | exclude ConfirmVersionAndTag.py 11 | exclude MANIFEST.in 12 | exclude .vscode 13 | prune docs 14 | prune tests.unit 15 | prune .githooks 16 | prune .github 17 | prune edk2_pytool_library.egg-info 18 | prune azure-pipelines 19 | 20 | include edk2toollib/bin/vswhere.exe -------------------------------------------------------------------------------- /azure-pipelines/azure-pipelines-release.yml: -------------------------------------------------------------------------------- 1 | ## 2 | # Azure Pipeline build file for a releasing to pypi 3 | 4 | # Copyright (c), Microsoft Corporation 5 | # SPDX-License-Identifier: BSD-2-Clause-Patent 6 | ## 7 | 8 | resources: 9 | repositories: 10 | - repository: pytool_extensions 11 | type: github 12 | name: tianocore/edk2-pytool-extensions 13 | ref: refs/heads/master 14 | endpoint: tianocore 15 | 16 | # trigger when a vXX.XX.XX tag is created 17 | trigger: 18 | tags: 19 | include: 20 | - v* 21 | 22 | pr: none # not a pr target 23 | 24 | jobs: 25 | - template: azure-pipelines/templates/build-test-job.yml@pytool_extensions 26 | parameters: 27 | vm_image: 'windows-2022' 28 | pypi_auth_feed: 'Pypi-edk2-pytool-library' 29 | root_package_folder: "edk2toollib" 30 | name: 'windows' 31 | -------------------------------------------------------------------------------- /docs/contributor/publishing.md: -------------------------------------------------------------------------------- 1 | # Publishing Tianocore Edk2 PyTool Library (edk2toollib) 2 | 3 | The __edk2toollib__ is published as a pypi (pip) module. The pip module is 4 | named __edk2-pytool-library__. Pypi allows for easy version management, 5 | dependency management, and sharing. 6 | 7 | Publishing/releasing a new version is generally handled thru a server based 8 | build process but for completeness the process is documented here. 9 | 10 | ## Version Scheme 11 | 12 | Versioning follows: aa.bb.cc and is based on tags in git 13 | 14 | * aa == Major version. Changes don’t need to be backward compatible 15 | * bb == Minor version. Significant new features. Backward compatibility 16 | generally maintained except when new feature is used. 17 | * cc == Patch version. Bug fix or small optional feature. Backward 18 | compatibility maintained. 19 | 20 | ## Github Publishing Process 21 | 22 | Note: If this release contains a breaking change, you may need to navigate to 23 | the [Milestones](https://github.com/tianocore/edk2-pytool-library/milestones) 24 | page and "edit" the milestone version to roll it to the next minor / major 25 | version. If it was already done, then you don't need to do it again. 26 | 27 | 1. Navigate to the [Releases](https://github.com/tianocore/edk2-pytool-library/releases) 28 | section on the main page of edk2-pytool-library 29 | 2. Click `Draft a new release` at the top right of the page 30 | 3. Click `Choose a tag` and create the new release version (`v0.21.8`, `v0.22.0`, etc.) 31 | 4. Click `Generate release notes` 32 | 5. Add a new section `## Dependency Updates` 33 | 6. If the major / minor is rolled in this release, add a `## Integration Steps` 34 | section 35 | 7. Move all dependabot contributions to the `## Dependency Updates` section 36 | 8. Leave all "true" contributions in the `## What's Changed` section 37 | 9. Copy the integration steps from the pull request into the 38 | `## Integration Steps` section 39 | 10. Click `Publish release` 40 | 41 | These are the steps you need to do once a release is complete, to setup 42 | contributing to the next tag. 43 | 44 | 1. Navigate to the [Milestones](https://github.com/tianocore/edk2-pytool-library/milestones) 45 | section on the Pull requests page 46 | 2. Click `New Milestone` and create a new tag that should be the last release 47 | with the patch version + 1 48 | 3. Click `Create milestone` 49 | 4. Close the old milestone for the latest release tag 50 | 51 | NOTE: Feel free to add additional sections to the release notes as necessary. 52 | The release is not immediate. A pipeline will be queued that will perform final 53 | CI checks and then release to pypi. You can monitor this pipeline [HERE](https://dev.azure.com/tianocore/edk2-pytools-library/_build?definitionId=3) 54 | 55 | ## Manual Publishing Process 56 | 57 | NOTE: These directions assume you have already configured your workspace for 58 | developing. If not please first do that. Directions on the 59 | [developing](developing.md) page. 60 | 61 | 1. Pass all development tests and checks. 62 | 2. Update the __readme.md__ `Release Version History` section with info on all 63 | important changes for this version. Remove the "-dev" tag from the version 64 | about to be released. 65 | 3. Get your changes into master branch (official releases should only be done 66 | from the master branch) 67 | 4. Make a git tag for the version that will be released and push tag. Tag 68 | format is v\.\.\ 69 | 5. Do the release process 70 | 71 | 1. Install tools 72 | 73 | ``` cmd 74 | pip install --upgrade -e [publish] 75 | ``` 76 | 77 | 2. Build a wheel 78 | 79 | ``` cmd 80 | python -m build --sdist --wheel 81 | ``` 82 | 83 | 3. Confirm wheel version is aligned with git tag 84 | 85 | ``` cmd 86 | ConfirmVersionAndTag.py 87 | ``` 88 | 89 | 4. Publish the wheel/distribution to pypi 90 | 91 | ``` cmd 92 | twine upload dist/* 93 | ``` 94 | -------------------------------------------------------------------------------- /docs/contributor/python_msv.md: -------------------------------------------------------------------------------- 1 | # Python Minimum Supported Version 2 | 3 | In addition to the N-2 versions of python being actively maintained and 4 | supported, this repository also has a minimum supported version (MSV) of 5 | python. Any version of python between the MSV and N-2 version of python is not 6 | actively maintained or monitored, however it is supported (e.g. the repository 7 | does not use any features past this version of python). 8 | 9 | The MSV for this project is subject to change at any time based on project 10 | needs and features introduced by python. At a minimum, this repository will 11 | never use a feature newer than N-2, providing a two year lookback period for 12 | consumers to increase their supported version of python while receiving new 13 | feature updates of this project. 14 | 15 | ## Updating the MSV 16 | 17 | If the need arises to increase the minimum supported version of python, below 18 | are the necessary files and steps to update the repository. 19 | 20 | ### pyproject.toml 21 | 22 | This file is responsible for the release process to pypi. We want to make sure 23 | we keep the required version for our pypi releases up to date. Update 24 | `requires-python` to the new msv. 25 | 26 | Additionally, we must update the classifiers section to remove the now 27 | unsupported versions of python. 28 | 29 | ```python 30 | classifiers=[ 31 | "Programming Language :: Python :: 3.10", 32 | "Programming Language :: Python :: 3.11", 33 | "Programming Language :: Python :: 3.12", 34 | "Programming Language :: Python :: 3.13" 35 | ] 36 | ``` 37 | 38 | ### bug_report.yml 39 | 40 | Remove the now unsupported version of python in the following 41 | section: `id: py_version`. 42 | 43 | ### VariableProducer.yml 44 | 45 | Update `python-msv:` to the new msv 46 | 47 | ### readme.md 48 | 49 | Update the `Toolchain` section of the `Minimum Supported Version` table. 50 | -------------------------------------------------------------------------------- /docs/contributor/python_release.md: -------------------------------------------------------------------------------- 1 | # Python Releases and edk2toollib 2 | 3 | This document provides information on the necessary steps to update the 4 | edk2-pytool-library repository when a new minor version of python has been 5 | released (3.9, 3.10, etc). 6 | 7 | ## Steps 8 | 9 | Each individual step will be a different section below and be associated with 10 | a specific file that must be updated. 11 | 12 | ### pyproject.toml 13 | 14 | We must update the classifiers section to show the new supported python version: 15 | 16 | ```python 17 | classifiers=[ 18 | ... 19 | "Programming Language :: Python :: 3.10", 20 | "Programming Language :: Python :: 3.11", 21 | "Programming Language :: Python :: 3.12", 22 | "Programming Language :: Python :: 3.13" 23 | ] 24 | ``` 25 | 26 | ### bug_report.yml 27 | 28 | Update the supported python versions in the entry with `id: py_version` 29 | 30 | ### VariableProducer.yml 31 | 32 | Update `pythonversions` to the support versions 33 | 34 | ### readme.md 35 | 36 | Update the python versions in the `Current Status` section 37 | -------------------------------------------------------------------------------- /docs/contributor/using.md: -------------------------------------------------------------------------------- 1 | # Using Tianocore edk2 pytool library (edk2toollib) 2 | 3 | ## Installing 4 | 5 | NOTE: It is suggested to use python virtual environments to avoid dependency pollution and conflicts. 6 | [Read More](https://docs.python.org/3/library/venv.html) 7 | 8 | Install from pip 9 | 10 | ```cmd 11 | pip install --upgrade edk2-pytool-library 12 | ``` 13 | 14 | ## Using in python code 15 | 16 | ```python 17 | from edk2toollib. import 18 | ``` 19 | -------------------------------------------------------------------------------- /docs/user/features/.pages: -------------------------------------------------------------------------------- 1 | title: Advanced Features 2 | -------------------------------------------------------------------------------- /docs/user/features/build_objects.md: -------------------------------------------------------------------------------- 1 | # Build Objects 2 | 3 | ## What are they 4 | 5 | Data model or build objects are data objects that allow for a DSC or other build 6 | file to be converted into an python object. This allows transformations, 7 | verifications, and other things to do be done much more easily as they can be 8 | done in-memory. The current proposal is three separate data models, detailed 9 | below. Generally the data model is composed of sets and maps of sets. The sets 10 | will be checked for uniqueness and will be restricted in what can be added to 11 | them. The maps will have various section header types as keys and restricted 12 | sets as values. 13 | 14 | ### DSC 15 | 16 | This is the data model that represents the DSC file, macros fully resolved with 17 | conditional paths taken. This is a fairly standard 1:1 mapping from the spec to 18 | the DSC object. No higher level verification is to be done by the data model 19 | object itself (for example, checking that the SKU that a PCD references is in 20 | fact legal or INF paths exist). This higher level verification can be done by a 21 | separate class, perhaps once the object has been created in the parser itself. 22 | Low level verifications (for example, anything specified with specific values in 23 | the spec) may be done as the file is being processed as long as it can be 24 | immediately verified. A good example of this would be checking that the module 25 | type specified exists in the list of allowed EDKII values (DXE_RUNTIME_DRIVER, 26 | PEIM, etc). 27 | 28 | ### FDF 29 | 30 | This is the data model that represents the FDF file, macros fully resolved with 31 | conditional paths taken. Similar to DSCs, no higher level verification should be 32 | done in the data model itself. Low lever verifications may be done by the data 33 | model. 34 | 35 | ### Recipe 36 | 37 | The central class is the `recipe`. This holds all the components that need to be 38 | built and their respective library classes, PCD's, and defines. It does not 39 | contain general library classes. In the future, it will also contain the flash 40 | map information. 41 | 42 | We've written a parser for DSC files to convert them into recipes. In the 43 | future, we hope to include FDF files as well. 44 | 45 | ## Who are they for 46 | 47 | Build objects are for anyone dealing with complex and large projects. In 48 | projects, more and more DSC files are taking advantage of the !include 49 | functionality. However, there are a few problems with that fact. 50 | 51 | 1. DSC files are fragile 52 | 2. Includes have no idea what is already in your file. An include might expect 53 | you to be in a defines section. You have no way to know this from the main 54 | DSC file. 55 | 56 | ## Why were they made 57 | 58 | To better abstract away the essence of what a build is doing. DSC is a way to 59 | communicate a recipe. 60 | 61 | ## How are they being used 62 | 63 | - DSC compositing/transformations 64 | - Build state verification 65 | - Best practice checking 66 | -------------------------------------------------------------------------------- /docs/user/features/logging.ansi_handler.md: -------------------------------------------------------------------------------- 1 | # Logging ANSI Handler 2 | 3 | This document details the Ansi Handler 4 | 5 | ## How to Use 6 | 7 | ```python 8 | from edk2toollib.logging.ansi_handler import ColoredStreamHandler 9 | 10 | handler = ColoredStreamHandler(stream, strip=True, convert=False) 11 | formatter = ColoredFormatter() 12 | ``` 13 | 14 | ## Usage info 15 | 16 | ColoredStreamHandler() will create a handler from the logging package. It 17 | accepts a stream (such as a file) and will display the colors in that particular 18 | stream as needed to the console. There are two options, strip and convert. 19 | 20 | ColoredFormatter() will create a formatter from the logging package that will 21 | insert ANSI codes according to the logging level into the output stream. 22 | 23 | ### ColoredStreamHandler Arguments 24 | 25 | ### 1. strip 26 | 27 | Strip will strip ANSI codes if the terminal does not support them (such as 28 | windows). 29 | 30 | ### 2. convert 31 | 32 | Convert will convert ANSI codes on windows platforms into windows platform 33 | calls. 34 | 35 | ### ColoredFormatter Arguments 36 | 37 | ### 1. msg 38 | 39 | The best documentation for this is from Python itself. It's the same message 40 | that's passed into the formatted base class. 41 | 42 | ### 2. use_azure 43 | 44 | Azure Dev ops can support colors with certain keywords. This turns that on 45 | instead of using ANSI. 46 | 47 | ## Purpose 48 | 49 | To put color into your life and your terminal, we needed to support coloring 50 | based on logging levels. ANSI seemed like a universal choice. The 51 | StreamHandler is just a workaround for windows based systems that don't 52 | support ANSI natively. 53 | -------------------------------------------------------------------------------- /docs/user/features/utility_functions.GetHostInfo.md: -------------------------------------------------------------------------------- 1 | # Utility Functions GetHostInfo() 2 | 3 | This document details the utility function called GetHostInfo. This function was 4 | written because tools needed a consistent way to determine attributes about the 5 | host system. 6 | 7 | ## Purpose 8 | 9 | Since there are multiple different ways one could derive these values, it is 10 | necessary provide a common implementation of that logic to ensure it is 11 | uniform. 12 | 13 | ## How to Use 14 | 15 | ```python 16 | from edk2toollib.utility_functions import GetHostInfo 17 | 18 | host_info = GetHostInfo() 19 | ``` 20 | 21 | ## Usage info 22 | 23 | GetHostInfo() will return a named tuple with 3 attributes describing the host 24 | machine. Below for each is the name of the field, description of the field and 25 | possible contents therein. 26 | 27 | ### 1. os - OS Name 28 | 29 | Windows, Linux, or Java 30 | 31 | ### 2. arch - Processor architecture 32 | 33 | ARM or x86 34 | 35 | ### 3. bit - Highest order bit 36 | 37 | 32 or 64 38 | -------------------------------------------------------------------------------- /docs/user/features/windows_firmware_policy.md: -------------------------------------------------------------------------------- 1 | # Windows Firmware Policy Library 2 | 3 | This library supports creation and analysis of Windows Firmware Policy binaries 4 | (unsigned) 5 | 6 | ## Usage info 7 | 8 | ### To deserialize an unsigned firmware policy binary file and print its contents 9 | 10 | 1. Construct a ```FirmwarePolicy()``` using a binary file stream as a parameter 11 | 1. Inspect data members 12 | 1. Invoke the ```Print()``` method on it 13 | 14 | ### To create a firmware policy binary file 15 | 16 | 1. Device targeting information must first be read from the target device (not 17 | covered here). 18 | 1. Construct a Dictionary with keys 'Manufacturer', 'Product', 'SerialNumber', 19 | 'OEM_01', 'OEM_02', & 'Nonce' populated with the targeting values read from 20 | the device. 21 | 1. Construct an default ```FirmwarePolicy()``` object, then call 22 | 23 | ```SetDeviceTarget(target_dictionary)``` 24 | 25 | to populate it with the targeting information 26 | 1. Bitwise OR the desired ```FirmwarePolicy.FW_POLICY_VALUE_foo``` values into 27 | an integer and pass to ```SetDevicePolicy(64_bit_device_policy)``` 28 | 1. The FirmwarePolicy object is now ready, serialize it to a file stream using 29 | 30 | ```SerializeToStream(your_file_stream)``` 31 | 32 | 1. For consumption by a secure device, sign the policy using instructions found 33 | elsewhere 34 | 35 | ## How to Use 36 | 37 | ```python 38 | from edk2toollib.windows.policy.firmware_policy import FirmwarePolicy 39 | 40 | # to create an object from file and print its contents 41 | policy = FirmwarePolicy(fs=policy_file_stream) # construct from file stream 42 | policy.Print() 43 | 44 | 45 | # or to create a policy and save to file 46 | policy = FirmwarePolicy() 47 | 48 | deviceTarget = { 49 | 'Manufacturer': manufacturer_read_from_device, 50 | 'Product': product_make_read_from_device, 51 | 'SerialNumber': sn_read_from_device, 52 | 'OEM_01': '', # Yours to define, or not use (NULL string) 53 | 'OEM_02': '', 54 | 'Nonce': nonce_read_from_device 55 | } 56 | policy.SetDeviceTarget(TargetInfo) 57 | 58 | devicePolicy = \ 59 | FirmwarePolicy.FW_POLICY_VALUE_ACTION_SECUREBOOT_CLEAR \ 60 | + FirmwarePolicy.FW_POLICY_VALUE_ACTION_TPM_CLEAR 61 | policy.SetDevicePolicy(devicePolicy) 62 | 63 | policy.SerializeToStream(stream=your_file_stream) 64 | ``` 65 | -------------------------------------------------------------------------------- /docs/user/gen_api.py: -------------------------------------------------------------------------------- 1 | # @file gen_api.py 2 | # 3 | ## 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """Python script used to automatically generate API Reference documentation. 9 | 10 | Used in conjunction with mkdocs to generate static markdown files for each 11 | file inside the edk2toollib package for ReadTheDocs hosting. 12 | """ 13 | 14 | import glob 15 | import os 16 | 17 | import mkdocs_gen_files 18 | 19 | 20 | def main(): 21 | """Entry into script that is executed.""" 22 | files = glob.glob("**/*.py", recursive=True, root_dir="edk2toollib") 23 | 24 | excluded_files = ["__init__.py"] 25 | 26 | for file_path in files: 27 | edit_path = file_path 28 | # __init__ file excluded as they provide no API's that needs to be generated 29 | # tool files excluded as they have entire readmes on how to use the tool 30 | if file_path.split(os.sep)[-1] in excluded_files: 31 | continue 32 | 33 | # tests are excluded as no API reference is necessary 34 | if file_path.startswith("tests"): 35 | continue 36 | 37 | file_path = file_path.replace(".py", ".md") 38 | 39 | filename = f"api{os.sep}{file_path}" 40 | with mkdocs_gen_files.open(filename, "w") as f: 41 | ff = file_path.replace(os.sep, ".").replace(".md", "") 42 | ff = f"edk2toollib.{ff}" 43 | print(f"::: {ff}", file=f) 44 | print(" handler: python", file=f) 45 | print(" options:", file=f) 46 | print(" show_bases: False", file=f) 47 | print(" show_root_heading: True", file=f) 48 | print(" show_root_full_path: False", file=f) 49 | print(" show_signature_annotations: True", file=f) 50 | print(" separate_signature: True", file=f) 51 | print(" members_order: 'source'", file=f) 52 | print(" show_source: False", file=f) 53 | 54 | # Point the "Edit on Github" button in the docs to point at the source code 55 | edit_path = os.path.join("..", "edk2toollib", edit_path) 56 | mkdocs_gen_files.set_edit_path(filename, edit_path) 57 | 58 | with mkdocs_gen_files.open("api/.pages", "w") as f: 59 | print("title: API Reference", file=f) 60 | 61 | 62 | main() 63 | -------------------------------------------------------------------------------- /docs/user/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | hide: 3 | - navigation 4 | - toc 5 | --- 6 | # Our Philosophy 7 | 8 | Edk2 Pytool Library (edk2toollib) is a Tianocore maintained project consisting 9 | of a python library supporting UEFI firmware development. This package's intent 10 | is to provide an easy way to organize and share python code to facilitate reuse 11 | across environments, tools, and scripts. Inclusion of this package and 12 | dependency management is best managed using Pip/Pypi. 13 | 14 | ## Content 15 | 16 | The package contains classes and modules that can be used as the building 17 | blocks of tools that are relevant to UEFI firmware developers. These modules 18 | should attempt to provide generic support and avoid tightly coupling with 19 | specific use cases. It is expected these modules do not provide direct 20 | interaction with the user (through command line interfaces) but instead are 21 | intended to be wrapped in other scripts/tools which contains the specific usage 22 | and interface. 23 | 24 | Examples: 25 | 26 | * File parsers for edk2 specific file types. These parse the file and provide 27 | an object for interacting with the content. 28 | * UEFI specific services for encoding/decoding binary structures. 29 | * UEFI defined values and interfaces for usage in python 30 | * Python wrappers for other system cli tools ( signtool, catalog file 31 | generation, inf file generation, etc) 32 | * Python utilities to provide consistent logging, command invocation, path 33 | resolution, etc 34 | 35 | ## Getting Started 36 | 37 | It is strongly recommended that you use python virtual environments. Virtual 38 | environments avoid changing the global python workspace and causing 39 | conflicting dependencies. Virtual environments are lightweight and easy to use. 40 | [Learn more](https://docs.python.org/3/library/venv.html) 41 | 42 | * To install run `pip install --upgrade edk2-pytool-library` 43 | * To use in your python code 44 | 45 | ```python 46 | from edk2toollib. import 47 | ``` 48 | -------------------------------------------------------------------------------- /edk2toollib/__init__.py: -------------------------------------------------------------------------------- 1 | ## 2 | # File to mark this a python package 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """This file exists to satisfy pythons packaging requirements. 9 | 10 | Read more: https://docs.python.org/3/reference/import.html#regular-packages 11 | """ 12 | -------------------------------------------------------------------------------- /edk2toollib/acpi/__init__.py: -------------------------------------------------------------------------------- 1 | ## 2 | # File to mark this a python package 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """Package containing different ACPI Parsers.""" 9 | -------------------------------------------------------------------------------- /edk2toollib/bin/__init__.py: -------------------------------------------------------------------------------- 1 | ## 2 | # File to mark this a python package 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """This file exists to satisfy pythons packaging requirements. 9 | 10 | Read more: https://docs.python.org/3/reference/import.html#regular-packages 11 | """ 12 | -------------------------------------------------------------------------------- /edk2toollib/database/edk2_db.py: -------------------------------------------------------------------------------- 1 | # @file edk2_db.py 2 | # A class for interacting with a database implemented using json. 3 | ## 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """A class for interacting with a database implemented using json.""" 9 | 10 | import logging 11 | import time 12 | import uuid 13 | from contextlib import contextmanager 14 | from typing import Any 15 | 16 | from sqlalchemy import create_engine 17 | from sqlalchemy.orm import DeclarativeBase, Session 18 | 19 | from edk2toollib.uefi.edk2.path_utilities import Edk2Path 20 | 21 | 22 | class Base(DeclarativeBase): 23 | """The base class for creating database table models. 24 | 25 | This class should be the subclass for any table model that will be used with Edk2DB. 26 | """ 27 | 28 | 29 | class Edk2DB: 30 | """A SQLite3 database manager for a EDKII workspace. 31 | 32 | This class provides the ability to register parsers that will create / update tables in the database. This will 33 | create a SQLite datbase file that can be queried using any SQLite3 client. VSCode provides multiple extensions 34 | for viewing and interacting with the database. Queries can also be created and run in python using the sqlite3 35 | module that comes with python. 36 | 37 | Edk2DB can, and should, be used as a context manager to ensure that the database is closed properly. If 38 | not using as a context manager, the `db.connection.commit()` and `db.connection.close()` must be used to cleanly 39 | close the database. 40 | 41 | Attributes: 42 | connection (sqlite3.Connection): The connection to the database 43 | 44 | !!! note 45 | Edk2DB provides a table called `junction` that can be used to make associations between tables. It has the 46 | following schema: `env_id, table1, key1, table2, key2`. 47 | 48 | Example: 49 | ```python 50 | from edk2toollib.database.parsers import * 51 | table = "..." 52 | with Edk2DB(Path("path/to/db.db"), edk2path) as db: 53 | db.register(Parser1(), Parser2(), Parser3()) 54 | db.parse() 55 | db.connection.execute("SELECT * FROM ?", table) 56 | ``` 57 | """ 58 | 59 | Base = Base 60 | 61 | def __init__(self: "Edk2DB", db_path: str, pathobj: Edk2Path = None, **kwargs: dict[str, Any]) -> "Edk2DB": 62 | """Initializes the database. 63 | 64 | Args: 65 | db_path: Path to create or load the database from 66 | pathobj: Edk2Path object for the workspace 67 | **kwargs: None 68 | """ 69 | self.pathobj = pathobj 70 | self.clear_parsers() 71 | self.engine = create_engine(f"sqlite:///{db_path}", **kwargs) 72 | self.Base.metadata.create_all(self.engine) 73 | 74 | @contextmanager 75 | def session(self) -> Session: 76 | """Provides a context manager for a session with the database. 77 | 78 | Handles commiting changes and rolling back if an exception is raised. 79 | """ 80 | session = Session(self.engine) 81 | try: 82 | yield session 83 | session.commit() 84 | except Exception as e: 85 | session.rollback() 86 | raise e 87 | finally: 88 | session.close() 89 | 90 | def register(self, *parsers: "TableGenerator") -> None: 91 | """Registers a one or more table generators. 92 | 93 | Args: 94 | *parsers: One or more instantiated TableGenerator object 95 | """ 96 | for parser in parsers: 97 | self._parsers.append(parser) 98 | 99 | def clear_parsers(self) -> None: 100 | """Empties the list of registered table generators.""" 101 | self._parsers = [] 102 | 103 | def parse(self, env: dict) -> None: 104 | """Runs all registered table parsers against the database. 105 | 106 | !!! note 107 | To enable queries to differentiate between two parses, an environment table is always created if it does 108 | not exist, and a row is added for each call of this command. 109 | """ 110 | id = str(uuid.uuid4().hex) 111 | 112 | # Fill all tables 113 | for table in self._parsers: 114 | logging.debug(f"[{table.__class__.__name__}] starting...") 115 | t = time.time() 116 | with self.session() as session: 117 | table.parse(session, self.pathobj, id, env) 118 | logging.debug(f"Finished in {round(time.time() - t, 2)}") 119 | 120 | 121 | class TableGenerator: 122 | """An interface for a parser that generates a sqlite3 table maintained by Edk2DB. 123 | 124 | Allows you to parse a workspace, file, etc, and load the contents into the database as rows in a table. 125 | 126 | Edk2Db provides a connection to a sqlite3 database and will commit any changes made during `parse` once 127 | the parser has finished executing and has returned. Review sqlite3 documentation for more information on 128 | how to interact with the database. 129 | """ 130 | 131 | def __init__(self, *args: Any, **kwargs: Any) -> "TableGenerator": 132 | """Initialize the query with the specific settings.""" 133 | 134 | def parse(self, session: Session, pathobj: Edk2Path, id: str, env: dict) -> None: 135 | """Execute the parser and update the database.""" 136 | raise NotImplementedError 137 | -------------------------------------------------------------------------------- /edk2toollib/database/tables/__init__.py: -------------------------------------------------------------------------------- 1 | ## 2 | # Copyright (c) Microsoft Corporation 3 | # 4 | # SPDX-License-Identifier: BSD-2-Clause-Patent 5 | ## 6 | """A collection of table generators that run against the workspace.""" 7 | 8 | from edk2toollib.database.edk2_db import TableGenerator # noqa: F401 9 | from edk2toollib.database.tables.environment_table import EnvironmentTable # noqa: F401 10 | from edk2toollib.database.tables.inf_table import InfTable # noqa: F401 11 | from edk2toollib.database.tables.instanced_fv_table import InstancedFvTable # noqa: F401 12 | from edk2toollib.database.tables.instanced_inf_table import InstancedInfTable # noqa: F401 13 | from edk2toollib.database.tables.package_table import PackageTable # noqa: F401 14 | from edk2toollib.database.tables.source_table import SourceTable # noqa: F401 15 | -------------------------------------------------------------------------------- /edk2toollib/database/tables/environment_table.py: -------------------------------------------------------------------------------- 1 | # @file environment_table.py 2 | # A module to run a table generator that creates or appends to a table with environment information." 3 | ## 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """A module to run a table generator that creates or appends to a table with environment information.""" 9 | 10 | import datetime 11 | from typing import Any 12 | 13 | import git 14 | 15 | from edk2toollib.database import Environment, Session, Value 16 | from edk2toollib.database.tables import TableGenerator 17 | from edk2toollib.uefi.edk2.path_utilities import Edk2Path 18 | 19 | 20 | class EnvironmentTable(TableGenerator): 21 | """A Workspace parser that records import environment information for a given parsing execution.""" # noqa: E501 22 | 23 | def __init__(self, *args: Any, **kwargs: Any) -> "EnvironmentTable": 24 | """Initialize the query with the specific settings.""" 25 | 26 | def parse(self, session: Session, pathobj: Edk2Path, id: str, env: dict) -> None: 27 | """Parses the environment and adds the data to the table.""" 28 | dtime = datetime.datetime.now() 29 | 30 | try: 31 | version = git.Repo(pathobj.WorkspacePath).head.commit.hexsha 32 | except git.InvalidGitRepositoryError: 33 | version = "UNKNOWN" 34 | 35 | entry = Environment( 36 | id=id, 37 | date=dtime, 38 | version=version, 39 | values=[Value(env_id=env, key=key, value=value) for key, value in env.items()], 40 | ) 41 | 42 | session.add(entry) 43 | -------------------------------------------------------------------------------- /edk2toollib/database/tables/inf_table.py: -------------------------------------------------------------------------------- 1 | # @file inf_table.py 2 | # A module to run a table generator that parses all INF files in the workspace and generates a table of information 3 | # about each INF. 4 | ## 5 | # Copyright (c) Microsoft Corporation 6 | # 7 | # SPDX-License-Identifier: BSD-2-Clause-Patent 8 | ## 9 | """A module to run generate a table containing information about each INF in the workspace.""" 10 | 11 | import logging 12 | import time 13 | from pathlib import Path 14 | from typing import Any 15 | 16 | from joblib import Parallel, delayed 17 | 18 | from edk2toollib.database import Inf, Library, Session, Source 19 | from edk2toollib.database.tables import TableGenerator 20 | from edk2toollib.uefi.edk2.parsers.inf_parser import InfParser as InfP 21 | from edk2toollib.uefi.edk2.path_utilities import Edk2Path 22 | 23 | 24 | class InfTable(TableGenerator): 25 | """A Table Generator that parses all INF files in the workspace and generates a table.""" 26 | 27 | # TODO: Add phase, protocol, guid, ppi, pcd tables and associations once necessary 28 | def __init__(self, *args: Any, **kwargs: Any) -> "InfTable": 29 | """Initializes the INF Table Parser. 30 | 31 | Args: 32 | args (any): non-keyword arguments 33 | kwargs (any): keyword arguments described below 34 | 35 | Keyword Arguments: 36 | n_jobs (int): Number of files to run in parallel 37 | """ 38 | self.n_jobs = kwargs.get("n_jobs", -1) 39 | 40 | def parse(self, session: Session, pathobj: Edk2Path, env_id: str, env: dict) -> None: 41 | """Parse the workspace and update the database.""" 42 | ws = Path(pathobj.WorkspacePath) 43 | inf_entries = [] 44 | 45 | start = time.time() 46 | files = list(ws.glob("**/*.inf")) 47 | files = [file for file in files if not file.is_relative_to(ws / "Build")] 48 | inf_entries = Parallel(n_jobs=self.n_jobs)(delayed(self._parse_file)(fname, pathobj) for fname in files) 49 | 50 | all_inf = {inf.path: inf for inf in session.query(Inf).all()} 51 | all_source = {source.path: source for source in session.query(Source).all()} 52 | all_libs = {lib.name: lib for lib in session.query(Library).all()} 53 | to_add = [] 54 | for entry, source_list, lib_list in inf_entries: 55 | # Could parse a Windows INF file, which is not a EDKII INF file 56 | # and won't have a guid. GUIDS are required for INFs so we can 57 | # assume if it does not have a guid, its the wrong type of INF 58 | if entry.guid == "": 59 | continue 60 | if entry.path in all_inf: 61 | continue 62 | for source in source_list: 63 | if source not in all_source: 64 | all_source[source] = Source(path=source) 65 | entry.sources.append(all_source[source]) 66 | for lib in lib_list: 67 | if lib not in all_libs: 68 | all_libs[lib] = Library(name=lib) 69 | entry.libraries.append(all_libs[lib]) 70 | to_add.append(entry) 71 | all_inf[entry.path] = entry 72 | 73 | session.add_all(to_add) 74 | 75 | logging.debug( 76 | f"{self.__class__.__name__}: Parsed {len(inf_entries)} .inf files took; " 77 | f"{round(time.time() - start, 2)} seconds." 78 | ) 79 | 80 | def _parse_file(self, filename: str, pathobj: Edk2Path) -> dict: 81 | inf_parser = InfP().SetEdk2Path(pathobj) 82 | inf_parser.ParseFile(filename) 83 | 84 | pkg = pathobj.GetContainingPackage(str(inf_parser.Path)) 85 | path = Path(pathobj.GetEdk2RelativePathFromAbsolutePath(str(inf_parser.Path))).as_posix() 86 | 87 | # Make source files package path relative and resolve ".." in paths 88 | source_list = [] 89 | for source in inf_parser.Sources: 90 | source = (Path(filename).parent / source).resolve() 91 | source = Path(pathobj.GetEdk2RelativePathFromAbsolutePath(str(source))).as_posix() 92 | source_list.append(source) 93 | 94 | return ( 95 | Inf( 96 | path=Path(path).as_posix(), 97 | guid=inf_parser.Dict.get("FILE_GUID", ""), 98 | library_class=inf_parser.LibraryClass or None, 99 | package_name=pkg, 100 | module_type=inf_parser.Dict.get("MODULE_TYPE", None), 101 | ), 102 | source_list, 103 | inf_parser.LibrariesUsed, 104 | ) 105 | -------------------------------------------------------------------------------- /edk2toollib/database/tables/instanced_fv_table.py: -------------------------------------------------------------------------------- 1 | # @file instaced_fv.py 2 | # A module to run a table generator that uses a fdf and environment information to generate a table of information 3 | # about instanced fvs where each row is a unique fv. 4 | ## 5 | # Copyright (c) Microsoft Corporation 6 | # 7 | # SPDX-License-Identifier: BSD-2-Clause-Patent 8 | ## 9 | """A module to generate a table containing fv information.""" 10 | 11 | import logging 12 | import re 13 | from pathlib import Path 14 | from typing import Any 15 | 16 | from edk2toollib.database import Fv, InstancedInf, Session 17 | from edk2toollib.database.tables import TableGenerator 18 | from edk2toollib.uefi.edk2.parsers.fdf_parser import FdfParser as FdfP 19 | from edk2toollib.uefi.edk2.path_utilities import Edk2Path 20 | 21 | 22 | class InstancedFvTable(TableGenerator): 23 | """A Table Generator that parses a single FDF file and generates a table containing FV information. 24 | 25 | !!! warning 26 | This table generator relies on the instanced_inf_table generator to be run first. 27 | """ # noqa: E501 28 | 29 | INFOPTS = re.compile(r"(RuleOverride|file_guid|version|ui|use)\s*=.+\s+(.+\.inf)", re.IGNORECASE) 30 | 31 | def __init__(self, *args: Any, **kwargs: Any) -> "InstancedFvTable": 32 | """Initialize the query with the specific settings.""" 33 | 34 | def parse(self, session: Session, pathobj: Edk2Path, env_id: str, env: dict) -> None: 35 | """Parse the workspace and update the database.""" 36 | self.pathobj = pathobj 37 | self.ws = Path(self.pathobj.WorkspacePath) 38 | self.env = env 39 | self.env_id = env_id 40 | self.dsc = self.env.get("ACTIVE_PLATFORM", None) 41 | self.fdf = self.env.get("FLASH_DEFINITION", None) 42 | self.arch = self.env["TARGET_ARCH"].split(" ") 43 | self.target = self.env["TARGET"] 44 | 45 | if self.dsc is None or self.fdf is None: 46 | logging.debug("DSC or FDF not found in environment. Skipping InstancedFvTable") 47 | return 48 | 49 | # Our DscParser subclass can now parse components, their scope, and their overrides 50 | fdfp = FdfP().SetEdk2Path(self.pathobj) 51 | fdfp.SetInputVars(self.env) 52 | fdfp.ParseFile(self.fdf) 53 | 54 | all_components = {inf.path: inf for inf in session.query(InstancedInf).filter_by(env=env_id, cls=None).all()} 55 | for fv in fdfp.FVs: 56 | inf_list = [] # Some INF's have extra options. We only need the INF 57 | for inf in fdfp.FVs[fv]["Infs"]: 58 | options = InstancedFvTable.INFOPTS.findall(inf) 59 | if len(options) > 0: 60 | inf = options[0][1] 61 | 62 | # Convert to absolute, and back to relative to ensure we get the closest pp relative path 63 | # i.e. if we have two package paths: ("MyPP", and "MyPP/Subfolder"), in the FDF, devs 64 | # can specify INFs are either ("Subfolder/MyPkg/../MyPkg.inf" or "MyPkg/../MyPkg.inf") 65 | # However in the database, we want the closest match, i.e. "MyPkg/../MyPkg.inf", even if 66 | # they are providing ("Subfolder/MyPkg/../MyPkg.inf"). "GetEdk2RelativePathFromAbsolutePath" 67 | # always returns the relative path from the closest package path. 68 | inf = self.pathobj.GetAbsolutePathOnThisSystemFromEdk2RelativePath(inf) 69 | inf = self.pathobj.GetEdk2RelativePathFromAbsolutePath(inf) 70 | inf_list.append(Path(inf).as_posix()) 71 | 72 | filtered = [] 73 | for inf in inf_list: 74 | if inf not in all_components: 75 | logging.warning(f"INF [{inf}] not found in database.") 76 | else: 77 | filtered.append(inf) 78 | 79 | fv = Fv(env=env_id, name=fv, fdf=self.fdf, infs=[all_components.get(inf) for inf in filtered]) 80 | session.add(fv) 81 | session.commit() 82 | -------------------------------------------------------------------------------- /edk2toollib/database/tables/package_table.py: -------------------------------------------------------------------------------- 1 | # @file package_table.py 2 | # A module to associate the packages in a workspace with the repositories they come from. 3 | ## 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """A module to generate a table containing information about a package.""" 9 | 10 | from pathlib import Path 11 | from typing import Any 12 | 13 | import git 14 | 15 | from edk2toollib.database import Package, Repository, Session 16 | from edk2toollib.database.tables import TableGenerator 17 | from edk2toollib.uefi.edk2.path_utilities import Edk2Path 18 | 19 | GIT_EXTENSION = ".git" 20 | DEC_EXTENSION = "*.dec" 21 | 22 | 23 | class PackageTable(TableGenerator): 24 | """A Table Generator that associates packages with their repositories.""" 25 | 26 | def __init__(self, *args: Any, **kwargs: Any) -> "PackageTable": 27 | """Initializes the Repository Table Parser. 28 | 29 | Args: 30 | args (any): non-keyword arguments 31 | kwargs (any): None 32 | 33 | """ 34 | 35 | def get_repo_name(repo: git.Repo) -> str: 36 | """Get the name of the repository.""" 37 | if "origin" in repo.remotes: 38 | return repo.remotes.origin.url.split("/")[-1].split(GIT_EXTENSION)[0].upper() 39 | elif len(repo.remotes) > 0: 40 | return repo.remotes[0].url.split("/")[-1].split(GIT_EXTENSION)[0].upper() 41 | return "BASE" 42 | 43 | def parse(self, session: Session, pathobj: Edk2Path, id: str, env: dict) -> None: 44 | """Glob for packages and insert them into the table.""" 45 | try: 46 | repo = git.Repo(pathobj.WorkspacePath) 47 | except git.InvalidGitRepositoryError: 48 | return 49 | 50 | all_packages = {(pkg.name, pkg.path): pkg for pkg in session.query(Package).all()} 51 | all_repos = {(repo.name, repo.path): repo for repo in session.query(Repository).all()} 52 | 53 | packages_to_add = [] 54 | for file in Path(pathobj.WorkspacePath).rglob(DEC_EXTENSION): 55 | pkg_name = file.parent.name 56 | containing_repo = PackageTable.get_repo_name(repo) 57 | repo_path = None 58 | 59 | for submodule in repo.submodules: 60 | if submodule.abspath in str(file): 61 | containing_repo = submodule.name 62 | repo_path = submodule.path 63 | break 64 | 65 | repository = all_repos.setdefault( 66 | (containing_repo, repo_path), Repository(name=containing_repo, path=repo_path) 67 | ) 68 | 69 | pkg_path = file.parent.relative_to(pathobj.WorkspacePath).as_posix() 70 | if (pkg_name, pkg_path) not in all_packages: 71 | package = all_packages.setdefault( 72 | (pkg_name, pkg_path), Package(name=pkg_name, path=pkg_path, repository=repository) 73 | ) 74 | packages_to_add.append(package) 75 | session.add_all(packages_to_add) 76 | -------------------------------------------------------------------------------- /edk2toollib/database/tables/source_table.py: -------------------------------------------------------------------------------- 1 | # @file source_table.py 2 | # A module to Parse all Source files and add them to the database. 3 | ## 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """A module to Parse all Source files and add them to the database.""" 9 | 10 | import logging 11 | import re 12 | import time 13 | from pathlib import Path 14 | from typing import Any 15 | 16 | from joblib import Parallel, delayed 17 | from pygount import SourceAnalysis 18 | 19 | from edk2toollib.database import Session, Source 20 | from edk2toollib.database.tables import TableGenerator 21 | from edk2toollib.uefi.edk2.path_utilities import Edk2Path 22 | 23 | SOURCE_EXT_LIST = ["*.c", "*.h", "*.cpp", "*.asm", "*.s", "*.nasm", "*.masm", "*.rs"] 24 | 25 | 26 | class SourceTable(TableGenerator): 27 | """A Table Generator that parses all c and h files in the workspace.""" 28 | 29 | def __init__(self, *args: Any, **kwargs: Any) -> "SourceTable": 30 | """Initializes the Source Table Parser. 31 | 32 | Args: 33 | args (any): non-keyword arguments 34 | kwargs (any): keyword arguments described below 35 | 36 | Keyword Arguments: 37 | source_stats (bool): Whether to parse source statistics 38 | n_jobs (int): Number of files to run in parallel 39 | source_extensions (list[str]): List of file extensions to parse 40 | """ 41 | self.source_stats = kwargs.get("source_stats", False) 42 | self.n_jobs = kwargs.get("n_jobs", -1) 43 | self.source_extensions = kwargs.get("source_extensions", SOURCE_EXT_LIST) 44 | 45 | def parse(self, session: Session, pathobj: Edk2Path, id: str, env: dict) -> None: 46 | """Parse the workspace and update the database.""" 47 | ws = Path(pathobj.WorkspacePath) 48 | self.pathobj = pathobj 49 | 50 | start = time.time() 51 | files = [] 52 | for src in self.source_extensions: 53 | files.extend(list(ws.rglob(src))) 54 | files = [file for file in files if not file.is_relative_to(ws / "Build")] 55 | src_entries = Parallel(n_jobs=self.n_jobs)(delayed(self._parse_file)(filename) for filename in files) 56 | 57 | existing_source = {source.path: source for source in session.query(Source).all()} 58 | to_add = [] 59 | for source in src_entries: 60 | if source.path not in existing_source: 61 | existing_source[source.path] = source 62 | to_add.append(source) 63 | else: 64 | to_add.append(existing_source[source.path]) 65 | to_add[-1].code_lines = source.code_lines 66 | to_add[-1].comment_lines = source.comment_lines 67 | to_add[-1].blank_lines = source.blank_lines 68 | 69 | session.add_all(to_add) 70 | session.commit() 71 | 72 | logging.debug( 73 | f"{self.__class__.__name__}: Parsed {len(src_entries)} files; " 74 | f"took {round(time.time() - start, 2)} seconds." 75 | ) 76 | 77 | def _parse_file(self, filename: Path) -> dict: 78 | """Parse a C file and return the results.""" 79 | license = "" 80 | with open(filename, "r", encoding="cp850") as f: 81 | lines = f.readlines() 82 | for line in lines: 83 | match = re.search(r"SPDX-License-Identifier:\s*(.*)$", line) # TODO: This is not a standard format. 84 | if match: 85 | license = match.group(1) 86 | 87 | total_lines = len(lines) 88 | code_lines = total_lines 89 | comment_lines = 0 90 | blank_lines = 0 91 | if self.source_stats: 92 | code = SourceAnalysis.from_file(filename, "_", fallback_encoding="utf-8") 93 | code_lines = code.code_count 94 | comment_lines = code.documentation_count 95 | blank_lines = code.empty_count 96 | 97 | path = self.pathobj.GetEdk2RelativePathFromAbsolutePath(filename.as_posix()) 98 | return Source( 99 | path=path, 100 | license=license or "Unknown", 101 | total_lines=total_lines, 102 | code_lines=code_lines, 103 | comment_lines=comment_lines, 104 | blank_lines=blank_lines, 105 | ) 106 | -------------------------------------------------------------------------------- /edk2toollib/log/__init__.py: -------------------------------------------------------------------------------- 1 | ## 2 | # File to mark this a python package 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """This package contains different log file handlers.""" 9 | -------------------------------------------------------------------------------- /edk2toollib/log/file_handler.py: -------------------------------------------------------------------------------- 1 | ## 2 | # Handle basic logging outputting to files 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """Module for handling basically logging to files.""" 9 | 10 | import logging 11 | 12 | 13 | class FileHandler(logging.FileHandler): 14 | """Object for handling basic logging output to files.""" 15 | 16 | def __init__(self, filename: str, mode: str = "w+", encoding="utf-8") -> "FileHandler": 17 | """Init a file handler for the specified file.""" 18 | logging.FileHandler.__init__(self, filename, mode=mode, encoding=encoding) 19 | 20 | def handle(self, record: logging.LogRecord) -> bool: 21 | """Conditionally emit the specified logging record. 22 | 23 | Emission depends on filters which may have been added to the handler. 24 | Wrap the actual emission of the record with acquisition/release of 25 | the I/O thread lock. Returns whether the filter passed the record for 26 | emission. 27 | """ 28 | rv = self.filter(record) 29 | if rv and record.levelno >= self.level: 30 | self.acquire() 31 | try: 32 | self.emit(record) 33 | finally: 34 | self.release() 35 | return rv 36 | -------------------------------------------------------------------------------- /edk2toollib/log/string_handler.py: -------------------------------------------------------------------------------- 1 | ## 2 | # Handle basic logging by streaming into stringIO 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """Module for handling basic logging by streaming into StringIO.""" 9 | 10 | import io 11 | import logging 12 | from logging import LogRecord 13 | 14 | 15 | class StringStreamHandler(logging.StreamHandler): 16 | """Class for logging via StringIO.""" 17 | 18 | terminator = "\n" 19 | 20 | def __init__(self) -> "StringStreamHandler": 21 | """Init a StringStreamHandler.""" 22 | logging.Handler.__init__(self) 23 | self.stream = io.StringIO() 24 | 25 | def handle(self, record: LogRecord) -> bool: 26 | """Conditionally emit the specified logging record. 27 | 28 | Emission depends on filters which may have been added to the handler. 29 | Wrap the actual emission of the record with acquisition/release of 30 | the I/O thread lock. Returns whether the filter passed the record for 31 | emission. 32 | """ 33 | rv = self.filter(record) 34 | if rv and record.levelno >= self.level: 35 | self.acquire() 36 | try: 37 | self.emit(record) 38 | finally: 39 | self.release() 40 | return rv 41 | 42 | def readlines(self, hint: int = -1) -> list[str]: 43 | """Reads lines from stream and returns them.""" 44 | return self.stream.readlines(hint) 45 | 46 | def seek_start(self) -> None: 47 | """Seeks to a specific point in the stream.""" 48 | self.stream.seek(0, 0) 49 | 50 | def seek_end(self) -> None: 51 | """Seeks to the end of the stream.""" 52 | self.stream.seek(0, io.SEEK_END) 53 | 54 | def seek(self, offset: int, whence: int) -> int: 55 | """Seek to a specific point in the stream.""" 56 | return self.stream.seek(offset, whence) 57 | -------------------------------------------------------------------------------- /edk2toollib/os/__init__.py: -------------------------------------------------------------------------------- 1 | ## 2 | # Copyright (c) Microsoft Corporation 3 | # 4 | # SPDX-License-Identifier: BSD-2-Clause-Patent 5 | ## 6 | """A python class to allow interaction with Uefi Variables from an OS.""" 7 | -------------------------------------------------------------------------------- /edk2toollib/tpm/__init__.py: -------------------------------------------------------------------------------- 1 | ## 2 | # File to mark this a python package 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """This package contains different tools for working with TPM 2.0.""" 9 | -------------------------------------------------------------------------------- /edk2toollib/tpm/tpm2_simulator.py: -------------------------------------------------------------------------------- 1 | # @file tpm2_simulator.py 2 | # This file contains transportation layer classes for interacting with the TPM 2.0 simulator. 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """Module that contains transportation layer classes for interacting with the TPM 2.0 simulator.""" 9 | 10 | import socket 11 | import struct 12 | 13 | import edk2toollib.tpm.tpm2_defs as t2d 14 | import edk2toollib.tpm.tpm2_stream as t2s 15 | 16 | PLAT_COMMANDS = { 17 | "TPM_SIGNAL_POWER_ON": 1, 18 | "TPM_SIGNAL_POWER_OFF": 2, 19 | "TPM_SIGNAL_PHYS_PRES_ON": 3, 20 | "TPM_SIGNAL_PHYS_PRES_OFF": 4, 21 | "TPM_SIGNAL_HASH_START": 5, 22 | "TPM_SIGNAL_HASH_DATA": 6, 23 | # {UINT32 BufferSize, BYTE[BufferSize] Buffer} 24 | "TPM_SIGNAL_HASH_END": 7, 25 | "TPM_SEND_COMMAND": 8, 26 | # {BYTE Locality, UINT32 InBufferSize, BYTE[InBufferSize] InBuffer} -> 27 | # {UINT32 OutBufferSize, BYTE[OutBufferSize] OutBuffer} 28 | "TPM_SIGNAL_CANCEL_ON": 9, 29 | "TPM_SIGNAL_CANCEL_OFF": 10, 30 | "TPM_SIGNAL_NV_ON": 11, 31 | "TPM_SIGNAL_NV_OFF": 12, 32 | "TPM_SIGNAL_KEY_CACHE_ON": 13, 33 | "TPM_SIGNAL_KEY_CACHE_OFF": 14, 34 | "TPM_REMOTE_HANDSHAKE": 15, 35 | "TPM_SET_ALTERNATIVE_RESULT": 16, 36 | "TPM_SIGNAL_RESET": 17, 37 | "TPM_SESSION_END": 20, 38 | "TPM_STOP": 21, 39 | "TPM_GET_COMMAND_RESPONSE_SIZES": 25, 40 | "TPM_TEST_FAILURE_MODE": 30, 41 | } 42 | 43 | 44 | class TpmSimulator(object): 45 | """An object for interacting with the Tpm Simulator.""" 46 | 47 | def __init__(self, host: str = "localhost", port: int = 2321) -> "TpmSimulator": 48 | """Initialize the simulator on the requested host (ip) and port.""" 49 | super(TpmSimulator, self).__init__() 50 | 51 | # Connect to the control socket. 52 | self.platSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 53 | self.platSock.connect((host, port + 1)) 54 | 55 | # Connect to the simulator socket. 56 | self.tpmSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 57 | self.tpmSock.connect((host, port)) 58 | 59 | # Power cycle the TPM. 60 | self.platSock.send(struct.pack(">L", PLAT_COMMANDS["TPM_SIGNAL_POWER_OFF"])) 61 | self.platSock.send(struct.pack(">L", PLAT_COMMANDS["TPM_SIGNAL_POWER_ON"])) 62 | 63 | # Enable the NV space. 64 | self.platSock.send(struct.pack(">L", PLAT_COMMANDS["TPM_SIGNAL_NV_ON"])) 65 | 66 | def send_raw_data(self, data: str) -> None: 67 | """Send raw data to the TPM simulator.""" 68 | print("RAW -->: " + str(data).encode("hex")) 69 | self.tpmSock.send(data) 70 | 71 | def read_raw_data(self, count: int) -> bytes: 72 | """Read raw data from the TPM simulator.""" 73 | data = self.tpmSock.recv(count) 74 | print("RAW <--: " + str(data).encode("hex")) 75 | return data 76 | 77 | def send_data(self, data: str) -> bytes: 78 | """Send data to the TPM simulator.""" 79 | # Send the "I'm about to send data" command. 80 | self.send_raw_data(struct.pack(">L", PLAT_COMMANDS["TPM_SEND_COMMAND"])) 81 | # Send the locality for the data. 82 | self.send_raw_data(struct.pack(">b", 0x03)) 83 | # Send the size of the data. 84 | self.send_raw_data(struct.pack(">L", len(data))) 85 | 86 | # Now, send the data itself. 87 | self.send_raw_data(data) 88 | 89 | # Poll until a result is available. 90 | # NOTE: This shouldn't be necessary and denotes a lack of understanding... 91 | while True: 92 | result_size = self.read_raw_data(4) 93 | result_size = struct.unpack(">L", result_size)[0] 94 | if result_size > 0: 95 | break 96 | 97 | return self.read_raw_data(result_size) 98 | 99 | def startup(self, type: str) -> bytes: 100 | """Initialize the connection to the TPM simulator.""" 101 | stream = t2s.Tpm2CommandStream(t2d.TPM_ST_NO_SESSIONS, 0x00, t2d.TPM_CC_Startup) 102 | stream.add_element(t2s.Tpm2StreamPrimitive(t2d.TPM_SU_Size, type)) 103 | return self.send_data(stream.get_stream()) 104 | -------------------------------------------------------------------------------- /edk2toollib/tpm/tpm2_stream.py: -------------------------------------------------------------------------------- 1 | # @file tpm2_stream.py 2 | # This file contains utility classes to help marshal and un-marshal data to/from the TPM. 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """Module that contains utility classes to help marshal and un-marshal date to/from the TPM.""" 9 | 10 | import struct 11 | 12 | 13 | class Tpm2StreamElement(object): 14 | """Tpm2 Stream Element.""" 15 | 16 | def __init__(self) -> "Tpm2StreamElement": 17 | """Init an empty Tpm2StreamElement.""" 18 | self.pack_string = "" 19 | 20 | def get_size(self) -> int: 21 | """The size of this structure when marshalled.""" 22 | return struct.calcsize(self.pack_string) 23 | 24 | 25 | class Tpm2StreamPrimitive(Tpm2StreamElement): 26 | """Tpm2 Stream Primitive. 27 | 28 | Attributes: 29 | size: size of the primitive. 1, 2, 4, or 8 bytes 30 | value: Value of primitive 31 | """ 32 | 33 | def __init__(self, size: int, value: str) -> "Tpm2StreamPrimitive": 34 | """Init a primitive value. 35 | 36 | Args: 37 | size: 1, 2, 4, or 8 bytes 38 | value: Value to stream. 39 | """ 40 | super(Tpm2StreamPrimitive, self).__init__() 41 | 42 | if size not in (1, 2, 4, 8): 43 | raise ValueError("Size must be 1, 2, 4, or 8 bytes!") 44 | 45 | self.pack_string = {1: ">B", 2: ">H", 4: ">L", 8: ">Q"}[size] 46 | self.value = value 47 | 48 | def marshal(self) -> bytes: 49 | r"""Serializes the Tpm2 primitive. 50 | 51 | Returns: 52 | (str): string representing packed data as bytes (i.e. b'\x01\x00\x03') 53 | """ 54 | return struct.pack(self.pack_string, self.value) 55 | 56 | 57 | class TPM2_COMMAND_HEADER(Tpm2StreamElement): 58 | """Tpm2 Command header. 59 | 60 | Attributes: 61 | tag: The Tag 62 | code: The Code 63 | size: The size of the code 64 | """ 65 | 66 | def __init__(self, tag: str, size: str, code: str) -> "TPM2_COMMAND_HEADER": 67 | """Init a Tpm2 command.""" 68 | super(TPM2_COMMAND_HEADER, self).__init__() 69 | self.tag = tag 70 | self.code = code 71 | self.size = size 72 | self.pack_string = ">HLL" 73 | 74 | def update_size(self, size: int) -> None: 75 | """Update size of the whole command.""" 76 | self.size = size 77 | 78 | def marshal(self) -> str: 79 | r"""Serializes the Tpm2 command header. 80 | 81 | Returns: 82 | (str): string representing packed data as bytes (i.e. b'\x01\x00\x03') 83 | """ 84 | return struct.pack(self.pack_string, self.tag, self.size, self.code) 85 | 86 | 87 | class TPM2B(Tpm2StreamElement): 88 | """Tpm2 B.""" 89 | 90 | def __init__(self, data: str) -> "TPM2B": 91 | """Inits the object.""" 92 | super(TPM2B, self).__init__() 93 | self.data = data 94 | self.size = len(data) 95 | self.pack_string = ">H%ds" % self.size 96 | 97 | def update_data(self, data: str) -> None: 98 | """Updates the data attribute.""" 99 | self.data = data 100 | self.size = len(data) 101 | self.pack_string = ">H%ds" % self.size 102 | 103 | def marshal(self) -> str: 104 | r"""Serializes the Tpm2B object. 105 | 106 | Returns: 107 | (str): string representing packed data as bytes (i.e. b'\x01\x00\x03') 108 | """ 109 | return struct.pack(self.pack_string, self.size, self.data) 110 | 111 | 112 | class Tpm2CommandStream(object): 113 | """Tpm2 Command Stream.""" 114 | 115 | def __init__(self, tag: str, size: int, code: str) -> "Tpm2CommandStream": 116 | """Inits a Tpm2 Command stream object.""" 117 | super(Tpm2CommandStream, self).__init__() 118 | self.header = TPM2_COMMAND_HEADER(tag, size, code) 119 | self.stream_size = self.header.get_size() 120 | self.header.update_size(self.stream_size) 121 | self.stream_elements = [] 122 | 123 | def get_size(self) -> int: 124 | """Returns the stream size.""" 125 | return self.stream_size 126 | 127 | def add_element(self, element: "Tpm2StreamElement") -> None: 128 | """Adds an element to the stream list.""" 129 | self.stream_elements.append(element) 130 | self.stream_size += element.get_size() 131 | self.header.update_size(self.stream_size) 132 | 133 | def get_stream(self) -> str: 134 | r"""Serializes the Header + elements. 135 | 136 | Returns: 137 | (str): string representing packed data as bytes (i.e. b'\x01\x00\x03') 138 | """ 139 | return self.header.marshal() + b"".join(element.marshal() for element in self.stream_elements) 140 | -------------------------------------------------------------------------------- /edk2toollib/uefi/__init__.py: -------------------------------------------------------------------------------- 1 | ## 2 | # File to mark this a python package 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """This file exists to satisfy pythons packaging requirements. 9 | 10 | Read more: https://docs.python.org/3/reference/import.html#regular-packages 11 | """ 12 | -------------------------------------------------------------------------------- /edk2toollib/uefi/edk2/__init__.py: -------------------------------------------------------------------------------- 1 | ## 2 | # File to mark this a python package 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """This file exists to satisfy pythons packaging requirements. 9 | 10 | Read more: https://docs.python.org/3/reference/import.html#regular-packages 11 | """ 12 | -------------------------------------------------------------------------------- /edk2toollib/uefi/edk2/build_objects/__init__.py: -------------------------------------------------------------------------------- 1 | ## 2 | # File to mark this a python package 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """This package contains various EDK2 build object python wrappers.""" 9 | -------------------------------------------------------------------------------- /edk2toollib/uefi/edk2/build_objects/dsc_translator.py: -------------------------------------------------------------------------------- 1 | # @file dsc_translator 2 | # Translates a DSC object into a file 3 | # Copyright (c) Microsoft Corporation 4 | # 5 | # SPDX-License-Identifier: BSD-2-Clause-Patent 6 | """Translates a DSC object into a file.""" 7 | 8 | import logging 9 | import os 10 | 11 | from edk2toollib.uefi.edk2.build_objects.dsc import ( 12 | build_option, 13 | component, 14 | definition, 15 | dsc, 16 | dsc_set, 17 | library_class, 18 | pcd, 19 | pcd_typed, 20 | pcd_variable, 21 | sku_id, 22 | ) 23 | 24 | 25 | class DscTranslator: 26 | """A class used to translate DSCs.""" 27 | 28 | @classmethod 29 | def dsc_to_file(cls: "DscTranslator", dsc_obj: dsc, filepath: str) -> None: 30 | """Transforms the DSC object to a file.""" 31 | file_path = os.path.abspath(filepath) 32 | f = open(file_path, "w") 33 | lines = cls._GetDscLinesFromDscObj(dsc_obj) 34 | for line in lines: 35 | f.write(line + "\n") 36 | f.close() 37 | 38 | @classmethod 39 | def _GetDscLinesFromDscObj(cls: "DscTranslator", obj: dsc, depth: int = 0) -> list: 40 | """Gets the DSC strings for an data model objects.""" 41 | lines = [] 42 | depth_pad = "".ljust(depth) 43 | org_depth = depth 44 | depth += 2 45 | 46 | if isinstance(obj, (list, set, dsc_set)): 47 | for item in obj: 48 | lines += cls._GetDscLinesFromDscObj(item, org_depth) 49 | elif type(obj) is dsc: 50 | lines.append(f"{depth_pad}[Defines]") 51 | lines += cls._GetDscLinesFromDscObj(obj.defines, depth) 52 | 53 | # Second do the Skus 54 | lines.append(f"{depth_pad}[SkuIds]") 55 | for x in obj.skus: 56 | lines += cls._GetDscLinesFromDscObj(x, depth) 57 | 58 | # Third, library classes 59 | for header, x in obj.library_classes.items(): 60 | lines.append(f"{depth_pad}[LibraryClasses{header}]") 61 | lines += cls._GetDscLinesFromDscObj(x, depth) 62 | 63 | # Next do the components 64 | for header, x in obj.components.items(): 65 | lines.append(f"{depth_pad}[Components{header}]") 66 | lines += cls._GetDscLinesFromDscObj(x, depth) 67 | 68 | # Then PCD's 69 | for header, x in obj.pcds.items(): 70 | lines.append(f"{depth_pad}[{header}]") 71 | lines += cls._GetDscLinesFromDscObj(x, depth) 72 | 73 | # Then Build Options 74 | print(obj.build_options.items()) 75 | for header, x in obj.build_options.items(): 76 | lines.append(f"{depth_pad}[BuildOptions{header}]") 77 | lines += cls._GetDscLinesFromDscObj(x, depth) 78 | 79 | elif type(obj) is sku_id: 80 | lines.append(f"{depth_pad}{obj.id}|{obj.name}|{obj.parent}") 81 | elif type(obj) is library_class: 82 | lines.append(f"{depth_pad}{obj.libraryclass}|{obj.inf}") 83 | elif type(obj) is definition: 84 | def_str = f"{obj.name} =\t{obj.value}" 85 | if obj.local: 86 | def_str = "DEFINE " + def_str 87 | lines.append(depth_pad + def_str) 88 | 89 | elif type(obj) is component: 90 | lines += cls._FormatComponent(obj, depth) 91 | 92 | elif type(obj) is pcd: 93 | lines.append(f"{depth_pad}{obj.namespace}.{obj.name}|{obj.value}") 94 | 95 | elif type(obj) is pcd_typed: 96 | pcd_str = f"{depth_pad}{obj.namespace}.{obj.name}|{obj.value}|{obj.datum_type}" 97 | if obj.max_size > 0: 98 | pcd_str += f"|{obj.max_size}" 99 | lines.append(pcd_str) 100 | 101 | elif type(obj) is pcd_variable: 102 | pcd_name = f"{depth_pad}{obj.namespace}.{obj.name}|{obj.var_name}" 103 | if obj.default is None: 104 | lines.append(f"{pcd_name}|{obj.var_guid}|{obj.var_offset}") 105 | elif len(obj.attributes) == 0: 106 | lines.append(f"{pcd_name}|{obj.var_guid}|{obj.var_offset}|{obj.default}") 107 | else: 108 | attr = ", ".join(obj.attributes) 109 | lines.append(f"{pcd_name}|{obj.var_guid}|{obj.var_offset}|{obj.default}|{attr}") 110 | 111 | elif type(obj) is build_option: 112 | rep = depth_pad if obj.family is None else f"{depth_pad}{obj.family}:" 113 | rep += "_".join((obj.target, obj.tagname, obj.arch, obj.tool_code, obj.attribute)) 114 | rep += f"= {obj.data}" 115 | lines.append(rep) 116 | else: 117 | logging.warning(f"UNKNOWN OBJECT {obj}") 118 | return lines 119 | 120 | @classmethod 121 | def _FormatComponent(cls: "DscTranslator", comp: dsc, depth: int = 0) -> list[str]: 122 | has_subsection = ( 123 | len(comp.pcds) > 0 or len(comp.defines) > 0 or len(comp.build_options) > 0 or len(comp.library_classes) > 0 124 | ) 125 | depth_pad = "".ljust(depth) 126 | if not has_subsection: 127 | return [ 128 | f"{depth_pad}{comp.inf}", 129 | ] 130 | lines = [] 131 | org_depth_pad = depth_pad 132 | depth_pad += " " # add two more onto our pad 133 | depth += 4 134 | lines.append(f"{org_depth_pad}{comp.inf} {{") 135 | if len(comp.pcds) > 0: 136 | for section, pcds in comp.pcds.items(): 137 | lines.append(f"{depth_pad}<{section}>") 138 | lines += cls._GetDscLinesFromDscObj(pcds, depth) 139 | if len(comp.library_classes) > 0: 140 | lines.append(f"{depth_pad}") 141 | lines += cls._GetDscLinesFromDscObj(comp.library_classes, depth) 142 | if len(comp.defines) > 0: 143 | lines.append(f"{depth_pad}") 144 | lines += cls._GetDscLinesFromDscObj(comp.defines, depth) 145 | if len(comp.build_options) > 0: 146 | lines.append(f"{depth_pad}") 147 | lines += cls._GetDscLinesFromDscObj(comp.build_options, depth) 148 | lines.append(f"{org_depth_pad}}}") 149 | return lines 150 | -------------------------------------------------------------------------------- /edk2toollib/uefi/edk2/fmp_payload_header.py: -------------------------------------------------------------------------------- 1 | ## @file 2 | # Module that encodes and decodes a FMP_PAYLOAD_HEADER with a payload. 3 | # The FMP_PAYLOAD_HEADER is processed by the FmpPayloadHeaderLib in the 4 | # FmpDevicePkg. 5 | # 6 | # Copyright (c) 2018, Intel Corporation. All rights reserved.
7 | # SPDX-License-Identifier: BSD-2-Clause-Patent 8 | # 9 | 10 | """Module that encodes and decodes a FMP_PAYLOAD_HEADER with a payload. 11 | 12 | The FMP_PAYLOAD_HEADER is processed by the FmpPayloadHeaderLib in the 13 | FmpDevicePkg. 14 | """ 15 | 16 | import struct 17 | 18 | 19 | def _SIGNATURE_32(A: str, B: str, C: str, D: str) -> bytes: 20 | return struct.unpack("=I", bytearray(A + B + C + D, "ascii"))[0] 21 | 22 | 23 | def _SIGNATURE_32_TO_STRING(Signature: int) -> bytes: 24 | return struct.pack(" "FmpPayloadHeaderClass": 46 | """Inits an empty object.""" 47 | self.Signature = self._FMP_PAYLOAD_HEADER_SIGNATURE 48 | self.HeaderSize = self._StructSize 49 | self.FwVersion = 0x00000000 50 | self.LowestSupportedVersion = 0x00000000 51 | self.Payload = b"" 52 | 53 | def Encode(self) -> bytes: 54 | r"""Serializes the Header + payload. 55 | 56 | Returns: 57 | (bytes): string representing packed data as bytes (i.e. b'\x01\x00\x03') 58 | """ 59 | FmpPayloadHeader = struct.pack( 60 | self._StructFormat, self.Signature, self.HeaderSize, self.FwVersion, self.LowestSupportedVersion 61 | ) 62 | return FmpPayloadHeader + self.Payload 63 | 64 | def Decode(self, Buffer: bytes) -> bytes: 65 | """Loads data into the Object by parsing a buffer. 66 | 67 | Args: 68 | Buffer (obj): Buffer containing the data 69 | 70 | Returns: 71 | (str): string of binary representing the payload 72 | 73 | Raises: 74 | (ValueError): Invalid Buffer 75 | (ValueError): Invalid Signature 76 | (ValueError): Invalid Header size 77 | """ 78 | if len(Buffer) < self._StructSize: 79 | raise ValueError 80 | (Signature, HeaderSize, FwVersion, LowestSupportedVersion) = struct.unpack( 81 | self._StructFormat, Buffer[0 : self._StructSize] 82 | ) 83 | if Signature != self._FMP_PAYLOAD_HEADER_SIGNATURE: 84 | raise ValueError 85 | if HeaderSize < self._StructSize: 86 | raise ValueError 87 | self.Signature = Signature 88 | self.HeaderSize = HeaderSize 89 | self.FwVersion = FwVersion 90 | self.LowestSupportedVersion = LowestSupportedVersion 91 | self.Payload = Buffer[self.HeaderSize :] 92 | 93 | return self.Payload 94 | 95 | def DumpInfo(self) -> None: 96 | """Prints payload header information.""" 97 | print( 98 | "FMP_PAYLOAD_HEADER.Signature = {Signature:08X} ({SignatureString})".format( 99 | Signature=self.Signature, SignatureString=_SIGNATURE_32_TO_STRING(self.Signature) 100 | ) 101 | ) 102 | print("FMP_PAYLOAD_HEADER.HeaderSize = {HeaderSize:08X}".format(HeaderSize=self.HeaderSize)) 103 | print("FMP_PAYLOAD_HEADER.FwVersion = {FwVersion:08X}".format(FwVersion=self.FwVersion)) 104 | print( 105 | "FMP_PAYLOAD_HEADER.LowestSupportedVersion = {LowestSupportedVersion:08X}".format( 106 | LowestSupportedVersion=self.LowestSupportedVersion 107 | ) 108 | ) 109 | print("sizeof (Payload) = {Size:08X}".format(Size=len(self.Payload))) 110 | -------------------------------------------------------------------------------- /edk2toollib/uefi/edk2/guid_list.py: -------------------------------------------------------------------------------- 1 | # @file guid_list 2 | # 3 | # Simple list of GuidListEntry objects parsed from edk2 specific files. 4 | # 5 | # Copyright (c) Microsoft Corporation 6 | # 7 | # SPDX-License-Identifier: BSD-2-Clause-Patent 8 | ## 9 | """Simple list of GuidListEntry objects parsed from edk2 specific files.""" 10 | 11 | import logging 12 | import os 13 | from typing import IO 14 | 15 | from edk2toollib.gitignore_parser import parse_gitignore_lines 16 | from edk2toollib.uefi.edk2.parsers.dec_parser import DecParser 17 | from edk2toollib.uefi.edk2.parsers.inf_parser import InfParser 18 | 19 | 20 | class GuidListEntry: 21 | """A object representing a Guid. 22 | 23 | Attributes: 24 | name (str): name of guid 25 | guid (str): registry format guid in string format 26 | filepath (str): absolute path to file where this guid was found 27 | """ 28 | 29 | def __init__(self, name: str, guid: str, filepath: str) -> "GuidListEntry": 30 | """Create GuidListEntry for later review and compare. 31 | 32 | Args: 33 | name (str): name of guid 34 | guid (str): registry format guid in string format 35 | filepath (str): absolute path to file where this guid was found 36 | """ 37 | self.name = name 38 | self.guid = guid 39 | self.absfilepath = filepath 40 | 41 | def __str__(self) -> str: 42 | """String representation of the guid.""" 43 | return f"GUID: {self.guid} NAME: {self.name} FILE: {self.absfilepath}" 44 | 45 | 46 | class GuidList: 47 | """Static class for returning Guids.""" 48 | 49 | @staticmethod 50 | def guidlist_from_filesystem(folder: str, ignore_lines: list = list()) -> list: 51 | """Create a list of GuidListEntry from files found in the file system. 52 | 53 | Args: 54 | folder (str): path string to root folder to walk 55 | ignore_lines (list): list of gitignore syntax to ignore files and folders 56 | 57 | Returns: 58 | (list[GuidListEntry]): guids 59 | """ 60 | guids = [] 61 | ignore = parse_gitignore_lines(ignore_lines, os.path.join(folder, "nofile.txt"), folder) 62 | for root, dirs, files in os.walk(folder): 63 | for d in dirs[:]: 64 | fullpath = os.path.join(root, d) 65 | if ignore(fullpath): 66 | logging.debug(f"Ignore folder: {fullpath}") 67 | dirs.remove(d) 68 | 69 | for name in files: 70 | fullpath = os.path.join(root, name) 71 | if ignore(fullpath): 72 | logging.debug(f"Ignore file: {fullpath}") 73 | continue 74 | 75 | new_guids = GuidList.parse_guids_from_edk2_file(fullpath) 76 | guids.extend(new_guids) 77 | return guids 78 | 79 | @staticmethod 80 | def parse_guids_from_edk2_file(filename: str) -> list: 81 | """Parse edk2 files for guids. 82 | 83 | Args: 84 | filename (str): abspath to dec file 85 | 86 | Returns: 87 | (list[GuidListEntry]): guids 88 | """ 89 | if filename.lower().endswith(".dec"): 90 | with open(filename, "r") as f: 91 | return GuidList.parse_guids_from_dec(f, filename) 92 | elif filename.lower().endswith(".inf"): 93 | return GuidList.parse_guids_from_inf(filename) 94 | else: 95 | return [] 96 | 97 | @staticmethod 98 | def parse_guids_from_dec(stream: IO, filename: str) -> list: 99 | """Find all guids in a dec file contents contained with stream. 100 | 101 | Args: 102 | stream: lines of dec file content 103 | filename: abspath to dec file 104 | 105 | Returns: 106 | (list[GuidListEntry]): Guids 107 | """ 108 | results = [] 109 | dec = DecParser() 110 | dec.ParseStream(stream) 111 | for p in dec.Protocols: 112 | results.append(GuidListEntry(p.name, str(p.guid).upper(), filename)) 113 | for p in dec.PPIs: 114 | results.append(GuidListEntry(p.name, str(p.guid).upper(), filename)) 115 | for p in dec.Guids: 116 | results.append(GuidListEntry(p.name, str(p.guid).upper(), filename)) 117 | 118 | try: 119 | results.append(GuidListEntry(dec.Dict["PACKAGE_NAME"], dec.Dict["PACKAGE_GUID"], filename)) 120 | except Exception: 121 | logging.warning("Failed to find Package Guid from dec file: " + filename) 122 | return results 123 | 124 | @staticmethod 125 | def parse_guids_from_inf(filename: str) -> list: 126 | """Find the module guid in an Edk2 inf file. 127 | 128 | Args: 129 | filename (str): abspath to inf file 130 | 131 | Returns: 132 | (list[GuidListEntry]): Guids 133 | """ 134 | inf = InfParser() 135 | inf.ParseFile(filename) 136 | try: 137 | return [GuidListEntry(inf.Dict["BASE_NAME"], inf.Dict["FILE_GUID"].upper(), filename)] 138 | except Exception: 139 | logging.warning("Failed to find info from INF file: " + filename) 140 | return [] 141 | -------------------------------------------------------------------------------- /edk2toollib/uefi/edk2/parsers/__init__.py: -------------------------------------------------------------------------------- 1 | ## 2 | # File to mark this a python package 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """This package contains parsers for variable EDK2 files and objects.""" 9 | -------------------------------------------------------------------------------- /edk2toollib/uefi/edk2/parsers/targettxt_parser.py: -------------------------------------------------------------------------------- 1 | # @file targettxt_parser.py 2 | # Code to help parse Edk2 Conf/Target.txt file 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """Code to help parse Edk2 Conf/Target.txt file.""" 9 | 10 | import os 11 | 12 | from edk2toollib.uefi.edk2.parsers.base_parser import HashFileParser 13 | 14 | 15 | class TargetTxtParser(HashFileParser): 16 | """Parser for the Edk2 Conf/Target.txt file. 17 | 18 | Attributes: 19 | Parsed (bool): Whether the object has parsed a file or not 20 | Lines (list): Ordered list of each line in the file 21 | Dict (dict): Key / Value pair of all lines that contain a `=` in them (key=value) 22 | Path (str): path to Target.txt file 23 | """ 24 | 25 | def __init__(self) -> "TargetTxtParser": 26 | """Inits an empty parser.""" 27 | HashFileParser.__init__(self, "TargetTxtParser") 28 | self.Lines = [] 29 | self.Parsed = False 30 | self.Dict = {} 31 | self.Path = "" 32 | 33 | def ParseFile(self, filepath: str) -> None: 34 | """Parses the file provided.""" 35 | self.Logger.debug("Parsing file: %s" % filepath) 36 | if not os.path.isabs(filepath): 37 | fp = self.FindPath(filepath) 38 | else: 39 | fp = filepath 40 | self.Path = fp 41 | f = open(fp, "r") 42 | self.Lines = f.readlines() 43 | f.close() 44 | 45 | for line in self.Lines: 46 | sline = self.StripComment(line) 47 | 48 | if sline is None or len(sline) < 1: 49 | continue 50 | 51 | if sline.count("=") == 1: 52 | tokens = sline.split("=", 1) 53 | self.Dict[tokens[0].strip()] = tokens[1].strip() 54 | self.Logger.debug("Key,values found: %s = %s" % (tokens[0].strip(), tokens[1].strip())) 55 | continue 56 | 57 | self.Parsed = True 58 | -------------------------------------------------------------------------------- /edk2toollib/uefi/edk2/variablestore_manulipulations.py: -------------------------------------------------------------------------------- 1 | # @file 2 | # Contains classes and helper functions to modify variables in a UEFI ROM image. 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """Contains classes and helper functions to modify variables in a UEFI ROM image.""" 9 | 10 | import mmap 11 | import os 12 | from typing import Optional 13 | 14 | import edk2toollib.uefi.edk2.variable_format as VF 15 | import edk2toollib.uefi.pi_firmware_volume as PiFV 16 | 17 | 18 | class VariableStore(object): 19 | """Class representing the variable store.""" 20 | 21 | def __init__( 22 | self, romfile: str, store_base: Optional[int] = None, store_size: Optional[int] = None 23 | ) -> "VariableStore": 24 | """Initialize the Variable store and read necessary files. 25 | 26 | Loads the data. 27 | """ 28 | self.rom_file_path = romfile 29 | self.store_base = store_base 30 | self.store_size = store_size 31 | self.rom_file = None 32 | self.rom_file_map = None 33 | 34 | if not os.path.isfile(self.rom_file_path): 35 | raise Exception("'%s' is not the path to a file!" % self.rom_file_path) 36 | 37 | self.rom_file = open(self.rom_file_path, "r+b") 38 | self.rom_file_map = mmap.mmap(self.rom_file.fileno(), 0) 39 | 40 | # Sanity check some things. 41 | file_size = self.rom_file_map.size() 42 | if store_base is not None and store_size is not None and (store_base + store_size) > file_size: 43 | raise Exception("ROM file is %d bytes. Cannot seek to %d+%d bytes!" % (file_size, store_base, store_size)) 44 | 45 | # Go ahead and advance the file cursor and load the FV header. 46 | self.rom_file.seek(self.store_base) 47 | self.fv_header = PiFV.EfiFirmwareVolumeHeader().load_from_file(self.rom_file) 48 | if self.fv_header.FileSystemGuid != PiFV.EfiSystemNvDataFvGuid: 49 | raise Exception("Store_base is not pointing at a valid SystemNvData FV!") 50 | if self.fv_header.FvLength != self.store_size: 51 | raise Exception("Store_size %d does not match FV size %d!" % (self.store_size, self.fv_header.FvLength)) 52 | 53 | # Advance the file cursor and load the VarStore header. 54 | self.rom_file.seek(self.fv_header.HeaderLength, os.SEEK_CUR) 55 | self.var_store_header = VF.VariableStoreHeader().load_from_file(self.rom_file) 56 | if ( 57 | self.var_store_header.Format != VF.VARIABLE_STORE_FORMATTED 58 | or self.var_store_header.State != VF.VARIABLE_STORE_HEALTHY 59 | ): 60 | raise Exception("VarStore is invalid or cannot be processed with this helper!") 61 | 62 | # Now we're finally ready to read some variables. 63 | self.variables = [] 64 | self.rom_file.seek(self.var_store_header.StructSize, os.SEEK_CUR) 65 | try: 66 | while True: 67 | new_var = self.get_new_var_class().load_from_file(self.rom_file) 68 | 69 | # Seek past the current variable in the store. 70 | self.rom_file.seek(new_var.get_buffer_size(), os.SEEK_CUR) 71 | 72 | # Add the variable to the array. 73 | self.variables.append(new_var) 74 | except EOFError: 75 | pass 76 | except: 77 | raise 78 | 79 | # Finally, reset the file cursor to the beginning of the VarStore FV. 80 | self.rom_file.seek(self.store_base) 81 | 82 | def __del__(self) -> None: 83 | """Flushes and closes files.""" 84 | if self.rom_file_map is not None: 85 | self.rom_file_map.flush() 86 | self.rom_file_map.close() 87 | 88 | if self.rom_file is not None: 89 | self.rom_file.close() 90 | 91 | def get_new_var_class(self) -> VF.VariableHeader | VF.AuthenticatedVariableHeader: 92 | """Var class builder method depending on var type.""" 93 | if self.var_store_header.Type == "Var": 94 | new_var = VF.VariableHeader() 95 | else: 96 | new_var = VF.AuthenticatedVariableHeader() 97 | 98 | return new_var 99 | 100 | def add_variable(self, new_var: VF.VariableHeader | VF.AuthenticatedVariableHeader) -> None: 101 | """Add a variable to the variable list.""" 102 | self.variables.append(new_var) 103 | 104 | def flush_to_file(self) -> None: 105 | """Flush the changes to file.""" 106 | # First, we need to make sure that our variables will fit in the VarStore. 107 | var_size = sum([var.get_buffer_size() for var in self.variables]) 108 | # Add the terminating var header. 109 | dummy_var = self.get_new_var_class() 110 | var_size += dummy_var.StructSize 111 | if var_size > self.var_store_header.Size: 112 | raise Exception( 113 | "Total variable size %d is too large to fit in VarStore %d!" % (var_size, self.var_store_header.Size) 114 | ) 115 | 116 | # Now, we just have to serialize each variable in turn and write them to the mmap buffer. 117 | var_offset = self.store_base + self.fv_header.HeaderLength + self.var_store_header.StructSize 118 | for var in self.variables: 119 | var_buffer_size = var.get_buffer_size() 120 | self.rom_file_map[var_offset : (var_offset + var_buffer_size)] = var.serialize(True) 121 | var_offset += var_buffer_size 122 | 123 | # Add a terminating Variable Header. 124 | self.rom_file_map[var_offset : (var_offset + dummy_var.StructSize)] = b"\xff" * dummy_var.StructSize 125 | 126 | # Now we have to flush the mmap to the file. 127 | self.rom_file_map.flush() 128 | -------------------------------------------------------------------------------- /edk2toollib/uefi/fmp_auth_header.py: -------------------------------------------------------------------------------- 1 | ## @file 2 | # Module that encodes and decodes a EFI_FIRMWARE_IMAGE_AUTHENTICATION with 3 | # certificate data and payload data. 4 | # 5 | # Copyright (c) 2018 - 2019, Intel Corporation. All rights reserved.
6 | # Copyright (c) Microsoft Corporation 7 | # SPDX-License-Identifier: BSD-2-Clause-Patent 8 | # 9 | 10 | """Module for encoding and decoding EFI_FIRMWARE_IMAGE_AUTHENTICATION with certificate data and payload data.""" 11 | 12 | import struct 13 | from typing import IO 14 | 15 | from edk2toollib.uefi.edk2.fmp_payload_header import FmpPayloadHeaderClass 16 | from edk2toollib.uefi.wincert import WinCertUefiGuid 17 | 18 | 19 | class FmpAuthHeaderClass(object): 20 | r"""An object representing an EFI_FIRMWARE_IMAGE_AUTHENTICATION. 21 | 22 | Can parse or produce an EFI_FIRMWARE_IMAGE_AUTHENTICATION structure/byte buffer. 23 | 24 | Attributes: 25 | MonotonicCount (int): It is included in the signature of AuthInfo. It is used to ensure freshness/no 26 | replay. It is incremented during each firmware image operation. 27 | AuthInfo (WinCertUefiGuid): Provides the authorization for the firmware image operations. 28 | Payload (str): string representing payload as bytes (i.e. b'\x01\x00\x03') 29 | FmpPayloadHeader (FmpPayloadHeaderClass): Header for the payload 30 | 31 | ``` 32 | typedef struct { 33 | UINT64 MonotonicCount; 34 | WIN_CERTIFICATE_UEFI_GUID AuthInfo; 35 | } EFI_FIRMWARE_IMAGE_AUTHENTICATION; 36 | ``` 37 | """ 38 | 39 | _MonotonicCountFormat = " "FmpAuthHeaderClass": 43 | """Inits an empty object.""" 44 | self.MonotonicCount = 0 45 | self.AuthInfo = WinCertUefiGuid() 46 | self.Payload = b"" 47 | self.FmpPayloadHeader = None 48 | 49 | def Encode(self) -> bytes: 50 | r"""Serializes the Auth header + AuthInfo + Payload/FmpPayloadHeader. 51 | 52 | Returns: 53 | (bytes): string representing packed data as bytes (i.e. b'\x01\x00\x03') 54 | """ 55 | FmpAuthHeader = struct.pack(self._MonotonicCountFormat, self.MonotonicCount) 56 | 57 | if self.FmpPayloadHeader is not None: 58 | return FmpAuthHeader + self.AuthInfo.Encode() + self.FmpPayloadHeader.Encode() 59 | else: 60 | return FmpAuthHeader + self.AuthInfo.Encode() + self.Payload 61 | 62 | def Decode(self, Buffer: IO) -> bytes: 63 | """Loads data into the Object by parsing a buffer. 64 | 65 | Args: 66 | Buffer (obj): Buffer containing the data 67 | 68 | Returns: 69 | (str): string of binary representing the payload 70 | 71 | Raises: 72 | (ValueError): Invalid Buffer 73 | """ 74 | if len(Buffer) < self._MonotonicCountSize: 75 | raise ValueError 76 | (MonotonicCount,) = struct.unpack(self._MonotonicCountFormat, Buffer[: self._MonotonicCountSize]) 77 | self.MonotonicCount = MonotonicCount 78 | 79 | self.Payload = self.AuthInfo.Decode(Buffer[self._MonotonicCountSize :]) 80 | if len(self.Payload) > 0: 81 | self.FmpPayloadHeader = FmpPayloadHeaderClass() 82 | self.FmpPayloadHeader.Decode(self.Payload) 83 | return self.Payload 84 | 85 | def IsSigned(self, Buffer: IO) -> bool: 86 | """Parses the buffer and returns if the Cert is signed or not. 87 | 88 | Returns: 89 | (bool): True if signed 90 | (bool): False if invalid buffer 91 | (bool): False if not signed 92 | """ 93 | if len(Buffer) < self._MonotonicCountSize: 94 | return False 95 | 96 | auth_info = WinCertUefiGuid(Buffer[self._MonotonicCountSize :]) 97 | if auth_info.CertType != WinCertUefiGuid._EFI_CERT_TYPE_PKCS7_GUID.bytes_le: 98 | return False 99 | return True 100 | 101 | def DumpInfo(self) -> None: 102 | """Prints object to console.""" 103 | print( 104 | "EFI_FIRMWARE_IMAGE_AUTHENTICATION.MonotonicCount = {MonotonicCount:016X}".format( 105 | MonotonicCount=self.MonotonicCount 106 | ) 107 | ) 108 | self.AuthInfo.DumpInfo() 109 | print( 110 | "sizeof (Payload) = {Size:08X}".format( 111 | Size=len(self.Payload) 112 | ) 113 | ) 114 | if self.FmpPayloadHeader is not None: 115 | self.FmpPayloadHeader.DumpInfo() 116 | -------------------------------------------------------------------------------- /edk2toollib/uefi/pi_firmware_file.py: -------------------------------------------------------------------------------- 1 | # @file 2 | # Module contains helper classes and functions to work with UEFI FFs. 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """Module containing helper classes and functions for working with UEFI FFs.""" 9 | 10 | import struct 11 | import sys 12 | import uuid 13 | from typing import IO 14 | 15 | 16 | class EfiFirmwareFileSystemHeader(object): 17 | """An object representing an EFI_FFS_FILE_HEADER. 18 | 19 | Can parse or produce an EFI_FFS_FILE_HEADER structure/byte buffer. 20 | 21 | ``` 22 | typedef struct { 23 | EFI_GUID Name; 24 | EFI_FFS_INTEGRITY_CHECK IntegrityCheck; 25 | EFI_FV_FILETYPE Type; 26 | EFI_FFS_FILE_ATTRIBUTES Attributes; 27 | UINT8 Size[3]; 28 | EFI_FFS_FILE_STATE State; 29 | } EFI_FFS_FILE_HEADER; 30 | ``` 31 | """ 32 | 33 | def __init__(self) -> "EfiFirmwareFileSystemHeader": 34 | """Inits an empty object.""" 35 | self.StructString = "=16sHBBBBBB" # spell-checker: disable-line 36 | self.FileSystemGuid = None 37 | self.Size0 = None 38 | self.Size1 = None 39 | self.Size2 = None 40 | self.Attributes = None 41 | self.Type = None 42 | self.State = None 43 | 44 | def get_size(self) -> int: 45 | """Returns the size of the header.""" 46 | return self.Size0 + (self.Size1 << 8) + (self.Size2 << 16) 47 | 48 | def load_from_file(self, file: IO) -> "EfiFirmwareFileSystemHeader": 49 | """Loads data into the object from a filestream. 50 | 51 | Args: 52 | file (obj): An open file that has been seeked to the correct location. 53 | 54 | Returns: 55 | (EfiFirmwareFileSystemHeader): self 56 | """ 57 | orig_seek = file.tell() 58 | struct_bytes = file.read(struct.calcsize(self.StructString)) 59 | file.seek(orig_seek) 60 | 61 | # Load this object with the contents of the data. 62 | ( 63 | self.FileSystemGuid, 64 | self.Checksum, 65 | self.Type, 66 | self.Attributes, 67 | self.Size0, 68 | self.Size1, 69 | self.Size2, 70 | self.State, 71 | ) = struct.unpack(self.StructString, struct_bytes) 72 | 73 | # Update the GUID to be a UUID object. 74 | if sys.byteorder == "big": 75 | self.FileSystemGuid = uuid.UUID(bytes=self.FileSystemGuid) 76 | else: 77 | self.FileSystemGuid = uuid.UUID(bytes_le=self.FileSystemGuid) 78 | 79 | return self 80 | 81 | def serialize(self) -> bytes: 82 | r"""Serializes the object. 83 | 84 | Returns: 85 | (bytes): string representing packed data as bytes (i.e. b'\x01\x00\x03') 86 | """ 87 | file_system_guid_bin = self.FileSystemGuid.bytes if sys.byteorder == "big" else self.FileSystemGuid.bytes_le 88 | return struct.pack( 89 | self.StructString, 90 | file_system_guid_bin, 91 | self.Checksum, 92 | self.Type, 93 | self.Attributes, 94 | self.Size0, 95 | self.Size1, 96 | self.Size2, 97 | self.State, 98 | ) 99 | -------------------------------------------------------------------------------- /edk2toollib/uefi/pi_firmware_volume.py: -------------------------------------------------------------------------------- 1 | # @file 2 | # Module contains helper classes and functions to work with UEFI FVs. 3 | # 4 | # 5 | # Copyright (c) Microsoft Corporation 6 | # 7 | # SPDX-License-Identifier: BSD-2-Clause-Patent 8 | ## 9 | """Module containing helper classes and functions for working with UEFI Fvs.""" 10 | 11 | import struct 12 | import sys 13 | import uuid 14 | from typing import IO 15 | 16 | # 17 | # UEFI GUIDs 18 | # 19 | EfiSystemNvDataFvGuid = uuid.UUID(fields=(0xFFF12B8D, 0x7696, 0x4C8B, 0xA9, 0x85, 0x2747075B4F50)) 20 | 21 | 22 | # 23 | # UEFI #Defines 24 | # 25 | EFI_FVH_SIGNATURE = b"_FVH" 26 | 27 | 28 | class EfiFirmwareVolumeHeader(object): 29 | """An object representing an EFI_FIRMWARE_VOLUME_HEADER. 30 | 31 | Can parse or produce an EFI_FIRMWARE_VOLUME_HEADER structure/byte buffer. 32 | 33 | ``` 34 | typedef struct { 35 | UINT8 ZeroVector[16]; 36 | EFI_GUID FileSystemGuid; 37 | UINT64 FvLength; 38 | UINT32 Signature; 39 | EFI_FVB_ATTRIBUTES_2 Attributes; 40 | UINT16 HeaderLength; 41 | UINT16 Checksum; 42 | UINT16 ExtHeaderOffset; 43 | UINT8 Reserved[1]; 44 | UINT8 Revision; 45 | EFI_FV_BLOCK_MAP_ENTRY BlockMap[1]; 46 | } EFI_FIRMWARE_VOLUME_HEADER; 47 | ``` 48 | """ 49 | 50 | def __init__(self) -> "EfiFirmwareVolumeHeader": 51 | """Inits an empty object.""" 52 | self.StructString = "=16s16sQ4sLHHHBBQQ" # spell-checker: disable-line 53 | self.ZeroVector = None 54 | self.FileSystemGuid = None 55 | self.FvLength = None 56 | self.Attributes = None 57 | self.HeaderLength = None 58 | self.Checksum = None 59 | self.ExtHeaderOffset = None 60 | self.Reserved = None 61 | self.Revision = None 62 | self.Blockmap0 = None 63 | self.Blockmap1 = None 64 | 65 | def load_from_file(self, file: IO) -> "EfiFirmwareVolumeHeader": 66 | """Loads data into the object from a filestream. 67 | 68 | Args: 69 | file (obj): An open file that has been seeked to the correct location. 70 | 71 | Returns: 72 | (EfiFirmwareVolumeHeader): self 73 | 74 | Raises: 75 | (Exception): Invalid signature in fs 76 | """ 77 | # This function assumes that the file has been seeked 78 | # to the correct starting location. 79 | orig_seek = file.tell() 80 | struct_bytes = file.read(struct.calcsize(self.StructString)) 81 | file.seek(orig_seek) 82 | 83 | # Load this object with the contents of the data. 84 | ( 85 | self.ZeroVector, 86 | file_system_guid_bin, 87 | self.FvLength, 88 | self.Signature, 89 | self.Attributes, 90 | self.HeaderLength, 91 | self.Checksum, 92 | self.ExtHeaderOffset, 93 | self.Reserved, 94 | self.Revision, 95 | self.Blockmap0, 96 | self.Blockmap1, 97 | ) = struct.unpack(self.StructString, struct_bytes) 98 | 99 | # Make sure that this structure is what we think it is. 100 | if self.Signature != EFI_FVH_SIGNATURE: 101 | raise Exception("File does not appear to point to a valid EfiFirmwareVolumeHeader!") 102 | 103 | # Update the GUID to be a UUID object. 104 | if sys.byteorder == "big": 105 | self.FileSystemGuid = uuid.UUID(bytes=file_system_guid_bin) 106 | else: 107 | self.FileSystemGuid = uuid.UUID(bytes_le=file_system_guid_bin) 108 | 109 | return self 110 | 111 | def serialize(self) -> bytes: 112 | r"""Serializes the object. 113 | 114 | Returns: 115 | (str): string representing packed data as bytes (i.e. b'\x01\x00\x03') 116 | """ 117 | file_system_guid_bin = self.FileSystemGuid.bytes if sys.byteorder == "big" else self.FileSystemGuid.bytes_le 118 | return struct.pack( 119 | self.StructString, 120 | self.ZeroVector, 121 | file_system_guid_bin, 122 | self.FvLength, 123 | self.Signature, 124 | self.Attributes, 125 | self.HeaderLength, 126 | self.Checksum, 127 | self.ExtHeaderOffset, 128 | self.Reserved, 129 | self.Revision, 130 | self.Blockmap0, 131 | self.Blockmap1, 132 | ) 133 | 134 | 135 | class EfiFirmwareVolumeExtHeader(object): 136 | """An object representing an EFI_FIRMWARE_VOLUME_EXT_HEADER. 137 | 138 | Can parse or produce an EFI_FIRMWARE_VOLUME_EXT_HEADER structure/byte buffer. 139 | 140 | ``` 141 | typedef struct { 142 | EFI_GUID FileSystemGuid; 143 | UINT32 ExtHeaderSize; 144 | } EFI_FIRMWARE_VOLUME_EXT_HEADER; 145 | ``` 146 | """ 147 | 148 | def __init__(self) -> "EfiFirmwareVolumeExtHeader": 149 | """Inits an empty object.""" 150 | self.StructString = "=16sL" 151 | self.FileSystemGuid = None 152 | self.ExtHeaderSize = None 153 | 154 | def load_from_file(self, file: IO) -> "EfiFirmwareVolumeExtHeader": 155 | """Loads data into the object from a filestream. 156 | 157 | Args: 158 | file: An open file that has been seeked to the correct location. 159 | 160 | Returns: 161 | (EfiFirmwareVolumeExtHeader): self 162 | """ 163 | # This function assumes that the file has been seeked 164 | # to the correct starting location. 165 | orig_seek = file.tell() 166 | struct_bytes = file.read(struct.calcsize(self.StructString)) 167 | file.seek(orig_seek) 168 | 169 | # Load this object with the contents of the data. 170 | (self.FileSystemGuid, self.ExtHeaderSize) = struct.unpack(self.StructString, struct_bytes) 171 | 172 | return self 173 | -------------------------------------------------------------------------------- /edk2toollib/uefi/status_codes.py: -------------------------------------------------------------------------------- 1 | # @file 2 | # Code to help convert an Int to StatusCode string 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """Module for converting an Int to StatusCode string.""" 9 | 10 | 11 | class UefiStatusCode(object): 12 | """Object representing a UEFI Status Code from Appendix D of the UEFI spec.""" 13 | 14 | # high bit set 15 | ErrorCodeStrings = [ 16 | "NOT VALID", 17 | "Load Error", 18 | "Invalid Parameter", 19 | "Unsupported", 20 | "Bad BufferSize", 21 | "Buffer Too Small", 22 | "Not Ready", 23 | "Device Error", 24 | "Write Protected", 25 | "Out of Resources", 26 | "Volume Corrupt", 27 | "Volume Full", 28 | "No Media", 29 | "Media Changed", 30 | "Not Found", 31 | "Access Denied", 32 | "No Response", 33 | "No Mapping", 34 | "Time Out", 35 | "Not Started", 36 | "Already Started", 37 | "Aborted", 38 | "ICMP Error", 39 | "TFTP Error", 40 | "Protocol Error", 41 | "Incompatible Error", 42 | "Security Violation", 43 | "CRC Error", 44 | "End of Media", 45 | "Reserved(29)", 46 | "Reserved(30)", 47 | "End of File", 48 | "Invalid Language", 49 | "Compromised Data", 50 | "IP Address Conflict", 51 | "HTTP Error", 52 | ] 53 | 54 | NonErrorCodeStrings = [ 55 | "Success", 56 | "Unknown Glyph", 57 | "Delete Failure", 58 | "Write Failure", 59 | "Buffer Too Small", 60 | "Stale Data", 61 | "File System", 62 | "Reset Required", 63 | ] 64 | 65 | def Convert32BitToString(self, value: int) -> str: 66 | """Convert 32 bit int to a friendly UEFI status code string value.""" 67 | StatusStrings = UefiStatusCode.NonErrorCodeStrings 68 | 69 | if (value >> 31) & 1 == 1: 70 | # error 71 | StatusStrings = UefiStatusCode.ErrorCodeStrings 72 | value = value & 0x7FFFFFFF # mask off upper bit 73 | 74 | if value >= len(StatusStrings): 75 | return "Undefined StatusCode" 76 | 77 | return StatusStrings[value] 78 | 79 | def Convert64BitToString(self, value: int) -> str: 80 | """Convert 64 bit int to a friendly UEFI status code string value.""" 81 | StatusStrings = UefiStatusCode.NonErrorCodeStrings 82 | 83 | if (value >> 63) & 1 == 1: 84 | # error 85 | StatusStrings = UefiStatusCode.ErrorCodeStrings 86 | value = value & 0x7FFFFFFFFFFFFFFF # mask off upper bit 87 | 88 | if value >= len(StatusStrings): 89 | return "Undefined StatusCode" 90 | 91 | return StatusStrings[value] 92 | 93 | def ConvertHexString64ToString(self, hexstring: str) -> str: 94 | """Convert 64 bit hexstring in 0x format to a UEFI status code.""" 95 | value = int(hexstring, 16) 96 | return self.Convert64BitToString(value) 97 | 98 | def ConvertHexString32ToString(self, hexstring: str) -> str: 99 | """Convert 32 bit hexstring in 0x format to a UEFI status code.""" 100 | value = int(hexstring, 16) 101 | return self.Convert32BitToString(value) 102 | -------------------------------------------------------------------------------- /edk2toollib/uefi/uefi_types.py: -------------------------------------------------------------------------------- 1 | # @file 2 | # Python implementation of UEFI C types 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """Python implementation of UEFI C types""" 9 | 10 | from typing import TypeAlias 11 | import ctypes 12 | 13 | # These depend on 32 bit vs 64 bit, but assuming 64 14 | EFI_PHYSICAL_ADDRESS: TypeAlias = ctypes.c_uint64 15 | UINTN: TypeAlias = ctypes.c_uint64 16 | UINT8: TypeAlias = ctypes.c_uint8 17 | -------------------------------------------------------------------------------- /edk2toollib/windows/__init__.py: -------------------------------------------------------------------------------- 1 | ## 2 | # File to mark this a python package 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """This file exists to satisfy pythons packaging requirements. 9 | 10 | Read more: https://docs.python.org/3/reference/import.html#regular-packages 11 | """ 12 | -------------------------------------------------------------------------------- /edk2toollib/windows/capsule/__init__.py: -------------------------------------------------------------------------------- 1 | ## 2 | # File to mark this a python package 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """This package contains different EDK2 file generators for capsules.""" 9 | -------------------------------------------------------------------------------- /edk2toollib/windows/capsule/cat_generator.py: -------------------------------------------------------------------------------- 1 | ## @file 2 | # Script to generate Cat files for capsule update based on supplied inf file. 3 | # This uses the winsdk and the command line tool Inf2Cat.exe 4 | # 5 | # Copyright (c) Microsoft Corporation 6 | # 7 | # SPDX-License-Identifier: BSD-2-Clause-Patent 8 | ## 9 | """Script to generate Cat files for capsule update. 10 | 11 | Based on a supplied inf file and uses the winsdk and command line tool Inf2Cat.exe 12 | """ 13 | 14 | import logging 15 | import os 16 | from typing import Optional 17 | 18 | from edk2toollib.utility_functions import RunCmd 19 | from edk2toollib.windows.locate_tools import FindToolInWinSdk 20 | 21 | 22 | class CatGenerator(object): 23 | """A cat file generator. 24 | 25 | Attributes: 26 | arch (str): a supported architecture 27 | os (str): a supported os 28 | """ 29 | 30 | SUPPORTED_OS = { 31 | "win10": "10", 32 | "10": "10", 33 | "10_au": "10_AU", 34 | "10_rs2": "10_RS2", 35 | "10_rs3": "10_RS3", 36 | "10_rs4": "10_RS4", 37 | "server10": "Server10", 38 | "server2016": "Server2016", 39 | "serverrs2": "ServerRS2", 40 | "serverrs3": "ServerRS3", 41 | "serverrs4": "ServerRS4", 42 | } 43 | 44 | def __init__(self, arch: str, os: str) -> "CatGenerator": 45 | """Inits a Cat Generator. 46 | 47 | Args: 48 | arch (str): a supported arch 49 | os (str): a supported os 50 | """ 51 | self.Arch = arch 52 | self.OperatingSystem = os 53 | 54 | @property 55 | def Arch(self) -> str: 56 | """Returns the attribute arch.""" 57 | return self._arch 58 | 59 | @Arch.setter 60 | def Arch(self, value: str) -> None: 61 | """Validates the arch before setting it. 62 | 63 | Raises: 64 | (ValueError): Invalid Architecture 65 | """ 66 | value = value.lower() 67 | if (value == "x64") or (value == "amd64"): # support amd64 value so INF and CAT tools can use same arch value 68 | self._arch = "X64" 69 | elif value == "arm": 70 | self._arch = "ARM" 71 | elif (value == "arm64") or (value == "aarch64"): # support UEFI defined aarch64 value as well 72 | self._arch = "ARM64" 73 | else: 74 | logging.critical("Unsupported Architecture: %s", value) 75 | raise ValueError("Unsupported Architecture") 76 | 77 | @property 78 | def OperatingSystem(self) -> str: 79 | """Returns the Operating system attribute.""" 80 | return self._operatingsystem 81 | 82 | @OperatingSystem.setter 83 | def OperatingSystem(self, value: str) -> None: 84 | """Validates the OS is supported before setting the attribute. 85 | 86 | Raises: 87 | (ValueError): Operating system is unsupported 88 | """ 89 | key = value.lower() 90 | if key not in CatGenerator.SUPPORTED_OS.keys(): 91 | logging.critical("Unsupported Operating System: %s", key) 92 | raise ValueError("Unsupported Operating System") 93 | self._operatingsystem = CatGenerator.SUPPORTED_OS[key] 94 | 95 | def MakeCat(self, OutputCatFile: str, PathToInf2CatTool: Optional[str] = None) -> int: 96 | """Generates a cat file to the outputcatfile directory. 97 | 98 | Args: 99 | OutputCatFile (str): Where to place the output cat file. 100 | PathToInf2CatTool (:obj:`str`, optional): path to Inf2CatTool if known. 101 | 102 | Raises: 103 | (Exception): Invalid Inf2CatTool path or unable to find it. 104 | (Exception): Inf2CatTool failed 105 | (Exception): Cat file not found, but tool executed successfully 106 | """ 107 | # Find Inf2Cat tool 108 | if PathToInf2CatTool is None: 109 | PathToInf2CatTool = FindToolInWinSdk("Inf2Cat.exe") 110 | # check if exists 111 | if PathToInf2CatTool is None or not os.path.exists(PathToInf2CatTool): 112 | raise Exception( 113 | "Can't find Inf2Cat on this machine. Please install the Windows 10 WDK - " 114 | "https://developer.microsoft.com/en-us/windows/hardware/windows-driver-kit" 115 | ) 116 | 117 | OutputFolder = os.path.dirname(OutputCatFile) 118 | # Make Cat file 119 | cmd = "/driver:. /os:" + self.OperatingSystem + "_" + self.Arch + " /verbose /uselocaltime" 120 | ret = RunCmd(PathToInf2CatTool, cmd, workingdir=OutputFolder) 121 | if ret != 0: 122 | raise Exception("Creating Cat file Failed with errorcode %d" % ret) 123 | if not os.path.isfile(OutputCatFile): 124 | raise Exception("CAT file (%s) not created" % OutputCatFile) 125 | 126 | return 0 127 | -------------------------------------------------------------------------------- /edk2toollib/windows/policy/__init__.py: -------------------------------------------------------------------------------- 1 | ## 2 | # File to mark this a python package 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """This package contains various firmware policy management tools.""" 9 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: Tianocore Edk2 Pytool Library (edk2toollib) 2 | repo_url: https://github.com/tianocore/edk2-pytool-library 3 | copyright: Copyright (c) Microsoft. All rights reserved 4 | site_description: edk2toollib package documentation 5 | 6 | validation: 7 | links: 8 | absolute_links: relative_to_docs 9 | anchors: warn 10 | unrecognized_links: warn 11 | 12 | theme: 13 | name: material 14 | features: 15 | - navigation.tabs 16 | - navigation.indexes 17 | palette: 18 | - scheme: default 19 | toggle: 20 | icon: material/brightness-7 21 | name: Switch to dark mode 22 | - scheme: slate 23 | toggle: 24 | icon: material/brightness-4 25 | name: Switch to light mode 26 | 27 | docs_dir: docs/user 28 | 29 | plugins: 30 | - search 31 | - mkdocstrings: 32 | handlers: 33 | python: 34 | options: 35 | docstring_style: google 36 | - exclude: 37 | glob: 38 | - coverage.md 39 | - publishing.md 40 | - developing.md 41 | - gen-files: 42 | scripts: 43 | - docs/user/gen_api.py 44 | - awesome-pages 45 | 46 | extra_javascript: 47 | - https://unpkg.com/mermaid@8.7.0/dist/mermaid.min.js 48 | 49 | markdown_extensions: 50 | - markdown_include.include: 51 | base_path: . 52 | - admonition 53 | - codehilite 54 | - meta 55 | - fenced_code 56 | - pymdownx.betterem: 57 | smart_enable: all 58 | - pymdownx.caret 59 | - pymdownx.critic 60 | - pymdownx.details 61 | - pymdownx.emoji: 62 | emoji_index: !!python/name:material.extensions.emoji.twemoji 63 | emoji_generator: !!python/name:materialx.emoji.to_svg 64 | - pymdownx.inlinehilite 65 | - pymdownx.magiclink 66 | - pymdownx.mark 67 | - pymdownx.smartsymbols 68 | - pymdownx.superfences: 69 | custom_fences: 70 | - name: mermaid 71 | class: mermaid 72 | format: !!python/name:pymdownx.superfences.fence_div_format 73 | - pymdownx.tasklist: 74 | custom_checkbox: true 75 | - pymdownx.tilde 76 | - pymdownx.tabbed 77 | - toc: 78 | permalink: true 79 | watch: 80 | - 'docs/user' 81 | - 'mkdocs.yml' 82 | - 'edk2toollib/' 83 | 84 | nav: 85 | - Home: index.md 86 | - ... | features/**/*.md 87 | - ... | api/**/*.md 88 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools", "setuptools_scm[toml]"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "edk2-pytool-library" 7 | maintainers = [{name = "EDK2 Pytool Maintainers", email = "edk2-pytools@microsoft.com"}] 8 | dynamic = ["version"] 9 | description = "Python library supporting UEFI EDK2 firmware development" 10 | readme = {file = "readme.md", content-type = "text/markdown"} 11 | license-files = ["LICENSE"] 12 | requires-python = ">=3.10" 13 | dependencies = [ 14 | "pyasn1 >= 0.4.8", 15 | "pyasn1-modules >= 0.2.8", 16 | "cryptography >= 39.0.1", 17 | "joblib >= 1.3.2", 18 | "GitPython >= 3.1.30", 19 | "sqlalchemy >= 2.0.0", 20 | "pygount >= 1.6.1", 21 | "pywin32 >= 308 ; sys_platform == 'win32'", 22 | ] 23 | classifiers=[ 24 | "Programming Language :: Python :: 3", 25 | "Operating System :: OS Independent", 26 | "Development Status :: 4 - Beta", 27 | "Intended Audience :: Developers", 28 | "Programming Language :: Python :: 3.10", 29 | "Programming Language :: Python :: 3.11", 30 | "Programming Language :: Python :: 3.12", 31 | "Programming Language :: Python :: 3.13" 32 | ] 33 | 34 | [project.urls] 35 | homepage = "https://github.com/tianocore/edk2-pytool-library/" 36 | documentation = "https://www.tianocore.org/edk2-pytool-library/" 37 | issues = "https://github.com/tianocore/edk2-pytool-library/issues/" 38 | 39 | [project.optional-dependencies] 40 | dev = [ 41 | "ruff == 0.8.6", 42 | "pytest == 8.3.5", 43 | "coverage == 7.6.12", 44 | "pre-commit == 4.0.1", 45 | ] 46 | publish = [ 47 | "setuptools == 75.8.2", 48 | "build == 1.2.2.post1", 49 | "twine == 6.1.0", 50 | ] 51 | docs = [ 52 | "black==25.1.0", 53 | "mkdocs==1.6.1", 54 | "mkdocs-material==9.5.49", 55 | "mkdocstrings[python]==0.27.0", 56 | "mkdocstrings-python==1.11.1", 57 | "markdown-include==0.8.1", 58 | "mkdocs-gen-files==0.5.0", 59 | "mkdocs-exclude==1.0.2", 60 | "mkdocs-awesome-pages-plugin==2.9.3", 61 | ] 62 | 63 | [tool.setuptools] 64 | packages = ["edk2toollib"] 65 | 66 | [tool.setuptools_scm] 67 | 68 | [tool.coverage.run] 69 | include = ["edk2toollib/*"] 70 | 71 | [tool.ruff] 72 | src = ["edk2toollib"] 73 | line-length = 120 74 | 75 | [lint] 76 | ignore = ["ANN101"] 77 | select = [ 78 | "E", # Pycodestyle errors 79 | "W", # Pycodestyle warnings 80 | "F", # PyFlakes 81 | "D", # pydocstyle 82 | "I", # isort 83 | "PIE", # flake8 - PIEpip 84 | "ANN", 85 | ] 86 | 87 | [lint.flake8-annotations] 88 | allow-star-arg-any = true 89 | 90 | [lint.pydocstyle] 91 | convention = "google" 92 | 93 | [tool.pytest.ini_options] 94 | testpaths = [ 95 | "tests.unit" 96 | ] 97 | -------------------------------------------------------------------------------- /tests.unit/__init__.py: -------------------------------------------------------------------------------- 1 | ## 2 | # Copyright (c) Microsoft Corporation 3 | # 4 | # SPDX-License-Identifier: BSD-2-Clause-Patent 5 | ## 6 | """This file exists to satisfy pythons packaging requirements. 7 | 8 | Read more: https://docs.python.org/3/reference/import.html#regular-packages 9 | """ 10 | -------------------------------------------------------------------------------- /tests.unit/database/test_edk2_db.py: -------------------------------------------------------------------------------- 1 | ## 2 | # unittest for the Edk2DB class 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # Spdx-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | # ruff: noqa: F811 9 | """Unittest for the Edk2DB class.""" 10 | 11 | import pytest 12 | from common import Tree, empty_tree # noqa: F401 13 | from edk2toollib.database import Edk2DB, Inf 14 | from edk2toollib.database.tables import InfTable, TableGenerator 15 | from edk2toollib.uefi.edk2.path_utilities import Edk2Path 16 | 17 | 18 | def test_load_existing_db(empty_tree: Tree): 19 | """Test that we can create a json database and load it later.""" 20 | empty_tree.create_library("TestLib1", "TestCls") 21 | edk2path = Edk2Path(str(empty_tree.ws), []) 22 | 23 | db_path = empty_tree.ws / "test.db" 24 | assert db_path.exists() is False 25 | 26 | db = Edk2DB(db_path, pathobj=edk2path) 27 | db.register(InfTable(n_jobs=1)) 28 | db.parse({}) 29 | with db.session() as session: 30 | rows = session.query(Inf).all() 31 | assert len(rows) == 1 32 | 33 | assert db_path.exists() 34 | 35 | # Ensure we can load an existing database 36 | db = Edk2DB(db_path, pathobj=edk2path) 37 | with db.session() as session: 38 | rows = session.query(Inf).all() 39 | assert len(rows) == 1 40 | 41 | 42 | def test_catch_bad_parser_and_query(empty_tree: Tree): 43 | """Test that a bad parser will be caught and logged.""" 44 | edk2path = Edk2Path(str(empty_tree.ws), []) 45 | 46 | db_path = empty_tree.ws / "test.db" 47 | assert db_path.exists() is False 48 | 49 | db = Edk2DB(db_path, pathobj=edk2path) 50 | parser = TableGenerator() 51 | db.register(parser) 52 | 53 | with pytest.raises(NotImplementedError): 54 | db.parse({}) 55 | 56 | with pytest.raises(NotImplementedError): 57 | parser.parse(db.session(), db.pathobj, 0, {}) 58 | 59 | 60 | def test_clear_parsers(empty_tree: Tree): 61 | """Test that we can clear all parsers. EnvironmentTable should always persist.""" 62 | edk2path = Edk2Path(str(empty_tree.ws), []) 63 | db = Edk2DB(empty_tree.ws / "test.db", pathobj=edk2path) 64 | db.register(TableGenerator()) 65 | assert len(db._parsers) == 1 66 | 67 | db.clear_parsers() 68 | assert len(db._parsers) == 0 69 | 70 | 71 | def test_multiple_databases_do_not_interfere(empty_tree: Tree): 72 | empty_tree.create_library("TestLib1", "TestCls") 73 | edk2path = Edk2Path(str(empty_tree.ws), []) 74 | 75 | db_path1 = empty_tree.ws / "test1.db" 76 | db_path2 = empty_tree.ws / "test2.db" 77 | 78 | assert db_path1.exists() is False 79 | assert db_path2.exists() is False 80 | 81 | db1 = Edk2DB(db_path1, pathobj=edk2path) 82 | db2 = Edk2DB(db_path2, pathobj=edk2path) 83 | 84 | assert db_path1.exists() 85 | assert db_path2.exists() 86 | 87 | db1.register(InfTable(n_jobs=1)) 88 | db1.parse({}) 89 | 90 | with db1.session() as session: 91 | rows = session.query(Inf).all() 92 | assert len(rows) == 1 93 | 94 | with db2.session() as session: 95 | rows = session.query(Inf).all() 96 | assert len(rows) == 0 97 | -------------------------------------------------------------------------------- /tests.unit/database/test_environment_table.py: -------------------------------------------------------------------------------- 1 | ## 2 | # unittest for the EnvironmentTable generator 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | # ruff: noqa: F811 9 | """Tests for build an inf file table.""" 10 | 11 | from datetime import date 12 | 13 | from edk2toollib.database import Edk2DB, Environment 14 | from edk2toollib.database.tables import EnvironmentTable 15 | from edk2toollib.uefi.edk2.path_utilities import Edk2Path 16 | 17 | 18 | def test_environment_no_version(tmp_path): 19 | """Test that version is set if not found in the environment variables.""" 20 | edk2path = Edk2Path(str(tmp_path), []) 21 | db = Edk2DB(":memory:", pathobj=edk2path) 22 | db.register(EnvironmentTable()) 23 | db.parse({}) 24 | 25 | with db.session() as session: 26 | rows = session.query(Environment).all() 27 | assert len(rows) == 1 28 | env = rows[0] 29 | assert env.date.date() == date.today() 30 | assert env.version == "UNKNOWN" 31 | assert env.values == [] 32 | 33 | 34 | def test_environment_with_vars(tmp_path): 35 | """Tests that environment variables are recorded.""" 36 | env = { 37 | "ACTIVE_PLATFORM": "TestPkg/TestPkg.dsc", 38 | "TARGET_ARCH": "X64", 39 | "TOOL_CHAIN_TAG": "VS2019", 40 | "FLASH_DEFINITION": "TestPkg/TestPkg.fdf", 41 | } 42 | edk2path = Edk2Path(str(tmp_path), []) 43 | db = Edk2DB(tmp_path / "db.db", pathobj=edk2path) 44 | db.register(EnvironmentTable()) 45 | db.parse(env) 46 | 47 | with db.session() as session: 48 | rows = session.query(Environment).all() 49 | assert len(rows) == 1 50 | entry = rows[0] 51 | assert entry.version == "UNKNOWN" 52 | assert entry.date.date() == date.today() 53 | assert len(entry.values) == 4 54 | 55 | db.parse(env) 56 | 57 | with db.session() as session: 58 | rows = session.query(Environment).all() 59 | assert len(rows) == 2 60 | -------------------------------------------------------------------------------- /tests.unit/database/test_inf_table.py: -------------------------------------------------------------------------------- 1 | ## 2 | # unittest for the InfTable generator 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | # ruff: noqa: F811 9 | """Tests for build an inf file table.""" 10 | 11 | import shutil 12 | from pathlib import Path 13 | 14 | from common import Tree, empty_tree, write_file # noqa: F401 15 | from edk2toollib.database import Edk2DB, Inf 16 | from edk2toollib.database.tables import InfTable 17 | from edk2toollib.uefi.edk2.path_utilities import Edk2Path 18 | 19 | 20 | def test_valid_inf(empty_tree: Tree): 21 | """Tests that a valid Inf with typical settings is properly parsed.""" 22 | edk2path = Edk2Path(str(empty_tree.ws), []) 23 | db = Edk2DB(empty_tree.ws / "db.db", pathobj=edk2path) 24 | db.register(InfTable(n_jobs=1)) 25 | 26 | # Configure inf 27 | libs = ["TestLib2", "TestLib3"] 28 | protocols = ["gEfiTestProtocolGuid"] 29 | guids = ["gEfiTestTokenSpaceGuid"] 30 | sources = ["Test.c"] 31 | sources_ia32 = ["IA32/Test.c"] 32 | sources_x64 = ["X64/Test.c"] 33 | 34 | lib1 = empty_tree.create_library( 35 | "TestLib1", 36 | "TestCls", 37 | libraryclasses=libs, 38 | protocols=protocols, 39 | guids=guids, 40 | sources=sources, 41 | sources_ia32=sources_ia32, 42 | sources_x64=sources_x64, 43 | ) 44 | lib2 = empty_tree.create_library( 45 | "TestLib2", 46 | "TestCls", 47 | libraryclasses=libs, 48 | protocols=protocols, 49 | guids=guids, 50 | sources=sources, 51 | sources_ia32=sources_ia32, 52 | sources_x64=sources_x64, 53 | ) 54 | 55 | (empty_tree.library_folder / "IA32").mkdir() 56 | (empty_tree.library_folder / "X64").mkdir() 57 | for file in sources + sources_ia32 + sources_x64: 58 | write_file((empty_tree.library_folder / file).resolve(), "FILLER") 59 | 60 | db.parse({}) 61 | 62 | with db.session() as session: 63 | rows = session.query(Inf).all() 64 | assert len(rows) == 2 65 | for row in rows: 66 | assert row.path in [Path(lib1).as_posix(), Path(lib2).as_posix()] 67 | assert row.library_class == "TestCls" 68 | 69 | for inf in [Path(lib1).as_posix(), Path(lib2).as_posix()]: 70 | row = session.query(Inf).filter(Inf.path == inf).first() 71 | assert len(row.sources) == 3 72 | 73 | 74 | def test_source_path_with_dot_dot(empty_tree: Tree): 75 | """Tests that paths with .. are correctly resolved.""" 76 | edk2path = Edk2Path(str(empty_tree.ws), []) 77 | db = Edk2DB(empty_tree.ws / "db.db", pathobj=edk2path) 78 | db.register(InfTable(n_jobs=1)) 79 | empty_tree.create_library("TestLib", "TestCls", sources=["../Test1.c", "Test2.c"]) 80 | file1 = empty_tree.package / "Test1.c" 81 | file1.touch() 82 | file2 = empty_tree.library_folder / "Test2.c" 83 | file2.touch() 84 | 85 | db.parse({}) 86 | with db.session() as session: 87 | for row in session.query(Inf).all(): 88 | for source in row.sources: 89 | assert empty_tree.ws / source.path in [file1, file2] 90 | 91 | 92 | def test_pkg_not_pkg_path_relative(empty_tree: Tree): 93 | """Tests when a package is not itself relative to a package path. 94 | 95 | !!! example 96 | pp = ["Common"] 97 | pkg1 "Common/Package1" 98 | pkg2 "Common/Packages/Package2" 99 | 100 | assert pkg1.relative == "Package1" 101 | assert pkg2.relative == "Packges/Package2" 102 | """ 103 | empty_tree.create_library("TestLib", "TestCls", sources=["Test2.c"]) 104 | file2 = empty_tree.library_folder / "Test2.c" 105 | file2.touch() 106 | 107 | ws = empty_tree.ws 108 | common = ws / "Common" 109 | 110 | shutil.copytree(ws, common) 111 | shutil.rmtree(ws / "TestPkg") 112 | 113 | edk2path = Edk2Path(str(ws), []) 114 | db = Edk2DB(empty_tree.ws / "db.db", pathobj=edk2path) 115 | db.register(InfTable(n_jobs=1)) 116 | db.parse({}) 117 | 118 | with db.session() as session: 119 | inf = session.query(Inf).one() 120 | assert len(inf.sources) == 1 121 | assert inf.sources[0].path == Path("Common", "TestPkg", "Library", "Test2.c").as_posix() 122 | assert inf.path == Path("Common", "TestPkg", "Library", "TestLib.inf").as_posix() 123 | -------------------------------------------------------------------------------- /tests.unit/database/test_instanced_fv_table.py: -------------------------------------------------------------------------------- 1 | ## 2 | # unittests for the InstancedFv table generator 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """Unittest for the InstancedFv table generator.""" 9 | 10 | import logging 11 | from pathlib import Path 12 | 13 | import pytest 14 | from common import Tree, empty_tree # noqa: F401 15 | from edk2toollib.database import Edk2DB, Fv 16 | from edk2toollib.database.tables import InstancedFvTable, InstancedInfTable 17 | from edk2toollib.uefi.edk2.path_utilities import Edk2Path 18 | 19 | GET_INF_LIST_QUERY = """ 20 | SELECT i.path 21 | FROM inf AS i 22 | JOIN junction AS j ON ? = j.key1 and j.table2 = "inf" 23 | """ 24 | 25 | 26 | def test_valid_fdf(empty_tree: Tree): # noqa: F811 27 | """Tests that a typical fdf can be properly parsed.""" 28 | edk2path = Edk2Path(str(empty_tree.ws), []) 29 | db = Edk2DB(empty_tree.ws / "db.db", pathobj=edk2path) 30 | db.register(*[InstancedInfTable(), InstancedFvTable()]) 31 | other_folder = empty_tree.ws / "TestPkg" / "Extra Drivers" 32 | other_folder.mkdir(parents=True) 33 | 34 | comp1 = empty_tree.create_component("TestDriver1", "DXE_DRIVER") 35 | comp2 = empty_tree.create_component("TestDriver2", "DXE_DRIVER") 36 | comp3 = empty_tree.create_component("TestDriver3", "DXE_DRIVER") 37 | comp4 = empty_tree.create_component("TestDriver4", "DXE_DRIVER") 38 | comp4 = Path(empty_tree.ws, comp4).rename(other_folder / "TestDriver4.inf") 39 | comp5 = empty_tree.create_component("TestDriver5", "DXE_DRIVER") 40 | comp6 = empty_tree.create_component("TestDriver6", "DXE_DRIVER") 41 | comp7 = empty_tree.create_component("TestDriver7", "DXE_DRIVER") 42 | comp8 = empty_tree.create_component("TestDriver8", "DXE_DRIVER") 43 | comp9 = empty_tree.create_component("TestDriver9", "DXE_DRIVER") 44 | 45 | dsc = empty_tree.create_dsc( 46 | libraryclasses=[], 47 | components=[comp1, comp2, comp3, comp4, comp5, comp6, comp7, comp8, comp9], 48 | ) 49 | 50 | # Write the FDF; includes a "infformat" FV used to test 51 | # All the different ways an INF can be defined in the FDF 52 | fdf = empty_tree.create_fdf( 53 | fv_testfv=[ 54 | f"INF {comp1}", # PP relative 55 | f"INF {str(empty_tree.ws / comp2)}", # Absolute 56 | f"INF RuleOverride=RESET_VECTOR {comp3}", # RuleOverride 57 | "INF TestPkg/Extra Drivers/TestDriver4.inf", # Space in path 58 | f"INF ruleoverride = RESET_VECTOR {comp5}", # RuleOverride lowercase & spaces' 59 | f"INF USE = IA32 {comp6}", 60 | f'INF VERSION = "1.1.1" {comp7}', 61 | f'INF UI = "HELLO" {comp8}', 62 | f"INF FILE_GUID = 12345678-1234-1234-1234-123456789012 {comp9}", 63 | ] 64 | ) 65 | env = { 66 | "ACTIVE_PLATFORM": dsc, 67 | "FLASH_DEFINITION": fdf, 68 | "TARGET_ARCH": "IA32 X64", 69 | "TARGET": "DEBUG", 70 | } 71 | db.parse(env) 72 | 73 | with db.session() as session: 74 | infs = session.query(Fv).filter_by(name="testfv").one().infs 75 | 76 | assert len(infs) == 9 77 | assert sorted([inf.path for inf in infs]) == sorted( 78 | [ 79 | Path(comp1).as_posix(), 80 | Path(comp2).as_posix(), 81 | Path(comp3).as_posix(), 82 | "TestPkg/Extra Drivers/TestDriver4.inf", 83 | Path(comp5).as_posix(), 84 | Path(comp6).as_posix(), 85 | Path(comp7).as_posix(), 86 | Path(comp8).as_posix(), 87 | Path(comp9).as_posix(), 88 | ] 89 | ) 90 | 91 | 92 | def test_missing_dsc_and_fdf(empty_tree: Tree, caplog): # noqa: F811 93 | """Tests that the table generator is skipped if missing the necessary information.""" 94 | with caplog.at_level(logging.DEBUG): 95 | edk2path = Edk2Path(str(empty_tree.ws), []) 96 | db = Edk2DB(empty_tree.ws / "db.db", pathobj=edk2path) 97 | db.register(InstancedFvTable()) 98 | 99 | # raise exception if the Table generator is missing required information to Generate the table. 100 | with pytest.raises(KeyError): 101 | db.parse({}) 102 | 103 | db.parse({"TARGET_ARCH": "", "TARGET": "DEBUG"}) 104 | db.parse({"TARGET_ARCH": "", "TARGET": "DEBUG", "ACTIVE_PLATFORM": "Pkg.dsc"}) 105 | 106 | # check that we skipped (instead of asserting) twice, once for missing ACTIVE_PLATFORM and once for the 107 | # missing FLASH_DEFINITION 108 | count = 0 109 | for _, _, record in caplog.record_tuples: 110 | if record.startswith("DSC or FDF not found"): 111 | count += 1 112 | assert count == 2 113 | 114 | 115 | def test_non_closest_inf_path(empty_tree: Tree): # noqa: F811 116 | # Create the Common folder, which will be a package path 117 | common_folder = empty_tree.ws / "Common" 118 | common_folder.mkdir() 119 | 120 | # Create a subfolder of common folder, which is also a package path 121 | sub_folder = common_folder / "SubFolder" 122 | sub_folder.mkdir() 123 | edk2path = Edk2Path(str(empty_tree.ws), ["Common", str(sub_folder)]) 124 | 125 | # Make the INF we want to make sure we get the closest match of 126 | (sub_folder / "Drivers").mkdir() 127 | driver = empty_tree.create_component("TestDriver1", "DXE_DRIVER") 128 | driver = Path(empty_tree.ws, driver).rename(sub_folder / "Drivers" / "TestDriver1.inf") 129 | 130 | dsc = empty_tree.create_dsc(libraryclasses=[], components=[driver]) 131 | fdf = empty_tree.create_fdf( 132 | fv_testfv=[ 133 | "INF Common/SubFolder/Drivers/TestDriver1.inf", 134 | ] 135 | ) 136 | 137 | db = Edk2DB(empty_tree.ws / "db.db", pathobj=edk2path) 138 | db.register(InstancedInfTable(), InstancedFvTable()) 139 | env = { 140 | "ACTIVE_PLATFORM": dsc, 141 | "FLASH_DEFINITION": fdf, 142 | "TARGET_ARCH": "IA32 X64", 143 | "TARGET": "DEBUG", 144 | } 145 | db.parse(env) 146 | 147 | with db.session() as session: 148 | libs = session.query(Fv).filter_by(name="testfv").one().infs 149 | assert len(libs) == 1 150 | 151 | assert libs[0].path == "Drivers/TestDriver1.inf" 152 | -------------------------------------------------------------------------------- /tests.unit/database/test_package_table.py: -------------------------------------------------------------------------------- 1 | ## 2 | # unittest for the PackageTable generator 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """Tests for building a package table.""" 9 | 10 | import sys 11 | 12 | import git 13 | import pytest 14 | from edk2toollib.database import Edk2DB, Package 15 | from edk2toollib.database.tables import PackageTable 16 | from edk2toollib.uefi.edk2.path_utilities import Edk2Path 17 | 18 | 19 | @pytest.mark.skipif(sys.platform.startswith("win"), reason="Linux only") 20 | def test_basic_parse(tmp_path): 21 | """Tests basic PackageTable functionality.""" 22 | # Clone the repo and init a single submodule. 23 | repo_path = tmp_path / "mu_tiano_platforms" 24 | repo_path.mkdir() 25 | with git.Repo.clone_from("https://github.com/microsoft/mu_tiano_platforms", repo_path) as repo: 26 | if repo is None: 27 | raise Exception("Failed to clone mu_tiano_platforms") 28 | repo.git.submodule("update", "--init", "Features/CONFIG") 29 | 30 | edk2path = Edk2Path(str(repo_path), ["Platforms", "Features/CONFIG"]) 31 | db = Edk2DB(tmp_path / "db.db", pathobj=edk2path) 32 | db.register(PackageTable()) 33 | db.parse({}) 34 | 35 | with db.session() as session: 36 | packages = session.query(Package).all() 37 | 38 | to_pass = { 39 | ("QemuPkg", "MU_TIANO_PLATFORMS"): False, 40 | ("QemuSbsaPkg", "MU_TIANO_PLATFORMS"): False, 41 | ("QemuQ35Pkg", "MU_TIANO_PLATFORMS"): False, 42 | ("SetupDataPkg", "Features/CONFIG"): False, 43 | } 44 | for package in packages: 45 | to_pass[(package.name, package.repository.name)] = True 46 | 47 | # Assert that all expected items in to_pass were found and set to True 48 | assert all(to_pass.values()) 49 | -------------------------------------------------------------------------------- /tests.unit/database/test_source_table.py: -------------------------------------------------------------------------------- 1 | ## 2 | # unittest for the SourceTable generator 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | """Tests for building a source file table.""" 9 | 10 | from common import write_file # noqa: I001 11 | from edk2toollib.database import Source, Edk2DB 12 | from edk2toollib.database.tables import SourceTable 13 | from edk2toollib.uefi.edk2.path_utilities import Edk2Path 14 | 15 | SOURCE_LICENSE = r""" 16 | /** @file 17 | This is a description of a fake file 18 | 19 | Copyright (c) Corporation 20 | SPDX-License-Identifier: BSD-2-Clause-Patent 21 | *// 22 | """ 23 | 24 | SOURCE_NO_LICENSE = r""" 25 | /** @file 26 | This is a description of a fake file 27 | 28 | Copyright (c) Corporation 29 | *// 30 | """ 31 | 32 | SOURCE_WITH_CODE = r""" 33 | x = 5 34 | y = 6 35 | z = x + y 36 | print(z) 37 | """ 38 | 39 | 40 | def test_source_with_license(tmp_path): 41 | """Tests that a source with a license is detected and the license is set.""" 42 | edk2path = Edk2Path(str(tmp_path), []) 43 | db = Edk2DB(tmp_path / "db.db", pathobj=edk2path) 44 | db.register(SourceTable(n_jobs=1)) 45 | 46 | # Verify we detect c and h files 47 | for file in ["file.c", "file.h", "file.asm", "file.cpp"]: 48 | write_file(tmp_path / file, SOURCE_LICENSE) 49 | 50 | db.parse({}) 51 | with db.session() as session: 52 | rows = session.query(Source).all() 53 | assert len(rows) == 4 54 | for entry in rows: 55 | assert entry.license == "BSD-2-Clause-Patent" 56 | 57 | 58 | def test_source_without_license(tmp_path): 59 | """Tests that a source without a license is detected.""" 60 | edk2path = Edk2Path(str(tmp_path), []) 61 | db = Edk2DB(tmp_path / "db.db", pathobj=edk2path) 62 | db.register(SourceTable(n_jobs=1)) 63 | 64 | # Verify we detect c and h files 65 | for file in ["file.c", "file.h"]: 66 | write_file(tmp_path / file, SOURCE_NO_LICENSE) 67 | 68 | db.parse({}) 69 | 70 | with db.session() as session: 71 | rows = session.query(Source).all() 72 | assert len(rows) == 2 73 | for entry in rows: 74 | assert entry.license == "Unknown" 75 | 76 | 77 | def test_invalid_filetype(tmp_path): 78 | """Tests that a source file that is not of the valid type is skipped.""" 79 | edk2path = Edk2Path(str(tmp_path), []) 80 | db = Edk2DB(tmp_path / "db.db", pathobj=edk2path) 81 | db.register(SourceTable(n_jobs=1)) 82 | 83 | # Ensure we don't catch a file that isnt a c / h file. 84 | write_file(tmp_path / "file1.py", SOURCE_LICENSE) 85 | db.parse({}) 86 | with db.session() as session: 87 | rows = session.query(Source).all() 88 | assert len(rows) == 0 89 | 90 | 91 | def test_source_with_code(tmp_path): 92 | """Tests that a source with code is detected.""" 93 | edk2path = Edk2Path(str(tmp_path), []) 94 | db = Edk2DB(tmp_path / "db.db", pathobj=edk2path) 95 | db.register(SourceTable(n_jobs=1, source_stats=True, source_extensions=["*.py"])) 96 | 97 | # Verify we detect c and h files 98 | write_file(tmp_path / "file.py", SOURCE_WITH_CODE) 99 | 100 | db.parse({}) 101 | 102 | with db.session() as session: 103 | file = session.query(Source).one() 104 | assert file.code_lines == 4 105 | 106 | 107 | def test_source_with_code_is_updated(tmp_path): 108 | """Tests that a source with code is updated When parsed again with different source_stats setting.""" 109 | edk2path = Edk2Path(str(tmp_path), []) 110 | db = Edk2DB(tmp_path / "db.db", pathobj=edk2path) 111 | db.register(SourceTable(n_jobs=1, source_stats=False, source_extensions=["*.py"])) 112 | 113 | # Verify we detect c and h files 114 | write_file(tmp_path / "file.py", SOURCE_WITH_CODE) 115 | 116 | db.parse({}) 117 | 118 | with db.session() as session: 119 | file = session.query(Source).one() 120 | assert ( 121 | file.code_lines == file.total_lines == 5 122 | ) # When not parsing source_stats, code lines is equal to total lines 123 | 124 | db.clear_parsers() 125 | db.register(SourceTable(n_jobs=1, source_stats=True, source_extensions=["*.py"])) 126 | 127 | db.parse({}) 128 | with db.session() as session: 129 | file = session.query(Source).one() 130 | assert file.code_lines == 4 131 | -------------------------------------------------------------------------------- /tests.unit/parsers/IncludedDefinesChild.fdf.inc: -------------------------------------------------------------------------------- 1 | ## @file 2 | # Test FDF containing simple definitions and conditionals 3 | # 4 | # Copyright (c) Microsoft Corporation. 5 | # SPDX-License-Identifier: BSD-2-Clause-Patent 6 | # 7 | ## 8 | 9 | 10 | DEFINE INTERNAL_VALUE = 104 11 | DEFINE EXTRA_BLOCK_SIZE = 0x00001000 12 | 13 | !if $(TARGET) == "TEST4" 14 | DEFINE AM_I_YOU = FALSE 15 | !endif 16 | -------------------------------------------------------------------------------- /tests.unit/parsers/IncludedDefinesChildConditional.fdf.inc: -------------------------------------------------------------------------------- 1 | ## @file 2 | # Test FDF containing simple definitions and conditionals 3 | # 4 | # Copyright (c) Microsoft Corporation. 5 | # SPDX-License-Identifier: BSD-2-Clause-Patent 6 | # 7 | ## 8 | 9 | 10 | DEFINE CONDITIONAL_VALUE = 121 11 | 12 | !if $(TARGET) == "TEST4" 13 | DEFINE AM_I_YOU = FALSE 14 | !endif 15 | -------------------------------------------------------------------------------- /tests.unit/parsers/IncludedDefinesParent.fdf: -------------------------------------------------------------------------------- 1 | ## @file 2 | # Test FDF containing simple definitions and conditionals 3 | # 4 | # Copyright (c) Microsoft Corporation. 5 | # SPDX-License-Identifier: BSD-2-Clause-Patent 6 | # 7 | ## 8 | 9 | [Defines] 10 | 11 | !include IncludedDefinesChild.fdf.inc 12 | 13 | DEFINE FD_BASE = 0x00800000 14 | DEFINE FD_BLOCK_SIZE = 0x00001000 15 | 16 | !if $(TARGET) == "TEST5" 17 | !include IncludedDefinesChildConditional.fdf.inc 18 | !endif 19 | 20 | !if $(TARGET) == "TEST2" 21 | DEFINE FD_SIZE = 0x00850000 22 | DEFINE NUM_BLOCKS = 0x850 23 | !else 24 | 25 | DEFINE FD_SIZE = 0x00410000 26 | DEFINE NUM_BLOCKS = 0x410 27 | !endif 28 | 29 | !if $(TARGET) == "TEST2" 30 | DEFINE EXTRA_DEF = 42 31 | !endif 32 | -------------------------------------------------------------------------------- /tests.unit/parsers/SimpleDefines.fdf: -------------------------------------------------------------------------------- 1 | ## @file 2 | # Test FDF containing simple definitions and conditionals 3 | # 4 | # Copyright (c) Microsoft Corporation. 5 | # SPDX-License-Identifier: BSD-2-Clause-Patent 6 | # 7 | ## 8 | 9 | [Defines] 10 | DEFINE FD_BASE = 0x00800000 11 | DEFINE FD_BLOCK_SIZE = 0x00001000 12 | 13 | !if $(TARGET) == "TEST2" 14 | DEFINE FD_SIZE = 0x00850000 15 | DEFINE NUM_BLOCKS = 0x850 16 | !else 17 | 18 | DEFINE FD_SIZE = 0x00410000 19 | DEFINE NUM_BLOCKS = 0x410 20 | !endif 21 | 22 | !if $(TARGET) == "TEST2" 23 | DEFINE EXTRA_DEF = 42 24 | !endif 25 | -------------------------------------------------------------------------------- /tests.unit/parsers/__init__.py: -------------------------------------------------------------------------------- 1 | ## 2 | # Copyright (c) Microsoft Corporation 3 | # 4 | # SPDX-License-Identifier: BSD-2-Clause-Patent 5 | ## 6 | """This file exists to satisfy pythons packaging requirements. 7 | 8 | Read more: https://docs.python.org/3/reference/import.html#regular-packages 9 | """ 10 | -------------------------------------------------------------------------------- /tests.unit/parsers/test_guid_parser.py: -------------------------------------------------------------------------------- 1 | # @file guid_parser_test.py 2 | # Contains unit test routines for the guid parser class. 3 | # 4 | # 5 | # Copyright (c) Microsoft Corporation 6 | # 7 | # SPDX-License-Identifier: BSD-2-Clause-Patent 8 | ## 9 | 10 | import unittest 11 | from edk2toollib.uefi.edk2.parsers.guid_parser import GuidParser 12 | 13 | 14 | class TestGuidParser(unittest.TestCase): 15 | def test_valid_input_guid(self): 16 | SAMPLE_DATA_C_FORMAT_GUID = "{0x66341ae8, 0x668f, 0x4192, {0xb4, 0x4d, 0x5f, 0x87, 0xb8, 0x68, 0xf0, 0x41}}" # noqa: E501 17 | SAMPLE_DATA_REG_FORMAT_GUID = "66341ae8-668f-4192-b44d-5f87b868f041" 18 | self.assertEqual(GuidParser.reg_guid_from_c_format(SAMPLE_DATA_C_FORMAT_GUID), SAMPLE_DATA_REG_FORMAT_GUID) 19 | self.assertEqual(GuidParser.c_guid_from_reg_format(SAMPLE_DATA_REG_FORMAT_GUID), SAMPLE_DATA_C_FORMAT_GUID) 20 | 21 | uuid_from_c = GuidParser.uuid_from_guidstring(SAMPLE_DATA_C_FORMAT_GUID) 22 | uuid_from_reg = GuidParser.uuid_from_guidstring(SAMPLE_DATA_REG_FORMAT_GUID) 23 | 24 | self.assertEqual( 25 | GuidParser.reg_guid_str_from_uuid(uuid_from_c), GuidParser.reg_guid_str_from_uuid(uuid_from_reg) 26 | ) 27 | 28 | self.assertEqual(GuidParser.c_guid_str_from_uuid(uuid_from_c), GuidParser.c_guid_str_from_uuid(uuid_from_reg)) 29 | 30 | def test_invalid_reg_format_to_uuid(self): 31 | SAMPLE_DATA_REG_FORMAT_GUID = "66341ae8-668f4192b44d-0087b868f041" 32 | u = GuidParser.uuid_from_guidstring(SAMPLE_DATA_REG_FORMAT_GUID) 33 | self.assertIsNone(u) 34 | 35 | def test_invalid_reg_format_to_c_format(self): 36 | SAMPLE_DATA_REG_FORMAT_GUID = "66341ae8-668f4192b44d-0087b868f041" 37 | u = GuidParser.c_guid_from_reg_format(SAMPLE_DATA_REG_FORMAT_GUID) 38 | self.assertEqual("", u) 39 | 40 | def test_invalid_c_format_to_uuid(self): 41 | SAMPLE_DATA_C_FORMAT_GUID = "{0x66341ae8, 0x668f 0x4192 {0xb4, 0x4d, 0x5f, 0x87, 0xb8, 0x68, 0xf0, 0x41}}" 42 | u = GuidParser.uuid_from_guidstring(SAMPLE_DATA_C_FORMAT_GUID) 43 | self.assertIsNone(u) 44 | 45 | def test_invalid_c_format_to_reg(self): 46 | SAMPLE_DATA_C_FORMAT_GUID = ( 47 | "{0x66341ae8, 0x668f4192, 0x1234, {0xb4, 0x4d, 0x5f34, 0x87, 0xb8, 0x68, 0xf0, 0x41}}" # noqa: E501 48 | ) 49 | u = GuidParser.reg_guid_from_c_format(SAMPLE_DATA_C_FORMAT_GUID) 50 | self.assertEqual("", u) 51 | 52 | def test_valid_reg_input_with_brackets(self): 53 | """check the reg_format functions are able to handle extra {} as reg format sometimes has brackets""" 54 | SAMPLE_DATA_REG_FORMAT_GUID_WITH = "{66341ae8-668f-4192-b44d-5f87b868f041}" 55 | SAMPLE_DATA_REG_FORMAT_GUID = "66341ae8-668f-4192-b44d-5f87b868f041" 56 | u = GuidParser.uuid_from_guidstring(SAMPLE_DATA_REG_FORMAT_GUID_WITH) 57 | self.assertEqual(SAMPLE_DATA_REG_FORMAT_GUID, GuidParser.reg_guid_str_from_uuid(u)) 58 | 59 | def test_valid_reg_input_with_spaces(self): 60 | """check the reg_format functions are able to handle extra spaces""" 61 | SAMPLE_DATA_REG_FORMAT_GUID_WITH = " 66341ae8-668f-4192-b44d-5f87b868f041 " 62 | SAMPLE_DATA_REG_FORMAT_GUID = "66341ae8-668f-4192-b44d-5f87b868f041" 63 | u = GuidParser.uuid_from_guidstring(SAMPLE_DATA_REG_FORMAT_GUID_WITH) 64 | self.assertEqual(SAMPLE_DATA_REG_FORMAT_GUID, GuidParser.reg_guid_str_from_uuid(u)) 65 | 66 | def test_valid_c_format_input_with_spaces(self): 67 | """check the c_format functions are able to handle extra spaces""" 68 | SAMPLE_DATA_C_FORMAT_GUID = ( 69 | " { 0x66341ae8, 0x668f, 0x4192, {0xb4, 0x4d, 0x5f, 0x87, 0xb8, 0x68, 0xf0, 0x41 } } " # noqa: E501 70 | ) 71 | SAMPLE_DATA_REG_FORMAT_GUID = "66341ae8-668f-4192-b44d-5f87b868f041" 72 | u = GuidParser.uuid_from_guidstring(SAMPLE_DATA_C_FORMAT_GUID) 73 | self.assertEqual(SAMPLE_DATA_REG_FORMAT_GUID, GuidParser.reg_guid_str_from_uuid(u)) 74 | -------------------------------------------------------------------------------- /tests.unit/parsers/test_hash_file_parser.py: -------------------------------------------------------------------------------- 1 | # @file guid_parser_test.py 2 | # Contains unit test routines for the guid parser class. 3 | # 4 | # 5 | # Copyright (c) Microsoft Corporation 6 | # 7 | # SPDX-License-Identifier: BSD-2-Clause-Patent 8 | ## 9 | 10 | import unittest 11 | 12 | from edk2toollib.uefi.edk2.parsers.base_parser import HashFileParser 13 | 14 | 15 | class TestBaseParser(unittest.TestCase): 16 | def test_parse_new_section(self): 17 | parser = HashFileParser("") 18 | section1 = "[Defines]" 19 | res, sect = parser.ParseNewSection(section1) 20 | self.assertTrue(res) 21 | self.assertEqual(sect, "Defines") 22 | # invalid section 23 | section2 = "[Defines" 24 | res, sect = parser.ParseNewSection(section2) 25 | self.assertFalse(res) 26 | # multiple parts with multiple definitions 27 | section3 = "[Components.X64, Components.IA32]" 28 | res, sect = parser.ParseNewSection(section3) 29 | self.assertTrue(res) 30 | self.assertEqual(sect, "Components") 31 | # try multiple parts on a single 32 | section4 = "[ Defines.Common.Section ]" 33 | res, sect = parser.ParseNewSection(section4) 34 | self.assertTrue(res) 35 | self.assertEqual(sect, "Defines") 36 | 37 | def test_strip_comment(self): 38 | parser = HashFileParser("") 39 | 40 | lines_to_test = [ 41 | ("Test", "\t# this shouldn't show up"), 42 | ("Test", " # test"), 43 | ("MagicLib|Include/Magic", "\t# this shouldn't show up"), 44 | ("MagicLib|Include/Magic", "# test"), 45 | ("", "# this is a comment"), 46 | ("gMyPkgTokenSpaceGuid.MyThing|'Value'|VOID*|0x10000000", " # My Comment"), 47 | ('gMyPkgTokenSpaceGuid.MyThing|"Value"|VOID*|0x10000000', "# My Comment"), 48 | ('gMyPkgTokenSpaceGuid.MyThing|"#Value"|VOID*|0x10000000', "# My Comment"), 49 | ('file_data = "DEFINE TEST DEFINE = "DEFINE_VALUE""', ' # "Test String" # Test String'), 50 | ("file_data = 'DEFINE TEST DEFINE = \"DEFINE_VALUE\"'", ' # "Test String" # Test String'), 51 | ('file_data = "DEFINE TEST DEFINE = "DEFINE_VALUE" \' more to check \'"', ' # "Test String" # Test String'), 52 | ("file_data = 'DEFINE\" # TEMP \" UPDATE '", "# Found a quote"), 53 | (r"test = \"", r" # Temp \""), 54 | ('file_data = "DEFINE TEST DEFINE = "DEFINE\\"_VALUE""', ' # "Test String" # Test String'), 55 | ('file_data = "DEFINE TEST DEFINE = "DEFINE\\\'_VALUE""', ' # "Test String" # Test String'), 56 | ] 57 | 58 | for line in lines_to_test: 59 | self.assertEqual(parser.StripComment(line[0] + line[1]), line[0]) 60 | -------------------------------------------------------------------------------- /tests.unit/parsers/test_inf_parser.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from edk2toollib.uefi.edk2.parsers.inf_parser import InfParser 4 | 5 | INF_EXAMPLE1 = """ 6 | [Defines] 7 | INF_VERSION = 0x00010005 8 | BASE_NAME = TestLib 9 | FILE_GUID = ffffffff-ffff-ffff-ffff-ffffffffffff 10 | MODULE_TYPE = DXE_DRIVER 11 | VERSION_STRING = 1.0 12 | LIBRARY_CLASS = BaseTestLib 13 | 14 | DEFINE MY_PATH = files 15 | 16 | [Sources] 17 | # [Binaries] 18 | File1.c 19 | 20 | [Sources.common] 21 | File2.c 22 | 23 | [Sources.IA32] 24 | # Random Comment 25 | File3.c 26 | # File999.c 27 | 28 | [sources.IA32, sources.X64] 29 | File4.c 30 | 31 | [LibraryClasses] 32 | Library1 33 | 34 | [Binaries] 35 | Binary1.efi 36 | 37 | [LibraryClasses.common] 38 | Library2 39 | 40 | [LibraryClasses.IA32] 41 | Library3 42 | 43 | [LibraryClasses.IA32, LibraryClasses.X64] 44 | Library4 45 | 46 | [Sources.AARCH64] 47 | $(MY_PATH)/File5.c 48 | DEFINE MY_PATH = files2 49 | $(MY_PATH)/File6.c 50 | """ 51 | 52 | 53 | def test_inf_parser_scoped_libraryclasses(tmp_path: Path): 54 | """Test that we accurately detect scoped library classes.""" 55 | inf_path = tmp_path / "test.inf" 56 | inf_path.touch() 57 | inf_path.write_text(INF_EXAMPLE1) 58 | 59 | infp = InfParser() 60 | infp.ParseFile(inf_path) 61 | 62 | assert sorted(infp.get_libraries([])) == sorted(["Library1", "Library2"]) 63 | assert sorted(infp.get_libraries(["Common"])) == sorted(["Library1", "Library2"]) 64 | assert sorted(infp.get_libraries(["IA32"])) == sorted(["Library1", "Library2", "Library3", "Library4"]) 65 | assert sorted(infp.get_libraries(["X64"])) == sorted(["Library1", "Library2", "Library4"]) 66 | 67 | 68 | def test_inf_parser_scoped_sources(tmp_path: Path): 69 | """Test that we accurately detect scoped sources.""" 70 | inf_path = tmp_path / "test.inf" 71 | inf_path.touch() 72 | inf_path.write_text(INF_EXAMPLE1) 73 | 74 | infp = InfParser() 75 | infp.ParseFile(inf_path) 76 | 77 | assert sorted(infp.get_sources([])) == sorted(["File1.c", "File2.c"]) 78 | assert sorted(infp.get_sources(["Common"])) == sorted(["File1.c", "File2.c"]) 79 | assert sorted(infp.get_sources(["IA32"])) == sorted(["File1.c", "File2.c", "File3.c", "File4.c"]) 80 | assert sorted(infp.get_sources(["X64"])) == sorted(["File1.c", "File2.c", "File4.c"]) 81 | 82 | 83 | def test_inf_parser_with_defines(tmp_path: Path): 84 | """Tests that we accurately resolve variables if defined in the INF.""" 85 | inf_path = tmp_path / "test.inf" 86 | inf_path.touch() 87 | inf_path.write_text(INF_EXAMPLE1) 88 | 89 | infp = InfParser() 90 | infp.ParseFile(inf_path) 91 | 92 | assert sorted(infp.get_sources(["AARCH64"])) == sorted(["File1.c", "File2.c", "files/File5.c", "files2/File6.c"]) 93 | -------------------------------------------------------------------------------- /tests.unit/test_ansi_handler.py: -------------------------------------------------------------------------------- 1 | ## 2 | # unittest for ansi_handler 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | import logging 9 | import unittest 10 | 11 | from edk2toollib.log.ansi_handler import ColoredFormatter, ColoredStreamHandler 12 | 13 | try: 14 | from StringIO import StringIO 15 | except ImportError: 16 | from io import StringIO 17 | 18 | 19 | class AnsiHandlerTest(unittest.TestCase): 20 | # we are mainly looking for exception to be thrown 21 | 22 | record = logging.makeLogRecord( 23 | { 24 | "name": "", 25 | "level": logging.CRITICAL, 26 | "levelno": logging.CRITICAL, 27 | "levelname": "CRITICAL", 28 | "path": "test_path", 29 | "lineno": 0, 30 | "msg": "Test message", 31 | } 32 | ) 33 | record2 = logging.makeLogRecord( 34 | { 35 | "name": "", 36 | "level": logging.INFO, 37 | "levelno": logging.INFO, 38 | "levelname": "INFO", 39 | "path": "test_path", 40 | "lineno": 0, 41 | "msg": "Test message", 42 | } 43 | ) 44 | record3 = logging.makeLogRecord( 45 | { 46 | "name": "", 47 | "level": logging.ERROR, 48 | "levelno": logging.ERROR, 49 | "levelname": "ERROR", 50 | "path": "test_path", 51 | "lineno": 0, 52 | "msg": ["Logging", "A", "List"], 53 | } 54 | ) 55 | record4 = logging.makeLogRecord( 56 | { 57 | "name": "", 58 | "level": logging.ERROR, 59 | "levelno": logging.ERROR, 60 | "levelname": "ERROR", 61 | "path": "test_path", 62 | "lineno": 0, 63 | "msg": ("Logging", "A", "Tuple"), 64 | } 65 | ) 66 | record5 = logging.makeLogRecord( 67 | { 68 | "name": "", 69 | "level": logging.ERROR, 70 | "levelno": logging.ERROR, 71 | "levelname": "ERROR", 72 | "path": "test_path", 73 | "lineno": 0, 74 | "msg": "Testing This Works: %s", 75 | "args": ("Test",), 76 | } 77 | ) 78 | 79 | def test_colored_formatter_init(self): 80 | formatter = ColoredFormatter("%(levelname)s - %(message)s") 81 | # if we didn't throw an exception, then we are good 82 | self.assertNotEqual(formatter, None) 83 | 84 | def test_colored_formatter_to_output_ansi(self): 85 | formatter = ColoredFormatter("%(levelname)s - %(message)s") 86 | 87 | output = formatter.format(AnsiHandlerTest.record) 88 | self.assertNotEqual(output, None) 89 | CSI = "\033[" 90 | self.assertGreater(len(output), 0, "We should have some output") 91 | self.assertFalse((CSI not in output), "There was supposed to be a ANSI control code in that %s" % output) 92 | 93 | def test_color_handler_to_strip_ansi(self): 94 | stream = StringIO() 95 | # make sure we set out handler to strip the control sequence 96 | handler = ColoredStreamHandler(stream, strip=True, convert=False) 97 | formatter = ColoredFormatter("%(levelname)s - %(message)s") 98 | handler.formatter = formatter 99 | handler.level = logging.NOTSET 100 | 101 | handler.emit(AnsiHandlerTest.record) 102 | handler.flush() 103 | 104 | CSI = "\033[" 105 | 106 | # check for ANSI escape code in stream 107 | stream.seek(0) 108 | lines = stream.readlines() 109 | self.assertGreater(len(lines), 0, "We should have some output %s" % lines) 110 | for line in lines: 111 | if CSI in line: 112 | self.fail("A control sequence was not stripped! %s" % lines) 113 | 114 | def test_color_handler_not_strip_ansi(self): 115 | stream = StringIO() 116 | formatter = ColoredFormatter("%(levelname)s - %(message)s") 117 | handler = ColoredStreamHandler(stream, strip=False, convert=False) 118 | handler.formatter = formatter 119 | handler.level = logging.NOTSET 120 | 121 | handler.emit(AnsiHandlerTest.record2) 122 | handler.flush() 123 | 124 | CSI = "\033[" 125 | 126 | found_csi = False 127 | stream.seek(0) 128 | lines = stream.readlines() 129 | self.assertGreater(len(lines), 0, "We should have some output %s" % lines) 130 | for line in lines: 131 | if CSI in line: 132 | found_csi = True 133 | self.assertTrue(found_csi, "We are supposed to to have found an ANSI control character %s" % lines) 134 | 135 | def test_ansi_handler_with_list(self): 136 | """Tests that the ANSI handler can handle Iterables in the message.""" 137 | stream = StringIO() 138 | formatter = ColoredFormatter("%(levelname)s - %(message)s") 139 | handler = ColoredStreamHandler(stream, strip=False, convert=False) 140 | handler.setFormatter(formatter) 141 | handler.setLevel(logging.INFO) 142 | 143 | handler.emit(AnsiHandlerTest.record3) 144 | handler.emit(AnsiHandlerTest.record4) 145 | handler.emit(AnsiHandlerTest.record5) 146 | handler.flush() 147 | 148 | stream.seek(0) 149 | lines = stream.readlines() 150 | CSI = "\033[31m" # Red 151 | CSI2 = "\033[39m" # Reset 152 | for line in lines: 153 | assert CSI in line and CSI2 in line 154 | -------------------------------------------------------------------------------- /tests.unit/test_bmp_object.py: -------------------------------------------------------------------------------- 1 | # @file 2 | # Unit test for the bmp_object class 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | 9 | import unittest 10 | from edk2toollib.uefi.bmp_object import BmpObject 11 | import io 12 | 13 | """ 14 | # to import an image into hex use this 15 | import binascii 16 | filename = image_path 17 | with open(filename, 'rb') as f: 18 | content = f.read() 19 | print(binascii.hexlify(content)) 20 | """ 21 | 22 | hamburger = bytes.fromhex( 23 | ( 24 | "424d3603000000000000360000002800000010000000100000" 25 | "00010018000000000000000000c30e0000c30e00000000000000000000fffffff7f7f7669ccc43" 26 | "87c3307abd186ab12474b92877ba2477be1e70bc2473b83c81be337cbd4789c3fffffffffffff6" 27 | "f6f6608fb92d70a11c666a0b57670d6e7f226fb54280b03a8f82228d661d886d0c6e990955a22e" 28 | "78b986b1d8fffffff0f0f03b77ae257e57219b6920a87316a3661ba56a167c4e2b938d2cac8b19" 29 | "ac7920b18110799f004f9d2f79baffffffffffff146a60157e4c21907026928e289daf33b3d23d" 30 | "bee43fb8dd48b1d036a1a71e976824a47129997b11589cffffff78c0ec249bc9469bc5877cad5d" 31 | "68854053721a335e182f571b2e592e4466515f799192ac9c82ad75c4c040a89edde6e826b0e54b" 32 | "b4db9394b0223554233a58364c6a334c6d3451762f4d75304e742b4569273d5a435271c3bcd661" 33 | "cbc15cbb9df1fafd7995a721395b173862113b692c4f7938557e3f5c7e4365893454812e4b7f32" 34 | "4b7f34496f41506b9db2d0eaf6f5f5f5f54961791f42934a6fdc3e89c42aa1a6297e8e3a5cb534" 35 | "b79a279183324bdd2945d52a38b5333c8c516890abddf4f8f8f82649703e61ca41a5a053a1a25d" 36 | "9db15c9cbd599ac3568fb5588ead5a93aa468e9133867a2c3eb7384089f7f7f7fdfdfd38889369" 37 | "a6b176a8cf5297d32b7fcd267bc92377c42e78b92777bf2975b93785cd4892d3589cba338281f7" 38 | "f7f7ffffff7ab8d2589cd62f79be367cbb3381c61870bb1169b61c71b80d68b73177b3286da92a" 39 | "7cc5297ecc5197cbf4f6f7fcfdfd559ad53e89cf2e7fc32674b6a9c2db2272b61c6eb4b0cbe914" 40 | "68b00b5b9e9db6d0377cb72b7cc4277ccbe8f1f8fdfdff2b81cb3e89ccb7d0ec1c6fb4206dac2e" 41 | "6ea51b68a90f60a51b69aa0e63a91461a31764a52470b22579c2eff4fbffffffb2d0eb3f89ca36" 42 | "81c13c82bd4086c091b0ce3a74a5115c990b599bafcae6055ba3085ba17fa5ca8bacbefbfbfbff" 43 | "fffff5f9fdaac7e05394cbb3cdea7faad06c9cc43f75a1a4b9cf125b98226ca7065ca40e65ae84" 44 | "a8becad3d5fbfbfbfffffffffffffafbfcc0d1e0619bcd4e8fc8468ac43d80bb3576aa256fad20" 45 | "6cab1565a8aac8e2e7e9ebf8f8f8ffffff" 46 | ) 47 | ) 48 | 49 | hamburger_lores = bytes.fromhex( 50 | "424df60000000000000076000000280000001000000010" 51 | "000000010004000000000080000000c30e0000c30e000000000000000000000000000000008000" 52 | "008000000080800080000000800080008080000080808000c0c0c0000000ff0000ff000000ffff" 53 | "00ff000000ff00ff00ffff0000ffffff00ff733333333333fff73333333333338ff33333323333" 54 | "313ff32333bbbb33333f833733111138783fbb71111311113883f71111333311138ff319333333" 55 | "999138f13333333333391ff377b3333333b33ff8b333333333333ffb3338338338333ff3383333" 56 | "3333333ff83333833383377fff8388738333378ffff8733333338fff" 57 | ) 58 | 59 | bad_header_burger = bytes.fromhex( 60 | "434df600000000000000760000002800000010000000" 61 | "10000000010004000000000080000000c30e0000c30e0000000000000000000000000000000080" 62 | "00008000000080800080000000800080008080000080808000c0c0c0000000ff0000ff000000ff" 63 | "ff00ff000000ff00ff00ffff0000ffffff00ff733333333333fff73333333333338ff333333233" 64 | "33313ff32333bbbb33333f833733111138783fbb71111311113883f71111333311138ff3193333" 65 | "33999138f13333333333391ff377b3333333b33ff8b333333333333ffb3338338338333ff33833" 66 | "333333333ff83333833383377fff8388738333378ffff8733333338fff" 67 | ) 68 | 69 | bad_size_burger = bytes.fromhex( 70 | "424df60000000000000076000000280000001000000010" 71 | "000000010004000000000080000000c30e0000c30e0000" 72 | "0000000000000000000000000000800000800000008080" 73 | "0080000000800080008080000080808000c0c0c0000000" 74 | "ff0000ff000000ffff00ff000000ff00ff00ffff0000ff" 75 | "ffff00ff733333333333fff73333333333338ff3333332" 76 | "3333313ff32333bbbb33333f833733111138783fbb7111" 77 | "1311113883f71111333311138ff319333333999138f133" 78 | "33333333391ff377b3333333b33ff8b333333333333ffb" 79 | "3338338338333ff33833333333333ff83333833383377f" 80 | "ff8388738333378ffff873333333" 81 | ) 82 | 83 | 84 | class TestBmpObject(unittest.TestCase): 85 | def test_good_header(self): 86 | file = io.BytesIO(hamburger) 87 | bmp = BmpObject(file) 88 | self.assertEqual(bmp.CharB, b"B", "B header should be accurate") 89 | self.assertEqual(bmp.CharM, b"M", "M header should be accurate") 90 | 91 | def test_lores_good_header(self): 92 | file = io.BytesIO(hamburger_lores) 93 | bmp = BmpObject(file) 94 | self.assertEqual(bmp.CharB, b"B", "B header should be accurate") 95 | self.assertEqual(bmp.CharM, b"M", "M header should be accurate") 96 | 97 | def test_get_width_height(self): 98 | file = io.BytesIO(hamburger) 99 | bmp = BmpObject(file) 100 | self.assertEqual(bmp.PixelWidth, 16, "This is a 16 by 16") 101 | self.assertEqual(bmp.PixelHeight, 16, "This is 16 by 16") 102 | 103 | def test_lores_get_width_height(self): 104 | file = io.BytesIO(hamburger_lores) 105 | bmp = BmpObject(file) 106 | self.assertEqual(bmp.PixelWidth, 16, "This is a 16 by 16") 107 | self.assertEqual(bmp.PixelHeight, 16, "This is 16 by 16") 108 | 109 | def test_get_bits(self): 110 | file = io.BytesIO(hamburger_lores) 111 | bmp = BmpObject(file) 112 | self.assertEqual(bmp.BitPerPixel, 4, "should be 4 bit aren't accurate") 113 | 114 | def test_get_24_bits(self): 115 | file = io.BytesIO(hamburger) 116 | bmp = BmpObject(file) 117 | self.assertEqual(bmp.BitPerPixel, 24, "24 bits aren't accurate") 118 | 119 | def test_bad_header(self): 120 | file = io.BytesIO(bad_header_burger) 121 | bmp = BmpObject(file) 122 | self.assertNotEqual(bmp.CharB, b"B", "B header should be accurate") 123 | self.assertEqual(bmp.BitPerPixel, 4, "24 bits aren't accurate") 124 | 125 | def test_bad_image(self): 126 | file = io.BytesIO(bad_size_burger) 127 | with self.assertRaises(Exception): 128 | BmpObject(file) # we should keep reading pass the data 129 | -------------------------------------------------------------------------------- /tests.unit/test_cat_generator.py: -------------------------------------------------------------------------------- 1 | ## @file 2 | # UnitTest for cat_generator.py based on various architecture/OS 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | 9 | import os 10 | import unittest 11 | from edk2toollib.windows.capsule.cat_generator import CatGenerator 12 | 13 | 14 | class CatGeneratorTest(unittest.TestCase): 15 | def test_win10_OS(self): 16 | o = CatGenerator("x64", "win10") 17 | self.assertEqual(o.OperatingSystem, "10") 18 | 19 | def test_10_OS(self): 20 | o = CatGenerator("x64", "10") 21 | self.assertEqual(o.OperatingSystem, "10") 22 | 23 | def test_10_AU_OS(self): 24 | o = CatGenerator("x64", "10_AU") 25 | self.assertEqual(o.OperatingSystem, "10_AU") 26 | 27 | def test_10_RS2_OS(self): 28 | o = CatGenerator("x64", "10_RS2") 29 | self.assertEqual(o.OperatingSystem, "10_RS2") 30 | 31 | def test_10_RS3_OS(self): 32 | o = CatGenerator("x64", "10_RS3") 33 | self.assertEqual(o.OperatingSystem, "10_RS3") 34 | 35 | def test_10_RS4_OS(self): 36 | o = CatGenerator("x64", "10_RS4") 37 | self.assertEqual(o.OperatingSystem, "10_RS4") 38 | 39 | def test_win10Server_OS(self): 40 | o = CatGenerator("x64", "Server10") 41 | self.assertEqual(o.OperatingSystem, "Server10") 42 | 43 | def test_Server2016_OS(self): 44 | o = CatGenerator("x64", "Server2016") 45 | self.assertEqual(o.OperatingSystem, "Server2016") 46 | 47 | def test_ServerRS2_OS(self): 48 | o = CatGenerator("x64", "ServerRS2") 49 | self.assertEqual(o.OperatingSystem, "ServerRS2") 50 | 51 | def test_ServerRS3_OS(self): 52 | o = CatGenerator("x64", "ServerRS3") 53 | self.assertEqual(o.OperatingSystem, "ServerRS3") 54 | 55 | def test_ServerRS4_OS(self): 56 | o = CatGenerator("x64", "ServerRS4") 57 | self.assertEqual(o.OperatingSystem, "ServerRS4") 58 | 59 | def test_invalid_OS(self): 60 | with self.assertRaises(ValueError): 61 | CatGenerator("x64", "Invalid Junk") 62 | 63 | def test_x64_arch(self): 64 | o = CatGenerator("x64", "win10") 65 | self.assertEqual(o.Arch, "X64") 66 | 67 | def test_amd64_arch(self): 68 | o = CatGenerator("amd64", "win10") 69 | self.assertEqual(o.Arch, "X64") 70 | 71 | def test_arm_arch(self): 72 | o = CatGenerator("arm", "win10") 73 | self.assertEqual(o.Arch, "ARM") 74 | 75 | def test_arm64_arch(self): 76 | o = CatGenerator("arm64", "win10") 77 | self.assertEqual(o.Arch, "ARM64") 78 | 79 | def test_aarch64_arch(self): 80 | o = CatGenerator("aarch64", "win10") 81 | self.assertEqual(o.Arch, "ARM64") 82 | 83 | def test_invalid_arch(self): 84 | with self.assertRaises(ValueError): 85 | CatGenerator("Invalid Arch", "win10") 86 | 87 | def test_invalid_path_to_tool(self): 88 | o = CatGenerator("amd64", "10") 89 | with self.assertRaises(Exception) as cm: 90 | o.MakeCat("garbage", os.path.join("c:", "test", "badpath", "inf2cat.exe")) 91 | self.assertTrue(str(cm.exception).startswith("Can't find Inf2Cat on this machine.")) 92 | -------------------------------------------------------------------------------- /tests.unit/test_dsc.py: -------------------------------------------------------------------------------- 1 | # @file dsc_test.py 2 | # Tests for the data model for the EDK II DSC 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | import unittest 9 | from edk2toollib.uefi.edk2.build_objects.dsc import dsc 10 | from edk2toollib.uefi.edk2.build_objects.dsc import library_class 11 | from edk2toollib.uefi.edk2.build_objects.dsc import component 12 | from edk2toollib.uefi.edk2.build_objects.dsc import definition 13 | from edk2toollib.uefi.edk2.build_objects.dsc import dsc_buildoption_section_type 14 | from edk2toollib.uefi.edk2.build_objects.dsc import dsc_pcd_section_type 15 | from edk2toollib.uefi.edk2.build_objects.dsc import dsc_section_type 16 | 17 | 18 | class TestDscObject(unittest.TestCase): 19 | def test_null_creation(self): 20 | d = dsc() 21 | self.assertNotEqual(d, None) 22 | 23 | def test_dsc_multple_defines(self): 24 | # When we add an object, it should overwrite the previous one 25 | d = TestDscObject.create_dsc_object() 26 | d.defines.add(definition("PLATFORM_NAME", "TEST2")) 27 | for define in d.defines: 28 | if define.name == "PLATFORM_NAME": # check to make sure it matches 29 | self.assertEqual(define.value, "TEST2") 30 | 31 | def test_dsc_multple_library_classes(self): 32 | d = dsc() 33 | # When we add an object, it should overwrite the previous one 34 | common_section = dsc_section_type() 35 | d.library_classes[common_section].add(library_class("TEST", "BOB.inf")) 36 | self.assertEqual(len(d.library_classes[common_section]), 1) 37 | # we should override the previous one 38 | d.library_classes[common_section].add(library_class("TEST", "BOB2.inf")) 39 | self.assertEqual(len(d.library_classes[common_section]), 1) 40 | for lib in d.library_classes[common_section]: 41 | self.assertEqual(lib.inf, "BOB2.inf") # make sure we overrode it 42 | self.assertEqual(len(d.library_classes[common_section]), 1) 43 | 44 | # make sure we can add a library to a different section and that 45 | IA32_section = dsc_section_type(arch="IA32") 46 | self.assertEqual(len(d.library_classes[IA32_section]), 0) 47 | d.library_classes[IA32_section].add(library_class("NULL", "BOB1.inf")) 48 | self.assertEqual(len(d.library_classes[IA32_section]), 1) 49 | d.library_classes[IA32_section].add(library_class("NULL", "BOB2.inf")) 50 | self.assertEqual(len(d.library_classes[IA32_section]), 2) 51 | 52 | def test_get_library_classes(self): 53 | """This serves more as an example of how to walk the DSC to get a library class for a componenet""" 54 | pass 55 | 56 | def test_put_in_bad_things(self): 57 | d = dsc() 58 | # make sure we can't add stuff to d.defines 59 | with self.assertRaises(ValueError): 60 | d.defines.add(library_class("NULL", "TEST.inf")) 61 | # make sure we can't add stuff to skus 62 | with self.assertRaises(ValueError): 63 | d.skus.add(library_class("TEST", "TEST.inf")) 64 | # make sure we can't add stuff to skus 65 | with self.assertRaises(ValueError): 66 | d.default_stores.add(component("TEST", "TEST.inf")) 67 | 68 | common_section = dsc_section_type() 69 | build_opt_section = dsc_buildoption_section_type() 70 | pcd_section = dsc_pcd_section_type("FEATUREFLAG") 71 | 72 | # now to check the build options 73 | d.build_options[build_opt_section] = set() 74 | with self.assertRaises(ValueError): 75 | d.build_options[pcd_section] = set() 76 | with self.assertRaises(ValueError): 77 | d.build_options[common_section] = set() 78 | with self.assertRaises(ValueError): 79 | d.build_options[build_opt_section].add(library_class("TEST", "TEST.inf")) 80 | # NEXTVER: once the adding logic is implemented, this will be need to redone 81 | with self.assertRaises(ValueError): 82 | d.build_options[build_opt_section] = set() 83 | 84 | # now to check the pcds 85 | d.pcds[pcd_section] = set() 86 | with self.assertRaises(ValueError): 87 | d.pcds[build_opt_section] = set() 88 | with self.assertRaises(ValueError): 89 | d.pcds[pcd_section].add(library_class("TEST", "TEST.inf")) 90 | # NEXTVER: once the adding logic is implemented, this will be need to redone 91 | with self.assertRaises(ValueError): 92 | d.pcds[pcd_section] = set() 93 | 94 | # now to check the library classes 95 | d.library_classes[common_section] = set() 96 | with self.assertRaises(ValueError): 97 | d.library_classes[build_opt_section] = set() 98 | with self.assertRaises(ValueError): 99 | d.library_classes[common_section].add(component("TEST.inf")) 100 | # NEXTVER: once the adding logic is implemented, this will be need to redone 101 | with self.assertRaises(ValueError): 102 | d.library_classes[common_section] = set() 103 | 104 | # now to check the components 105 | d.components[common_section] = set() 106 | with self.assertRaises(ValueError): 107 | d.components[build_opt_section] = set() 108 | with self.assertRaises(ValueError): 109 | d.components[common_section].add(library_class("TEST", "TEST.inf")) 110 | # NEXTVER: once the adding logic is implemented, this will be need to redone 111 | with self.assertRaises(ValueError): 112 | d.components[common_section] = set() 113 | 114 | @staticmethod 115 | def create_dsc_object(): 116 | # Normally we would just read the dsc object 117 | d = dsc() 118 | # first add the defines 119 | d.defines.add(definition("PLATFORM_NAME", "TEST")) 120 | d.defines.add(definition("PLATFORM_GUID", "EB216561-961F-47EE-9EF9-CA426EF547C2")) 121 | d.defines.add(definition("OUTPUT_DIRECTORY", "Build/TEST")) 122 | d.defines.add(definition("SUPPORTED_ARCHITECTURES", "IA32 X64 AARCH64")) 123 | 124 | # Next add some library classes 125 | default_section = dsc_section_type() 126 | d.library_classes[default_section].add(library_class("NULL", "BOB.inf")) 127 | 128 | # Next add a component 129 | return d 130 | -------------------------------------------------------------------------------- /tests.unit/test_fmp_capsule_header.py: -------------------------------------------------------------------------------- 1 | # @file tpm2_defs_test.py 2 | # This file contains utility classes to help interpret definitions from the 3 | # Tpm20.h header file in TianoCore. 4 | # 5 | # Copyright (c) Microsoft Corporation 6 | # 7 | # SPDX-License-Identifier: BSD-2-Clause-Patent 8 | ## 9 | import unittest 10 | import pytest 11 | from edk2toollib.uefi.fmp_capsule_header import FmpCapsuleHeaderClass 12 | 13 | 14 | class TestFmpCapsuleHeaderClass(unittest.TestCase): 15 | @pytest.mark.skip(reason="test is incomplete") 16 | def test_should_successfully_decode_test_payload(self): 17 | pass 18 | 19 | def test_embedded_driver_count_should_track_additions(self): 20 | test_header = FmpCapsuleHeaderClass() 21 | self.assertEqual(test_header.EmbeddedDriverCount, 0) 22 | test_header.AddEmbeddedDriver(b"dummydriver") 23 | self.assertEqual(test_header.EmbeddedDriverCount, 1) 24 | test_header.AddEmbeddedDriver(b"dummydriver2") 25 | self.assertEqual(test_header.EmbeddedDriverCount, 2) 26 | 27 | def test_payload_item_count_should_track_additions(self): 28 | test_header = FmpCapsuleHeaderClass() 29 | self.assertEqual(test_header.PayloadItemCount, 0) 30 | test_header.AddFmpCapsuleImageHeader(b"dummyheader") 31 | self.assertEqual(test_header.PayloadItemCount, 1) 32 | test_header.AddFmpCapsuleImageHeader(b"dummyheader2") 33 | self.assertEqual(test_header.PayloadItemCount, 2) 34 | 35 | def test_encoding_twice_should_yield_identical_results(self): 36 | test_header = FmpCapsuleHeaderClass() 37 | test_header.AddEmbeddedDriver(b"dummydriver") 38 | encode_1 = test_header.Encode() 39 | encode_2 = test_header.Encode() 40 | self.assertEqual(encode_1, encode_2) 41 | -------------------------------------------------------------------------------- /tests.unit/test_status_codes.py: -------------------------------------------------------------------------------- 1 | # @file 2 | # Code to test UEFI status code module 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | import unittest 9 | from edk2toollib.uefi.status_codes import UefiStatusCode 10 | 11 | 12 | class TestUefiStatusCodes(unittest.TestCase): 13 | def test_Hex64ToString_NotError(self): 14 | StatusCode = "0x0000000000000000" 15 | self.assertEqual(UefiStatusCode().ConvertHexString64ToString(StatusCode), "Success") 16 | 17 | def test_Hex64ToString_ErrorNotFound(self): 18 | StatusCode = "0x800000000000000E" 19 | self.assertEqual(UefiStatusCode().ConvertHexString64ToString(StatusCode), "Not Found") 20 | 21 | def test_Hex64ToString_Error_Invalid_Len(self): 22 | StatusCode = hex(len(UefiStatusCode.ErrorCodeStrings) + 0x8000000000000000) 23 | self.assertEqual(UefiStatusCode().ConvertHexString64ToString(StatusCode), "Undefined StatusCode") 24 | 25 | def test_Hex32ToString_NotError(self): 26 | StatusCode = "0x00000000" 27 | self.assertEqual(UefiStatusCode().ConvertHexString32ToString(StatusCode), "Success") 28 | 29 | def test_Hex32ToString_ErrorInvalidParameter(self): 30 | StatusCode = "0x80000002" 31 | self.assertEqual(UefiStatusCode().ConvertHexString32ToString(StatusCode), "Invalid Parameter") 32 | 33 | def test_Hex32ToString_Error_Invalid_Len(self): 34 | StatusCode = hex(len(UefiStatusCode.ErrorCodeStrings) + 0x80000000) 35 | self.assertEqual(UefiStatusCode().ConvertHexString32ToString(StatusCode), "Undefined StatusCode") 36 | 37 | def test_Hex32ToString_NonError_Invalid_Len(self): 38 | StatusCode = hex(len(UefiStatusCode.NonErrorCodeStrings)) 39 | self.assertEqual(UefiStatusCode().ConvertHexString32ToString(StatusCode), "Undefined StatusCode") 40 | -------------------------------------------------------------------------------- /tests.unit/test_string_handler.py: -------------------------------------------------------------------------------- 1 | # @file string_handler_test.py 2 | # Contains unit test routines for the string_handler functions 3 | # 4 | # 5 | # Copyright (c) Microsoft Corporation 6 | # 7 | # SPDX-License-Identifier: BSD-2-Clause-Patent 8 | ## 9 | 10 | import unittest 11 | import logging 12 | from edk2toollib.log.string_handler import StringStreamHandler 13 | 14 | 15 | class TestStringStreamHandler(unittest.TestCase): 16 | def test_init(self): 17 | handler = StringStreamHandler() 18 | self.assertNotEqual(handler, None) 19 | 20 | def create_record(self, message="TEST", level=logging.INFO, name=""): 21 | return logging.LogRecord(name, level, __file__, 0, message, [], None) 22 | 23 | def test_readlines(self): 24 | # create the handler 25 | handler = StringStreamHandler() 26 | handler.setLevel(logging.DEBUG) 27 | LINES_TO_DEBUG = 10 28 | # create some records for it to process 29 | for i in range(LINES_TO_DEBUG): 30 | rec = self.create_record(f"test{i}") 31 | handler.handle(rec) 32 | # check to make sure we don't have any 33 | self.assertEqual(len(handler.readlines()), 0, "We shouldn't have anything because our stream is at the end") 34 | # go to the beginning and read the streams 35 | handler.seek_start() 36 | self.assertEqual(len(handler.readlines()), LINES_TO_DEBUG, "We should have at least a few") 37 | # go to the beginning but then back to the end 38 | handler.seek_start() 39 | handler.seek_end() 40 | self.assertEqual(len(handler.readlines()), 0, "We shouldn't have anything because our stream is at the end") 41 | -------------------------------------------------------------------------------- /tests.unit/test_tpm2_defs.py: -------------------------------------------------------------------------------- 1 | # @file tpm2_defs_test.py 2 | # This file contains utility classes to help interpret definitions from the 3 | # Tpm20.h header file in TianoCore. 4 | # 5 | # Copyright (c) Microsoft Corporation 6 | # 7 | # SPDX-License-Identifier: BSD-2-Clause-Patent 8 | ## 9 | import unittest 10 | import edk2toollib.tpm.tpm2_defs as t2d 11 | 12 | 13 | class TestCommandCode(unittest.TestCase): 14 | def test_get_code_returns_codes(self): 15 | self.assertEqual(t2d.CommandCode.get_code("TPM_CC_Clear"), 0x00000126) 16 | self.assertEqual(t2d.CommandCode.get_code("TPM_CC_ActivateCredential"), 0x00000147) 17 | 18 | def test_get_code_returns_none_if_not_found(self): 19 | self.assertEqual(t2d.CommandCode.get_code("I_AM_NOT_A_VALID_CODE"), None) 20 | self.assertEqual(t2d.CommandCode.get_code(None), None) 21 | 22 | def test_get_string_returns_strings(self): 23 | self.assertEqual(t2d.CommandCode.get_string(0x00000126), "TPM_CC_Clear") 24 | self.assertEqual(t2d.CommandCode.get_string(0x00000147), "TPM_CC_ActivateCredential") 25 | 26 | def test_get_string_returns_none_if_not_found(self): 27 | self.assertEqual(t2d.CommandCode.get_string(0xFFFFFFFF), None) 28 | -------------------------------------------------------------------------------- /tests.unit/test_tpm2_stream.py: -------------------------------------------------------------------------------- 1 | # @file tpm2_stream_test.py 2 | # This file contains utility classes to help marshal and un-marshal data to/from the TPM. 3 | # 4 | # 5 | # Copyright (c) Microsoft Corporation 6 | # 7 | # SPDX-License-Identifier: BSD-2-Clause-Patent 8 | ## 9 | 10 | 11 | import unittest 12 | import struct 13 | from edk2toollib.tpm import tpm2_defs as Tpm2Defs 14 | from edk2toollib.tpm import tpm2_stream as Tpm2Stream 15 | 16 | 17 | class Tpm2StreamElement(unittest.TestCase): 18 | def test_object_has_zero_size_by_default(self): 19 | so = Tpm2Stream.Tpm2StreamElement() 20 | self.assertEqual(so.get_size(), 0) 21 | 22 | 23 | class Tpm2CommandHeader(unittest.TestCase): 24 | def test_ch_marshals_correctly(self): 25 | ch1 = Tpm2Stream.TPM2_COMMAND_HEADER(0x4321, 0x00000000, 0xDEADBEEF) 26 | ch2 = Tpm2Stream.TPM2_COMMAND_HEADER(0x8001, 0x0000000A, Tpm2Defs.TPM_CC_Clear) 27 | 28 | self.assertEqual(ch1.marshal(), bytearray.fromhex("432100000000DEADBEEF")) 29 | self.assertEqual(ch2.marshal(), bytearray.fromhex("80010000000A") + struct.pack(">L", Tpm2Defs.TPM_CC_Clear)) 30 | 31 | def test_ch_has_correct_size(self): 32 | ch1 = Tpm2Stream.TPM2_COMMAND_HEADER(0x4321, 0x00000000, 0xDEADBEEF) 33 | self.assertEqual(ch1.get_size(), 0x0A) 34 | 35 | def test_ch_size_can_be_updated(self): 36 | ch1 = Tpm2Stream.TPM2_COMMAND_HEADER(0x4321, 0x00000000, 0xDEADBEEF) 37 | self.assertEqual(ch1.marshal(), bytearray.fromhex("432100000000DEADBEEF")) 38 | ch1.update_size(0x1234) 39 | self.assertEqual(ch1.marshal(), bytearray.fromhex("432100001234DEADBEEF")) 40 | -------------------------------------------------------------------------------- /tests.unit/test_uefi_multi_phase.py: -------------------------------------------------------------------------------- 1 | # @file 2 | # Code to test UEFI MultiPhase module 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | import unittest 9 | from edk2toollib.uefi.uefi_multi_phase import ( 10 | EfiVariableAttributes, 11 | EFI_VARIABLE_NON_VOLATILE, 12 | EFI_VARIABLE_RUNTIME_ACCESS, 13 | EFI_VARIABLE_BOOTSERVICE_ACCESS, 14 | EFI_VARIABLE_TIME_BASED_AUTHENTICATED_WRITE_ACCESS, 15 | ) 16 | 17 | 18 | class TestUefiMultiphase(unittest.TestCase): 19 | def test_string_conversion(self): 20 | attributes = EfiVariableAttributes( 21 | EFI_VARIABLE_NON_VOLATILE | EFI_VARIABLE_RUNTIME_ACCESS | EFI_VARIABLE_BOOTSERVICE_ACCESS 22 | ) 23 | string = str(attributes) 24 | 25 | self.assertTrue("EFI_VARIABLE_RUNTIME_ACCESS" in string) 26 | self.assertTrue("EFI_VARIABLE_NON_VOLATILE" in string) 27 | self.assertTrue("EFI_VARIABLE_BOOTSERVICE_ACCESS" in string) 28 | 29 | def test_empty(self): 30 | attributes = EfiVariableAttributes(0) 31 | 32 | self.assertEqual(str(attributes), "") 33 | self.assertEqual(int(attributes), 0) 34 | 35 | def test_int_to_alternate(self): 36 | attributes = EfiVariableAttributes(EFI_VARIABLE_NON_VOLATILE) 37 | self.assertEqual(str(attributes), "EFI_VARIABLE_NON_VOLATILE") 38 | self.assertEqual(attributes.get_short_string(), "NV") 39 | self.assertEqual(int(attributes), EFI_VARIABLE_NON_VOLATILE) 40 | 41 | attributes.update(EFI_VARIABLE_NON_VOLATILE | EFI_VARIABLE_BOOTSERVICE_ACCESS) 42 | self.assertEqual(str(attributes), "EFI_VARIABLE_BOOTSERVICE_ACCESS,EFI_VARIABLE_NON_VOLATILE") 43 | self.assertEqual(attributes.get_short_string(), "BS,NV") 44 | self.assertEqual(int(attributes), EFI_VARIABLE_NON_VOLATILE | EFI_VARIABLE_BOOTSERVICE_ACCESS) 45 | 46 | attributes.update( 47 | EFI_VARIABLE_NON_VOLATILE 48 | | EFI_VARIABLE_BOOTSERVICE_ACCESS 49 | | EFI_VARIABLE_TIME_BASED_AUTHENTICATED_WRITE_ACCESS 50 | ) 51 | self.assertEqual( 52 | str(attributes), 53 | "EFI_VARIABLE_TIME_BASED_AUTHENTICATED_WRITE_ACCESS,EFI_VARIABLE_BOOTSERVICE_ACCESS,EFI_VARIABLE_NON_VOLATILE", 54 | ) # noqa 55 | self.assertEqual(attributes.get_short_string(), "AT,BS,NV") 56 | self.assertEqual( 57 | int(attributes), 58 | EFI_VARIABLE_TIME_BASED_AUTHENTICATED_WRITE_ACCESS 59 | | EFI_VARIABLE_NON_VOLATILE 60 | | EFI_VARIABLE_BOOTSERVICE_ACCESS, 61 | ) 62 | 63 | def test_string_to_alternate(self): 64 | attributes = EfiVariableAttributes("EFI_VARIABLE_NON_VOLATILE") 65 | self.assertEqual(str(attributes), "EFI_VARIABLE_NON_VOLATILE") 66 | self.assertEqual(attributes.get_short_string(), "NV") 67 | self.assertEqual(int(attributes), EFI_VARIABLE_NON_VOLATILE) 68 | 69 | attributes.update("EFI_VARIABLE_BOOTSERVICE_ACCESS,EFI_VARIABLE_NON_VOLATILE") 70 | self.assertEqual(str(attributes), "EFI_VARIABLE_BOOTSERVICE_ACCESS,EFI_VARIABLE_NON_VOLATILE") 71 | self.assertEqual(attributes.get_short_string(), "BS,NV") 72 | self.assertEqual(int(attributes), EFI_VARIABLE_NON_VOLATILE | EFI_VARIABLE_BOOTSERVICE_ACCESS) 73 | 74 | attributes.update( 75 | "EFI_VARIABLE_TIME_BASED_AUTHENTICATED_WRITE_ACCESS,EFI_VARIABLE_BOOTSERVICE_ACCESS,EFI_VARIABLE_NON_VOLATILE" 76 | ) # noqa 77 | self.assertEqual( 78 | str(attributes), 79 | "EFI_VARIABLE_TIME_BASED_AUTHENTICATED_WRITE_ACCESS,EFI_VARIABLE_BOOTSERVICE_ACCESS,EFI_VARIABLE_NON_VOLATILE", 80 | ) # noqa 81 | self.assertEqual(attributes.get_short_string(), "AT,BS,NV") 82 | self.assertEqual( 83 | int(attributes), 84 | EFI_VARIABLE_TIME_BASED_AUTHENTICATED_WRITE_ACCESS 85 | | EFI_VARIABLE_NON_VOLATILE 86 | | EFI_VARIABLE_BOOTSERVICE_ACCESS, 87 | ) 88 | 89 | def test_short_string_to_alternate(self): 90 | attributes = EfiVariableAttributes("NV") 91 | self.assertEqual(str(attributes), "EFI_VARIABLE_NON_VOLATILE") 92 | self.assertEqual(attributes.get_short_string(), "NV") 93 | self.assertEqual(int(attributes), EFI_VARIABLE_NON_VOLATILE) 94 | 95 | attributes.update("BS,NV") 96 | self.assertEqual(str(attributes), "EFI_VARIABLE_BOOTSERVICE_ACCESS,EFI_VARIABLE_NON_VOLATILE") 97 | self.assertEqual(attributes.get_short_string(), "BS,NV") 98 | self.assertEqual(int(attributes), EFI_VARIABLE_NON_VOLATILE | EFI_VARIABLE_BOOTSERVICE_ACCESS) 99 | 100 | attributes.update("AT,BS,NV") 101 | self.assertEqual( 102 | str(attributes), 103 | "EFI_VARIABLE_TIME_BASED_AUTHENTICATED_WRITE_ACCESS,EFI_VARIABLE_BOOTSERVICE_ACCESS,EFI_VARIABLE_NON_VOLATILE", 104 | ) # noqa 105 | self.assertEqual(attributes.get_short_string(), "AT,BS,NV") 106 | self.assertEqual( 107 | int(attributes), 108 | EFI_VARIABLE_TIME_BASED_AUTHENTICATED_WRITE_ACCESS 109 | | EFI_VARIABLE_NON_VOLATILE 110 | | EFI_VARIABLE_BOOTSERVICE_ACCESS, 111 | ) 112 | 113 | def test_with_spaces_to_alternate(self): 114 | attributes = EfiVariableAttributes("BS, NV") 115 | self.assertEqual(str(attributes), "EFI_VARIABLE_BOOTSERVICE_ACCESS,EFI_VARIABLE_NON_VOLATILE") 116 | self.assertEqual(attributes.get_short_string(), "BS,NV") 117 | self.assertEqual(int(attributes), EFI_VARIABLE_NON_VOLATILE | EFI_VARIABLE_BOOTSERVICE_ACCESS) 118 | 119 | attributes.update("EFI_VARIABLE_BOOTSERVICE_ACCESS,EFI_VARIABLE_NON_VOLATILE") 120 | self.assertEqual(str(attributes), "EFI_VARIABLE_BOOTSERVICE_ACCESS,EFI_VARIABLE_NON_VOLATILE") 121 | self.assertEqual(attributes.get_short_string(), "BS,NV") 122 | self.assertEqual(int(attributes), EFI_VARIABLE_NON_VOLATILE | EFI_VARIABLE_BOOTSERVICE_ACCESS) 123 | -------------------------------------------------------------------------------- /tests.unit/test_variable_format.py: -------------------------------------------------------------------------------- 1 | # @file variable_format_Test.py 2 | # Unit test harness for the VariableFormat module/classes. 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # 6 | # SPDX-License-Identifier: BSD-2-Clause-Patent 7 | ## 8 | 9 | 10 | import unittest 11 | import edk2toollib.uefi.edk2.variable_format as VF 12 | 13 | 14 | class TestVariableHeader(unittest.TestCase): 15 | def test_set_name(self): 16 | var = VF.VariableHeader() 17 | 18 | test_name = "MyNewName" 19 | var.set_name(test_name) 20 | 21 | self.assertEqual(var.Name, test_name) 22 | 23 | def test_get_packed_name(self): 24 | var = VF.VariableHeader() 25 | 26 | test_name = "MyNewName" 27 | var.set_name(test_name) 28 | 29 | test_name_packed = bytes.fromhex("4D0079004E00650077004E0061006D0065000000") 30 | self.assertEqual(var.get_packed_name(), test_name_packed) 31 | -------------------------------------------------------------------------------- /tests.unit/test_variable_policy.py: -------------------------------------------------------------------------------- 1 | # @file 2 | # Unit test harness for the VariablePolicy module/classes. 3 | # 4 | # Copyright (c) Microsoft Corporation 5 | # SPDX-License-Identifier: BSD-2-Clause-Patent 6 | ## 7 | 8 | 9 | import unittest 10 | import uuid 11 | from edk2toollib.uefi.edk2.variable_policy import VariableLockOnVarStatePolicy, VariablePolicyEntry 12 | 13 | 14 | TEST_GUID_1 = uuid.UUID("48B5F961-3F7D-4B88-9BEE-D305ED8256DA") 15 | TEST_GUID_2 = uuid.UUID("65D16747-FCBC-4FAE-A727-7B679A7B23F9") 16 | 17 | TEST_POLICY_ENTRY = b"".fromhex( 18 | "000001006A004600E222FFB0EA4A2547A6E55317FB8FD39C00000000FFFFFFFF000000000000000003AFAFAFC690F5ECF9F887438422486E3CCD8B2001AF45004F00440000004C0061007300740041007400740065006D00700074005300740061007400750073000000" 19 | ) # noqa 20 | TEST_POLICY_ENTRY_BAD_VERSION = b"".fromhex( 21 | "010001006A004600E222FFB0EA4A2547A6E55317FB8FD39C00000000FFFFFFFF000000000000000003AFAFAFC690F5ECF9F887438422486E3CCD8B2001AF45004F00440000004C0061007300740041007400740065006D00700074005300740061007400750073000000" 22 | ) # noqa 23 | TEST_POLICY_ENTRY_BAD_LOCK_TYPE = b"".fromhex( 24 | "000001006A004600E222FFB0EA4A2547A6E55317FB8FD39C00000000FFFFFFFF000000000000000004AFAFAFC690F5ECF9F887438422486E3CCD8B2001AF45004F00440000004C0061007300740041007400740065006D00700074005300740061007400750073000000" 25 | ) # noqa 26 | TEST_POLICY_ENTRY_GUID = uuid.UUID("B0FF22E2-4AEA-4725-A6E5-5317FB8FD39C") 27 | 28 | 29 | class TestVariableLockOnVarStatePolicy(unittest.TestCase): 30 | def test_remaining_buffer(self): 31 | test_vpl = VariableLockOnVarStatePolicy() 32 | test_remainder = b"123" 33 | test_buffer = TEST_GUID_2.bytes_le + b"\x00\x00" + b"\x00A\x00\x00" + test_remainder 34 | 35 | self.assertEqual(test_remainder, test_vpl.decode(test_buffer)) 36 | 37 | def test_missing_name(self): 38 | test_vpl = VariableLockOnVarStatePolicy() 39 | 40 | # Test with no Name field at all. 41 | test1 = TEST_GUID_1.bytes_le + b"\x00\x00" 42 | with self.assertRaises(Exception): 43 | test_vpl.decode(test1) 44 | 45 | # Test with an empty string. 46 | test2 = test1 + b"\x00\x00" 47 | with self.assertRaises(Exception): 48 | test_vpl.decode(test2) 49 | 50 | # Test successful. 51 | test3 = test1 + b"\x00A\x00\x00" 52 | _ = test_vpl.decode(test3) 53 | 54 | def test_malformed_name(self): 55 | test_vpl = VariableLockOnVarStatePolicy() 56 | 57 | # Test with no termination. 58 | test1 = TEST_GUID_1.bytes_le + b"\x00\x00" + b"\x00A\x00B" 59 | with self.assertRaises(Exception): 60 | test_vpl.decode(test1) 61 | 62 | # Test with an unaligned termination. 63 | test2 = TEST_GUID_1.bytes_le + b"\x00\x00" + b"A\x00B\x00" + b"C" + b"\x00\x00" 64 | with self.assertRaises(Exception): 65 | test_vpl.decode(test2) 66 | 67 | def test_to_string(self): 68 | test_vpl = VariableLockOnVarStatePolicy() 69 | test_buffer = TEST_GUID_2.bytes_le + b"\x00\x00" + b"A\x00B\x00C\x00\x00\x00" 70 | 71 | test_vpl.decode(test_buffer) 72 | 73 | self.assertEqual(test_vpl.Name, "ABC") 74 | 75 | 76 | class TestVariablePolicyEntry(unittest.TestCase): 77 | def test_create_and_to_string(self): 78 | test_vp = VariablePolicyEntry() 79 | to_string = str(test_vp) 80 | 81 | # Check for the LockType string. 82 | self.assertIn("NONE", to_string) 83 | 84 | test_vp.LockPolicyType = VariablePolicyEntry.TYPE_LOCK_ON_CREATE 85 | to_string = str(test_vp) 86 | 87 | # Check for the new LockType string. 88 | self.assertIn("CREATE", to_string) 89 | 90 | def test_csv_formatting(self): 91 | header_row = VariablePolicyEntry.csv_header() 92 | self.assertIn("Namespace", header_row) 93 | self.assertIn("LockPolicyType", header_row) 94 | 95 | test_vp = VariablePolicyEntry() 96 | test_vp.LockPolicyType = VariablePolicyEntry.TYPE_LOCK_ON_CREATE 97 | csv_row = test_vp.csv_row() 98 | self.assertEqual(len(header_row), len(csv_row)) 99 | self.assertIn("ON_CREATE", csv_row) 100 | 101 | def test_decoding(self): 102 | test_vp = VariablePolicyEntry() 103 | test_vp.decode(TEST_POLICY_ENTRY) 104 | 105 | self.assertEqual(test_vp.Namespace, TEST_POLICY_ENTRY_GUID) 106 | self.assertEqual(test_vp.LockPolicyType, VariablePolicyEntry.TYPE_LOCK_ON_VAR_STATE) 107 | self.assertEqual(test_vp.Name, "LastAttemptStatus") 108 | self.assertEqual(test_vp.LockPolicy.Name, "EOD") 109 | 110 | to_string = str(test_vp) 111 | self.assertIn("VAR_STATE", to_string) 112 | self.assertIn("EOD", to_string) 113 | self.assertIn("LastAttemptStatus", to_string) 114 | 115 | def test_decoding_errors(self): 116 | test_vp = VariablePolicyEntry() 117 | 118 | with self.assertRaises(ValueError): 119 | test_vp.decode(TEST_POLICY_ENTRY_BAD_VERSION) 120 | with self.assertRaises(ValueError): 121 | test_vp.decode(TEST_POLICY_ENTRY_BAD_LOCK_TYPE) 122 | -------------------------------------------------------------------------------- /tests.unit/testdata/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tianocore/edk2-pytool-library/aa9b59a747be7fec815fa0e8496679de7bc1f754/tests.unit/testdata/__init__.py --------------------------------------------------------------------------------