├── .github └── dependabot.yml ├── .gitignore ├── README.md ├── build_kernel ├── __init__.py ├── __main__.py ├── builder.py ├── main.py └── utils │ ├── ak3.py │ ├── arch.py │ ├── config.py │ ├── device.py │ ├── dumpvars.py │ ├── logging.py │ ├── make.py │ ├── mkdtboimg.py │ └── toolchain.py ├── config.py ├── device ├── asus │ ├── X00H.py │ ├── X00I.py │ ├── X00P.py │ ├── X00R.py │ └── msm8937.py ├── examples │ └── example.py ├── google │ └── mainline_gki_x86_64.py ├── lenovo │ ├── jd2019.py │ ├── kunlun2.py │ └── sdm710.py ├── msft │ └── talkman.py ├── qcom │ ├── msm8916.py │ ├── msm8996.py │ ├── msm8998.py │ ├── sdm845.py │ ├── sm8150.py │ └── sm8250.py └── xiaomi │ ├── alioth.py │ ├── andromeda.py │ ├── apollo.py │ ├── cas.py │ ├── cepheus.py │ ├── cmi.py │ ├── crux.py │ ├── dagu.py │ ├── elish.py │ ├── enuma.py │ ├── grus.py │ ├── lavender.py │ ├── lmi.py │ ├── munch.py │ ├── nabu.py │ ├── platina.py │ ├── psyche.py │ ├── pyxis.py │ ├── raphael.py │ ├── sdm660.py │ ├── sdm710.py │ ├── sirius.py │ ├── sm8150.py │ ├── sm8250.py │ ├── thyme.py │ ├── umi.py │ ├── vayu.py │ ├── vela.py │ └── whyred.py ├── poetry.lock └── pyproject.toml /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "pip" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "daily" 12 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # Distribution / packaging 7 | .Python 8 | build/ 9 | develop-eggs/ 10 | dist/ 11 | downloads/ 12 | eggs/ 13 | .eggs/ 14 | lib/ 15 | lib64/ 16 | parts/ 17 | sdist/ 18 | var/ 19 | wheels/ 20 | pip-wheel-metadata/ 21 | share/python-wheels/ 22 | *.egg-info/ 23 | .installed.cfg 24 | *.egg 25 | MANIFEST 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .nox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | *.py,cover 48 | .hypothesis/ 49 | .pytest_cache/ 50 | 51 | # Sphinx documentation 52 | docs/_build/ 53 | 54 | # PyBuilder 55 | target/ 56 | 57 | # pyenv 58 | .python-version 59 | 60 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 61 | __pypackages__/ 62 | 63 | # Environments 64 | .env 65 | .venv 66 | env/ 67 | venv/ 68 | ENV/ 69 | env.bak/ 70 | venv.bak/ 71 | roject 72 | 73 | # mkdocs documentation 74 | /site 75 | 76 | # mypy 77 | .mypy_cache/ 78 | .dmypy.json 79 | dmypy.json 80 | 81 | # Pyre type checker 82 | .pyre/ 83 | 84 | # editors 85 | .idea/ 86 | .vscode/ 87 | 88 | # Kernel folder 89 | kernel/ 90 | 91 | # Build artifacts 92 | out/ 93 | 94 | # Toolchains 95 | toolchains/ 96 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # build_kernel 2 | 3 | [![PyPi version](https://img.shields.io/pypi/v/build_kernel)](https://pypi.org/project/build_kernel/) 4 | [![Codacy Badge](https://app.codacy.com/project/badge/Grade/83567b747b614dc7892e1c2c1bf8cbd9)](https://www.codacy.com/gh/SebaUbuntu/android-kernel-builder/dashboard?utm_source=github.com&utm_medium=referral&utm_content=SebaUbuntu/android-kernel-builder&utm_campaign=Badge_Grade) 5 | 6 | Little build system to build Linux 7 | 8 | - Clones AOSP Clang automatically 9 | - Managed with config files 10 | - Automatic AnyKernel3 packing 11 | 12 | Requires Python 3.8 or greater 13 | 14 | ## Installation 15 | 16 | ```sh 17 | pip3 install build_kernel 18 | ``` 19 | 20 | ## Sources preparation 21 | 22 | - Add a config for your kernel in `device/vendor/codename.py` (check the example config in `device/examples/example.py`) 23 | - Add kernel sources in `kernel/` (in the path that you specified in the config) 24 | 25 | ## Instructions 26 | 27 | ```sh 28 | python3 -m build_kernel 29 | ``` 30 | 31 | ## License 32 | 33 | ``` 34 | # 35 | # Copyright (C) 2022 Sebastiano Barezzi 36 | # 37 | # SPDX-License-Identifier: LGPL-3.0-or-later 38 | # 39 | ``` 40 | -------------------------------------------------------------------------------- /build_kernel/__init__.py: -------------------------------------------------------------------------------- 1 | """Android kernel builder module.""" 2 | 3 | from build_kernel.utils.device import register_devices 4 | from pathlib import Path 5 | 6 | # I'm sorry 7 | try: 8 | from config import config 9 | except ModuleNotFoundError: 10 | config = {} 11 | 12 | __version__ = "1.2.0" 13 | 14 | module_path = Path(__file__).parent 15 | current_path = Path.cwd() 16 | 17 | device_path = current_path / "device" 18 | kernel_path = current_path / "kernel" 19 | out_path = current_path / "out" 20 | toolchains_path = current_path / "toolchains" 21 | 22 | register_devices(device_path) 23 | -------------------------------------------------------------------------------- /build_kernel/__main__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from build_kernel.main import main 4 | 5 | if __name__ == '__main__': 6 | main() 7 | -------------------------------------------------------------------------------- /build_kernel/builder.py: -------------------------------------------------------------------------------- 1 | from build_kernel import out_path 2 | from build_kernel.utils.ak3 import AK3Manager 3 | from build_kernel.utils.device import Device, devices 4 | from build_kernel.utils.dumpvars import dumpvars 5 | from build_kernel.utils.logging import LOGI 6 | from build_kernel.utils.make import Make 7 | from build_kernel.utils.mkdtboimg import Dtbo, parse_create_args, parse_dt_entries 8 | from typing import List, Optional, Union 9 | 10 | class Builder: 11 | """Class representing a build instance.""" 12 | def __init__(self, device: Device): 13 | """Initialize the builder.""" 14 | self.device = device 15 | 16 | self.out_path = out_path / self.device.PRODUCT_DEVICE 17 | 18 | self.dtb_out = self.out_path / "dtb.img" 19 | self.dtbo_out = self.out_path / "dtbo.img" 20 | 21 | self.kernel_obj_path = self.out_path / "KERNEL_OBJ" 22 | self.kernel_obj_boot_path = self.kernel_obj_path / "arch" / self.device.TARGET_ARCH / "boot" 23 | self.dtbs_path = self.kernel_obj_boot_path / "dts" 24 | 25 | self.make = Make(self.device) 26 | self.ak3manager = AK3Manager(self.device) 27 | 28 | @classmethod 29 | def from_codename(cls, codename: str): 30 | """Get a Builder object for the given device codename.""" 31 | if not codename in devices: 32 | return None 33 | 34 | return cls(devices[codename]) 35 | 36 | def dumpvars(self): 37 | """Dump the kernel variables to stdout.""" 38 | return dumpvars(self.device) 39 | 40 | def build(self, target: Union[str, Optional[List[str]]] = None): 41 | """Build the kernel and create an AnyKernel3 flashable zip.""" 42 | LOGI("Building defconfig") 43 | self.make.run([self.device.TARGET_KERNEL_CONFIG] + self.device.TARGET_KERNEL_FRAGMENTS) 44 | 45 | LOGI("Building kernel") 46 | self.make.run(target) 47 | 48 | if target: 49 | return 50 | 51 | artifacts = [self.kernel_obj_boot_path / self.device.BOARD_KERNEL_IMAGE_NAME] 52 | 53 | if self.device.BOARD_INCLUDE_DTB_IN_BOOTIMG: 54 | dtb_files = [file for file in self.dtbs_path.rglob("*.dtb")] 55 | assert dtb_files, "No dtb files found" 56 | with self.dtb_out.open("wb") as f: 57 | for dtb_file in dtb_files: 58 | f.write(dtb_file.read_bytes()) 59 | 60 | artifacts.append(self.dtb_out) 61 | 62 | if self.device.BOARD_KERNEL_SEPARATED_DTBO: 63 | dtbo_files = [str(file) for file in self.dtbs_path.rglob("*.dtbo")] 64 | assert dtbo_files, "No dtbo files found" 65 | global_args, _ = parse_create_args([]) 66 | dt_entries = parse_dt_entries(global_args, dtbo_files) 67 | with self.dtbo_out.open("wb") as f: 68 | dtbo = Dtbo(f, page_size=self.device.BOARD_KERNEL_PAGESIZE) 69 | dt_entry_buf = dtbo.add_dt_entries(dt_entries) 70 | dtbo.commit(dt_entry_buf) 71 | 72 | artifacts.append(self.dtbo_out) 73 | 74 | if not artifacts: 75 | LOGI("No artifact found, skipping AK3 zip creation") 76 | return 77 | 78 | LOGI("Creating AnyKernel3 zip") 79 | zip_filename = self.ak3manager.create_ak3_zip(artifacts) 80 | 81 | return zip_filename 82 | 83 | def clean(self): 84 | """Clean output folder.""" 85 | self.make.run("clean") 86 | self.make.run("mrproper") 87 | -------------------------------------------------------------------------------- /build_kernel/main.py: -------------------------------------------------------------------------------- 1 | from argparse import ArgumentParser 2 | from build_kernel.builder import Builder 3 | from build_kernel.utils.device import devices 4 | from build_kernel.utils.logging import LOGE, LOGI, setup_logging 5 | 6 | def main(): 7 | setup_logging() 8 | 9 | parser = ArgumentParser(prog='python3 -m kernel_build') 10 | 11 | # Positional arguments 12 | parser.add_argument("device", type=str, help="device codename") 13 | 14 | # Build tasks 15 | parser.add_argument("-c", "--clean", action='store_true', help="cleanup out dir") 16 | 17 | # Build options 18 | parser.add_argument("-v", "--verbose", action='store_true', help="verbose logging") 19 | 20 | args, build_target = parser.parse_known_args() 21 | 22 | builder = Builder.from_codename(args.device) 23 | if not builder: 24 | LOGE(f"Device {args.device} not found") 25 | LOGI("Available devices:\n" + "\n".join(devices.keys())) 26 | return 27 | 28 | builder.dumpvars() 29 | 30 | if args.clean: 31 | builder.clean() 32 | return 33 | 34 | zip_filename = builder.build(build_target) 35 | 36 | LOGI(f"Build completed successfully: {zip_filename}") 37 | -------------------------------------------------------------------------------- /build_kernel/utils/ak3.py: -------------------------------------------------------------------------------- 1 | from build_kernel import out_path 2 | from build_kernel.utils.config import get_config 3 | from build_kernel.utils.device import Device 4 | from build_kernel.utils.make import KERNEL_NAME, KERNEL_VERSION 5 | from datetime import date 6 | from git.repo import Repo 7 | from pathlib import Path 8 | from shutil import copyfile, rmtree, make_archive 9 | from stat import S_IWRITE 10 | from typing import List 11 | 12 | ANYKERNEL3_REMOTE = "https://github.com/osm0sis/AnyKernel3" 13 | 14 | AK3_CONFIG = """\ 15 | # AnyKernel3 Ramdisk Mod Script 16 | # osm0sis @ xda-developers 17 | ## AnyKernel setup 18 | # begin properties 19 | properties() {{ ' 20 | kernel.string={kernel_name} 21 | do.devicecheck=1 22 | do.modules=0 23 | do.systemless=1 24 | do.cleanup=1 25 | do.cleanuponabort=0 26 | device.name1={device.PRODUCT_DEVICE} 27 | '; }} 28 | # shell variables 29 | block={device.TARGET_BLOCK_DEVICE}; 30 | is_slot_device={is_ab}; 31 | ramdisk_compression=auto; 32 | ## AnyKernel methods (DO NOT CHANGE) 33 | # import patching functions/variables - see for reference 34 | . tools/ak3-core.sh; 35 | ## AnyKernel install 36 | {flash_procedure} 37 | ## end install 38 | """ 39 | 40 | FLASH_PROCEDURE_RAMDISK = """\ 41 | dump_boot; 42 | 43 | write_boot; 44 | """ 45 | 46 | FLASH_PROCEDURE_NO_RAMDISK = """\ 47 | split_boot; 48 | 49 | flash_boot; 50 | flash_dtbo; 51 | """ 52 | 53 | INCLUDE_DATE_IN_ZIP_FILENAME = get_config("ak3.include_date_in_zip_filename", False) 54 | 55 | def handle_remove_readonly(func, path, _): 56 | Path(path).chmod(S_IWRITE) 57 | func(path) 58 | 59 | class AK3Manager: 60 | def __init__(self, device: Device): 61 | self.device = device 62 | 63 | self.device_out_path = out_path / self.device.PRODUCT_DEVICE 64 | self.path = self.device_out_path / "ANYKERNEL_OBJ" 65 | 66 | if self.path.is_dir(): 67 | rmtree(self.path, ignore_errors=False, onerror=handle_remove_readonly) 68 | 69 | Repo.clone_from(ANYKERNEL3_REMOTE, self.path, single_branch=True, depth=1) 70 | 71 | def create_ak3_zip(self, artifacts: List[Path]): 72 | assert artifacts, "No artifacts to create zip file from" 73 | 74 | for artifact in artifacts: 75 | assert artifact.is_file(), f"{artifact} is not a file" 76 | 77 | copyfile(artifact, self.path / artifact.name) 78 | 79 | (self.path / "anykernel.sh").write_text(self.get_ak3_config()) 80 | 81 | zip_filename = self.device_out_path / self.get_ak3_zip_filename() 82 | 83 | make_archive(zip_filename, 'zip', self.path) 84 | 85 | return f"{zip_filename}.zip" 86 | 87 | def get_ak3_config(self): 88 | is_ab = '1' if self.device.AB_OTA_UPDATER else '0' 89 | flash_procedure = (FLASH_PROCEDURE_RAMDISK 90 | if not self.device.BOARD_BUILD_SYSTEM_ROOT_IMAGE 91 | else FLASH_PROCEDURE_NO_RAMDISK) 92 | 93 | text = AK3_CONFIG.format(device=self.device, kernel_name=KERNEL_NAME, 94 | is_ab=is_ab, flash_procedure=flash_procedure) 95 | return text 96 | 97 | def get_ak3_zip_filename(self): 98 | filename = [KERNEL_NAME if KERNEL_NAME else "kernel"] 99 | filename.append(self.device.PRODUCT_DEVICE) 100 | if KERNEL_VERSION: 101 | filename.append(f"v{KERNEL_VERSION}") 102 | if INCLUDE_DATE_IN_ZIP_FILENAME: 103 | filename += [date.today().strftime('%Y%m%d')] 104 | 105 | return "-".join(filename) 106 | -------------------------------------------------------------------------------- /build_kernel/utils/arch.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import List 3 | 4 | class _Arch: 5 | _ALL: List[_Arch] = [] 6 | 7 | def __init__(self, name: str, clang_triple_prefix: str): 8 | self.name = name 9 | self.clang_triple_prefix = clang_triple_prefix 10 | 11 | self._ALL.append(self) 12 | 13 | @classmethod 14 | def from_name(cls, name: str): 15 | for arch in cls._ALL: 16 | if name == arch.name: 17 | return arch 18 | 19 | raise ValueError(f"No arch with name: {name}") 20 | 21 | class Arch(_Arch): 22 | ARM = _Arch("arm", "arm-linux-gnu-") 23 | ARM64 = _Arch("arm64", "aarch64-linux-gnu-") 24 | X86 = _Arch("x86", "x86_64-linux-gnu-") 25 | X86_64 = _Arch("x86_64", "x86_64-linux-gnu-") 26 | -------------------------------------------------------------------------------- /build_kernel/utils/config.py: -------------------------------------------------------------------------------- 1 | from build_kernel import config 2 | 3 | def get_config(name: str, default=None): 4 | """Get a property from config.py.""" 5 | if not '.' in name: 6 | if name in config: 7 | value = config[name] 8 | else: 9 | value = default 10 | else: 11 | value = config 12 | for key in name.split('.'): 13 | if not key in value: 14 | value = default 15 | break 16 | value = value[key] 17 | 18 | if value == "": 19 | return default 20 | 21 | return value 22 | -------------------------------------------------------------------------------- /build_kernel/utils/device.py: -------------------------------------------------------------------------------- 1 | from build_kernel.utils.logging import LOGE, format_exception 2 | from importlib import import_module 3 | from pathlib import Path 4 | from pkgutil import iter_modules 5 | from typing import Dict, List, Optional 6 | 7 | class Device: 8 | PRODUCT_DEVICE: str 9 | 10 | TARGET_ARCH: str 11 | TARGET_KERNEL_CONFIG: str 12 | TARGET_KERNEL_FRAGMENTS: List[str] = [] 13 | TARGET_KERNEL_SOURCE: Path 14 | 15 | AB_OTA_UPDATER: bool = False 16 | BOARD_BUILD_SYSTEM_ROOT_IMAGE: bool = False 17 | TARGET_BLOCK_DEVICE: str = "/dev/block/bootdevice/by-name/boot" 18 | 19 | BOARD_KERNEL_IMAGE_NAME: str 20 | BOARD_INCLUDE_DTB_IN_BOOTIMG: bool = False 21 | BOARD_KERNEL_SEPARATED_DTBO: bool = False 22 | BOARD_KERNEL_PAGESIZE: int = 2048 23 | 24 | TARGET_ADDITIONAL_MAKE_FLAGS: List[str] = [] 25 | TARGET_KERNEL_USE_HOST_COMPILER: bool = False 26 | TARGET_KERNEL_CROSS_COMPILE_PREFIX: Optional[str] = None 27 | TARGET_KERNEL_CLANG_COMPILE: bool = True 28 | TARGET_KERNEL_GCC_VERSION: Optional[str] = None 29 | TARGET_KERNEL_CLANG_VERSION: Optional[str] = None 30 | 31 | devices: Dict[str, Device] = {} 32 | 33 | def register_device(device: Device): 34 | devices[device.PRODUCT_DEVICE] = device 35 | 36 | def register_devices(device_path: Path): 37 | """Import all the sections and let them execute register_section().""" 38 | for vendor_name in [folder.name for folder in device_path.iterdir() if folder.is_dir()]: 39 | for device_name in [name for _, name, _ in iter_modules([str(device_path / vendor_name)])]: 40 | try: 41 | import_module(f'device.{vendor_name}.{device_name}') 42 | except Exception as e: 43 | LOGE(f"Error importing device {vendor_name}/{device_name}:\n" 44 | f"{format_exception(e)}") 45 | -------------------------------------------------------------------------------- /build_kernel/utils/dumpvars.py: -------------------------------------------------------------------------------- 1 | from build_kernel import current_path, out_path 2 | from build_kernel.utils.device import Device 3 | from build_kernel.utils.make import KBUILD_BUILD_USER, KBUILD_BUILD_HOST, ENABLE_CCACHE 4 | 5 | def dumpvars(device: Device): 6 | variables = { 7 | "PRODUCT_DEVICE": device.PRODUCT_DEVICE, 8 | "TARGET_ARCH": device.TARGET_ARCH, 9 | "TARGET_KERNEL_SOURCE": current_path / device.TARGET_KERNEL_SOURCE, 10 | "OUT_DIR": out_path / device.PRODUCT_DEVICE, 11 | "BUILD_USER": KBUILD_BUILD_USER, 12 | "BUILD_HOST": KBUILD_BUILD_HOST, 13 | "CCACHE": ENABLE_CCACHE, 14 | } 15 | 16 | print("\n".join([ 17 | "============================================", 18 | *[f"{k}={v}" for k, v in variables.items()], 19 | "============================================", 20 | ])) 21 | -------------------------------------------------------------------------------- /build_kernel/utils/logging.py: -------------------------------------------------------------------------------- 1 | from logging import basicConfig, INFO, debug, error, info, warning 2 | import traceback 3 | 4 | def setup_logging(level = INFO): 5 | basicConfig(format='[%(filename)s:%(lineno)s %(levelname)s] %(funcName)s: %(message)s', 6 | level=level) 7 | 8 | LOGD = debug 9 | LOGE = error 10 | LOGI = info 11 | LOGW = warning 12 | 13 | def format_exception(exception): 14 | return ''.join(traceback.format_exception(type(exception), exception, 15 | exception.__traceback__, 16 | limit=None, chain=True)) 17 | -------------------------------------------------------------------------------- /build_kernel/utils/make.py: -------------------------------------------------------------------------------- 1 | from build_kernel import current_path, out_path 2 | from build_kernel.utils.arch import Arch 3 | from build_kernel.utils.config import get_config 4 | from build_kernel.utils.device import Device 5 | from build_kernel.utils.toolchain import ClangToolchain, GccToolchain 6 | from multiprocessing import cpu_count 7 | import os 8 | from pathlib import Path 9 | import platform 10 | from subprocess import Popen, PIPE, STDOUT 11 | from typing import List, Tuple, Union 12 | 13 | ENABLE_CCACHE = get_config("build.enable_ccache", False) 14 | KERNEL_NAME = get_config("build.kernel_name") 15 | KERNEL_VERSION = get_config("build.kernel_version") 16 | KBUILD_BUILD_USER = get_config("build.kbuild_build_user") 17 | KBUILD_BUILD_HOST = get_config("build.kbuild_build_host") 18 | 19 | SUPPORTED_ENVIRONMENTS: List[Tuple[str, str]] = [ 20 | ("64bit", "ELF"), 21 | ] 22 | """List of supported arch/system combos.""" 23 | 24 | class Make: 25 | """Class representing a make instance.""" 26 | def __init__(self, device: Device): 27 | """Initialize a Make instance.""" 28 | self.device = device 29 | 30 | host_architecture = platform.architecture() 31 | if host_architecture not in SUPPORTED_ENVIRONMENTS: 32 | raise RuntimeError(f"Unsupported environment: {host_architecture}") 33 | 34 | self.kernel_source = current_path / self.device.TARGET_KERNEL_SOURCE 35 | self.out_path = out_path / device.PRODUCT_DEVICE / "KERNEL_OBJ" 36 | self.out_path.mkdir(exist_ok=True, parents=True) 37 | 38 | self.arch = Arch.from_name(self.device.TARGET_ARCH) 39 | 40 | if not device.TARGET_KERNEL_USE_HOST_COMPILER: 41 | if device.TARGET_KERNEL_CLANG_COMPILE: 42 | self.toolchain = (ClangToolchain.from_version(device.TARGET_KERNEL_CLANG_VERSION) 43 | if device.TARGET_KERNEL_CLANG_VERSION is not None 44 | else ClangToolchain.DEFAULT) 45 | else: 46 | self.toolchain = (GccToolchain.from_version(device.TARGET_KERNEL_GCC_VERSION) 47 | if device.TARGET_KERNEL_GCC_VERSION is not None 48 | else GccToolchain.get_default(self.arch)) 49 | elif device.TARGET_KERNEL_CROSS_COMPILE_PREFIX: 50 | self.toolchain = GccToolchain(device.TARGET_KERNEL_CROSS_COMPILE_PREFIX, 51 | device.TARGET_KERNEL_CROSS_COMPILE_PREFIX, None) 52 | else: 53 | self.toolchain = None 54 | 55 | if self.toolchain: 56 | self.toolchain.prepare(self.arch) 57 | 58 | self.path_dirs: List[Path] = [] 59 | if self.toolchain: 60 | self.path_dirs.extend(self.toolchain.get_path_dirs(self.arch)) 61 | 62 | # Create environment variables 63 | self.env_vars = os.environ.copy() 64 | self.env_vars['PATH'] = f"{':'.join([str(path) for path in self.path_dirs])}:{self.env_vars['PATH']}" 65 | 66 | # Create Make flags 67 | self.make_flags = [ 68 | f"O={self.out_path}", 69 | f"ARCH={self.arch.name}", 70 | f"SUBARCH={self.arch.name}", 71 | f"-j{cpu_count()}", 72 | ] 73 | 74 | if self.toolchain: 75 | self.make_flags.extend(self.toolchain.get_make_flags(self.arch)) 76 | 77 | if ENABLE_CCACHE: 78 | self.make_flags.append(f"CC=ccache {self.toolchain.cc}") 79 | else: 80 | self.make_flags.append(f"CC={self.toolchain.cc}") 81 | 82 | localversion = "" 83 | if KERNEL_NAME: 84 | localversion += f"-{KERNEL_NAME}" 85 | if KERNEL_VERSION: 86 | localversion += f"-{KERNEL_VERSION}" 87 | 88 | if localversion: 89 | self.make_flags.append(f"LOCALVERSION={localversion}") 90 | 91 | if KBUILD_BUILD_USER: 92 | self.make_flags.append(f"KBUILD_BUILD_USER={KBUILD_BUILD_USER}") 93 | if KBUILD_BUILD_HOST: 94 | self.make_flags.append(f"KBUILD_BUILD_HOST={KBUILD_BUILD_HOST}") 95 | 96 | self.make_flags += device.TARGET_ADDITIONAL_MAKE_FLAGS 97 | 98 | def run(self, target: Union[str, List[str], None] = None): 99 | command = ["make"] 100 | command.extend(self.make_flags) 101 | if target is not None: 102 | if isinstance(target, str): 103 | command.append(target) 104 | else: 105 | command.extend(target) 106 | 107 | process = Popen(command, env=self.env_vars, stdout=PIPE, stderr=STDOUT, 108 | cwd=self.kernel_source, encoding="UTF-8") 109 | while True: 110 | output = process.stdout.readline() 111 | if output == '' and process.poll() is not None: 112 | break 113 | if output: 114 | print(output.strip()) 115 | rc = process.poll() 116 | if rc != 0: 117 | make_command = "make" 118 | if target is not None: 119 | make_command += f" {target}" 120 | 121 | raise RuntimeError(f"{make_command} failed, return code {rc}") 122 | 123 | return rc 124 | -------------------------------------------------------------------------------- /build_kernel/utils/mkdtboimg.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python 2 | # Copyright 2017, The Android Open Source Project 3 | # 4 | # Licensed under the Apache License, Version 2.0 (the "License"); 5 | # you may not use this file except in compliance with the License. 6 | # You may obtain a copy of the License at 7 | # 8 | # http://www.apache.org/licenses/LICENSE-2.0 9 | # 10 | # Unless required by applicable law or agreed to in writing, software 11 | # distributed under the License is distributed on an "AS IS" BASIS, 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | # See the License for the specific language governing permissions and 14 | # limitations under the License. 15 | 16 | from __future__ import print_function 17 | 18 | """Tool for packing multiple DTB/DTBO files into a single image""" 19 | 20 | import argparse 21 | import fnmatch 22 | import os 23 | import struct 24 | import zlib 25 | from array import array 26 | from collections import namedtuple 27 | from sys import stdout 28 | 29 | class CompressionFormat(object): 30 | """Enum representing DT compression format for a DT entry. 31 | """ 32 | NO_COMPRESSION = 0x00 33 | ZLIB_COMPRESSION = 0x01 34 | GZIP_COMPRESSION = 0x02 35 | 36 | class DtEntry(object): 37 | """Provides individual DT image file arguments to be added to a DTBO. 38 | 39 | Attributes: 40 | REQUIRED_KEYS_V0: 'keys' needed to be present in the dictionary passed to instantiate 41 | an object of this class when a DTBO header of version 0 is used. 42 | REQUIRED_KEYS_V1: 'keys' needed to be present in the dictionary passed to instantiate 43 | an object of this class when a DTBO header of version 1 is used. 44 | COMPRESSION_FORMAT_MASK: Mask to retrieve compression info for DT entry from flags field 45 | when a DTBO header of version 1 is used. 46 | """ 47 | COMPRESSION_FORMAT_MASK = 0x0f 48 | REQUIRED_KEYS_V0 = ('dt_file', 'dt_size', 'dt_offset', 'id', 'rev', 49 | 'custom0', 'custom1', 'custom2', 'custom3') 50 | REQUIRED_KEYS_V1 = ('dt_file', 'dt_size', 'dt_offset', 'id', 'rev', 51 | 'flags', 'custom0', 'custom1', 'custom2') 52 | 53 | @staticmethod 54 | def __get_number_or_prop(arg): 55 | """Converts string to integer or reads the property from DT image. 56 | 57 | Args: 58 | arg: String containing the argument provided on the command line. 59 | 60 | Returns: 61 | An integer property read from DT file or argument string 62 | converted to integer 63 | """ 64 | 65 | if not arg or arg[0] == '+' or arg[0] == '-': 66 | raise ValueError('Invalid argument passed to DTImage') 67 | if arg[0] == '/': 68 | # TODO(b/XXX): Use pylibfdt to get property value from DT 69 | raise ValueError('Invalid argument passed to DTImage') 70 | else: 71 | base = 10 72 | if arg.startswith('0x') or arg.startswith('0X'): 73 | base = 16 74 | elif arg.startswith('0'): 75 | base = 8 76 | return int(arg, base) 77 | 78 | def __init__(self, **kwargs): 79 | """Constructor for DtEntry object. 80 | 81 | Initializes attributes from dictionary object that contains 82 | values keyed with names equivalent to the class's attributes. 83 | 84 | Args: 85 | kwargs: Dictionary object containing values to instantiate 86 | class members with. Expected keys in dictionary are from 87 | the tuple (_REQUIRED_KEYS) 88 | """ 89 | 90 | self.__version = kwargs['version'] 91 | required_keys = None 92 | if self.__version == 0: 93 | required_keys = self.REQUIRED_KEYS_V0 94 | elif self.__version == 1: 95 | required_keys = self.REQUIRED_KEYS_V1 96 | 97 | missing_keys = set(required_keys) - set(kwargs) 98 | if missing_keys: 99 | raise ValueError('Missing keys in DtEntry constructor: %r' % 100 | sorted(missing_keys)) 101 | 102 | self.__dt_file = kwargs['dt_file'] 103 | self.__dt_offset = kwargs['dt_offset'] 104 | self.__dt_size = kwargs['dt_size'] 105 | self.__id = self.__get_number_or_prop(kwargs['id']) 106 | self.__rev = self.__get_number_or_prop(kwargs['rev']) 107 | if self.__version == 1: 108 | self.__flags = self.__get_number_or_prop(kwargs['flags']) 109 | self.__custom0 = self.__get_number_or_prop(kwargs['custom0']) 110 | self.__custom1 = self.__get_number_or_prop(kwargs['custom1']) 111 | self.__custom2 = self.__get_number_or_prop(kwargs['custom2']) 112 | if self.__version == 0: 113 | self.__custom3 = self.__get_number_or_prop(kwargs['custom3']) 114 | 115 | def __str__(self): 116 | sb = [] 117 | sb.append('{key:>20} = {value:d}'.format(key='dt_size', 118 | value=self.__dt_size)) 119 | sb.append('{key:>20} = {value:d}'.format(key='dt_offset', 120 | value=self.__dt_offset)) 121 | sb.append('{key:>20} = {value:08x}'.format(key='id', 122 | value=self.__id)) 123 | sb.append('{key:>20} = {value:08x}'.format(key='rev', 124 | value=self.__rev)) 125 | if self.__version == 1: 126 | sb.append('{key:>20} = {value:08x}'.format(key='flags', 127 | value=self.__flags)) 128 | sb.append('{key:>20} = {value:08x}'.format(key='custom[0]', 129 | value=self.__custom0)) 130 | sb.append('{key:>20} = {value:08x}'.format(key='custom[1]', 131 | value=self.__custom1)) 132 | sb.append('{key:>20} = {value:08x}'.format(key='custom[2]', 133 | value=self.__custom2)) 134 | if self.__version == 0: 135 | sb.append('{key:>20} = {value:08x}'.format(key='custom[3]', 136 | value=self.__custom3)) 137 | return '\n'.join(sb) 138 | 139 | def compression_info(self): 140 | """CompressionFormat: compression format for DT image file. 141 | 142 | Args: 143 | version: Version of DTBO header, compression is only 144 | supported from version 1. 145 | """ 146 | if self.__version == 0: 147 | return CompressionFormat.NO_COMPRESSION 148 | return self.flags & self.COMPRESSION_FORMAT_MASK 149 | 150 | @property 151 | def dt_file(self): 152 | """file: File handle to the DT image file.""" 153 | return self.__dt_file 154 | 155 | @property 156 | def size(self): 157 | """int: size in bytes of the DT image file.""" 158 | return self.__dt_size 159 | 160 | @size.setter 161 | def size(self, value): 162 | self.__dt_size = value 163 | 164 | @property 165 | def dt_offset(self): 166 | """int: offset in DTBO file for this DT image.""" 167 | return self.__dt_offset 168 | 169 | @dt_offset.setter 170 | def dt_offset(self, value): 171 | self.__dt_offset = value 172 | 173 | @property 174 | def image_id(self): 175 | """int: DT entry _id for this DT image.""" 176 | return self.__id 177 | 178 | @property 179 | def rev(self): 180 | """int: DT entry _rev for this DT image.""" 181 | return self.__rev 182 | 183 | @property 184 | def flags(self): 185 | """int: DT entry _flags for this DT image.""" 186 | return self.__flags 187 | 188 | @property 189 | def custom0(self): 190 | """int: DT entry _custom0 for this DT image.""" 191 | return self.__custom0 192 | 193 | @property 194 | def custom1(self): 195 | """int: DT entry _custom1 for this DT image.""" 196 | return self.__custom1 197 | 198 | @property 199 | def custom2(self): 200 | """int: DT entry custom2 for this DT image.""" 201 | return self.__custom2 202 | 203 | @property 204 | def custom3(self): 205 | """int: DT entry custom3 for this DT image.""" 206 | return self.__custom3 207 | 208 | class Dtbo(object): 209 | """ 210 | Provides parser, reader, writer for dumping and creating Device Tree Blob 211 | Overlay (DTBO) images. 212 | 213 | Attributes: 214 | _DTBO_MAGIC: Device tree table header magic. 215 | _ACPIO_MAGIC: Advanced Configuration and Power Interface table header 216 | magic. 217 | _DT_TABLE_HEADER_SIZE: Size of Device tree table header. 218 | _DT_TABLE_HEADER_INTS: Number of integers in DT table header. 219 | _DT_ENTRY_HEADER_SIZE: Size of Device tree entry header within a DTBO. 220 | _DT_ENTRY_HEADER_INTS: Number of integers in DT entry header. 221 | _GZIP_COMPRESSION_WBITS: Argument 'wbits' for gzip compression 222 | _ZLIB_DECOMPRESSION_WBITS: Argument 'wbits' for zlib/gzip compression 223 | """ 224 | 225 | _DTBO_MAGIC = 0xd7b7ab1e 226 | _ACPIO_MAGIC = 0x41435049 227 | _DT_TABLE_HEADER_SIZE = struct.calcsize('>8I') 228 | _DT_TABLE_HEADER_INTS = 8 229 | _DT_ENTRY_HEADER_SIZE = struct.calcsize('>8I') 230 | _DT_ENTRY_HEADER_INTS = 8 231 | _GZIP_COMPRESSION_WBITS = 31 232 | _ZLIB_DECOMPRESSION_WBITS = 47 233 | 234 | def _update_dt_table_header(self): 235 | """Converts header entries into binary data for DTBO header. 236 | 237 | Packs the current Device tree table header attribute values in 238 | metadata buffer. 239 | """ 240 | struct.pack_into('>8I', self.__metadata, 0, self.magic, 241 | self.total_size, self.header_size, 242 | self.dt_entry_size, self.dt_entry_count, 243 | self.dt_entries_offset, self.page_size, 244 | self.version) 245 | 246 | def _update_dt_entry_header(self, dt_entry, metadata_offset): 247 | """Converts each DT entry header entry into binary data for DTBO file. 248 | 249 | Packs the current device tree table entry attribute into 250 | metadata buffer as device tree entry header. 251 | 252 | Args: 253 | dt_entry: DtEntry object for the header to be packed. 254 | metadata_offset: Offset into metadata buffer to begin writing. 255 | dtbo_offset: Offset where the DT image file for this dt_entry can 256 | be found in the resulting DTBO image. 257 | """ 258 | if self.version == 0: 259 | struct.pack_into('>8I', self.__metadata, metadata_offset, dt_entry.size, 260 | dt_entry.dt_offset, dt_entry.image_id, dt_entry.rev, 261 | dt_entry.custom0, dt_entry.custom1, dt_entry.custom2, 262 | dt_entry.custom3) 263 | elif self.version == 1: 264 | struct.pack_into('>8I', self.__metadata, metadata_offset, dt_entry.size, 265 | dt_entry.dt_offset, dt_entry.image_id, dt_entry.rev, 266 | dt_entry.flags, dt_entry.custom0, dt_entry.custom1, 267 | dt_entry.custom2) 268 | 269 | 270 | def _update_metadata(self): 271 | """Updates the DTBO metadata. 272 | 273 | Initialize the internal metadata buffer and fill it with all Device 274 | Tree table entries and update the DTBO header. 275 | """ 276 | 277 | self.__metadata = array('b', b' ' * self.__metadata_size) 278 | metadata_offset = self.header_size 279 | for dt_entry in self.__dt_entries: 280 | self._update_dt_entry_header(dt_entry, metadata_offset) 281 | metadata_offset += self.dt_entry_size 282 | self._update_dt_table_header() 283 | 284 | def _read_dtbo_header(self, buf): 285 | """Reads DTBO file header into metadata buffer. 286 | 287 | Unpack and read the DTBO table header from given buffer. The 288 | buffer size must exactly be equal to _DT_TABLE_HEADER_SIZE. 289 | 290 | Args: 291 | buf: Bytebuffer read directly from the file of size 292 | _DT_TABLE_HEADER_SIZE. 293 | """ 294 | (self.magic, self.total_size, self.header_size, 295 | self.dt_entry_size, self.dt_entry_count, self.dt_entries_offset, 296 | self.page_size, self.version) = struct.unpack_from('>8I', buf, 0) 297 | 298 | # verify the header 299 | if self.magic != self._DTBO_MAGIC and self.magic != self._ACPIO_MAGIC: 300 | raise ValueError('Invalid magic number 0x%x in DTBO/ACPIO file' % 301 | (self.magic)) 302 | 303 | if self.header_size != self._DT_TABLE_HEADER_SIZE: 304 | raise ValueError('Invalid header size (%d) in DTBO/ACPIO file' % 305 | (self.header_size)) 306 | 307 | if self.dt_entry_size != self._DT_ENTRY_HEADER_SIZE: 308 | raise ValueError('Invalid DT entry header size (%d) in DTBO/ACPIO file' % 309 | (self.dt_entry_size)) 310 | 311 | def _read_dt_entries_from_metadata(self): 312 | """Reads individual DT entry headers from metadata buffer. 313 | 314 | Unpack and read the DTBO DT entry headers from the internal buffer. 315 | The buffer size must exactly be equal to _DT_TABLE_HEADER_SIZE + 316 | (_DT_ENTRY_HEADER_SIZE * dt_entry_count). The method raises exception 317 | if DT entries have already been set for this object. 318 | """ 319 | 320 | if self.__dt_entries: 321 | raise ValueError('DTBO DT entries can be added only once') 322 | 323 | offset = self.dt_entries_offset // 4 324 | params = {} 325 | params['version'] = self.version 326 | params['dt_file'] = None 327 | for i in range(0, self.dt_entry_count): 328 | dt_table_entry = self.__metadata[offset:offset + self._DT_ENTRY_HEADER_INTS] 329 | params['dt_size'] = dt_table_entry[0] 330 | params['dt_offset'] = dt_table_entry[1] 331 | for j in range(2, self._DT_ENTRY_HEADER_INTS): 332 | required_keys = None 333 | if self.version == 0: 334 | required_keys = DtEntry.REQUIRED_KEYS_V0 335 | elif self.version == 1: 336 | required_keys = DtEntry.REQUIRED_KEYS_V1 337 | params[required_keys[j + 1]] = str(dt_table_entry[j]) 338 | dt_entry = DtEntry(**params) 339 | self.__dt_entries.append(dt_entry) 340 | offset += self._DT_ENTRY_HEADER_INTS 341 | 342 | def _read_dtbo_image(self): 343 | """Parse the input file and instantiate this object.""" 344 | 345 | # First check if we have enough to read the header 346 | file_size = os.fstat(self.__file.fileno()).st_size 347 | if file_size < self._DT_TABLE_HEADER_SIZE: 348 | raise ValueError('Invalid DTBO file') 349 | 350 | self.__file.seek(0) 351 | buf = self.__file.read(self._DT_TABLE_HEADER_SIZE) 352 | self._read_dtbo_header(buf) 353 | 354 | self.__metadata_size = (self.header_size + 355 | self.dt_entry_count * self.dt_entry_size) 356 | if file_size < self.__metadata_size: 357 | raise ValueError('Invalid or truncated DTBO file of size %d expected %d' % 358 | file_size, self.__metadata_size) 359 | 360 | num_ints = (self._DT_TABLE_HEADER_INTS + 361 | self.dt_entry_count * self._DT_ENTRY_HEADER_INTS) 362 | if self.dt_entries_offset > self._DT_TABLE_HEADER_SIZE: 363 | num_ints += (self.dt_entries_offset - self._DT_TABLE_HEADER_SIZE) / 4 364 | format_str = '>' + str(num_ints) + 'I' 365 | self.__file.seek(0) 366 | self.__metadata = struct.unpack(format_str, 367 | self.__file.read(self.__metadata_size)) 368 | self._read_dt_entries_from_metadata() 369 | 370 | def _find_dt_entry_with_same_file(self, dt_entry): 371 | """Finds DT Entry that has identical backing DT file. 372 | 373 | Args: 374 | dt_entry: DtEntry object whose 'dtfile' we find for existence in the 375 | current 'dt_entries'. 376 | Returns: 377 | If a match by file path is found, the corresponding DtEntry object 378 | from internal list is returned. If not, 'None' is returned. 379 | """ 380 | 381 | dt_entry_path = os.path.realpath(dt_entry.dt_file.name) 382 | for entry in self.__dt_entries: 383 | entry_path = os.path.realpath(entry.dt_file.name) 384 | if entry_path == dt_entry_path: 385 | return entry 386 | return None 387 | 388 | def __init__(self, file_handle, dt_type='dtb', page_size=None, version=0): 389 | """Constructor for Dtbo Object 390 | 391 | Args: 392 | file_handle: The Dtbo File handle corresponding to this object. 393 | The file handle can be used to write to (in case of 'create') 394 | or read from (in case of 'dump') 395 | """ 396 | 397 | self.__file = file_handle 398 | self.__dt_entries = [] 399 | self.__metadata = None 400 | self.__metadata_size = 0 401 | 402 | # if page_size is given, assume the object is being instantiated to 403 | # create a DTBO file 404 | if page_size: 405 | if dt_type == 'acpi': 406 | self.magic = self._ACPIO_MAGIC 407 | else: 408 | self.magic = self._DTBO_MAGIC 409 | self.total_size = self._DT_TABLE_HEADER_SIZE 410 | self.header_size = self._DT_TABLE_HEADER_SIZE 411 | self.dt_entry_size = self._DT_ENTRY_HEADER_SIZE 412 | self.dt_entry_count = 0 413 | self.dt_entries_offset = self._DT_TABLE_HEADER_SIZE 414 | self.page_size = page_size 415 | self.version = version 416 | self.__metadata_size = self._DT_TABLE_HEADER_SIZE 417 | else: 418 | self._read_dtbo_image() 419 | 420 | def __str__(self): 421 | sb = [] 422 | sb.append('dt_table_header:') 423 | _keys = ('magic', 'total_size', 'header_size', 'dt_entry_size', 424 | 'dt_entry_count', 'dt_entries_offset', 'page_size', 'version') 425 | for key in _keys: 426 | if key == 'magic': 427 | sb.append('{key:>20} = {value:08x}'.format(key=key, 428 | value=self.__dict__[key])) 429 | else: 430 | sb.append('{key:>20} = {value:d}'.format(key=key, 431 | value=self.__dict__[key])) 432 | count = 0 433 | for dt_entry in self.__dt_entries: 434 | sb.append('dt_table_entry[{0:d}]:'.format(count)) 435 | sb.append(str(dt_entry)) 436 | count = count + 1 437 | return '\n'.join(sb) 438 | 439 | @property 440 | def dt_entries(self): 441 | """Returns a list of DtEntry objects found in DTBO file.""" 442 | return self.__dt_entries 443 | 444 | def compress_dt_entry(self, compression_format, dt_entry_file): 445 | """Compresses a DT entry. 446 | 447 | Args: 448 | compression_format: Compression format for DT Entry 449 | dt_entry_file: File handle to read DT entry from. 450 | 451 | Returns: 452 | Compressed DT entry and its length. 453 | 454 | Raises: 455 | ValueError if unrecognized compression format is found. 456 | """ 457 | compress_zlib = zlib.compressobj() # zlib 458 | compress_gzip = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, 459 | zlib.DEFLATED, self._GZIP_COMPRESSION_WBITS) # gzip 460 | compression_obj_dict = { 461 | CompressionFormat.NO_COMPRESSION: None, 462 | CompressionFormat.ZLIB_COMPRESSION: compress_zlib, 463 | CompressionFormat.GZIP_COMPRESSION: compress_gzip, 464 | } 465 | 466 | if compression_format not in compression_obj_dict: 467 | ValueError("Bad compression format %d" % compression_format) 468 | 469 | if compression_format is CompressionFormat.NO_COMPRESSION: 470 | dt_entry = dt_entry_file.read() 471 | else: 472 | compression_object = compression_obj_dict[compression_format] 473 | dt_entry_file.seek(0) 474 | dt_entry = compression_object.compress(dt_entry_file.read()) 475 | dt_entry += compression_object.flush() 476 | return dt_entry, len(dt_entry) 477 | 478 | def add_dt_entries(self, dt_entries): 479 | """Adds DT image files to the DTBO object. 480 | 481 | Adds a list of Dtentry Objects to the DTBO image. The changes are not 482 | committed to the output file until commit() is called. 483 | 484 | Args: 485 | dt_entries: List of DtEntry object to be added. 486 | 487 | Returns: 488 | A buffer containing all DT entries. 489 | 490 | Raises: 491 | ValueError: if the list of DT entries is empty or if a list of DT entries 492 | has already been added to the DTBO. 493 | """ 494 | if not dt_entries: 495 | raise ValueError('Attempted to add empty list of DT entries') 496 | 497 | if self.__dt_entries: 498 | raise ValueError('DTBO DT entries can be added only once') 499 | 500 | dt_entry_count = len(dt_entries) 501 | dt_offset = (self.header_size + 502 | dt_entry_count * self.dt_entry_size) 503 | 504 | dt_entry_buf = b"" 505 | for dt_entry in dt_entries: 506 | if not isinstance(dt_entry, DtEntry): 507 | raise ValueError('Adding invalid DT entry object to DTBO') 508 | entry = self._find_dt_entry_with_same_file(dt_entry) 509 | dt_entry_compression_info = dt_entry.compression_info() 510 | if entry and (entry.compression_info() == dt_entry_compression_info): 511 | dt_entry.dt_offset = entry.dt_offset 512 | dt_entry.size = entry.size 513 | else: 514 | dt_entry.dt_offset = dt_offset 515 | compressed_entry, dt_entry.size = self.compress_dt_entry(dt_entry_compression_info, 516 | dt_entry.dt_file) 517 | dt_entry_buf += compressed_entry 518 | dt_offset += dt_entry.size 519 | self.total_size += dt_entry.size 520 | self.__dt_entries.append(dt_entry) 521 | self.dt_entry_count += 1 522 | self.__metadata_size += self.dt_entry_size 523 | self.total_size += self.dt_entry_size 524 | 525 | return dt_entry_buf 526 | 527 | def extract_dt_file(self, idx, fout, decompress): 528 | """Extract DT Image files embedded in the DTBO file. 529 | 530 | Extracts Device Tree blob image file at given index into a file handle. 531 | 532 | Args: 533 | idx: Index of the DT entry in the DTBO file. 534 | fout: File handle where the DTB at index idx to be extracted into. 535 | decompress: If a DT entry is compressed, decompress it before writing 536 | it to the file handle. 537 | 538 | Raises: 539 | ValueError: if invalid DT entry index or compression format is detected. 540 | """ 541 | if idx > self.dt_entry_count: 542 | raise ValueError('Invalid index %d of DtEntry' % idx) 543 | 544 | size = self.dt_entries[idx].size 545 | offset = self.dt_entries[idx].dt_offset 546 | self.__file.seek(offset, 0) 547 | fout.seek(0) 548 | compression_format = self.dt_entries[idx].compression_info() 549 | if decompress and compression_format: 550 | if (compression_format == CompressionFormat.ZLIB_COMPRESSION or 551 | compression_format == CompressionFormat.GZIP_COMPRESSION): 552 | fout.write(zlib.decompress(self.__file.read(size), self._ZLIB_DECOMPRESSION_WBITS)) 553 | else: 554 | raise ValueError("Unknown compression format detected") 555 | else: 556 | fout.write(self.__file.read(size)) 557 | 558 | def commit(self, dt_entry_buf): 559 | """Write out staged changes to the DTBO object to create a DTBO file. 560 | 561 | Writes a fully instantiated Dtbo Object into the output file using the 562 | file handle present in '_file'. No checks are performed on the object 563 | except for existence of output file handle on the object before writing 564 | out the file. 565 | 566 | Args: 567 | dt_entry_buf: Buffer containing all DT entries. 568 | """ 569 | if not self.__file: 570 | raise ValueError('No file given to write to.') 571 | 572 | if not self.__dt_entries: 573 | raise ValueError('No DT image files to embed into DTBO image given.') 574 | 575 | self._update_metadata() 576 | 577 | self.__file.seek(0) 578 | self.__file.write(self.__metadata) 579 | self.__file.write(dt_entry_buf) 580 | self.__file.flush() 581 | 582 | 583 | def parse_dt_entry(global_args, arglist): 584 | """Parse arguments for single DT entry file. 585 | 586 | Parses command line arguments for single DT image file while 587 | creating a Device tree blob overlay (DTBO). 588 | 589 | Args: 590 | global_args: Dtbo object containing global default values 591 | for DtEntry attributes. 592 | arglist: Command line argument list for this DtEntry. 593 | 594 | Returns: 595 | A Namespace object containing all values to instantiate DtEntry object. 596 | """ 597 | 598 | parser = argparse.ArgumentParser(add_help=False) 599 | parser.add_argument('dt_file', nargs='?', 600 | type=argparse.FileType('rb'), 601 | default=None) 602 | parser.add_argument('--id', type=str, dest='id', action='store', 603 | default=global_args.global_id) 604 | parser.add_argument('--rev', type=str, dest='rev', 605 | action='store', default=global_args.global_rev) 606 | parser.add_argument('--flags', type=str, dest='flags', 607 | action='store', 608 | default=global_args.global_flags) 609 | parser.add_argument('--custom0', type=str, dest='custom0', 610 | action='store', 611 | default=global_args.global_custom0) 612 | parser.add_argument('--custom1', type=str, dest='custom1', 613 | action='store', 614 | default=global_args.global_custom1) 615 | parser.add_argument('--custom2', type=str, dest='custom2', 616 | action='store', 617 | default=global_args.global_custom2) 618 | parser.add_argument('--custom3', type=str, dest='custom3', 619 | action='store', 620 | default=global_args.global_custom3) 621 | return parser.parse_args(arglist) 622 | 623 | 624 | def parse_dt_entries(global_args, arg_list): 625 | """Parse all DT entries from command line. 626 | 627 | Parse all DT image files and their corresponding attribute from 628 | command line 629 | 630 | Args: 631 | global_args: Argument containing default global values for _id, 632 | _rev and customX. 633 | arg_list: The remainder of the command line after global options 634 | DTBO creation have been parsed. 635 | 636 | Returns: 637 | A List of DtEntry objects created after parsing the command line 638 | given in argument. 639 | """ 640 | dt_entries = [] 641 | img_file_idx = [] 642 | idx = 0 643 | # find all positional arguments (i.e. DT image file paths) 644 | for arg in arg_list: 645 | if not arg.startswith("--"): 646 | img_file_idx.append(idx) 647 | idx = idx + 1 648 | 649 | if not img_file_idx: 650 | raise ValueError('Input DT images must be provided') 651 | 652 | total_images = len(img_file_idx) 653 | for idx in range(total_images): 654 | start_idx = img_file_idx[idx] 655 | if idx == total_images - 1: 656 | argv = arg_list[start_idx:] 657 | else: 658 | end_idx = img_file_idx[idx + 1] 659 | argv = arg_list[start_idx:end_idx] 660 | args = parse_dt_entry(global_args, argv) 661 | params = vars(args) 662 | params['version'] = global_args.version 663 | params['dt_offset'] = 0 664 | params['dt_size'] = os.fstat(params['dt_file'].fileno()).st_size 665 | dt_entries.append(DtEntry(**params)) 666 | 667 | return dt_entries 668 | 669 | def parse_config_option(line, is_global, dt_keys, global_key_types): 670 | """Parses a single line from the configuration file. 671 | 672 | Args: 673 | line: String containing the key=value line from the file. 674 | is_global: Boolean indicating if we should parse global or DT entry 675 | specific option. 676 | dt_keys: Tuple containing all valid DT entry and global option strings 677 | in configuration file. 678 | global_key_types: A dict of global options and their corresponding types. It 679 | contains all exclusive valid global option strings in configuration 680 | file that are not repeated in dt entry options. 681 | 682 | Returns: 683 | Returns a tuple for parsed key and value for the option. Also, checks 684 | the key to make sure its valid. 685 | """ 686 | 687 | if line.find('=') == -1: 688 | raise ValueError('Invalid line (%s) in configuration file' % line) 689 | 690 | key, value = (x.strip() for x in line.split('=')) 691 | if is_global and key in global_key_types: 692 | if global_key_types[key] is int: 693 | value = int(value) 694 | elif key not in dt_keys: 695 | raise ValueError('Invalid option (%s) in configuration file' % key) 696 | 697 | return key, value 698 | 699 | def parse_config_file(fin, dt_keys, global_key_types): 700 | """Parses the configuration file for creating DTBO image. 701 | 702 | Args: 703 | fin: File handle for configuration file 704 | is_global: Boolean indicating if we should parse global or DT entry 705 | specific option. 706 | dt_keys: Tuple containing all valid DT entry and global option strings 707 | in configuration file. 708 | global_key_types: A dict of global options and their corresponding types. It 709 | contains all exclusive valid global option strings in configuration 710 | file that are not repeated in dt entry options. 711 | 712 | Returns: 713 | global_args, dt_args: Tuple of a dictionary with global arguments 714 | and a list of dictionaries for all DT entry specific arguments the 715 | following format. 716 | global_args: 717 | {'id' : , 'rev' : ...} 718 | dt_args: 719 | [{'filename' : 'dt_file_name', 'id' : , 720 | 'rev' : ...}, 721 | {'filename' : 'dt_file_name2', 'id' : , 722 | 'rev' : ...}, ... 723 | ] 724 | """ 725 | 726 | # set all global defaults 727 | global_args = dict((k, '0') for k in dt_keys) 728 | global_args['dt_type'] = 'dtb' 729 | global_args['page_size'] = 2048 730 | global_args['version'] = 0 731 | 732 | dt_args = [] 733 | found_dt_entry = False 734 | count = -1 735 | for line in fin: 736 | line = line.rstrip() 737 | if line.lstrip().startswith('#'): 738 | continue 739 | comment_idx = line.find('#') 740 | line = line if comment_idx == -1 else line[0:comment_idx] 741 | if not line or line.isspace(): 742 | continue 743 | if line.startswith((' ', '\t')) and not found_dt_entry: 744 | # This is a global argument 745 | key, value = parse_config_option(line, True, dt_keys, global_key_types) 746 | global_args[key] = value 747 | elif line.find('=') != -1: 748 | key, value = parse_config_option(line, False, dt_keys, global_key_types) 749 | dt_args[-1][key] = value 750 | else: 751 | found_dt_entry = True 752 | count += 1 753 | dt_args.append({}) 754 | dt_args[-1]['filename'] = line.strip() 755 | return global_args, dt_args 756 | 757 | def parse_create_args(arg_list): 758 | """Parse command line arguments for 'create' sub-command. 759 | 760 | Args: 761 | arg_list: All command line arguments except the outfile file name. 762 | 763 | Returns: 764 | The list of remainder of the command line arguments after parsing 765 | for 'create'. 766 | """ 767 | 768 | image_arg_index = 0 769 | for arg in arg_list: 770 | if not arg.startswith("--"): 771 | break 772 | image_arg_index = image_arg_index + 1 773 | 774 | argv = arg_list[0:image_arg_index] 775 | remainder = arg_list[image_arg_index:] 776 | parser = argparse.ArgumentParser(prog='create', add_help=False) 777 | parser.add_argument('--dt_type', type=str, dest='dt_type', 778 | action='store', default='dtb') 779 | parser.add_argument('--page_size', type=int, dest='page_size', 780 | action='store', default=2048) 781 | parser.add_argument('--version', type=int, dest='version', 782 | action='store', default=0) 783 | parser.add_argument('--id', type=str, dest='global_id', 784 | action='store', default='0') 785 | parser.add_argument('--rev', type=str, dest='global_rev', 786 | action='store', default='0') 787 | parser.add_argument('--flags', type=str, dest='global_flags', 788 | action='store', default='0') 789 | parser.add_argument('--custom0', type=str, dest='global_custom0', 790 | action='store', default='0') 791 | parser.add_argument('--custom1', type=str, dest='global_custom1', 792 | action='store', default='0') 793 | parser.add_argument('--custom2', type=str, dest='global_custom2', 794 | action='store', default='0') 795 | parser.add_argument('--custom3', type=str, dest='global_custom3', 796 | action='store', default='0') 797 | args = parser.parse_args(argv) 798 | return args, remainder 799 | 800 | def parse_dump_cmd_args(arglist): 801 | """Parse command line arguments for 'dump' sub-command. 802 | 803 | Args: 804 | arglist: List of all command line arguments including the outfile 805 | file name if exists. 806 | 807 | Returns: 808 | A namespace object of parsed arguments. 809 | """ 810 | 811 | parser = argparse.ArgumentParser(prog='dump') 812 | parser.add_argument('--output', '-o', nargs='?', 813 | type=argparse.FileType('w'), 814 | dest='outfile', 815 | default=stdout) 816 | parser.add_argument('--dtb', '-b', nargs='?', type=str, 817 | dest='dtfilename') 818 | parser.add_argument('--decompress', action='store_true', dest='decompress') 819 | return parser.parse_args(arglist) 820 | 821 | def parse_config_create_cmd_args(arglist): 822 | """Parse command line arguments for 'cfg_create subcommand. 823 | 824 | Args: 825 | arglist: A list of all command line arguments including the 826 | mandatory input configuration file name. 827 | 828 | Returns: 829 | A Namespace object of parsed arguments. 830 | """ 831 | parser = argparse.ArgumentParser(prog='cfg_create') 832 | parser.add_argument('conf_file', nargs='?', 833 | type=argparse.FileType('r'), 834 | default=None) 835 | cwd = os.getcwd() 836 | parser.add_argument('--dtb-dir', '-d', nargs='?', type=str, 837 | dest='dtbdir', default=cwd) 838 | return parser.parse_args(arglist) 839 | 840 | def create_dtbo_image(fout, argv): 841 | """Create Device Tree Blob Overlay image using provided arguments. 842 | 843 | Args: 844 | fout: Output file handle to write to. 845 | argv: list of command line arguments. 846 | """ 847 | 848 | global_args, remainder = parse_create_args(argv) 849 | if not remainder: 850 | raise ValueError('List of dtimages to add to DTBO not provided') 851 | dt_entries = parse_dt_entries(global_args, remainder) 852 | dtbo = Dtbo(fout, global_args.dt_type, global_args.page_size, global_args.version) 853 | dt_entry_buf = dtbo.add_dt_entries(dt_entries) 854 | dtbo.commit(dt_entry_buf) 855 | fout.close() 856 | 857 | def dump_dtbo_image(fin, argv): 858 | """Dump DTBO file. 859 | 860 | Dump Device Tree Blob Overlay metadata as output and the device 861 | tree image files embedded in the DTBO image into file(s) provided 862 | as arguments 863 | 864 | Args: 865 | fin: Input DTBO image files. 866 | argv: list of command line arguments. 867 | """ 868 | dtbo = Dtbo(fin) 869 | args = parse_dump_cmd_args(argv) 870 | if args.dtfilename: 871 | num_entries = len(dtbo.dt_entries) 872 | for idx in range(0, num_entries): 873 | with open(args.dtfilename + '.{:d}'.format(idx), 'wb') as fout: 874 | dtbo.extract_dt_file(idx, fout, args.decompress) 875 | args.outfile.write(str(dtbo) + '\n') 876 | args.outfile.close() 877 | 878 | def create_dtbo_image_from_config(fout, argv): 879 | """Create DTBO file from a configuration file. 880 | 881 | Args: 882 | fout: Output file handle to write to. 883 | argv: list of command line arguments. 884 | """ 885 | args = parse_config_create_cmd_args(argv) 886 | if not args.conf_file: 887 | raise ValueError('Configuration file must be provided') 888 | 889 | _DT_KEYS = ('id', 'rev', 'flags', 'custom0', 'custom1', 'custom2', 'custom3') 890 | _GLOBAL_KEY_TYPES = {'dt_type': str, 'page_size': int, 'version': int} 891 | 892 | global_args, dt_args = parse_config_file(args.conf_file, 893 | _DT_KEYS, _GLOBAL_KEY_TYPES) 894 | version = global_args['version'] 895 | 896 | params = {} 897 | params['version'] = version 898 | dt_entries = [] 899 | for dt_arg in dt_args: 900 | filepath = dt_arg['filename'] 901 | if not os.path.isabs(filepath): 902 | for root, dirnames, filenames in os.walk(args.dtbdir): 903 | for filename in fnmatch.filter(filenames, os.path.basename(filepath)): 904 | filepath = os.path.join(root, filename) 905 | params['dt_file'] = open(filepath, 'rb') 906 | params['dt_offset'] = 0 907 | params['dt_size'] = os.fstat(params['dt_file'].fileno()).st_size 908 | for key in _DT_KEYS: 909 | if key not in dt_arg: 910 | params[key] = global_args[key] 911 | else: 912 | params[key] = dt_arg[key] 913 | dt_entries.append(DtEntry(**params)) 914 | 915 | # Create and write DTBO file 916 | dtbo = Dtbo(fout, global_args['dt_type'], global_args['page_size'], version) 917 | dt_entry_buf = dtbo.add_dt_entries(dt_entries) 918 | dtbo.commit(dt_entry_buf) 919 | fout.close() 920 | 921 | def print_default_usage(progname): 922 | """Prints program's default help string. 923 | 924 | Args: 925 | progname: This program's name. 926 | """ 927 | sb = [] 928 | sb.append(' ' + progname + ' help all') 929 | sb.append(' ' + progname + ' help \n') 930 | sb.append(' commands:') 931 | sb.append(' help, dump, create, cfg_create') 932 | print('\n'.join(sb)) 933 | 934 | def print_dump_usage(progname): 935 | """Prints usage for 'dump' sub-command. 936 | 937 | Args: 938 | progname: This program's name. 939 | """ 940 | sb = [] 941 | sb.append(' ' + progname + ' dump (