├── .gitignore ├── bazel_parser ├── consts.py ├── kbuild_writer.py ├── parse.py ├── parser_impl.py ├── pylintrc.toml ├── ruff.toml ├── rules.py └── utils.py ├── blacklisted_files.txt ├── blobs ├── blob_list.py ├── blobs.py └── utils.py ├── fdt_extra.py ├── find_closest_commit.sh ├── flash_fastboot.sh ├── fod_dim_lut_calibration.py ├── log.sh ├── manifest_to_modules_data.py ├── motorola-merge-kernel-modules.sh ├── motorola-split-kernel-modules.sh ├── prepare-kernel-tree.sh ├── requirements.txt ├── sepolicy ├── .gitignore ├── cil_rule.py ├── class_set.py ├── classmap.py ├── classmap_generator.c ├── conditional_type.py ├── config.py ├── decompile_cil.py ├── macro.py ├── match.py ├── match_extract.py ├── match_replace.py ├── mld.py ├── output.py ├── pylintrc.toml ├── ruff.toml ├── rule.py ├── source_rule.py └── utils.py ├── sort_dts.py ├── source_available_files.txt └── subtree_modules_data.sh /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled 2 | __pycache__/ 3 | 4 | # Virtual env 5 | venv/ 6 | -------------------------------------------------------------------------------- /bazel_parser/consts.py: -------------------------------------------------------------------------------- 1 | _common_outs = [ 2 | "System.map", 3 | "modules.builtin", 4 | "modules.builtin.modinfo", 5 | "vmlinux", 6 | "vmlinux.symvers", 7 | ] 8 | 9 | DEFAULT_IMAGES = [ 10 | "Image", 11 | "Image.lz4", 12 | "Image.gz", 13 | ] 14 | 15 | DEFAULT_GKI_OUTS = _common_outs + DEFAULT_IMAGES 16 | X86_64_OUTS = _common_outs + ["bzImage"] 17 | aarch64_outs = DEFAULT_GKI_OUTS 18 | x86_64_outs = X86_64_OUTS 19 | -------------------------------------------------------------------------------- /bazel_parser/kbuild_writer.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import TextIO 4 | 5 | from parser_impl import BazelParser, DdkModule, DdkSubModule, GenRule 6 | 7 | 8 | def write_submodule_kbuild( 9 | o: TextIO, 10 | submodule: DdkSubModule | DdkModule, 11 | bazel_parser: BazelParser, 12 | ): 13 | if submodule.out is None: 14 | return 15 | 16 | module_obj_name = submodule.out 17 | assert module_obj_name.endswith('.ko') 18 | module_obj_name = module_obj_name[: -len('.ko')] 19 | 20 | o.write(f'obj-m += {module_obj_name}.o\n') 21 | 22 | for src in submodule.srcs: 23 | if src.startswith(':'): 24 | target = bazel_parser.lookup_target(src) 25 | assert isinstance(target, GenRule) 26 | elif src.endswith('.c'): 27 | src = src[: -len('.c')] + '.o' 28 | o.write(f'{module_obj_name}-y += {src}\n') 29 | elif src.endswith('.h'): 30 | continue 31 | else: 32 | assert False, src 33 | 34 | if ( 35 | hasattr(submodule, 'conditional_srcs') 36 | and submodule.conditional_srcs is not None 37 | ): 38 | # TODO 39 | pass 40 | 41 | cflags = [] 42 | if submodule.local_defines is not None: 43 | for define in submodule.local_defines: 44 | cflags.append(f'-D{define}') 45 | if hasattr(submodule, 'includes') and submodule.includes is not None: 46 | for include in submodule.includes: 47 | cflags.append(f'-I$(src)/{include}') 48 | if hasattr(submodule, 'copts') and submodule.copts is not None: 49 | cflags += submodule.copts 50 | 51 | if cflags: 52 | o.write(f'CFLAGS_{module_obj_name}.o := \\\n') 53 | for cflag in cflags: 54 | o.write(f'\t{cflag}') 55 | if cflag is not cflags[-1]: 56 | o.write(' \\\n') 57 | else: 58 | o.write('\n') 59 | 60 | o.write('\n') 61 | 62 | 63 | def _write_kbuild(o: TextIO, bazel_parser: BazelParser): 64 | ddk_modules = bazel_parser.lookup_targets(DdkModule) 65 | 66 | for ddk_module in ddk_modules: 67 | soc, variant, _ = ddk_module.name.split('_', 2) 68 | 69 | o.write( 70 | f""" 71 | ifeq ($(CONFIG_ARCH_{soc.upper()}), y) 72 | ifeq ($(CONFIG_LOCALVERSION, -{variant})) 73 | """.lstrip() 74 | ) 75 | 76 | write_submodule_kbuild(o, ddk_module, bazel_parser) 77 | 78 | for dep in ddk_module.deps: 79 | submodule = bazel_parser.lookup_target(dep) 80 | 81 | if isinstance(submodule, DdkSubModule): 82 | write_submodule_kbuild(o, submodule, bazel_parser) 83 | else: 84 | print(submodule) 85 | 86 | o.write( 87 | """ 88 | endif 89 | endif 90 | """.lstrip() 91 | ) 92 | 93 | if ddk_module != ddk_modules[-1]: 94 | o.write('\n') 95 | 96 | 97 | def write_kbuild(kbuild_path: str, bazel_parser: BazelParser): 98 | with open(kbuild_path, 'w', encoding='utf-8') as o: 99 | _write_kbuild(o, bazel_parser) 100 | -------------------------------------------------------------------------------- /bazel_parser/parse.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | 5 | from kbuild_writer import write_kbuild 6 | from parser_impl import BazelParser 7 | 8 | args_parser = argparse.ArgumentParser('Parse bazel scripts') 9 | args_parser.add_argument('script', help='Script to parse') 10 | args_parser.add_argument( 11 | 'kbuild', 12 | help='Kbuild to output', 13 | nargs='?', 14 | ) 15 | args_parser.add_argument( 16 | '-r', 17 | '--root', 18 | action='store', 19 | help='Root of android tree', 20 | ) 21 | args_parser.add_argument( 22 | '-d', 23 | '--debug', 24 | action='store_true', 25 | help='Debug', 26 | ) 27 | args_parser.add_argument( 28 | '-b', 29 | '--print-bazel-output', 30 | action='store_true', 31 | help='Print output of bazel scripts', 32 | ) 33 | args_parser.add_argument( 34 | '-m', 35 | '--module-path', 36 | action='append', 37 | help='Maps from one path to another', 38 | ) 39 | args_parser.add_argument( 40 | '-f', 41 | '--flag', 42 | action='append', 43 | help='Set flag', 44 | ) 45 | args = args_parser.parse_args() 46 | 47 | module_paths_map = {} 48 | if args.module_path: 49 | for module_path in args.module_path: 50 | src_module_path, dst_module_path = module_path.split(':') 51 | module_paths_map[src_module_path] = dst_module_path 52 | 53 | flags_map = {} 54 | if args.flag: 55 | for flag in args.flag: 56 | flag_key, flag_value = flag.split(':') 57 | flags_map[flag_key] = flag_value 58 | 59 | 60 | bazel_parser = BazelParser( 61 | module_paths_map=module_paths_map, 62 | flags_map=flags_map, 63 | debug=args.debug, 64 | print_bazel_output=args.print_bazel_output, 65 | ) 66 | 67 | bazel_parser.parse(args.script) 68 | 69 | if args.kbuild: 70 | write_kbuild(args.kbuild, bazel_parser) 71 | -------------------------------------------------------------------------------- /bazel_parser/parser_impl.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import builtins 4 | import re 5 | from glob import glob 6 | from io import TextIOWrapper 7 | from os import path 8 | from types import SimpleNamespace 9 | from typing import Dict, List, Optional, Set, TypeVar 10 | 11 | from consts import X86_64_OUTS, aarch64_outs 12 | from rules import ( 13 | Alias, 14 | BoolFlag, 15 | ConfigSetting, 16 | DdkHeaders, 17 | DdkModule, 18 | DdkSubModule, 19 | GenRule, 20 | Rule, 21 | ) 22 | from utils import ( 23 | Color, 24 | TemporaryWorkingDirectory, 25 | builtin_print, 26 | color_print, 27 | import_module, 28 | ) 29 | 30 | BUILD_BAZEL_FILE_NAME = 'BUILD.bazel' 31 | 32 | 33 | class depset_impl(set): 34 | def to_list(self): 35 | return list(self) 36 | 37 | 38 | def native_glob_impl( 39 | include: Optional[List[str]] = None, 40 | exclude: Optional[List[str]] = None, 41 | exclude_directories: Optional[bool] = True, 42 | allow_empty: Optional[bool] = True, 43 | ): 44 | if include is None: 45 | include = [] 46 | 47 | if exclude is None: 48 | exclude = [] 49 | 50 | found_files = set() 51 | for g in include: 52 | g_files = glob(g, recursive=True) 53 | if not g_files and not allow_empty: 54 | assert False 55 | found_files.update(g_files) 56 | 57 | excluded_files = set() 58 | for g in exclude: 59 | g_files = glob(g, recursive=True) 60 | excluded_files.update(g_files) 61 | 62 | if not found_files and not allow_empty: 63 | assert False 64 | 65 | found_directories = set() 66 | if exclude_directories: 67 | for found_file in found_files: 68 | if path.isdir(found_file): 69 | found_directories.add(found_file) 70 | 71 | files = list(found_files - excluded_files - found_directories) 72 | files.sort() 73 | 74 | return files 75 | 76 | 77 | T = TypeVar('T', bound=Rule) 78 | 79 | 80 | class BazelParser: 81 | failed_imports: Set[str] = set() 82 | mapped_imports: Set[str] = set() 83 | missing_targets: Set[str] = set() 84 | 85 | def __init__( 86 | self, 87 | module_paths_map: Dict[str, str], 88 | flags_map: Dict[str, str], 89 | debug: Optional[bool] = None, 90 | print_bazel_output: Optional[bool] = None, 91 | ): 92 | self.module_paths_map = module_paths_map 93 | self.flags_map = flags_map 94 | self.debug = debug 95 | self.print_bazel_output = print_bazel_output 96 | self.targets: Dict[str, Rule] = {} 97 | 98 | self.overriden_rules = { 99 | 'attr': SimpleNamespace( 100 | { 101 | 'label': self.dummy('attr.label'), 102 | 'int': self.dummy('attr.int'), 103 | 'string_list': self.dummy('attr.string_list'), 104 | } 105 | ), 106 | 'hermetic_toolchain': SimpleNamespace( 107 | { 108 | 'type': self.dummy('hermetic_toolchain.type'), 109 | } 110 | ), 111 | 'native': SimpleNamespace( 112 | { 113 | 'alias': self.add_target_impl(Alias), 114 | 'genrule': self.add_target_impl(GenRule), 115 | 'glob': self.native_glob_wrapper, 116 | 'config_setting': self.dummy('native.config_setting'), 117 | 'filegroup': self.dummy('native.filegroup'), 118 | } 119 | ), 120 | 'checkpatch': self.dummy('checkpatch'), 121 | 'write_file': self.dummy('write_file'), 122 | 'filegroup': self.dummy('filegroup'), 123 | 'rule': self.rule_impl(), 124 | 'define_common_kernels': self.dummy('define_common_kernels'), 125 | 'kernel_abi': self.dummy('kernel_abi'), 126 | 'kernel_abi_dist': self.dummy('kernel_abi_dist'), 127 | 'kernel_build': self.dummy('kernel_build'), 128 | 'kernel_build_config': self.dummy('kernel_build_config'), 129 | 'kernel_modules_install': self.dummy('kernel_modules_install'), 130 | 'kernel_images': self.dummy('kernel_images'), 131 | 'merged_kernel_uapi_headers': self.dummy( 132 | 'merged_kernel_uapi_headers' 133 | ), 134 | 'kernel_uapi_headers_cc_library': self.dummy( 135 | 'kernel_uapi_headers_cc_library' 136 | ), 137 | 'kernel_compile_commands': self.dummy('kernel_compile_commands'), 138 | 'copy_to_dist_dir': self.dummy('copy_to_dist_dir'), 139 | 'super_image': self.dummy('super_image'), 140 | 'unsparsed_image': self.dummy('unsparsed_image'), 141 | 'hermetic_genrule': self.dummy('hermetic_genrule'), 142 | 'X86_64_OUTS': X86_64_OUTS, 143 | 'aarch64_outs': aarch64_outs, 144 | 'bool_flag': self.add_target_impl(BoolFlag), 145 | 'config_setting': self.add_target_impl(ConfigSetting), 146 | 'alias': self.add_target_impl(Alias), 147 | 'genrule': self.add_target_impl(GenRule), 148 | 'ddk_headers': self.add_target_impl(DdkHeaders), 149 | 'ddk_submodule': self.add_target_impl(DdkSubModule), 150 | 'ddk_module': self.add_target_impl(DdkModule), 151 | 'depset': self.depset_wrapper, 152 | 'select': self.select_impl, 153 | 'struct': self.struct_impl, 154 | 'package': self.package_impl, 155 | 'load': self.load_impl, 156 | 'glob': self.glob_impl, 157 | 'fail': self.fail_impl, 158 | } 159 | 160 | if not self.print_bazel_output: 161 | self.overriden_rules['print'] = self.print_impl 162 | 163 | def map_module_to_path(self, module_name: str): 164 | mapped_module_path = self.module_paths_map.get(module_name) 165 | if mapped_module_path is None: 166 | for module_name_repl, module_path in self.module_paths_map.items(): 167 | if ( 168 | module_name.startswith(module_name_repl) 169 | and module_name[len(module_name_repl)] == '/' 170 | ): 171 | trimmed_module_name = module_name[ 172 | len(module_name_repl) + 1 : 173 | ] 174 | mapped_module_path = path.join( 175 | module_path, trimmed_module_name 176 | ) 177 | break 178 | 179 | if mapped_module_path is None: 180 | return None 181 | 182 | if module_name not in self.mapped_imports: 183 | color_print( 184 | f'Mapped {module_name} to {mapped_module_path}', 185 | color=Color.GREEN, 186 | ) 187 | self.mapped_imports.add(module_name) 188 | 189 | return mapped_module_path 190 | 191 | def spec_resolution(self, spec: str, target_is_file=False): 192 | # There are multiple spec types 193 | # target 194 | # :file.bzl 195 | # :target 196 | # //path/to/module/root:file.bzl 197 | # //path/to/module/root:target 198 | 199 | if ':' not in spec: 200 | return None, None 201 | 202 | module_name, target_name = spec.split(':', 1) 203 | if not module_name and target_is_file: 204 | return '', target_name 205 | 206 | module_path = self.map_module_to_path(module_name) 207 | 208 | if module_path is not None: 209 | if target_is_file: 210 | module_path = path.join(module_path, target_name) 211 | else: 212 | module_path = path.join(module_path, BUILD_BAZEL_FILE_NAME) 213 | 214 | if module_path is None or not path.exists(module_path): 215 | if spec not in self.failed_imports: 216 | color_print(f'Failed to load {spec}', color=Color.YELLOW) 217 | self.failed_imports.add(spec) 218 | 219 | return None, None 220 | 221 | return module_name, module_path 222 | 223 | def parse_module(self, module_name: str, module_path: str): 224 | dir_path = path.dirname(module_path) 225 | module_path = path.basename(module_path) 226 | 227 | module_paths_map = {} 228 | for src_module_path, dst_module_path in self.module_paths_map.items(): 229 | dst_module_path = path.relpath(dst_module_path, dir_path) 230 | module_paths_map[src_module_path] = dst_module_path 231 | 232 | with TemporaryWorkingDirectory(dir_path): 233 | bazel_parser = BazelParser( 234 | module_paths_map, 235 | self.flags_map, 236 | debug=self.debug, 237 | print_bazel_output=self.print_bazel_output, 238 | ) 239 | 240 | bazel_parser.parse(module_path) 241 | 242 | self.init() 243 | 244 | for target in bazel_parser.targets.values(): 245 | self.add_target(target, module_name) 246 | 247 | def import_module(self, module_path: str): 248 | module = import_module(module_path) 249 | assert module is not None 250 | return module 251 | 252 | def _lookup_target(self, spec: str): 253 | target = self.targets.get(spec) 254 | 255 | if not isinstance(target, Alias): 256 | return target 257 | 258 | module_name, _ = self.spec_resolution(spec) 259 | assert module_name is not None 260 | 261 | return self.targets[f'{module_name}:{target.actual}'] 262 | 263 | def lookup_target(self, spec: str) -> Rule: 264 | target = self._lookup_target(spec) 265 | if target is not None: 266 | return target 267 | 268 | module_name, module_path = self.spec_resolution(spec) 269 | if module_path is not None: 270 | self.parse_module(module_name, module_path) 271 | 272 | target = self._lookup_target(spec) 273 | if target is None and spec not in self.missing_targets: 274 | if spec not in self.missing_targets: 275 | color_print(f'Failed to find target {spec}', color=Color.RED) 276 | self.missing_targets.add(spec) 277 | 278 | return target 279 | 280 | def lookup_targets(self, t: type[T]) -> List[T]: 281 | found_targets = set() 282 | for target in self.targets.values(): 283 | if isinstance(target, t): 284 | found_targets.add(target) 285 | return list(found_targets) 286 | 287 | def print_impl( 288 | self, 289 | *args, 290 | file: Optional[TextIOWrapper] = None, 291 | **kwargs, 292 | ): 293 | if file is not None and file.name == '': 294 | builtin_print(*args, file=file, **kwargs) 295 | 296 | def select_impl(self, d: Dict[str, any]): 297 | if self.debug: 298 | builtin_print('select', d) 299 | 300 | default_value = None 301 | for cond_spec, cond_value in d.items(): 302 | if cond_spec == '//conditions:default': 303 | default_value = cond_value 304 | continue 305 | 306 | config_setting = self.lookup_target(cond_spec) 307 | if config_setting is None: 308 | continue 309 | 310 | assert isinstance(config_setting, ConfigSetting) 311 | 312 | assert len(config_setting.flag_values) == 1 313 | for ( 314 | flag_spec, 315 | flag_expected_value, 316 | ) in config_setting.flag_values.items(): 317 | if flag_spec not in self.flags_map: 318 | continue 319 | 320 | flag = self.lookup_target(flag_spec) 321 | if flag is None: 322 | continue 323 | 324 | assert isinstance(flag, BoolFlag) 325 | 326 | flag_expected_value = BoolFlag.parse_value(flag_expected_value) 327 | 328 | flag_value = self.flags_map[flag.name] 329 | flag_value = BoolFlag.parse_value(flag_value) 330 | 331 | if flag_value == flag_expected_value: 332 | return cond_value 333 | 334 | return default_value 335 | 336 | def struct_impl(self, **kwargs): 337 | if self.debug: 338 | builtin_print('struct', kwargs) 339 | 340 | s = SimpleNamespace() 341 | for k, v in kwargs.items(): 342 | setattr(s, k, v) 343 | 344 | return s 345 | 346 | def package_impl(self, *args, **kwargs): 347 | if self.debug: 348 | builtin_print('package', args, kwargs) 349 | 350 | def load_impl( 351 | self, 352 | file_spec: str, 353 | *names: List[str], 354 | **mapped_names: Dict[str, str], 355 | ): 356 | if self.debug: 357 | builtin_print('load', file_spec, names, mapped_names) 358 | 359 | _, module_path = self.spec_resolution(file_spec, target_is_file=True) 360 | if not module_path: 361 | return 362 | 363 | module = self.import_module(module_path) 364 | 365 | mapped_names.update({x: x for x in names}) 366 | 367 | for name, src_name in mapped_names.items(): 368 | if name in self.overriden_rules: 369 | builtin_print(f'Skipped overriden rule {name}') 370 | continue 371 | 372 | value = getattr(module, src_name) 373 | setattr(builtins, name, value) 374 | 375 | def add_target(self, target: Rule, module_name=''): 376 | assert target.name not in self.targets, ( 377 | f'Target {target.name} already exists' 378 | ) 379 | 380 | self.targets[f'{module_name}:{target.name}'] = target 381 | if not module_name: 382 | self.targets[target.name] = target 383 | 384 | def add_target_impl(self, t: type[T]): 385 | def _add_target_impl(**data): 386 | target = t(**data) 387 | self.add_target(target) 388 | 389 | return _add_target_impl 390 | 391 | def dummy(self, name: str): 392 | def dummy_impl(**data): 393 | if self.debug: 394 | builtin_print(name) 395 | builtin_print(data) 396 | builtin_print() 397 | 398 | return dummy_impl 399 | 400 | def rule_impl(self, **data): 401 | if self.debug: 402 | builtin_print('rule') 403 | builtin_print(data) 404 | builtin_print() 405 | 406 | def rule_callable(**data): 407 | if self.debug: 408 | builtin_print('rule_callable') 409 | builtin_print(data) 410 | builtin_print() 411 | 412 | def rule_inner_callable(**data): 413 | if self.debug: 414 | builtin_print('rule_inner_callable') 415 | builtin_print(data) 416 | builtin_print() 417 | 418 | return rule_inner_callable 419 | 420 | return rule_callable 421 | 422 | def fail_impl(self, s: str): 423 | raise ValueError(s) 424 | 425 | def glob_impl(self, globs: List[str]): 426 | if self.debug: 427 | builtin_print('glob', globs) 428 | 429 | found_files = [] 430 | for g in globs: 431 | g_files = glob(g, recursive=True) 432 | found_files.extend(g_files) 433 | 434 | return found_files 435 | 436 | def depset_wrapper(self, data: List[str]): 437 | if self.debug: 438 | builtin_print('depset', data) 439 | 440 | data.sort() 441 | 442 | return depset_impl(data) 443 | 444 | def native_glob_wrapper(self, *args, **kwargs): 445 | if self.debug: 446 | builtin_print('native_glob', args, kwargs) 447 | 448 | return native_glob_impl(*args, **kwargs) 449 | 450 | def init(self): 451 | for key, value in self.overriden_rules.items(): 452 | setattr(builtins, key, value) 453 | 454 | def evaluate_genrule(self, genrule: GenRule): 455 | def replace(match: re.Match[str]): 456 | spec = match.group(1) 457 | _, module_path = self.spec_resolution(spec, target_is_file=True) 458 | if module_path is None: 459 | if spec not in self.missing_targets: 460 | color_print( 461 | f'Failed to find target {spec}', 462 | color=Color.RED, 463 | ) 464 | self.missing_targets.add(spec) 465 | 466 | return match.group(0) 467 | 468 | color_print( 469 | f'Replaced {match.group(0)} with {module_path}', 470 | color=Color.GREEN, 471 | ) 472 | 473 | return module_path 474 | 475 | if genrule.cmd_bash is not None: 476 | assert genrule.cmd is None 477 | genrule.cmd = genrule.cmd_bash 478 | 479 | location_pattern = r'\$\((?:location|locations) ([^\)]+)\)' 480 | genrule.cmd = re.sub(location_pattern, replace, genrule.cmd) 481 | 482 | def evaluate_genrules(self): 483 | genrules = self.lookup_targets(GenRule) 484 | for genrule in genrules: 485 | self.evaluate_genrule(genrule) 486 | 487 | def parse(self, file_path: str): 488 | self.init() 489 | import_module(file_path) 490 | self.evaluate_genrules() 491 | -------------------------------------------------------------------------------- /bazel_parser/pylintrc.toml: -------------------------------------------------------------------------------- 1 | [tool.pylint.'messages control'] 2 | disable = [ 3 | # Defaults 4 | 'raw-checker-failed', 5 | 'bad-inline-option', 6 | 'locally-disabled', 7 | 'file-ignored', 8 | 'suppressed-message', 9 | 'useless-suppression', 10 | 'deprecated-pragma', 11 | 'use-symbolic-message-instead', 12 | 'use-implicit-booleaness-not-comparison-to-string', 13 | 'use-implicit-booleaness-not-comparison-to-zero', 14 | 15 | 'invalid-name', 16 | 'missing-class-docstring', 17 | 'missing-function-docstring', 18 | 'missing-module-docstring', 19 | 'too-few-public-methods', 20 | 'too-many-arguments', 21 | 'too-many-boolean-expressions', 22 | 'too-many-instance-attributes', 23 | 'too-many-lines', 24 | 'too-many-locals', 25 | 'too-many-positional-arguments', 26 | 'too-many-public-methods', 27 | 'too-many-return-statements', 28 | 'unused-argument', 29 | ] 30 | -------------------------------------------------------------------------------- /bazel_parser/ruff.toml: -------------------------------------------------------------------------------- 1 | line-length = 80 2 | 3 | [lint] 4 | extend-select = ['A', 'FA100', 'FA102', 'I'] 5 | 6 | [format] 7 | quote-style = 'single' 8 | -------------------------------------------------------------------------------- /bazel_parser/rules.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Dict, List, Optional 4 | 5 | 6 | class Rule: 7 | def __init__(self, name: str): 8 | self.name = name 9 | 10 | def __str__(self): 11 | s = f'{self.__class__.__name__}:\n' 12 | s += f'\tname: {self.name}\n' 13 | return s 14 | 15 | 16 | class DdkHeaders(Rule): 17 | def __init__( 18 | self, 19 | *, 20 | name: str, 21 | hdrs: List[str], 22 | includes: Optional[List[str]] = None, 23 | visibility: Optional[List[str]] = None, 24 | linux_includes: Optional[List[str]] = None, 25 | ): 26 | super().__init__(name) 27 | 28 | self.hdrs = hdrs 29 | self.includes = includes 30 | self.visibility = visibility 31 | self.linux_includes = linux_includes 32 | 33 | def __str__(self): 34 | s = super().__str__() 35 | s += f'\thdrs: {self.hdrs}\n' 36 | s += f'\tincludes: {self.includes}\n' 37 | s += f'\tvisibility: {self.visibility}\n' 38 | s += f'\tlinux_includes: {self.linux_includes}\n' 39 | return s 40 | 41 | 42 | class DdkModule(Rule): 43 | def __init__( 44 | self, 45 | *, 46 | name: str, 47 | deps: List[str], 48 | hdrs: Optional[List[str]] = None, 49 | srcs: Optional[List[str]] = None, 50 | conditional_srcs: Optional[List[str]] = None, 51 | local_defines: Optional[List[str]] = None, 52 | includes: Optional[List[str]] = None, 53 | kconfig: Optional[str] = None, 54 | defconfig: Optional[str] = None, 55 | kernel_build: Optional[str] = None, 56 | copts: Optional[List[str]] = None, 57 | out: Optional[List[str]] = None, 58 | ): 59 | super().__init__(name) 60 | 61 | self.srcs = srcs 62 | self.conditional_srcs = conditional_srcs 63 | self.local_defines = local_defines 64 | self.deps = deps 65 | self.hdrs = hdrs 66 | self.includes = includes 67 | self.kconfig = kconfig 68 | self.defconfig = defconfig 69 | self.kernel_build = kernel_build 70 | self.copts = copts 71 | self.out = out 72 | 73 | def __str__(self): 74 | s = super().__str__() 75 | s += f'\tsrcs: {self.srcs}\n' 76 | s += f'\tconditional_srcs: {self.conditional_srcs}\n' 77 | s += f'\tlocal_defines: {self.local_defines}\n' 78 | s += f'\tdeps: {self.deps}\n' 79 | s += f'\thdrs: {self.hdrs}\n' 80 | s += f'\tincludes: {self.includes}\n' 81 | s += f'\tkconfig: {self.kconfig}\n' 82 | s += f'\tdefconfig: {self.defconfig}\n' 83 | s += f'\tkernel_build: {self.kernel_build}\n' 84 | s += f'\tcopts: {self.copts}\n' 85 | s += f'\tout: {self.out}\n' 86 | return s 87 | 88 | 89 | class DdkSubModule(Rule): 90 | def __init__( 91 | self, 92 | *, 93 | name: str, 94 | srcs: List[str], 95 | out: str, 96 | deps: List[str], 97 | local_defines: List[str], 98 | kernel_build: Optional[str] = None, 99 | ): 100 | super().__init__(name) 101 | 102 | self.srcs = srcs 103 | self.out = out 104 | self.deps = deps 105 | self.local_defines = local_defines 106 | self.kernel_build = kernel_build 107 | 108 | def __str__(self): 109 | s = super().__str__() 110 | s += f'\tsrcs: {self.srcs}\n' 111 | s += f'\tout: {self.out}\n' 112 | s += f'\tdeps: {self.deps}\n' 113 | s += f'\tlocal_defines: {self.local_defines}\n' 114 | s += f'\tkernel_build: {self.kernel_build}\n' 115 | return s 116 | 117 | 118 | class GenRule(Rule): 119 | def __init__( 120 | self, 121 | *, 122 | name: str, 123 | srcs: List[str], 124 | outs: List[str], 125 | cmd: Optional[str] = None, 126 | cmd_bash: Optional[str] = None, 127 | tools: Optional[str] = None, 128 | ): 129 | super().__init__(name) 130 | 131 | self.cmd = cmd 132 | self.cmd_bash = cmd_bash 133 | self.tools = tools 134 | self.srcs = srcs 135 | self.outs = outs 136 | self.evaluated = False 137 | 138 | def __str__(self): 139 | s = super().__str__() 140 | s += f'\tcmd: {self.cmd}\n' 141 | s += f'\tcmd_bash: {self.cmd_bash}\n' 142 | s += f'\ttools: {self.tools}\n' 143 | s += f'\tsrcs: {self.srcs}\n' 144 | s += f'\touts: {self.outs}\n' 145 | return s 146 | 147 | 148 | class Alias(Rule): 149 | def __init__( 150 | self, 151 | *, 152 | name: str, 153 | actual: str, 154 | deprecation: Optional[str] = None, 155 | visibility: Optional[List[str]] = None, 156 | ): 157 | super().__init__(name) 158 | 159 | self.actual = actual 160 | self.deprecation = deprecation 161 | self.visibility = visibility 162 | 163 | def __str__(self): 164 | s = super().__str__() 165 | s += f'\tactual: {self.actual}\n' 166 | s += f'\tdeprecation: {self.deprecation}\n' 167 | s += f'\tvisibility: {self.visibility}\n' 168 | return s 169 | 170 | 171 | class BoolFlag(Rule): 172 | def __init__( 173 | self, 174 | *, 175 | name: str, 176 | build_setting_default: bool, 177 | visibility: Optional[List[str]] = None, 178 | ): 179 | super().__init__(name) 180 | 181 | self.build_setting_default = build_setting_default 182 | self.visibility = visibility 183 | 184 | def __str__(self): 185 | s = super().__str__() 186 | s += f'\tbuild_setting_default: {self.build_setting_default}\n' 187 | s += f'\tvisibility: {self.visibility}\n' 188 | return s 189 | 190 | @staticmethod 191 | def parse_value(value: str | bool) -> bool: 192 | if isinstance(value, bool): 193 | return value 194 | 195 | if value in ['0', 'disabled', 'false', 'False']: 196 | return False 197 | 198 | if value in ['1', 'enabled', 'true', 'True']: 199 | return True 200 | 201 | raise ValueError(f'Unknown bool {value}') 202 | 203 | 204 | class ConfigSetting(Rule): 205 | def __init__( 206 | self, 207 | *, 208 | name: str, 209 | flag_values: Dict[str, str], 210 | visibility: List[str], 211 | ): 212 | super().__init__(name) 213 | 214 | self.flag_values = flag_values 215 | self.visibility = visibility 216 | 217 | def __str__(self): 218 | s = super().__str__() 219 | s += f'\tflag_values: {self.flag_values}\n' 220 | s += f'\tvisibility: {self.visibility}\n' 221 | return s 222 | -------------------------------------------------------------------------------- /bazel_parser/utils.py: -------------------------------------------------------------------------------- 1 | import builtins 2 | from contextlib import contextmanager 3 | from enum import Enum 4 | import importlib 5 | from importlib.machinery import SourceFileLoader 6 | import os 7 | from typing import Generator 8 | 9 | 10 | def import_module(module_path: str): 11 | module_name = module_path.strip("./").replace("/", "__").replace(".", "_") 12 | loader = SourceFileLoader(module_name, module_path) 13 | spec = importlib.util.spec_from_file_location(module_name, loader=loader) 14 | if spec is None: 15 | return None 16 | 17 | module = importlib.util.module_from_spec(spec) 18 | 19 | loader = spec.loader 20 | if loader is None: 21 | return None 22 | 23 | loader.exec_module(module) 24 | 25 | return module 26 | 27 | 28 | @contextmanager 29 | def TemporaryWorkingDirectory(dir_path: str) -> Generator[None, None, None]: 30 | cwd = os.getcwd() 31 | 32 | os.chdir(dir_path) 33 | 34 | try: 35 | yield 36 | finally: 37 | os.chdir(cwd) 38 | 39 | 40 | class Color(str, Enum): 41 | RED = '\033[0;31m' 42 | GREEN = '\033[0;32m' 43 | YELLOW = '\033[1;33m' 44 | END = '\033[0m' 45 | 46 | 47 | builtin_print = builtins.print 48 | 49 | def color_print(*args, color: Color, **kwargs): 50 | args_str = ' '.join(str(arg) for arg in args) 51 | args_str = color.value + args_str + Color.END.value 52 | builtin_print(args_str, **kwargs) 53 | -------------------------------------------------------------------------------- /blacklisted_files.txt: -------------------------------------------------------------------------------- 1 | group 2 | -------------------------------------------------------------------------------- /blobs/blob_list.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | import sys 4 | 5 | from blobs import * 6 | 7 | 8 | class BlobList: 9 | def __init__(self, dir_path): 10 | self.__dir_path = dir_path 11 | self.__modules = self._read_modules() 12 | self.__blacklisted = self._read_blacklisted() 13 | 14 | all_file_paths = self._get_dir_file_paths(self.__dir_path) 15 | 16 | executable_blobs = self._extract_elf_blobs(all_file_paths, ["bin/"]) 17 | lib_groups = self._extract_elf_groups(all_file_paths, ["lib/", "lib64/"]) 18 | other_blobs = self._extract_blobs(all_file_paths, []) 19 | 20 | blobs = executable_blobs + lib_groups + other_blobs 21 | adopted_blobs = [] 22 | 23 | # Figure out non-elf dependencies 24 | current_adopted_blobs = self._adopt_blobs(blobs, other_blobs) 25 | adopted_blobs.extend(current_adopted_blobs) 26 | 27 | # Figure out elf dependencies 28 | current_adopted_blobs = self._adopt_blobs(blobs, lib_groups) 29 | adopted_blobs.extend(current_adopted_blobs) 30 | 31 | self._remove_adopted_blobs(blobs, adopted_blobs) 32 | self._blobs = blobs 33 | 34 | @staticmethod 35 | def _read_modules(): 36 | with open("source_available_files.txt", "r") as source_available_files_data: 37 | modules = source_available_files_data.read().splitlines() 38 | 39 | return modules 40 | 41 | @staticmethod 42 | def _read_blacklisted(): 43 | with open("blacklisted_files.txt", "r") as blacklisted_files_data: 44 | blacklisted = blacklisted_files_data.read().splitlines() 45 | 46 | return blacklisted 47 | 48 | @staticmethod 49 | def _get_dir_file_paths(dir_path): 50 | """ 51 | Get a list of file paths found inside `dir_path`. 52 | 53 | Args: 54 | dir_path (str): A path to a directory to look inside. 55 | 56 | Returns: 57 | list: A list of all the file paths. 58 | """ 59 | 60 | file_paths = [] 61 | for root, _, files in os.walk(dir_path): 62 | for file in files: 63 | absolute_file_path = os.path.join(root, file) 64 | if os.path.islink(absolute_file_path): 65 | continue 66 | 67 | relative_file_path = os.path.relpath(absolute_file_path, dir_path) 68 | file_paths.append(relative_file_path) 69 | 70 | return file_paths 71 | 72 | @staticmethod 73 | def _extract_subdir_file_paths(file_paths, sub_directories): 74 | """ 75 | Extract file paths that are found under `subdir` from a list of file paths. 76 | 77 | Args: 78 | file_paths (list): A list of file paths to extract from. 79 | sub_directories (list): A list of sub directories to match. 80 | 81 | Returns: 82 | list: A list of all the extracted file paths. 83 | """ 84 | 85 | subdir_file_paths = [] 86 | 87 | for file_path in file_paths: 88 | if sub_directories: 89 | found_in_sub_directories = False 90 | 91 | for subdir in sub_directories: 92 | if file_path.startswith(subdir): 93 | found_in_sub_directories = True 94 | break 95 | 96 | if not found_in_sub_directories: 97 | continue 98 | 99 | subdir_file_paths.append(file_path) 100 | 101 | for file_path in subdir_file_paths: 102 | file_paths.remove(file_path) 103 | 104 | return subdir_file_paths 105 | 106 | def _extract_blobs(self, all_file_paths, sub_directories): 107 | """ 108 | Extract a list of simple blobs from the file paths that are 109 | found under `subdir` from a list of file paths. 110 | 111 | Args: 112 | all_file_paths (list): A list of file paths to extract from. 113 | sub_directories (list): A list of sub directories to match. 114 | 115 | Returns: 116 | list: A list of all the extracted simple blobs. 117 | """ 118 | 119 | blobs = [] 120 | 121 | file_paths = self._extract_subdir_file_paths(all_file_paths, sub_directories) 122 | for file_path in file_paths: 123 | try: 124 | blob = Blob(self.__dir_path, file_path) 125 | if blob.get_name() in self.__blacklisted: 126 | continue 127 | 128 | blobs.append(blob) 129 | except ValueError: 130 | pass 131 | 132 | return blobs 133 | 134 | def _extract_elf_blobs(self, all_file_paths, sub_directories): 135 | """ 136 | Extract a list of elf blobs from the file paths that are 137 | found under `subdir` from a list of file paths. 138 | 139 | Args: 140 | all_file_paths (list): A list of file paths to extract from. 141 | sub_directories (list): A list of sub directories to match. 142 | 143 | Returns: 144 | list: A list of all the extracted elf blobs. 145 | """ 146 | 147 | elf_blobs = [] 148 | non_elf_file_paths = [] 149 | 150 | file_paths = self._extract_subdir_file_paths(all_file_paths, sub_directories) 151 | for file_path in file_paths: 152 | try: 153 | elf_blob = ELFBlob(self.__dir_path, file_path) 154 | elf_blobs.append(elf_blob) 155 | except ValueError: 156 | non_elf_file_paths.append(file_path) 157 | 158 | # Add back non-ELF files 159 | for file_path in non_elf_file_paths: 160 | all_file_paths.append(file_path) 161 | 162 | return elf_blobs 163 | 164 | def _extract_elf_groups(self, all_file_paths, sub_directories): 165 | """ 166 | Extract a list of elf groups from the file paths that are 167 | found under `subdir` from a list of file paths. 168 | 169 | Args: 170 | all_file_paths (list): A list of file paths to extract from. 171 | sub_directories (list): A list of sub directories to match. 172 | 173 | Returns: 174 | list: A list of all the extracted elf groups. 175 | """ 176 | 177 | non_elf_file_paths = [] 178 | 179 | name_blobs = {} 180 | file_paths = self._extract_subdir_file_paths(all_file_paths, sub_directories) 181 | for file_path in file_paths: 182 | try: 183 | elf_blob = ELFBlob(self.__dir_path, file_path) 184 | name_blobs.setdefault(elf_blob.get_name(), []).append(elf_blob) 185 | except ValueError: 186 | non_elf_file_paths.append(file_path) 187 | 188 | # Add back non-ELF files 189 | for file_path in non_elf_file_paths: 190 | all_file_paths.append(file_path) 191 | 192 | elf_groups = [] 193 | for name, blobs in name_blobs.items(): 194 | elf_group = ELFGroup(self.__dir_path, blobs) 195 | elf_groups.append(elf_group) 196 | 197 | return elf_groups 198 | 199 | @staticmethod 200 | def _adopt_blobs(target_blobs, source_blobs): 201 | """ 202 | Adopt needed blobs from a list of blobs. 203 | 204 | Args: 205 | target_blobs (list): The list of blobs to adopt into. 206 | source_blobs (list): The list of blobs to adopt from. 207 | 208 | Returns: 209 | list: A list of all the blobs that were adopted by at least 210 | one other blob. 211 | """ 212 | adopted_blobs = [] 213 | 214 | for source_blob in source_blobs: 215 | solved_any = False 216 | 217 | for target_blob in target_blobs: 218 | if source_blob == target_blob: 219 | continue 220 | 221 | solved_one = target_blob.try_needed_blob(source_blob) 222 | if solved_one: 223 | solved_any = True 224 | 225 | if solved_any: 226 | adopted_blobs.append(source_blob) 227 | 228 | return adopted_blobs 229 | 230 | @staticmethod 231 | def _remove_adopted_blobs(target_blobs, adopted_blobs): 232 | """ 233 | Remove adopted blobs from a list of blobs. 234 | 235 | Args: 236 | :param target_blobs (list): The list of blobs to remove from. 237 | :param adopted_blobs (list): The list of blobs to remove. 238 | """ 239 | 240 | for adopted_blob in adopted_blobs: 241 | try: 242 | target_blobs.remove(adopted_blob) 243 | except ValueError: 244 | pass 245 | 246 | def _print_blob(self, blob, visited_blobs, depth, output_data): 247 | blob_path = blob.get_path() 248 | 249 | visited_blobs.append(blob) 250 | 251 | indent = "\t" * depth 252 | output_data.write(indent) 253 | output_data.write(blob_path) 254 | 255 | source_available = blob_path in self.__modules 256 | if source_available: 257 | output_data.write(" # source available") 258 | 259 | output_data.write("\n") 260 | 261 | blob_items = blob.get_blob_list() 262 | for blob_item in blob_items: 263 | if blob_item not in visited_blobs: 264 | self._print_blob(blob_item, visited_blobs, depth + 1, output_data) 265 | 266 | def print_blob(self, blob, output_data): 267 | blob_module_name = blob.get_module_name() 268 | output_data.write("# ") 269 | output_data.write(blob_module_name) 270 | output_data.write("\n") 271 | 272 | blob_items = blob.get_contained_blobs() 273 | for blob_item in blob_items: 274 | visited_blobs = [] 275 | self._print_blob(blob_item, visited_blobs, 0, output_data) 276 | 277 | output_data.write("\n") 278 | 279 | def print_blobs(self, output_data): 280 | for blob in self._blobs: 281 | self.print_blob(blob, output_data) 282 | 283 | 284 | if len(sys.argv) < 3: 285 | print("not enough arguments!") 286 | print("usage: blob_list.py ") 287 | exit() 288 | 289 | vendor_path = sys.argv[1] 290 | target_path = sys.argv[2] 291 | 292 | target_proprietary_files_path = os.path.join(target_path, "proprietary_files.txt") 293 | target_modules_path = os.path.join(target_path, "modules.mk") 294 | 295 | blob_list = BlobList(vendor_path) 296 | 297 | if not os.path.exists(target_path): 298 | os.makedirs(target_path) 299 | 300 | with open(target_proprietary_files_path, "w") as file: 301 | blob_list.print_blobs(file) 302 | -------------------------------------------------------------------------------- /blobs/blobs.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from utils import * 4 | 5 | 6 | class GenericBlob: 7 | def __init__(self): 8 | self._blobs = [] 9 | 10 | def get_name(self): 11 | raise NotImplementedError() 12 | 13 | def get_absolute_path(self): 14 | raise NotImplementedError() 15 | 16 | def get_arches(self): 17 | raise NotImplementedError() 18 | 19 | def is_init_file(self): 20 | return self.get_name().endswith(".rc") 21 | 22 | def is_service_file(self): 23 | return "/bin/" in self.get_absolute_path() 24 | 25 | def is_other_name_inside(self, other): 26 | path = self.get_absolute_path() 27 | other_name = other.get_name() 28 | 29 | if path_contains_string(path, other_name): 30 | return True 31 | 32 | return False 33 | 34 | def _is_needed_blob(self, other): 35 | # No .rc file should be the head of the hierarchy 36 | if self.is_init_file(): 37 | return False 38 | 39 | # .rc files should be marked as dependencies if they contain this name 40 | if self.is_service_file() and other.is_init_file() and \ 41 | other.is_other_name_inside(self): 42 | return True 43 | 44 | if self.is_other_name_inside(other): 45 | return True 46 | 47 | return False 48 | 49 | def try_needed_blob(self, other): 50 | if not self._is_needed_blob(other): 51 | return False 52 | 53 | self._blobs.append(other) 54 | 55 | return True 56 | 57 | def get_blob_list(self): 58 | # Unpack ELFGroups 59 | unpacked_blobs = [] 60 | for blob in self._blobs: 61 | contained_blobs = blob.get_contained_blobs() 62 | unpacked_blobs.extend(contained_blobs) 63 | 64 | # Get the target arches of top-most blob 65 | target_arches = self.get_arches() 66 | if not target_arches: 67 | return unpacked_blobs 68 | 69 | final_blobs = [] 70 | for blob in unpacked_blobs: 71 | if not blob.is_matching_arch(target_arches): 72 | continue 73 | 74 | final_blobs.append(blob) 75 | 76 | return final_blobs 77 | 78 | 79 | class Blob(GenericBlob): 80 | def __init__(self, dir_path, path): 81 | super().__init__() 82 | 83 | absolute_path = os.path.join(dir_path, path) 84 | name = os.path.basename(path) 85 | module_name = os.path.splitext(name)[0] 86 | 87 | self._absolute_path = absolute_path 88 | self._path = path 89 | self._name = name 90 | self._module_name = module_name 91 | 92 | def get_name(self): 93 | return self._name 94 | 95 | def get_module_name(self): 96 | return self._module_name 97 | 98 | def get_path(self): 99 | return self._path 100 | 101 | def get_absolute_path(self): 102 | return self._absolute_path 103 | 104 | def get_contained_blobs(self): 105 | return [self] 106 | 107 | def get_arches(self): 108 | return [] 109 | 110 | def is_matching_arch(self, arches): 111 | return True 112 | 113 | def set_blobs(self, blobs): 114 | self._blobs = blobs 115 | 116 | 117 | class ELFBlob(Blob): 118 | def __init__(self, dir_path, path): 119 | super().__init__(dir_path, path) 120 | 121 | self._arch = get_arch(self._absolute_path) 122 | 123 | def get_arches(self): 124 | return [self._arch] 125 | 126 | def is_matching_arch(self, arches): 127 | return self._arch in arches 128 | 129 | 130 | class ELFGroup(GenericBlob): 131 | def __init__(self, _, blobs): 132 | super().__init__() 133 | 134 | self._contained_blobs = blobs 135 | self._arches = [] 136 | for blob in self._contained_blobs: 137 | blob_arches = blob.get_arches() 138 | self._arches.extend(blob_arches) 139 | 140 | def get_arches(self): 141 | return self._arches 142 | 143 | def get_contained_blobs(self): 144 | for blob in self._contained_blobs: 145 | blob.set_blobs(self._blobs) 146 | 147 | return self._contained_blobs 148 | 149 | def get_blob(self): 150 | return self._contained_blobs[0] 151 | 152 | def get_name(self): 153 | return self.get_blob().get_name() 154 | 155 | def get_module_name(self): 156 | return self.get_blob().get_module_name() 157 | 158 | def get_absolute_path(self): 159 | return self.get_blob().get_absolute_path() 160 | -------------------------------------------------------------------------------- /blobs/utils.py: -------------------------------------------------------------------------------- 1 | data_map = {} 2 | 3 | 4 | def get_arch(path): 5 | elf_magic32 = b'\x7fELF\x01' 6 | elf_magic64 = b'\x7fELF\x02' 7 | 8 | with open(path, "rb") as file: 9 | file_head = file.read(5) 10 | 11 | if file_head == elf_magic32: 12 | return "32" 13 | 14 | if file_head == elf_magic64: 15 | return "64" 16 | 17 | raise ValueError() 18 | 19 | 20 | def path_contains_string(path, string): 21 | binary_string = bytes(string, 'UTF-8') 22 | 23 | if path in data_map: 24 | binary_data = data_map[path] 25 | else: 26 | with open(path, "rb") as file: 27 | binary_data = file.read() 28 | data_map[path] = binary_data 29 | 30 | position = binary_data.find(binary_string) 31 | if position == -1: 32 | return False 33 | else: 34 | return True 35 | -------------------------------------------------------------------------------- /fdt_extra.py: -------------------------------------------------------------------------------- 1 | import fdt 2 | 3 | def line_offset(tabsize, offset, string): 4 | offset = ' ' * (tabsize * offset) 5 | return offset + string 6 | 7 | class PropWordsWithPhandles(fdt.PropWords): 8 | def __init__(self, name, *args, phandle_names=None): 9 | super().__init__(name, *args) 10 | 11 | if phandle_names is None: 12 | phandle_names = {} 13 | 14 | self.__phandle_names = phandle_names 15 | 16 | def set_phandle_name(self, i, name): 17 | if i in self.__phandle_names: 18 | raise ValueError() 19 | 20 | self.__phandle_names[i] = name 21 | 22 | def get_phandle_name(self, i): 23 | return self.__phandle_names[i] 24 | 25 | def get_phandle_names(self): 26 | return self.__phandle_names 27 | 28 | def get_dts_value(self, i, word): 29 | if i in self.__phandle_names: 30 | return f'&{self.__phandle_names[i]}' 31 | else: 32 | return '0x{:X}'.format(word) 33 | 34 | def to_dts(self, tabsize: int = 4, depth: int = 0): 35 | result = line_offset(tabsize, depth, self.name) 36 | result += ' = <' 37 | result += ' '.join([self.get_dts_value(i, word) for i, word in enumerate(self.data)]) 38 | result += ">;\n" 39 | return result 40 | 41 | def copy(self): 42 | return PropWordsWithPhandles(self.name, *self.data, 43 | phandle_names=self.get_phandle_names()) 44 | -------------------------------------------------------------------------------- /find_closest_commit.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | STARTING_COMMIT="$1" 4 | TARGET_COMMIT="$2" 5 | 6 | PARENTS=$(git rev-list --parents "$STARTING_COMMIT" | cut -d ' ' -f2-) 7 | 8 | echo "$PARENTS" 9 | 10 | if [ -z "$PARENTS" ]; then 11 | echo "No parent commits found." 12 | exit 1 13 | fi 14 | 15 | BEST_PARENT="" 16 | MIN_DIFF="" 17 | 18 | for PARENT in $PARENTS; do 19 | DIFF_SIZE=$(git diff --numstat "$PARENT" "$TARGET_COMMIT" | awk '{s+=$1+$2} END {print s}') 20 | 21 | if [ -z "$MIN_DIFF" ] || [ "$DIFF_SIZE" -lt "$MIN_DIFF" ]; then 22 | MIN_DIFF=$DIFF_SIZE 23 | BEST_PARENT=$PARENT 24 | echo "Best parent commit: $BEST_PARENT with diff size: $MIN_DIFF" 25 | fi 26 | done 27 | -------------------------------------------------------------------------------- /flash_fastboot.sh: -------------------------------------------------------------------------------- 1 | ADB_STASTE=$(adb get-state) 2 | 3 | get_part_i() { 4 | name="$1" 5 | index="$2" 6 | parts=(${name//:/ }) 7 | echo "${parts[$index]}" 8 | } 9 | 10 | get_real_part() { 11 | get_part_i "$1" 0 12 | } 13 | 14 | get_img_part() { 15 | get_part_i "$1" 1 16 | } 17 | 18 | if [ "$ADB_STASTE" = "device" ]; then 19 | BOOTLOADER_PARTS="boot vendor_boot dtbo recovery" 20 | CAN_REBOOT_TO_BL=1 21 | 22 | for part in "$@" 23 | do 24 | real_part=$(get_real_part "$part") 25 | echo "$BOOTLOADER_PARTS" | grep -w -q "$real_part" 26 | if [ $? -ne 0 ]; then 27 | CAN_REBOOT_TO_BL=0 28 | fi 29 | done 30 | 31 | if [ "$CAN_REBOOT_TO_BL" = "1" ]; then 32 | adb reboot bootloader 33 | else 34 | adb reboot fastboot 35 | fi 36 | fi 37 | 38 | for part in "$@" 39 | do 40 | real_part=$(get_real_part "$part") 41 | img_part=$(get_img_part "$part") 42 | if [ -z "$img_part" ]; then 43 | img_part="$real_part.img" 44 | fi 45 | fastboot flash "$real_part" "$img_part" 46 | done 47 | 48 | fastboot reboot 49 | -------------------------------------------------------------------------------- /fod_dim_lut_calibration.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import bisect 4 | import subprocess 5 | import time 6 | 7 | from threading import Thread 8 | 9 | 10 | def run_shell_cmd(*args): 11 | return subprocess.check_output(["adb", "shell"] + [*args]) 12 | 13 | 14 | def get_shell_cmd_int(*args): 15 | output = run_shell_cmd(*args) 16 | return int(output) 17 | 18 | 19 | def set_shell_cmd_int(value, *args): 20 | run_shell_cmd("echo", str(value), " > ", *args) 21 | 22 | 23 | PANEL0_BACKLIGHT_PATH = "/sys/class/backlight/panel0-backlight/" 24 | MAX_BRIGHTNESS_PATH = PANEL0_BACKLIGHT_PATH + "max_brightness" 25 | BRIGHTNESS_PATH = PANEL0_BACKLIGHT_PATH + "brightness" 26 | 27 | DISPLAY_PATH = "/sys/devices/platform/soc/soc:qcom,dsi-display-primary/" 28 | DIM_ALPHA_PATH = DISPLAY_PATH + "fod_dim_alpha" 29 | FORCE_FOD_UI_PATH = DISPLAY_PATH + "force_fod_ui" 30 | 31 | 32 | def get_max_brightness(): 33 | return get_shell_cmd_int("cat", MAX_BRIGHTNESS_PATH) 34 | 35 | 36 | def get_min_brightness(): 37 | return 1 38 | 39 | 40 | def set_brightness(brightness): 41 | set_shell_cmd_int(brightness, BRIGHTNESS_PATH) 42 | 43 | 44 | def set_dim_alpha(alpha): 45 | set_shell_cmd_int(alpha, DIM_ALPHA_PATH) 46 | 47 | 48 | def get_dim_alpha(): 49 | return get_shell_cmd_int("cat", DIM_ALPHA_PATH) 50 | 51 | 52 | def set_force_fod_ui(status): 53 | set_shell_cmd_int(status, FORCE_FOD_UI_PATH) 54 | 55 | 56 | FORCE_FOD_UI_SWITCH_TIME = 1 / 10 57 | 58 | exit_worker_thread = False 59 | switch_fod_ui = False 60 | dim_alpha = 0 61 | 62 | 63 | def worker_thread_target(): 64 | global exit_worker_thread 65 | global switch_fod_ui 66 | global dim_alpha 67 | 68 | while not exit_worker_thread: 69 | if switch_fod_ui: 70 | set_dim_alpha(dim_alpha) 71 | set_force_fod_ui(1) 72 | 73 | time.sleep(FORCE_FOD_UI_SWITCH_TIME) 74 | 75 | if switch_fod_ui: 76 | set_force_fod_ui(0) 77 | set_dim_alpha(-1) 78 | 79 | time.sleep(FORCE_FOD_UI_SWITCH_TIME) 80 | 81 | 82 | worker_thread = Thread(target=worker_thread_target) 83 | worker_thread.start() 84 | 85 | brightness_alpha_pairs = [] 86 | 87 | 88 | def print_help(): 89 | print("""Commands: 90 | quit, q: Quit 91 | print, p: Print LUT 92 | fill, f: Create a LUT based on current values 93 | calibrate, c: Calibrate LUT 94 | """) 95 | 96 | 97 | def print_pair(pair): 98 | print("{}: {}".format(pair[0], pair[1])) 99 | 100 | 101 | def print_pairs(): 102 | print("{} pairs".format(len(brightness_alpha_pairs))) 103 | for pair in brightness_alpha_pairs: 104 | print_pair(pair) 105 | 106 | 107 | def add_pair(pair): 108 | bisect.bisect(brightness_alpha_pairs, pair, key=lambda p: p[0]) 109 | 110 | 111 | def fill_pairs(length): 112 | global brightness_alpha_pairs 113 | 114 | brightness_alpha_pairs = [] 115 | 116 | max_brightness = get_max_brightness() 117 | min_brightness = get_min_brightness() 118 | 119 | print("Max brightness: {}".format(max_brightness)) 120 | 121 | set_force_fod_ui(0) 122 | set_dim_alpha(-1) 123 | 124 | for i in range(length): 125 | brightness = ( 126 | i * (max_brightness - min_brightness) // (length - 1) + min_brightness 127 | ) 128 | set_brightness(brightness) 129 | alpha = get_dim_alpha() 130 | pair = [brightness, alpha] 131 | brightness_alpha_pairs.append(pair) 132 | print_pair(pair) 133 | 134 | 135 | def calibrate_one(pair): 136 | global dim_alpha 137 | global switch_fod_ui 138 | 139 | switch_fod_ui = True 140 | exit = False 141 | 142 | set_brightness(pair[0]) 143 | 144 | print("""Commands: 145 | q: Quit 146 | n: Next LUT value 147 | +: Add 1 to the dimming alpha 148 | +x: Add x to the dimming alpha 149 | -: Substract one from the dimming alpha 150 | -x: Substract x from the dimming alpha 151 | =x: Set the dimming value to x 152 | """) 153 | 154 | while True: 155 | print_pair(pair) 156 | 157 | dim_alpha = pair[1] 158 | mod = input("Modifier: ") 159 | new_dim_alpha = dim_alpha 160 | mod_int = 0 161 | 162 | if len(mod) == 0: 163 | continue 164 | 165 | if mod == "q": 166 | exit = True 167 | break 168 | elif mod == "n": 169 | break 170 | elif mod[0] == "+": 171 | try: 172 | mod_int = int(mod[1:]) 173 | except: 174 | mod_int = +1 175 | 176 | new_dim_alpha = new_dim_alpha + mod_int 177 | elif mod[0] == "-": 178 | try: 179 | mod_int = int(mod[1:]) 180 | except: 181 | mod_int = 1 182 | 183 | new_dim_alpha = new_dim_alpha - mod_int 184 | elif mod[0] == "=": 185 | try: 186 | new_dim_alpha = int(mod[1:]) 187 | except: 188 | pass 189 | 190 | if new_dim_alpha > 255: 191 | new_dim_alpha = 255 192 | 193 | if new_dim_alpha < 0: 194 | new_dim_alpha = 0 195 | 196 | pair[1] = dim_alpha = new_dim_alpha 197 | 198 | switch_fod_ui = False 199 | 200 | return exit 201 | 202 | 203 | def calibrate(): 204 | for pair in brightness_alpha_pairs: 205 | exit = calibrate_one(pair) 206 | if exit: 207 | break 208 | 209 | 210 | while True: 211 | print_help() 212 | 213 | command = input("Command: ") 214 | 215 | if command == "quit" or command == "q": 216 | break 217 | elif command == "print" or command == "p": 218 | print_pairs() 219 | elif command == "fill" or command == "f": 220 | try: 221 | length = int(input("Length: ")) 222 | fill_pairs(length) 223 | except: 224 | pass 225 | elif command == "calibrate" or command == "c": 226 | calibrate() 227 | else: 228 | print("Invalid command") 229 | print_help() 230 | 231 | exit_worker_thread = True 232 | worker_thread.join() 233 | -------------------------------------------------------------------------------- /log.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | adb wait-for-device root 4 | 5 | FILE="$1" 6 | 7 | if [[ -n "$FILE" ]] && [[ -f "$FILE" ]]; then 8 | rm "$FILE" 9 | fi 10 | 11 | while true; do 12 | if [[ -n "$FILE" ]]; then 13 | adb logcat -b all >> "$FILE" 14 | else 15 | adb logcat -b all 16 | fi 17 | done 18 | -------------------------------------------------------------------------------- /manifest_to_modules_data.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from os import path 4 | import sys 5 | import requests 6 | import xml.etree.ElementTree as ET 7 | import argparse 8 | 9 | 10 | def parse_manifest(server, repo, tag, projects): 11 | url = f"{server}/{repo}/-/raw/release/{tag}.xml" 12 | print(f'Downloading {url}', file=sys.stderr) 13 | response = requests.get(url) 14 | response.raise_for_status() 15 | root = ET.fromstring(response.text) 16 | 17 | for image in root.findall(".//refs/image"): 18 | server = image.get("server") 19 | project = image.get("project") 20 | tag = image.get("tag") 21 | parse_manifest(server, project, tag, projects) 22 | 23 | remote_mapping = { 24 | remote.get("name"): remote.get("fetch") for remote in root.findall(".//remote") 25 | } 26 | 27 | default_remote = root.find(".//default") 28 | default_remote_name = None 29 | if default_remote is not None: 30 | default_remote_name = default_remote.get("remote") 31 | 32 | for proj in root.findall(".//project"): 33 | name = proj.get("name") 34 | remote = proj.get("remote") or default_remote_name 35 | revision = proj.get("revision") 36 | full_url = f"{remote_mapping.get(remote)}/{name}" 37 | projects.append((name, full_url, revision)) 38 | 39 | 40 | def print_index(projects, index, repos): 41 | for values in sorted(projects): 42 | project = values[0] 43 | 44 | found = False 45 | for repo in repos: 46 | repo_source = repo_target = repo 47 | 48 | if ':' in repo: 49 | repo_source, repo_target = repo.split(':') 50 | 51 | if project.endswith(f'/{repo_source}'): 52 | project = repo_target 53 | found = True 54 | break 55 | 56 | entry = f'["{project}"]="{values[index]}"' 57 | 58 | if found: 59 | print(f"\t{entry}") 60 | else: 61 | print(f"\t# ignored: {entry}") 62 | 63 | 64 | def parse_and_print(base, repo, tag, repos): 65 | projects = [] 66 | parse_manifest(base, repo, tag, projects) 67 | 68 | print("declare -A REPOS_TO_URL=(") 69 | print_index(projects, 1, repos) 70 | print(")") 71 | 72 | print() 73 | 74 | print("declare -A REPOS_TO_REF=(") 75 | print_index(projects, 2, repos) 76 | print(")") 77 | 78 | 79 | def main(): 80 | base = "https://git.codelinaro.org/clo" 81 | repo = "la/la/vendor/manifest" 82 | 83 | parser = argparse.ArgumentParser("Parse manifest and output modules data") 84 | parser.add_argument("tag", help="Tag") 85 | parser.add_argument("repos", nargs="+", help="Repos to process") 86 | 87 | args = parser.parse_args() 88 | 89 | parse_and_print(base, repo, args.tag, args.repos) 90 | 91 | if __name__ == "__main__": 92 | main() 93 | -------------------------------------------------------------------------------- /motorola-merge-kernel-modules.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | if [ $# -lt 2 ]; then 4 | echo "usage: cat modules-subdirs | $0 " 5 | exit 1 6 | fi 7 | 8 | MODULES_REPO_NAME="$1" 9 | BASE_NAME="$2" 10 | 11 | base_name="" 12 | 13 | git fetch "$MODULES_REPO_NAME" 14 | 15 | merge_repo() { 16 | if [ -z "$base_name" ]; then 17 | echo "No paths found" 18 | exit 1 19 | fi 20 | 21 | branch_name="$BASE_NAME-$base_name" 22 | 23 | echo "Merging branch with name $branch_name" 24 | 25 | git merge --allow-unrelated-histories "$MODULES_REPO_NAME/$branch_name" 26 | 27 | base_name="" 28 | } 29 | 30 | while IFS='$\n' read -r subdir; do 31 | 32 | if [ -z "$base_name" ]; then 33 | base_name=$(basename "${subdir}" ".${subdir##*.}") 34 | fi 35 | 36 | if [ -n "$subdir" ]; then 37 | continue 38 | fi 39 | 40 | merge_repo 41 | done 42 | 43 | merge_repo 44 | -------------------------------------------------------------------------------- /motorola-split-kernel-modules.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | if [ $# -lt 2 ]; then 4 | echo "usage: cat modules-subdirs | $0 " 5 | exit 1 6 | fi 7 | 8 | BASE_NAME="$1" 9 | REPO_NAME="$2" 10 | 11 | paths="" 12 | path_params="" 13 | base_name="" 14 | 15 | base_commit=$(git rev-parse HEAD) 16 | 17 | filter_repo() { 18 | if [ -z "$base_name" ] || [ -z "$path_params" ] || [ -z "$paths" ]; then 19 | echo "No paths found" 20 | exit 1 21 | fi 22 | 23 | echo -e "Creating subtree with name $base_name$paths\n" 24 | 25 | git filter-repo $path_params --refs HEAD 26 | 27 | branch_name="$BASE_NAME-$base_name" 28 | git branch -D "$branch_name" 29 | git checkout -b "$branch_name" 30 | git push -f --set-upstream "$REPO_NAME" "$branch_name" 31 | git checkout "$base_commit" 32 | paths="" 33 | path_params="" 34 | base_name="" 35 | } 36 | 37 | while IFS='$\n' read -r subdir; do 38 | if [ -z "$base_name" ]; then 39 | base_name=$(basename "${subdir}" ".${subdir##*.}") 40 | fi 41 | 42 | if [ -n "$subdir" ]; then 43 | paths="$paths\n$subdir" 44 | path_params="$path_params --path $subdir" 45 | continue 46 | fi 47 | 48 | filter_repo 49 | done 50 | 51 | filter_repo 52 | -------------------------------------------------------------------------------- /prepare-kernel-tree.sh: -------------------------------------------------------------------------------- 1 | cut_line() { 2 | line=$1 3 | n=$2 4 | 5 | echo "$line" | tr -s $'\t' | cut -d $'\t' -f "$n" 6 | } 7 | 8 | while IFS='$\n' read -r line; do 9 | if [ -z "$line" ]; then 10 | continue 11 | fi 12 | 13 | if [ "$line" = "#*" ]; then 14 | continue 15 | fi 16 | 17 | url=$(cut_line "$line" 1) 18 | repo=$(cut_line "$line" 2) 19 | path=$(cut_line "$line" 3) 20 | commit=$(cut_line "$line" 4) 21 | 22 | git remote remove "$repo" &> /dev/null 23 | git remote add "$repo" "$url" 24 | git fetch -q "$repo" "$commit" 25 | 26 | if [ "$path" = "/" ]; then 27 | echo "Creating tree with $repo $commit" 28 | git reset --hard FETCH_HEAD 29 | else 30 | echo "Creating subtree at $path with $repo $commit" 31 | git subtree add -P "$path" FETCH_HEAD 32 | fi 33 | done 34 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | parse 2 | fdt 3 | -------------------------------------------------------------------------------- /sepolicy/.gitignore: -------------------------------------------------------------------------------- 1 | expanded_macros.if 2 | -------------------------------------------------------------------------------- /sepolicy/cil_rule.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 The LineageOS Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from __future__ import annotations 5 | 6 | from enum import Enum 7 | from typing import Dict, List, Optional, Set 8 | 9 | from conditional_type import ConditionalType, ConditionalTypeRedirect 10 | from rule import ( 11 | Rule, 12 | RuleType, 13 | is_type_generated, 14 | raw_part, 15 | raw_parts_list, 16 | unpack_line, 17 | ) 18 | from utils import Color, color_print 19 | 20 | 21 | def remove_type_suffix(suffix: Optional[str], t: str): 22 | if suffix is None: 23 | return t 24 | 25 | if t.endswith(suffix): 26 | return t[: -len(suffix)] 27 | 28 | return t 29 | 30 | 31 | def is_conditional_typeattr(part: raw_part): 32 | if isinstance(part[0], list): 33 | part = part[0][0] 34 | else: 35 | part = part[0] 36 | 37 | return part in ['and', 'not', 'all'] 38 | 39 | 40 | def create_conditional_type( 41 | version_suffix: Optional[str], 42 | parts: raw_parts_list, 43 | ): 44 | # ((and (...) ((not (...))))) -> (and (...) ((not (...)))) 45 | # ((not (...))) -> (not (...)) 46 | 47 | if len(parts) == 1 and isinstance(parts[0], list): 48 | parts = parts[0] 49 | assert parts[0] in ['and', 'not', 'all'], parts 50 | 51 | # (and (...) ((not (...)))) -> (and (...) (not (...))) 52 | if ( 53 | len(parts) == 3 54 | and isinstance(parts[2], list) 55 | and len(parts[2]) == 1 56 | and parts[2][0][0] == 'not' 57 | ): 58 | parts[2] = parts[2][0] 59 | 60 | # (and (...) (not (...))) -> (and (...) not (...)) 61 | if ( 62 | len(parts) == 3 63 | and parts[0] == 'and' 64 | and isinstance(parts[2], list) 65 | and parts[2][0] == 'not' 66 | ): 67 | assert isinstance(parts[2], list) 68 | parts.append(parts[2][1]) 69 | parts[2] = parts[2][0] 70 | 71 | # (all) 72 | if parts == ['all']: 73 | return ConditionalType([], [], True) 74 | 75 | # Split in groups of two 76 | if len(parts) not in [2, 4]: 77 | color_print('Ignored conditional type: ', parts, color=Color.YELLOW) 78 | return None 79 | 80 | positive: List[str] = [] 81 | negative: List[str] = [] 82 | 83 | for i in range(0, len(parts), 2): 84 | group = parts[i : i + 2] 85 | assert len(group) == 2, parts 86 | assert isinstance(group[0], str) 87 | assert group[0] in ['and', 'not'], parts 88 | assert isinstance(group[1], list), parts 89 | 90 | # Type narrowing 91 | new_group: List[str] = [] 92 | for t in group[1]: 93 | if isinstance(t, str): 94 | new_group.append(t) 95 | continue 96 | 97 | color_print('Ignored conditional type: ', parts, color=Color.YELLOW) 98 | return None 99 | 100 | new_types = map( 101 | lambda t: remove_type_suffix(version_suffix, t), 102 | new_group, 103 | ) 104 | if group[0] == 'and': 105 | positive.extend(new_types) 106 | elif group[0] == 'not': 107 | negative.extend(new_types) 108 | 109 | return ConditionalType(positive, negative, False) 110 | 111 | 112 | def is_valid_cil_line(line: str): 113 | line = line.strip() 114 | 115 | if not line: 116 | return False 117 | 118 | if line.startswith('#'): 119 | return False 120 | 121 | if line.startswith(';'): 122 | return False 123 | 124 | return True 125 | 126 | 127 | def is_allow_process_sigchld(parts: raw_parts_list): 128 | return ( 129 | parts[0] == RuleType.ALLOW 130 | and len(parts) == 4 131 | and parts[3] == ['process', ['sigchld']] 132 | ) 133 | 134 | 135 | def unpack_ioctls(parts: raw_parts_list): 136 | # (. (range . .) ((range . .))) 137 | 138 | for part in parts: 139 | if isinstance(part, str): 140 | yield part 141 | continue 142 | 143 | assert isinstance(part, list) 144 | 145 | if isinstance(part[0], list): 146 | part = part[0] 147 | 148 | assert part[0] == 'range' 149 | 150 | assert isinstance(part[1], str) 151 | start_ioctl = int(part[1], base=16) 152 | 153 | assert isinstance(part[2], str) 154 | end_ioctl = int(part[2], base=16) 155 | 156 | for n in range(start_ioctl, end_ioctl + 1): 157 | yield hex(n) 158 | 159 | 160 | class CilRuleType(str, Enum): 161 | ALLOWX = 'allowx' 162 | NEVERALLOWX = 'neverallowx' 163 | DONTAUDITX = 'dontauditx' 164 | EXPANDTYPEATTRIBUTE = 'expandtypeattribute' 165 | TYPEATTRIBUTE = 'typeattribute' 166 | TYPEATTRIBUTESET = 'typeattributeset' 167 | TYPETRANSITION = 'typetransition' 168 | 169 | 170 | unknown_rule_types: Set[str] = set( 171 | [ 172 | 'category', 173 | 'categoryorder', 174 | 'class', 175 | 'classcommon', 176 | 'classorder', 177 | 'handleunknown', 178 | 'mls', 179 | 'mlsconstrain', 180 | 'policycap', 181 | 'role', 182 | 'roleattribute', 183 | 'roletype', 184 | 'sensitivity', 185 | 'sensitivitycategory', 186 | 'sensitivityorder', 187 | 'sid', 188 | 'sidcontext', 189 | 'sidorder', 190 | 'fsuse', 191 | 'common', 192 | 'type', 193 | 'typealias', 194 | 'typealiasactual', 195 | 'user', 196 | 'userlevel', 197 | 'userrange', 198 | 'userrole', 199 | ] 200 | ) 201 | 202 | 203 | class CilRule(Rule): 204 | @classmethod 205 | def from_line( 206 | cls, 207 | line: str, 208 | conditional_types_map: Dict[str, ConditionalType], 209 | missing_generated_types: Set[str], 210 | genfs_rules: List[Rule], 211 | version: Optional[str], 212 | ) -> List[Rule]: 213 | def type_redirect(t: str): 214 | return ConditionalTypeRedirect( 215 | t, 216 | conditional_types_map, 217 | missing_generated_types, 218 | ) 219 | 220 | version_suffix = None 221 | if version is not None: 222 | version = version.replace('.', '_') 223 | version_suffix = f'_{version}' 224 | 225 | # Skip comments and empty lines 226 | if not is_valid_cil_line(line): 227 | return [] 228 | 229 | parts = unpack_line(line, '(', ')', ' ') 230 | if not parts: 231 | return [] 232 | 233 | assert isinstance(parts[0], str), line 234 | 235 | # Remove rules that don't have a meaningful source mapping 236 | if parts[0] in unknown_rule_types: 237 | return [] 238 | 239 | # Remove allow $3 $1:process sigchld as it is part of an ifelse 240 | # statement based on one of the parameters and it is not possible 241 | # to generate the checks for it as part of macro expansion 242 | if is_allow_process_sigchld(parts): 243 | return [] 244 | 245 | varargs: List[str] = [] 246 | 247 | match parts[0]: 248 | case ( 249 | RuleType.ALLOW.value 250 | | RuleType.NEVERALLOW.value 251 | | RuleType.AUDITALLOW.value 252 | | RuleType.DONTAUDIT.value 253 | ): 254 | # (allow a b (c (...))) 255 | assert len(parts) == 4, line 256 | assert len(parts[3]) == 2, line 257 | assert isinstance(parts[1], str), line 258 | assert isinstance(parts[2], str), line 259 | assert isinstance(parts[3][0], str), line 260 | assert isinstance(parts[3][1], list), line 261 | 262 | for part in parts[3][1]: 263 | assert isinstance(part, str) 264 | varargs.append(part) 265 | 266 | src = remove_type_suffix(version_suffix, parts[1]) 267 | if is_type_generated(src): 268 | src = type_redirect(src) 269 | 270 | dst = remove_type_suffix(version_suffix, parts[2]) 271 | if is_type_generated(dst): 272 | dst = type_redirect(dst) 273 | 274 | rule = Rule( 275 | parts[0], 276 | (src, dst, parts[3][0]), 277 | tuple(varargs), 278 | ) 279 | return [rule] 280 | case ( 281 | CilRuleType.ALLOWX.value 282 | | CilRuleType.NEVERALLOWX.value 283 | | CilRuleType.DONTAUDITX.value 284 | ): 285 | # (allowx a b (ioctl c (... (range . .) ((range . .))))) 286 | assert len(parts) == 4, line 287 | assert len(parts[3]) == 3, line 288 | assert isinstance(parts[1], str), line 289 | assert isinstance(parts[2], str), line 290 | assert isinstance(parts[3], list), line 291 | assert isinstance(parts[3][0], str), line 292 | assert parts[3][0] == 'ioctl', line 293 | assert isinstance(parts[3][1], str), line 294 | assert isinstance(parts[3][2], list), line 295 | 296 | for ioctl in unpack_ioctls(parts[3][2]): 297 | varargs.append(ioctl) 298 | 299 | src = remove_type_suffix(version_suffix, parts[1]) 300 | if is_type_generated(src): 301 | src = type_redirect(src) 302 | 303 | dst = remove_type_suffix(version_suffix, parts[2]) 304 | if is_type_generated(dst): 305 | dst = type_redirect(dst) 306 | 307 | if parts[0] == CilRuleType.ALLOWX.value: 308 | rule_type = RuleType.ALLOWXPERM.value 309 | elif parts[0] == CilRuleType.NEVERALLOWX.value: 310 | rule_type = RuleType.NEVERALLOWXPERM.value 311 | elif parts[0] == CilRuleType.DONTAUDITX.value: 312 | rule_type = RuleType.DONTAUDITXPERM.value 313 | else: 314 | assert False, line 315 | 316 | rule = Rule( 317 | rule_type, 318 | (src, dst, parts[3][1]), 319 | tuple(varargs), 320 | ) 321 | return [rule] 322 | case CilRuleType.TYPEATTRIBUTE.value: 323 | # (typeattribute a) 324 | assert len(parts) == 2, line 325 | assert isinstance(parts[1], str), line 326 | 327 | # Remove generated typeattribute as it does not map to a source rule 328 | if is_type_generated(parts[1]): 329 | return [] 330 | 331 | t = remove_type_suffix(version_suffix, parts[1]) 332 | 333 | # Rename typeattribute to attribute to match source 334 | # typeattribute rules in source expand to typeattributeset, 335 | # while attribute rules expand to typeattribute 336 | rule = Rule( 337 | RuleType.ATTRIBUTE.value, 338 | (t,), 339 | (), 340 | ) 341 | return [rule] 342 | case CilRuleType.TYPEATTRIBUTESET.value: 343 | assert isinstance(parts[1], str), line 344 | v = remove_type_suffix(version_suffix, parts[1]) 345 | 346 | # Process conditional types and add them to a map to be replaced 347 | # into the other rules later 348 | if is_conditional_typeattr(parts[2]): 349 | assert isinstance(parts[2], list) 350 | 351 | conditional_type = create_conditional_type( 352 | version_suffix, 353 | parts[2], 354 | ) 355 | if conditional_type is None: 356 | return [] 357 | 358 | assert v not in conditional_types_map 359 | conditional_types_map[v] = conditional_type 360 | return [] 361 | 362 | # Expand typeattributeset into multiple typeattribute rules 363 | expanded_rules: List[Rule] = [] 364 | 365 | for t in parts[2]: 366 | assert isinstance(t, str) 367 | t = remove_type_suffix(version_suffix, t) 368 | 369 | rule = Rule( 370 | RuleType.TYPEATTRIBUTE.value, 371 | (t, v), 372 | (), 373 | ) 374 | expanded_rules.append(rule) 375 | 376 | return expanded_rules 377 | case RuleType.GENFSCON.value: 378 | # (genfscon sysfs /kernel/aov (u object_r sysfs_adspd ((s0) (s0)))) 379 | assert len(parts) == 4, line 380 | assert len(parts[3]) == 4, line 381 | assert len(parts[3][3]) == 2, line 382 | assert len(parts[3][3][0]) == 1, line 383 | assert len(parts[3][3][1]) == 1, line 384 | assert isinstance(parts[1], str), line 385 | assert isinstance(parts[2], str), line 386 | assert isinstance(parts[3][2], str), line 387 | 388 | rule = Rule( 389 | parts[0], 390 | (parts[1], parts[2], parts[3][2]), 391 | (), 392 | ) 393 | genfs_rules.append(rule) 394 | return [] 395 | case CilRuleType.TYPETRANSITION.value: 396 | # (typetransition a b c d) 397 | # (typetransition a b c "[userfaultfd]" d) 398 | assert len(parts) in [5, 6], line 399 | assert isinstance(parts[1], str), line 400 | assert isinstance(parts[2], str), line 401 | assert isinstance(parts[3], str), line 402 | assert isinstance(parts[-1], str), line 403 | 404 | if len(parts) == 6: 405 | assert isinstance(parts[4], str), line 406 | # assert parts[4] == '"[userfaultfd]"', line 407 | varargs = [parts[4]] 408 | else: 409 | varargs = [] 410 | 411 | src = remove_type_suffix(version_suffix, parts[1]) 412 | if is_type_generated(src): 413 | src = type_redirect(src) 414 | 415 | dst = remove_type_suffix(version_suffix, parts[2]) 416 | if is_type_generated(dst): 417 | src = type_redirect(dst) 418 | 419 | rule = Rule( 420 | RuleType.TYPE_TRANSITION.value, 421 | (src, dst, parts[3], parts[-1]), 422 | tuple(varargs), 423 | ) 424 | return [rule] 425 | case CilRuleType.EXPANDTYPEATTRIBUTE.value: 426 | # (expandtypeattribute (a) true) 427 | assert len(parts) == 3, line 428 | assert isinstance(parts[1], list), line 429 | assert len(parts[1]) == 1, line 430 | assert isinstance(parts[1][0], str), line 431 | assert isinstance(parts[2], str), line 432 | 433 | rule = Rule( 434 | RuleType.EXPANDATTRIBUTE.value, 435 | (parts[1][0], parts[2]), 436 | (), 437 | ) 438 | return [rule] 439 | case _: 440 | assert False, line 441 | -------------------------------------------------------------------------------- /sepolicy/class_set.py: -------------------------------------------------------------------------------- 1 | from typing import Set 2 | 3 | 4 | class ClassSet: 5 | def __init__(self, values: Set[str]): 6 | self.__values = values 7 | self.__hash_values = frozenset(values) 8 | self.__hash = hash(self.__hash_values) 9 | 10 | def __eq__(self, other: object): 11 | if not isinstance(other, ClassSet): 12 | return False 13 | 14 | if self.__hash != other.__hash: 15 | return False 16 | 17 | return self.__hash_values == other.__hash_values 18 | 19 | def __hash__(self): 20 | return self.__hash 21 | 22 | def __str__(self): 23 | sorted_values = sorted(self.__values) 24 | if len(sorted_values) == 1: 25 | return sorted_values[0] 26 | 27 | class_sets = ' '.join(sorted_values) 28 | return f'{{ {class_sets} }}' 29 | -------------------------------------------------------------------------------- /sepolicy/classmap.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 The LineageOS Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from __future__ import annotations 5 | 6 | import json 7 | import subprocess 8 | from pathlib import Path 9 | from tempfile import TemporaryDirectory 10 | from typing import Dict, List 11 | 12 | SELINUX_INCLUDE_PATH = 'security/selinux/include/' 13 | SCRIPT_PATH = Path(__file__).parent.resolve() 14 | CLASSMAP_GENERATOR_C_PATH = Path(SCRIPT_PATH, 'classmap_generator.c') 15 | 16 | 17 | def extract_classmap(selinux_include_path: str): 18 | with TemporaryDirectory() as tmp_path: 19 | classmap_generator_path = Path(tmp_path, 'classmap_generator') 20 | 21 | subprocess.check_call( 22 | [ 23 | 'gcc', 24 | '-I', 25 | selinux_include_path, 26 | CLASSMAP_GENERATOR_C_PATH, 27 | '-o', 28 | classmap_generator_path, 29 | ] 30 | ) 31 | 32 | program_output = subprocess.check_output( 33 | [ 34 | classmap_generator_path, 35 | ] 36 | ) 37 | return json.loads(program_output) 38 | 39 | 40 | class Classmap: 41 | def __init__(self, selinux_include_path: str): 42 | class_perms_map = extract_classmap(selinux_include_path) 43 | 44 | self.__class_index_map: Dict[str, int] = {} 45 | self.__class_perms_index_map: Dict[str, Dict[str, int]] = {} 46 | 47 | for index, class_name in enumerate(class_perms_map.keys()): 48 | self.__class_index_map[class_name] = index 49 | 50 | for perm_index, perm_name in enumerate(class_perms_map[class_name]): 51 | self.__class_perms_index_map.setdefault(class_name, {}) 52 | self.__class_perms_index_map[class_name][perm_name] = perm_index 53 | 54 | def class_types(self, t: str): 55 | for key in self.__class_index_map: 56 | if key.endswith(t): 57 | yield key 58 | 59 | def class_perms(self, class_name: str): 60 | return list(self.__class_perms_index_map[class_name].keys()) 61 | 62 | def class_index(self, class_name: str): 63 | default = len(self.__class_index_map) 64 | return self.__class_index_map.get(class_name, default) 65 | 66 | def perm_index(self, class_name: str, perm_name: str): 67 | if class_name not in self.__class_index_map: 68 | return 0 69 | 70 | perms_map = self.__class_perms_index_map[class_name] 71 | 72 | if perm_name not in perms_map: 73 | return len(perms_map) 74 | 75 | return perms_map[perm_name] 76 | 77 | def sort_classes(self, classes: List[str]): 78 | classes.sort(key=lambda c: self.class_index(c)) 79 | 80 | def sort_perms(self, class_name: str, perms: List[str]): 81 | if perms == ['*']: 82 | perms[:] = self.class_perms(class_name) 83 | return 84 | 85 | # Remove duplicates 86 | perms[:] = list(dict.fromkeys(perms)) 87 | 88 | perms.sort(key=lambda p: self.perm_index(class_name, p)) 89 | -------------------------------------------------------------------------------- /sepolicy/classmap_generator.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | struct security_class_mapping { 5 | const char *name; 6 | const char *perms[sizeof(uint32_t) * 8 + 1]; 7 | }; 8 | 9 | #include 10 | 11 | int main(void) { 12 | printf("{\n"); 13 | for (int i = 0; secclass_map[i].name != NULL; i++) { 14 | printf(" \"%s\": [", secclass_map[i].name); 15 | for (int j = 0; secclass_map[i].perms[j] != NULL; j++) { 16 | printf("\"%s\"", secclass_map[i].perms[j]); 17 | if (secclass_map[i].perms[j + 1] != NULL) 18 | printf(", "); 19 | } 20 | printf("]"); 21 | if (secclass_map[i + 1].name != NULL) 22 | printf(","); 23 | printf("\n"); 24 | } 25 | printf("}\n"); 26 | return 0; 27 | } 28 | -------------------------------------------------------------------------------- /sepolicy/conditional_type.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 The LineageOS Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from __future__ import annotations 5 | 6 | from abc import ABC, abstractmethod 7 | from typing import Dict, Hashable, List, Set 8 | 9 | from utils import Color, color_print 10 | 11 | 12 | class IConditionalType(ABC): 13 | @abstractmethod 14 | def __eq__(self, other: object) -> bool: ... 15 | 16 | @abstractmethod 17 | def __hash__(self) -> int: ... 18 | 19 | @abstractmethod 20 | def __str__(self) -> str: ... 21 | 22 | @property 23 | @abstractmethod 24 | def hash(self) -> int: ... 25 | 26 | @property 27 | @abstractmethod 28 | def hash_values(self) -> Hashable: ... 29 | 30 | @property 31 | @abstractmethod 32 | def positive(self) -> List[str]: ... 33 | 34 | @property 35 | @abstractmethod 36 | def negative(self) -> List[str]: ... 37 | 38 | @property 39 | @abstractmethod 40 | def is_all(self) -> bool: ... 41 | 42 | 43 | class ConditionalType(IConditionalType): 44 | def __init__(self, positive: List[str], negative: List[str], is_all: bool): 45 | self.__positive = positive 46 | self.__negative = negative 47 | self.__is_all = is_all 48 | self.__hash_values = tuple( 49 | [ 50 | frozenset(positive), 51 | frozenset(negative), 52 | is_all, 53 | ], 54 | ) 55 | self.__hash = hash(self.__hash_values) 56 | 57 | @property 58 | def hash(self): 59 | return self.__hash 60 | 61 | @property 62 | def hash_values(self) -> Hashable: 63 | return self.__hash_values 64 | 65 | @property 66 | def positive(self): 67 | return self.__positive 68 | 69 | @property 70 | def negative(self): 71 | return self.__negative 72 | 73 | @property 74 | def is_all(self): 75 | return self.__is_all 76 | 77 | def __eq__(self, other: object): 78 | if not isinstance(other, IConditionalType): 79 | return False 80 | 81 | if self.__hash != other.hash: 82 | return False 83 | 84 | return self.__hash_values == other.hash_values 85 | 86 | def __hash__(self): 87 | return self.__hash 88 | 89 | def __str__(self): 90 | if self.__is_all: 91 | return '*' 92 | 93 | s = '' 94 | if self.__positive: 95 | s += '{' 96 | for v in self.__positive: 97 | s += f' {v}' 98 | for v in self.__negative: 99 | s += f' -{v}' 100 | s += ' }' 101 | elif self.__negative: 102 | s += '~' 103 | 104 | if len(self.__negative) > 1: 105 | s += '{' 106 | for v in self.__negative: 107 | s += f' {v}' 108 | if len(self.__negative) > 1: 109 | s += ' }' 110 | 111 | return s 112 | 113 | 114 | class ConditionalTypeRedirect(IConditionalType): 115 | def __init__(self, t: str, m: Dict[str, ConditionalType], i: Set[str]): 116 | self.__t = t 117 | self.__m = m 118 | self.__i = i 119 | 120 | # TODO: is it necessary to do comparisons by actual value, or is it 121 | # enough to compare the generated type name 122 | 123 | def __get_c(self): 124 | if self.__t not in self.__m: 125 | if self.__t not in self.__i: 126 | color_print( 127 | f'Generated type {self.__t} not found', 128 | color=Color.YELLOW, 129 | ) 130 | self.__i.add(self.__t) 131 | return None 132 | 133 | return self.__m[self.__t] 134 | 135 | @property 136 | def hash(self): 137 | c = self.__get_c() 138 | if c is None: 139 | assert False 140 | return c.hash 141 | 142 | @property 143 | def hash_values(self) -> Hashable: 144 | c = self.__get_c() 145 | if c is None: 146 | assert False 147 | return c.hash_values 148 | 149 | @property 150 | def positive(self) -> List[str]: 151 | c = self.__get_c() 152 | if c is None: 153 | return [] 154 | 155 | return c.positive 156 | 157 | @property 158 | def negative(self) -> List[str]: 159 | c = self.__get_c() 160 | if c is None: 161 | return [] 162 | 163 | return c.positive 164 | 165 | @property 166 | def is_all(self): 167 | c = self.__get_c() 168 | if c is None: 169 | return False 170 | 171 | return c.is_all 172 | 173 | def __eq__(self, other: object): 174 | c = self.__get_c() 175 | if c is None: 176 | return self.__t == other 177 | 178 | return c == other 179 | 180 | def __hash__(self): 181 | c = self.__get_c() 182 | if c is None: 183 | return hash(self.__t) 184 | 185 | return hash(c) 186 | 187 | def __str__(self): 188 | c = self.__get_c() 189 | if c is None: 190 | return self.__t 191 | 192 | return str(c) 193 | -------------------------------------------------------------------------------- /sepolicy/config.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 The LineageOS Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | # Variables extracted from system/sepolicy/build/soong/policy.go 5 | 6 | from __future__ import annotations 7 | 8 | from typing import Dict, List, Optional, Tuple 9 | 10 | from conditional_type import ConditionalType 11 | from mld import MultiLevelDict 12 | from rule import Rule, RuleType, rule_part_or_varargs 13 | from utils import Color, color_print 14 | 15 | default_variables = { 16 | # MlsSens = 1 17 | 'mls_num_sens': '1', 18 | # MlsCats = 1024 19 | 'mls_num_cats': '1024', 20 | # TARGET_ARCH 21 | 'target_arch': 'arm64', 22 | 'target_with_asan': 'false', 23 | # WITH_DEXPREOPT 24 | 'target_with_dexpreopt': 'false', 25 | 'target_with_native_coverage': 'false', 26 | # TARGET_BUILD_VARIANT 27 | 'target_build_variant': 'user', 28 | 'target_full_treble': 'true', 29 | 'target_compatible_property': 'true', 30 | # BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW 31 | 'target_treble_sysprop_neverallow': 'true', 32 | # BUILD_BROKEN_ENFORCE_SYSPROP_OWNER 33 | 'target_enforce_sysprop_owner': 'true', 34 | 'target_exclude_build_test': 'false', 35 | # PRODUCT_REQUIRES_INSECURE_EXECMEM_FOR_SWIFTSHADER 36 | 'target_requires_insecure_execmem_for_swiftshader': 'false', 37 | # PRODUCT_SET_DEBUGFS_RESTRICTIONS 38 | 'target_enforce_debugfs_restriction': 'true', 39 | 'target_recovery': 'false', 40 | # BOARD_API_LEVEL 41 | 'target_board_api_level': '202404', 42 | } 43 | 44 | default_variables_match_rules: Dict[ 45 | str, 46 | Tuple[List[Optional[rule_part_or_varargs]], str, str], 47 | ] = { 48 | # TODO: fix 49 | # public/file.te 50 | # type asanwrapper_exec, exec_type, file_type; 51 | # type rules are compiled into typeattributeset and then split by us into 52 | # typeattribute 53 | 'target_with_asan': ( 54 | [ 55 | RuleType.TYPEATTRIBUTE.value, 56 | 'asanwrapper_exec', 57 | 'exec_type', 58 | (), 59 | ], 60 | 'true', 61 | 'false', 62 | ), 63 | # TODO: fix 64 | # public/domain.te 65 | # allow domain method_trace_data_file:dir create_dir_perms; 66 | 'target_with_native_coverage': ( 67 | [ 68 | RuleType.ALLOW.value, 69 | 'domain', 70 | 'method_trace_data_file', 71 | 'dir', 72 | # TODO: Try to extract the value of create_dir_perms automatically. 73 | # Currently it is not possible to extract these automatically because 74 | # the variables that we're trying to autodetect here need to be passed 75 | # to m4 for variable expansion, and the create_dir_perms macro is parsed 76 | # from the m4 result 77 | ( 78 | 'open', 79 | 'getattr', 80 | 'lock', 81 | 'watch', 82 | 'write', 83 | 'watch_reads', 84 | 'rmdir', 85 | 'reparent', 86 | 'ioctl', 87 | 'remove_name', 88 | 'add_name', 89 | 'create', 90 | 'rename', 91 | 'setattr', 92 | 'read', 93 | 'search', 94 | ), 95 | ], 96 | 'true', 97 | 'false', 98 | ), 99 | # TODO: fix 100 | # public/domain.te 101 | # allow domain su:fd use; 102 | 'target_build_variant': ( 103 | [ 104 | RuleType.ALLOW.value, 105 | 'allow', 106 | 'domain', 107 | 'su', 108 | 'fd', 109 | ('use',), 110 | ], 111 | 'userdebug', 112 | 'user', 113 | ), 114 | # public/te_macros 115 | # hal_client_domain: 116 | # allow $2 vendor_file:file { read open getattr execute map }; 117 | 'target_full_treble': ( 118 | [ 119 | RuleType.ALLOW.value, 120 | None, 121 | 'vendor_file', 122 | 'file', 123 | ( 124 | 'read', 125 | 'getattr', 126 | 'map', 127 | 'execute', 128 | 'open', 129 | ), 130 | ], 131 | 'false', 132 | 'true', 133 | ), 134 | # TODO: improve 135 | # public/property.te 136 | # vendor_internal_prop: 137 | # -> 138 | # type vendor_default_prop, property_type, vendor_property_type, vendor_internal_property_type; 139 | 'target_compatible_property': ( 140 | [ 141 | RuleType.TYPEATTRIBUTE.value, 142 | 'vendor_default_prop', 143 | 'vendor_internal_property_type', 144 | (), 145 | ], 146 | 'true', 147 | 'false', 148 | ), 149 | # public/te_macros 150 | # vendor_restricted_prop(build_prop) 151 | # -> 152 | # neverallow { coredomain -init } $1:property_service set; 153 | 'target_treble_sysprop_neverallow': ( 154 | [ 155 | RuleType.NEVERALLOW.value, 156 | ConditionalType( 157 | ['coredomain'], 158 | ['init'], 159 | False, 160 | ), 161 | None, 162 | 'property_service', 163 | ('set',), 164 | ], 165 | 'true', 166 | 'false', 167 | ), 168 | } 169 | 170 | 171 | def get_default_variables(mld: MultiLevelDict[Rule]): 172 | variables: Dict[str, str] = default_variables.copy() 173 | 174 | for variable_name, data in default_variables_match_rules.items(): 175 | match_keys, match_value, pass_value = data 176 | 177 | found = False 178 | for _ in mld.match(match_keys): 179 | found = True 180 | break 181 | 182 | if found: 183 | variables[variable_name] = match_value 184 | else: 185 | variables[variable_name] = pass_value 186 | 187 | color_print( 188 | f'Found variable {variable_name}={variables[variable_name]}', 189 | color=Color.GREEN, 190 | ) 191 | 192 | return variables 193 | -------------------------------------------------------------------------------- /sepolicy/decompile_cil.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # SPDX-FileCopyrightText: 2025 The LineageOS Project 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | from __future__ import annotations 6 | 7 | import os 8 | import shutil 9 | from argparse import ArgumentParser 10 | from functools import partial 11 | from itertools import chain 12 | from pathlib import Path 13 | from typing import Dict, List, Optional, Set, Tuple 14 | 15 | from cil_rule import CilRule 16 | from classmap import SELINUX_INCLUDE_PATH, Classmap 17 | from conditional_type import ConditionalType 18 | from config import get_default_variables 19 | from macro import ( 20 | categorize_macros, 21 | decompile_ioctl_defines, 22 | decompile_ioctls, 23 | decompile_macros, 24 | decompile_perms, 25 | expand_macro_bodies, 26 | macro_conditionals, 27 | macro_name, 28 | read_macros, 29 | resolve_macro_paths, 30 | split_macros_text_name_body, 31 | ) 32 | from match import ( 33 | RuleMatch, 34 | match_macro_rules, 35 | merge_class_sets, 36 | merge_ioctl_rules, 37 | merge_typeattribute_rules, 38 | replace_ioctls, 39 | replace_macro_rules, 40 | replace_perms, 41 | ) 42 | from match_extract import rule_extract_part_iter 43 | from mld import MultiLevelDict 44 | from output import ( 45 | group_rules, 46 | output_contexts, 47 | output_genfs_contexts, 48 | output_grouped_rules, 49 | ) 50 | from rule import RULE_DYNAMIC_PARTS_INDEX, Rule 51 | from utils import Color, color_print 52 | 53 | 54 | def print_macro_file_paths(macro_file_paths: List[str]): 55 | for macro_path in macro_file_paths: 56 | print(f'Loading macros from {macro_path}') 57 | 58 | 59 | def print_variable_ifelse(macros: List[str]): 60 | handled_variable_macro_ifelse = [ 61 | 'domain_trans', 62 | ] 63 | 64 | # Find conditional variables used in the input text 65 | # Conditional variables can be specified, but we need to know if the 66 | # macro arguments are used in them 67 | for macro in macros: 68 | name = macro_name(macro) 69 | if name in handled_variable_macro_ifelse: 70 | continue 71 | 72 | conditional_variables = macro_conditionals(macro) 73 | for conditional_variable in conditional_variables: 74 | if conditional_variable.startswith('$'): 75 | print( 76 | f'Macro {name} contains variable ifelse: {conditional_variable}' 77 | ) 78 | 79 | 80 | def rule_arity(rule: Rule): 81 | macro_rule_args = rule_extract_part_iter( 82 | rule.parts, 83 | rule.parts, 84 | ) 85 | assert macro_rule_args is not None 86 | return len(macro_rule_args) 87 | 88 | 89 | def sort_macros(macros: List[Tuple[str, List[Rule]]]): 90 | # Inside the macro, prefer rules with higher arity to help 91 | # the arg matching algorithm 92 | for macro in macros: 93 | rules = macro[1] 94 | rules.sort(key=rule_arity, reverse=True) 95 | 96 | 97 | def decompile_cil( 98 | cil_path: str, 99 | conditional_types_map: Dict[str, ConditionalType], 100 | missing_generated_types: Set[str], 101 | version: Optional[str], 102 | ): 103 | cil_data = Path(cil_path).read_text() 104 | cil_lines = cil_data.splitlines() 105 | 106 | genfs_rules: List[Rule] = [] 107 | 108 | # Convert lines to rules 109 | fn = partial( 110 | CilRule.from_line, 111 | conditional_types_map=conditional_types_map, 112 | missing_generated_types=missing_generated_types, 113 | genfs_rules=genfs_rules, 114 | version=version, 115 | ) 116 | rules = list(chain.from_iterable(map(fn, cil_lines))) 117 | 118 | return rules, genfs_rules 119 | 120 | 121 | def get_selinux_dir_policy(selinux_dir: str): 122 | platform_policy_path = Path(selinux_dir, 'plat_pub_versioned.cil') 123 | assert platform_policy_path.exists() 124 | 125 | policy_path = Path(selinux_dir, 'vendor_sepolicy.cil') 126 | assert policy_path.exists() 127 | 128 | policy_version_path = Path(selinux_dir, 'plat_sepolicy_vers.txt') 129 | assert policy_version_path.exists() 130 | 131 | policy_version = policy_version_path.read_text().strip() 132 | 133 | return str(platform_policy_path), str(policy_path), policy_version 134 | 135 | 136 | if __name__ == '__main__': 137 | parser = ArgumentParser( 138 | prog='decompile_cil.py', 139 | description='Decompile CIL files', 140 | ) 141 | parser.add_argument( 142 | '--policy-version', action='store', help='Version string (eg: 31.0)' 143 | ) 144 | parser.add_argument( 145 | '--platform-policy', 146 | action='store', 147 | help='Path to platform policy (eg: vendor/etc/selinux/plat_pub_versioned.cil)', 148 | ) 149 | parser.add_argument( 150 | '--policy', 151 | action='store', 152 | help='Path to policy (eg: vendor/etc/selinux/vendor_sepolicy.cil)', 153 | ) 154 | parser.add_argument( 155 | '-s', 156 | '--selinux', 157 | action='store', 158 | help='Path to selinux directory (eg: vendor/etc/selinux)', 159 | ) 160 | parser.add_argument( 161 | '-m', 162 | '--macros', 163 | action='append', 164 | default=[], 165 | help='Path to directories or files containing macros', 166 | ) 167 | parser.add_argument( 168 | '-k', 169 | '--kernel', 170 | action='store', 171 | required=True, 172 | help='Path to kernel (external/selinux/python/sepolgen/src/share/perm_map)', 173 | ) 174 | parser.add_argument( 175 | '-v', 176 | '--var', 177 | action='append', 178 | default=[], 179 | help='Variable used when expanding macros', 180 | ) 181 | parser.add_argument( 182 | '-o', 183 | '--output', 184 | action='store', 185 | required=True, 186 | help='Output directory for the decompiled selinux', 187 | ) 188 | 189 | args = parser.parse_args() 190 | assert args.macros 191 | 192 | output_dir: str = args.output 193 | kernel_dir: str = args.kernel 194 | selinux_dir: Optional[str] = args.selinux 195 | 196 | if selinux_dir is None: 197 | assert args.platform_policy is not None 198 | platform_policy: str = args.platform_policy 199 | assert args.policy is not None 200 | policy: str = args.policy 201 | 202 | version: Optional[str] = args.policy_version 203 | else: 204 | platform_policy, policy, version = get_selinux_dir_policy( 205 | selinux_dir 206 | ) 207 | 208 | conditional_types_map: Dict[str, ConditionalType] = {} 209 | missing_generated_types: Set[str] = set() 210 | 211 | # Only load generated types from platform policy 212 | _, _ = decompile_cil( 213 | platform_policy, 214 | conditional_types_map, 215 | set(), 216 | version, 217 | ) 218 | 219 | rules, genfs_rules = decompile_cil( 220 | policy, 221 | conditional_types_map, 222 | missing_generated_types, 223 | version, 224 | ) 225 | 226 | mld: MultiLevelDict[Rule] = MultiLevelDict() 227 | for rule in rules: 228 | # Add partial matches to this rule 229 | # Start partial matching after the first key 230 | mld.add(rule.hash_values, rule, RULE_DYNAMIC_PARTS_INDEX) 231 | 232 | macro_file_paths = resolve_macro_paths(args.macros) 233 | 234 | print_macro_file_paths(macro_file_paths) 235 | 236 | input_text, macros_text = read_macros(macro_file_paths) 237 | 238 | print_variable_ifelse(macros_text) 239 | 240 | variables = get_default_variables(mld) 241 | 242 | for kv in args.var: 243 | k, v = kv.split('=') 244 | variables[k] = v 245 | 246 | expanded_macros_text = expand_macro_bodies( 247 | input_text, 248 | macros_text, 249 | variables, 250 | ) 251 | macros_name_body = split_macros_text_name_body(expanded_macros_text) 252 | 253 | expanded_macros, class_sets, perms, ioctls, ioctl_defines = ( 254 | categorize_macros(macros_name_body) 255 | ) 256 | decompiled_perms = decompile_perms(perms) 257 | decompiled_class_sets = decompile_perms(class_sets) 258 | decompiled_ioctls = decompile_ioctls(ioctls) 259 | decompiled_ioctl_defines = decompile_ioctl_defines(ioctl_defines) 260 | 261 | # classmap is needed to sort classes and perms to match the compiled 262 | # output 263 | selinux_include_path = Path(kernel_dir, SELINUX_INCLUDE_PATH).resolve() 264 | classmap = Classmap(str(selinux_include_path)) 265 | 266 | macros_name_rules = decompile_macros(classmap, expanded_macros) 267 | 268 | sort_macros(macros_name_rules) 269 | 270 | color_print(f'Total rules: {len(mld)}', color=Color.GREEN) 271 | 272 | all_rule_matches: Set[RuleMatch] = set() 273 | for name, rules in macros_name_rules: 274 | match_macro_rules( 275 | mld, 276 | name, 277 | rules, 278 | all_rule_matches, 279 | ) 280 | 281 | replace_macro_rules(mld, all_rule_matches) 282 | merge_typeattribute_rules(mld) 283 | merge_ioctl_rules(mld) 284 | 285 | replace_perms(mld, classmap, decompiled_perms) 286 | replace_ioctls(mld, decompiled_ioctls, decompiled_ioctl_defines) 287 | merge_class_sets(mld, decompiled_class_sets) 288 | 289 | # We can also merge target domains, but rules get long quickly 290 | # merge_target_domains(mld) 291 | 292 | color_print(f'Leftover rules: {len(mld)}', color=Color.GREEN) 293 | 294 | grouped_rules = group_rules(mld) 295 | 296 | shutil.rmtree(output_dir, ignore_errors=True) 297 | os.makedirs(output_dir) 298 | 299 | output_contexts(selinux_dir, output_dir) 300 | output_genfs_contexts(genfs_rules, output_dir) 301 | output_grouped_rules(grouped_rules, output_dir) 302 | 303 | # TODO: output app signing certificates 304 | -------------------------------------------------------------------------------- /sepolicy/macro.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 The LineageOS Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from __future__ import annotations 5 | 6 | import re 7 | import subprocess 8 | from functools import partial 9 | from itertools import chain 10 | from pathlib import Path 11 | from typing import Dict, List, Set, Tuple 12 | 13 | from classmap import Classmap 14 | from rule import Rule, flatten_parts, unpack_line 15 | from source_rule import SourceRule 16 | from utils import Color, color_print, split_normalize_text 17 | 18 | MACRO_START = 'define(`' 19 | 20 | 21 | def split_macros(lines: List[str]): 22 | level = 0 23 | block = '' 24 | 25 | for line in lines: 26 | assert '#' not in line 27 | 28 | if level == 0 and not line.startswith(MACRO_START): 29 | continue 30 | 31 | for c in line: 32 | last_level = level 33 | if c == '(': 34 | level += 1 35 | elif c == ')': 36 | level -= 1 37 | elif c == '`': 38 | level += 1 39 | elif c == "'": 40 | level -= 1 41 | 42 | block += c 43 | 44 | if level == 0 and last_level != 0: 45 | block = block.strip() 46 | yield block 47 | block = '' 48 | 49 | 50 | def _macro_name(macro: str): 51 | assert macro.startswith(MACRO_START), macro 52 | assert macro.endswith(')'), macro 53 | macro = macro[len(MACRO_START) : -1] 54 | name, body = macro.split("'", 1) 55 | 56 | assert body[0] == ',', body 57 | body = body[1:] 58 | body = body.strip() 59 | 60 | return name, body 61 | 62 | 63 | def macro_name(macro: str): 64 | return _macro_name(macro)[0] 65 | 66 | 67 | def macro_name_body(macro: str): 68 | name, body = _macro_name(macro) 69 | 70 | assert body[0] == '`', body 71 | assert body[-1] == "'", body 72 | body = body[1:-1] 73 | body = body.strip() 74 | 75 | # Squash spaces together 76 | body = re.sub(r'\s+', ' ', body, flags=re.MULTILINE) 77 | 78 | # Add back newline between rules 79 | body = re.sub(r'; ', ';\n', body) 80 | 81 | before_strip = body 82 | body = body.strip() 83 | assert body == before_strip 84 | 85 | return name, body 86 | 87 | 88 | ifelse_variable_pattern = re.compile(r'ifelse\s*\(\s*([^,\s\)]+)') 89 | macro_arity_pattern = re.compile(r'\$([1-9][0-9]*)') 90 | 91 | 92 | def macro_conditionals(body: str): 93 | return ifelse_variable_pattern.findall(body) 94 | 95 | 96 | def macro_arity(body: str): 97 | found_args = macro_arity_pattern.finditer(body) 98 | used_args = set(int(m.group(1)) for m in found_args) 99 | return max(used_args) if used_args else 0 100 | 101 | 102 | def arity_dummy_args(arity: int): 103 | return ', '.join(f"`${i}'" for i in range(1, arity + 1)) 104 | 105 | 106 | def macro_name_call(macro: str): 107 | name = macro_name(macro) 108 | # TODO: move arity outside of this function to be able to use it 109 | # in other contexts 110 | # Maybe create a class for each macro? 111 | arity = macro_arity(macro) 112 | dummy_args = arity_dummy_args(arity) 113 | 114 | # Define a macro that expands to its expanded definition, quoted 115 | # Double quote the macro name to prevent its expansion 116 | return f"`define'(``{name}'', quote_start()\n{name}({dummy_args})\nquote_end())" 117 | 118 | 119 | def quote_char(c: str): 120 | change = 'changequote([,])' 121 | unchange = "changequote(`,')" 122 | return f'{change}[{change}{c}{unchange}]{unchange}' 123 | 124 | 125 | def expand_macro_bodies( 126 | input_text: str, 127 | macros: List[str], 128 | variables: Dict[str, str], 129 | ): 130 | macro_calls = list(map(macro_name_call, macros)) 131 | 132 | # Define macros used to change the quote format 133 | # This is used to add ` and ' around the expanded macro body 134 | # Macro expansion does not happen when the macro text is 135 | # quoted more than once 136 | # Use the standard way of defining macros so that the macro 137 | # definition functions can be re-used 138 | input_text += f""" 139 | define(`quote_start', {quote_char('`')}) 140 | define(`quote_end', {quote_char("'")}) 141 | """ 142 | 143 | # Concatenate all the new macro definition to be able to call m4 144 | # only once 145 | input_text += '\n'.join(macro_calls) 146 | 147 | variables_args_k_v = [('-D', f'{k}={v}') for k, v in variables.items()] 148 | variables_args = [x for p in variables_args_k_v for x in p] 149 | output_text = subprocess.check_output( 150 | ['m4', *variables_args], 151 | input=input_text, 152 | text=True, 153 | ) 154 | 155 | return output_text 156 | 157 | 158 | def split_macros_text_name_body(expanded_macros_text: str): 159 | expanded_macros_lines = split_normalize_text(expanded_macros_text) 160 | macros = list(split_macros(expanded_macros_lines)) 161 | return list(map(macro_name_body, macros)) 162 | 163 | 164 | def categorize_macros(macros_name_body: List[Tuple[str, str]]): 165 | expanded_macros: List[Tuple[str, str]] = [] 166 | class_sets: List[Tuple[str, str]] = [] 167 | perms: List[Tuple[str, str]] = [] 168 | ioctls: List[Tuple[str, str]] = [] 169 | ioctl_defines: List[Tuple[str, str]] = [] 170 | 171 | for name, body in macros_name_body: 172 | if not body: 173 | color_print(f'Empty macro {name}', color=Color.YELLOW) 174 | continue 175 | 176 | macro_tuple = (name, body) 177 | 178 | if body.startswith('0x'): 179 | ioctl_defines.append(macro_tuple) 180 | elif '_class_set' in name: 181 | class_sets.append(macro_tuple) 182 | elif '_perms' in name: 183 | perms.append(macro_tuple) 184 | elif '_ioctls' in name: 185 | ioctls.append(macro_tuple) 186 | else: 187 | expanded_macros.append(macro_tuple) 188 | 189 | return expanded_macros, class_sets, perms, ioctls, ioctl_defines 190 | 191 | 192 | # Order is extracted from system/sepolicy/build/soong/policy.go 193 | SEPOLICY_FILES = [ 194 | 'flagging/flagging_macros', 195 | 'public/global_macros', 196 | 'public/neverallow_macros', 197 | 'public/te_macros', 198 | 'public/ioctl_defines', 199 | 'public/ioctl_macros', 200 | ] 201 | 202 | 203 | def resolve_macro_paths(macro_paths: List[str]): 204 | macro_file_paths: List[str] = [] 205 | for macro_path in macro_paths: 206 | mp = Path(macro_path) 207 | if mp.is_file(): 208 | macro_file_paths.append(str(mp.resolve())) 209 | continue 210 | 211 | if not mp.is_dir(): 212 | continue 213 | 214 | for file_path in SEPOLICY_FILES: 215 | fp = Path(macro_path, file_path) 216 | if fp.is_file(): 217 | macro_file_paths.append(str(fp.resolve())) 218 | 219 | return macro_file_paths 220 | 221 | 222 | def read_macros(macro_file_paths: List[str]) -> Tuple[str, List[str]]: 223 | # Join all the macro files 224 | input_text = '' 225 | for macro_path in macro_file_paths: 226 | input_text += Path(macro_path).read_text() 227 | input_text += '\n' 228 | 229 | # Split into lines, remove empty lines and commented lines 230 | input_text_lines = split_normalize_text(input_text) 231 | input_text = ''.join(input_text_lines) 232 | 233 | # After merging all the input files, split them into top-level 234 | # macro definitions 235 | # TODO: it's not necessary to process the macros in their entirety, 236 | # it should be enough to look for define(`(...)', 237 | macros_text = list(split_macros(input_text_lines)) 238 | 239 | return input_text, macros_text 240 | 241 | 242 | def decompile_macros( 243 | classmap: Classmap, 244 | expanded_macros: List[Tuple[str, str]], 245 | ): 246 | from_line_fn = partial(SourceRule.from_line, classmap=classmap) 247 | 248 | expanded_macro_rules: List[Tuple[str, List[Rule]]] = [] 249 | for name, body in expanded_macros: 250 | lines = body.splitlines() 251 | 252 | try: 253 | rules = list(chain.from_iterable(map(from_line_fn, lines))) 254 | except ValueError: 255 | color_print(f'Invalid macro {name}', color=Color.YELLOW) 256 | continue 257 | 258 | expanded_macro_rules.append((name, rules)) 259 | 260 | return expanded_macro_rules 261 | 262 | 263 | def decompile_perms(perms: List[Tuple[str, str]]): 264 | decompiled_perms: List[Tuple[str, Set[str]]] = [] 265 | 266 | for name, text in perms: 267 | parts = unpack_line( 268 | text, 269 | '{', 270 | '}', 271 | ' ', 272 | open_by_default=True, 273 | ignored_chars=';', 274 | ) 275 | parts_set = set(flatten_parts(parts)) 276 | decompiled_perms.append((name, parts_set)) 277 | 278 | # Prioritize replacement of largest perm macros 279 | decompiled_perms.sort(key=lambda np: len(np[1]), reverse=True) 280 | 281 | return decompiled_perms 282 | 283 | 284 | def unpack_ioctl(part: str): 285 | if '-' not in part: 286 | yield hex(int(part, base=16)) 287 | return 288 | 289 | parts = part.split('-') 290 | assert len(parts) == 2 291 | 292 | start_ioctl = int(parts[0], base=16) 293 | end_ioctl = int(parts[1], base=16) 294 | 295 | for n in range(start_ioctl, end_ioctl + 1): 296 | yield hex(n) 297 | 298 | 299 | def decompile_ioctls(ioctls: List[Tuple[str, str]]): 300 | decompiled_ioctls: List[Tuple[str, Set[str]]] = [] 301 | 302 | for name, text in ioctls: 303 | parts = unpack_line( 304 | text, 305 | '{', 306 | '}', 307 | ' ', 308 | open_by_default=True, 309 | ignored_chars=';', 310 | ) 311 | flattened_parts = flatten_parts(parts) 312 | unpacked_ioctls = map(unpack_ioctl, flattened_parts) 313 | parts_set = set(chain.from_iterable(unpacked_ioctls)) 314 | decompiled_ioctls.append((name, parts_set)) 315 | 316 | return decompiled_ioctls 317 | 318 | 319 | def decompile_ioctl_defines(ioctl_defines: List[Tuple[str, str]]): 320 | decompiled_ioctl_defines: Dict[str, str] = {} 321 | 322 | for name, text in ioctl_defines: 323 | value = hex(int(text, base=16)) 324 | if value in decompiled_ioctl_defines: 325 | existing_name = decompiled_ioctl_defines[value] 326 | color_print( 327 | f'Ioctl {name}={value} already defined as {existing_name}', 328 | color=Color.YELLOW, 329 | ) 330 | continue 331 | 332 | decompiled_ioctl_defines[value] = name 333 | 334 | return decompiled_ioctl_defines 335 | -------------------------------------------------------------------------------- /sepolicy/match.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 The LineageOS Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from __future__ import annotations 5 | 6 | from typing import Dict, List, Optional, Set, Tuple 7 | 8 | from class_set import ClassSet 9 | from classmap import Classmap 10 | from conditional_type import ConditionalType 11 | from match_extract import ( 12 | args_type, 13 | merge_arg_values, 14 | rule_extract_part, 15 | rule_extract_part_iter, 16 | ) 17 | from match_replace import rule_replace_part_iter 18 | from mld import MultiLevelDict 19 | from rule import ( 20 | ALLOW_RULE_TYPES, 21 | CLASS_SETS_RULE_TYPES, 22 | IOCTL_RULE_TYPES, 23 | RULE_DYNAMIC_PARTS_INDEX, 24 | Rule, 25 | RuleType, 26 | rule_part, 27 | rule_part_or_varargs, 28 | ) 29 | from utils import Color, color_print 30 | 31 | 32 | class RuleMatch: 33 | def __init__( 34 | self, 35 | macro_name: str, 36 | rules: Set[Rule] = set(), 37 | arg_values: args_type = {}, 38 | ): 39 | self.macro_name = macro_name 40 | self.rules = rules 41 | self.arg_values = arg_values 42 | self.hash_values = tuple( 43 | [ 44 | self.macro_name, 45 | frozenset(self.arg_values.items()), 46 | ] 47 | ) 48 | self.hash = hash(self.hash_values) 49 | 50 | args = tuple(arg_values[k] for k in sorted(arg_values.keys())) 51 | self.macro = Rule(macro_name, args, (), is_macro=True) 52 | 53 | def filled_args(self): 54 | return self.arg_values.keys() 55 | 56 | def add_arg_values(self, arg_values: args_type) -> Optional[RuleMatch]: 57 | new_arg_values = merge_arg_values(self.arg_values, arg_values) 58 | if new_arg_values is None: 59 | return None 60 | 61 | return RuleMatch(self.macro_name, self.rules.copy(), new_arg_values) 62 | 63 | def add_rule(self, rule: Rule): 64 | self.rules.add(rule) 65 | 66 | def __hash__(self): 67 | return self.hash 68 | 69 | def __eq__(self, other: object): 70 | assert isinstance(other, RuleMatch) 71 | 72 | return self.hash_values == other.hash_values 73 | 74 | def __str__(self): 75 | return str(self.macro) 76 | 77 | 78 | def rule_match_keys(rule: Rule, is_match_keys_full: bool): 79 | match_keys: List[Optional[rule_part_or_varargs]] = [rule.rule_type] 80 | 81 | for part in rule.parts: 82 | # A fully filled rule doesn't need to have its parts tested 83 | # to check if they need to be filled 84 | if is_match_keys_full: 85 | match_keys.append(part) 86 | continue 87 | 88 | # Match part to itself to see if it has any args 89 | part_args_values = rule_extract_part(part, part) 90 | 91 | if part_args_values: 92 | match_keys.append(None) 93 | else: 94 | match_keys.append(part) 95 | 96 | match_keys.append(rule.varargs) 97 | 98 | return match_keys 99 | 100 | 101 | def rule_fill(rule: Rule, arg_values: args_type): 102 | new_parts = rule_replace_part_iter(rule.parts, arg_values) 103 | if new_parts is None: 104 | return None 105 | 106 | return Rule(rule.rule_type, tuple(new_parts), rule.varargs) 107 | 108 | 109 | def match_macro_rule( 110 | mld: MultiLevelDict[Rule], 111 | macro_rule: Rule, 112 | rule_matches: Set[RuleMatch], 113 | ): 114 | print(f'Processing rule: {macro_rule}') 115 | 116 | macro_rule_args = rule_extract_part_iter( 117 | macro_rule.parts, 118 | macro_rule.parts, 119 | ) 120 | assert macro_rule_args is not None 121 | 122 | # Check if this rule requires only already completed args 123 | rule_match = next(iter(rule_matches)) 124 | is_match_keys_full = macro_rule_args.keys() <= rule_match.filled_args() 125 | 126 | new_rule_matches: Set[RuleMatch] = set() 127 | for rule_match in rule_matches: 128 | # print(f'Initial args: {rule_match.arg_values}') 129 | 130 | # TODO: make rule args extraction build a path that can be used for 131 | # filling no matter the args 132 | filled_rule = rule_fill(macro_rule, rule_match.arg_values) 133 | if filled_rule is None: 134 | continue 135 | 136 | # print(f'Filled rule: {filled_rule}') 137 | 138 | match_keys = rule_match_keys(filled_rule, is_match_keys_full) 139 | # print(f'Match keys: {match_keys}') 140 | 141 | for matched_rule in mld.match(match_keys): 142 | # print(f'Matched rule: {matched_rule}') 143 | 144 | # If the rule is fully filled don't expand the matches 145 | if is_match_keys_full: 146 | rule_match.add_rule(matched_rule) 147 | new_rule_matches.add(rule_match) 148 | # print() 149 | break 150 | 151 | new_args_values = rule_extract_part_iter( 152 | filled_rule.parts, 153 | matched_rule.parts, 154 | ) 155 | if new_args_values is None: 156 | continue 157 | 158 | new_rule_match = rule_match.add_arg_values(new_args_values) 159 | if new_rule_match is None: 160 | continue 161 | 162 | new_rule_match.add_rule(matched_rule) 163 | new_rule_matches.add(new_rule_match) 164 | 165 | return new_rule_matches 166 | 167 | 168 | def match_macro_rules( 169 | mld: MultiLevelDict[Rule], 170 | macro_name: str, 171 | macro_rules: List[Rule], 172 | all_rule_matches: Set[RuleMatch], 173 | ): 174 | print(f'Processing macro: {macro_name}') 175 | 176 | rule_matches: Set[RuleMatch] = set([RuleMatch(macro_name)]) 177 | for macro_rule in macro_rules: 178 | new_rule_matches = match_macro_rule( 179 | mld, 180 | macro_rule, 181 | rule_matches, 182 | ) 183 | print(f'Found {len(new_rule_matches)} candidates') 184 | if not len(new_rule_matches): 185 | print() 186 | return 187 | 188 | rule_matches = new_rule_matches 189 | 190 | all_rule_matches.update(rule_matches) 191 | 192 | print(f'Found {len(rule_matches)} macro calls') 193 | print() 194 | 195 | 196 | def replace_macro_rules( 197 | mld: MultiLevelDict[Rule], 198 | all_rule_matches: Set[RuleMatch], 199 | ): 200 | color_print( 201 | f'All macros: {len(all_rule_matches)}', 202 | color=Color.GREEN, 203 | ) 204 | 205 | rule_matches_map: Dict[Rule, Set[RuleMatch]] = {} 206 | for rule_match in all_rule_matches: 207 | for rule in rule_match.rules: 208 | if rule not in rule_matches_map: 209 | rule_matches_map[rule] = set() 210 | rule_matches_map[rule].add(rule_match) 211 | 212 | discarded_rule_matches: Set[RuleMatch] = set() 213 | 214 | for rule_match in all_rule_matches: 215 | candidate_supersets: Optional[Set[RuleMatch]] = None 216 | 217 | for rule in rule_match.rules: 218 | rule_matches = rule_matches_map[rule] 219 | 220 | if candidate_supersets is None: 221 | candidate_supersets = rule_matches 222 | else: 223 | candidate_supersets = candidate_supersets & rule_matches 224 | 225 | assert candidate_supersets is not None 226 | 227 | candidate_supersets.remove(rule_match) 228 | 229 | for candidate in candidate_supersets: 230 | if rule_match.rules < candidate.rules or ( 231 | rule_match.rules == candidate.rules 232 | and len(rule_match.arg_values) > len(candidate.arg_values) 233 | ): 234 | discarded_rule_matches.add(rule_match) 235 | break 236 | 237 | color_print( 238 | f'Discarded subset macros: {len(discarded_rule_matches)}', 239 | color=Color.GREEN, 240 | ) 241 | 242 | for rule_match in discarded_rule_matches: 243 | all_rule_matches.remove(rule_match) 244 | 245 | removed_rules = 0 246 | double_removed_rules: Set[Rule] = set() 247 | for rule_match in all_rule_matches: 248 | for rule in rule_match.rules: 249 | try: 250 | mld.remove(rule.hash_values, rule, RULE_DYNAMIC_PARTS_INDEX) 251 | removed_rules += 1 252 | except KeyError: 253 | if rule in double_removed_rules: 254 | continue 255 | 256 | color_print( 257 | f'Rule already removed: {rule}', 258 | color=Color.YELLOW, 259 | ) 260 | double_removed_rules.add(rule) 261 | 262 | rule = rule_match.macro 263 | mld.add(rule.hash_values, rule, RULE_DYNAMIC_PARTS_INDEX) 264 | 265 | color_print( 266 | f'Replaced {removed_rules} rules with {len(all_rule_matches)} macros', 267 | color=Color.GREEN, 268 | ) 269 | 270 | 271 | def merge_typeattribute_rules(mld: MultiLevelDict[Rule]): 272 | types: Dict[str, Set[str]] = {} 273 | 274 | removed_rules: Set[Rule] = set() 275 | for rule in mld.match((RuleType.TYPEATTRIBUTE.value, None, None, ())): 276 | t = rule.parts[0] 277 | v = rule.parts[1] 278 | 279 | assert isinstance(t, str) 280 | assert isinstance(v, str) 281 | 282 | if t not in types: 283 | types[t] = set() 284 | types[t].add(v) 285 | 286 | removed_rules.add(rule) 287 | 288 | for rule in removed_rules: 289 | mld.remove(rule.hash_values, rule, RULE_DYNAMIC_PARTS_INDEX) 290 | 291 | for t, values in types.items(): 292 | new_rule = Rule( 293 | RuleType.TYPE.value, 294 | (t,), 295 | tuple(values), 296 | ) 297 | mld.add(new_rule.hash_values, new_rule, RULE_DYNAMIC_PARTS_INDEX) 298 | 299 | color_print( 300 | f'Merged {len(removed_rules)} typeattributes into {len(types)} types', 301 | color=Color.GREEN, 302 | ) 303 | 304 | 305 | def merge_ioctl_rules(mld: MultiLevelDict[Rule]): 306 | rules_dict: Dict[Tuple[rule_part, ...], Set[Rule]] = {} 307 | for rule_type in IOCTL_RULE_TYPES: 308 | match_tuple = (rule_type.value, None, None, None, None) 309 | for matched_rule in mld.match(match_tuple): 310 | # Keep varargs out of the key 311 | key = (matched_rule.rule_type, *matched_rule.parts) 312 | 313 | if key not in rules_dict: 314 | rules_dict[key] = set() 315 | 316 | rules_dict[key].add(matched_rule) 317 | 318 | removed_rules = 0 319 | added_rules = 0 320 | for rules in rules_dict.values(): 321 | if len(rules) == 1: 322 | continue 323 | 324 | merged_varargs: Set[str] = set() 325 | for rule in rules: 326 | mld.remove(rule.hash_values, rule, RULE_DYNAMIC_PARTS_INDEX) 327 | merged_varargs.update(rule.varargs) 328 | removed_rules += 1 329 | 330 | matched_rule = next(iter(rules)) 331 | new_rule = Rule( 332 | matched_rule.rule_type, 333 | matched_rule.parts, 334 | tuple(merged_varargs), 335 | ) 336 | mld.add(new_rule.hash_values, new_rule, RULE_DYNAMIC_PARTS_INDEX) 337 | added_rules += 1 338 | 339 | color_print( 340 | f'Merged {removed_rules} rules into {added_rules} ioctl rules', 341 | color=Color.GREEN, 342 | ) 343 | 344 | 345 | def replace_perms_set( 346 | perms: List[Tuple[str, Set[str]]], 347 | class_all_perms: Set[str], 348 | rule_varargs_set: Set[str], 349 | ): 350 | if class_all_perms == rule_varargs_set: 351 | return set(['*']) 352 | 353 | varargs_set = rule_varargs_set 354 | for name, caps in perms: 355 | if caps <= varargs_set: 356 | varargs_set = varargs_set - caps 357 | varargs_set.add(name) 358 | # TODO: find out if there are cases of multiple 359 | # perms 360 | break 361 | 362 | return varargs_set 363 | 364 | 365 | def replace_type_perm( 366 | mld: MultiLevelDict[Rule], 367 | classmap: Classmap, 368 | perms: List[Tuple[str, Set[str]]], 369 | classes: List[str], 370 | removed_rules: Set[Rule], 371 | added_rules: Set[Rule], 372 | ): 373 | for rule_type in ALLOW_RULE_TYPES: 374 | for c in classes: 375 | match_tuple = (rule_type.value, None, None, c, None) 376 | class_all_perms = set(classmap.class_perms(c)) 377 | 378 | for matched_rule in mld.match(match_tuple): 379 | rule_varargs_set = set(matched_rule.varargs) 380 | varargs_set = replace_perms_set( 381 | perms, 382 | class_all_perms, 383 | rule_varargs_set, 384 | ) 385 | 386 | if varargs_set == rule_varargs_set: 387 | continue 388 | 389 | new_rule = Rule( 390 | matched_rule.rule_type, 391 | matched_rule.parts, 392 | tuple(varargs_set), 393 | ) 394 | added_rules.add(new_rule) 395 | removed_rules.add(matched_rule) 396 | 397 | 398 | def replace_perms( 399 | mld: MultiLevelDict[Rule], 400 | classmap: Classmap, 401 | all_perms: List[Tuple[str, Set[str]]], 402 | ): 403 | file_classes = list(classmap.class_types('file')) 404 | dir_classes = list(classmap.class_types('dir')) 405 | socket_classes = list(classmap.class_types('socket')) 406 | 407 | file_perms: List[Tuple[str, Set[str]]] = [] 408 | dir_perms: List[Tuple[str, Set[str]]] = [] 409 | socket_perms: List[Tuple[str, Set[str]]] = [] 410 | 411 | for perm in all_perms: 412 | name = perm[0] 413 | 414 | if '_file_' in name: 415 | file_perms.append(perm) 416 | elif '_dir_' in name: 417 | dir_perms.append(perm) 418 | elif '_socket_' in name: 419 | socket_perms.append(perm) 420 | elif '_ipc_' in name: 421 | # _ipc_ perms are unused, don't bother 422 | continue 423 | else: 424 | assert False, perm 425 | 426 | removed_rules: Set[Rule] = set() 427 | added_rules: Set[Rule] = set() 428 | 429 | def _replace_type_perm( 430 | perms: List[Tuple[str, Set[str]]], 431 | classes: List[str], 432 | ): 433 | replace_type_perm( 434 | mld, 435 | classmap, 436 | perms, 437 | classes, 438 | removed_rules, 439 | added_rules, 440 | ) 441 | 442 | _replace_type_perm(file_perms, file_classes) 443 | _replace_type_perm(dir_perms, dir_classes) 444 | _replace_type_perm(socket_perms, socket_classes) 445 | 446 | for rule in removed_rules: 447 | mld.remove(rule.hash_values, rule, RULE_DYNAMIC_PARTS_INDEX) 448 | 449 | for rule in added_rules: 450 | mld.add(rule.hash_values, rule, RULE_DYNAMIC_PARTS_INDEX) 451 | 452 | color_print( 453 | f'Replaced perm macros in {len(removed_rules)} rules', 454 | color=Color.GREEN, 455 | ) 456 | 457 | 458 | def replace_ioctls( 459 | mld: MultiLevelDict[Rule], 460 | ioctls: List[Tuple[str, Set[str]]], 461 | ioctl_defines: Dict[str, str], 462 | ): 463 | removed_rules: Set[Rule] = set() 464 | added_rules: Set[Rule] = set() 465 | 466 | for rule_type in IOCTL_RULE_TYPES: 467 | match_tuple = (rule_type.value, None, None, None, None) 468 | for matched_rule in mld.match(match_tuple): 469 | rule_varargs_set = set(matched_rule.varargs) 470 | 471 | varargs_set = rule_varargs_set 472 | for name, values in ioctls: 473 | if values <= varargs_set: 474 | varargs_set = varargs_set - values 475 | varargs_set.add(name) 476 | 477 | added_ioctls: Set[str] = set() 478 | removed_ioctls: Set[str] = set() 479 | for value in varargs_set: 480 | if value in ioctl_defines: 481 | removed_ioctls.add(value) 482 | added_ioctls.add(ioctl_defines[value]) 483 | 484 | if added_ioctls or removed_ioctls: 485 | varargs_set = varargs_set - removed_ioctls 486 | varargs_set = varargs_set | added_ioctls 487 | 488 | if varargs_set == rule_varargs_set: 489 | continue 490 | 491 | new_rule = Rule( 492 | matched_rule.rule_type, 493 | matched_rule.parts, 494 | tuple(varargs_set), 495 | ) 496 | added_rules.add(new_rule) 497 | removed_rules.add(matched_rule) 498 | 499 | for rule in removed_rules: 500 | mld.remove(rule.hash_values, rule, RULE_DYNAMIC_PARTS_INDEX) 501 | 502 | for rule in added_rules: 503 | mld.add(rule.hash_values, rule, RULE_DYNAMIC_PARTS_INDEX) 504 | 505 | color_print( 506 | f'Replaced ioctl macros in {len(removed_rules)} rules', 507 | color=Color.GREEN, 508 | ) 509 | 510 | 511 | def merge_class_set_rule_type( 512 | mld: MultiLevelDict[Rule], 513 | rule_type: RuleType, 514 | class_sets: List[Tuple[str, Set[str]]], 515 | ): 516 | rules_dict: Dict[ 517 | Tuple[rule_part_or_varargs, ...], 518 | Tuple[Set[str], Set[Rule]], 519 | ] = {} 520 | 521 | match_tuple = (rule_type.value, None, None, None, None) 522 | for matched_rule in mld.match(match_tuple): 523 | # Keep class out of the key 524 | key = ( 525 | matched_rule.rule_type, 526 | matched_rule.parts[0], 527 | matched_rule.parts[1], 528 | matched_rule.varargs, 529 | ) 530 | if key not in rules_dict: 531 | rules_dict[key] = (set(), set()) 532 | 533 | # Gather all matched classes 534 | matched_cls = matched_rule.parts[2] 535 | assert isinstance(matched_cls, str) 536 | rules_dict[key][0].add(matched_cls) 537 | rules_dict[key][1].add(matched_rule) 538 | 539 | removed_rules = 0 540 | added_rules = 0 541 | 542 | for matched_classes, rules in rules_dict.values(): 543 | if len(matched_classes) == 1: 544 | continue 545 | 546 | new_classes = matched_classes 547 | for name, classes in class_sets: 548 | if classes <= new_classes: 549 | new_classes = new_classes - classes 550 | new_classes.add(name) 551 | 552 | for rule in rules: 553 | mld.remove(rule.hash_values, rule, RULE_DYNAMIC_PARTS_INDEX) 554 | removed_rules += 1 555 | 556 | matched_rule = next(iter(rules)) 557 | new_rule = Rule( 558 | matched_rule.rule_type, 559 | tuple( 560 | [ 561 | matched_rule.parts[0], 562 | matched_rule.parts[1], 563 | ClassSet(new_classes), 564 | ] 565 | ), 566 | matched_rule.varargs, 567 | ) 568 | mld.add(new_rule.hash_values, new_rule, RULE_DYNAMIC_PARTS_INDEX) 569 | added_rules += 1 570 | 571 | return removed_rules, added_rules 572 | 573 | 574 | def merge_class_sets( 575 | mld: MultiLevelDict[Rule], 576 | class_sets: List[Tuple[str, Set[str]]], 577 | ): 578 | removed_rules = 0 579 | added_rules = 0 580 | for rule_type in CLASS_SETS_RULE_TYPES: 581 | new_removed_rules, new_added_rules = merge_class_set_rule_type( 582 | mld, 583 | rule_type, 584 | class_sets, 585 | ) 586 | removed_rules += new_removed_rules 587 | added_rules += new_added_rules 588 | 589 | color_print( 590 | f'Merged {removed_rules} rules into {added_rules} class set rules', 591 | color=Color.GREEN, 592 | ) 593 | 594 | 595 | def merge_target_domains_rule_type( 596 | mld: MultiLevelDict[Rule], 597 | rule_type: RuleType, 598 | ): 599 | rules_dict: Dict[ 600 | Tuple[rule_part_or_varargs, ...], 601 | Tuple[Set[str], Set[Rule]], 602 | ] = {} 603 | 604 | match_tuple = (rule_type.value, None, None, None, None) 605 | for matched_rule in mld.match(match_tuple): 606 | # Conditional types cannot be merged into another conditional type 607 | if not isinstance(matched_rule.parts[1], str): 608 | continue 609 | 610 | # Keep target domain out of the key 611 | key = ( 612 | matched_rule.rule_type, 613 | matched_rule.parts[0], 614 | matched_rule.parts[2], 615 | matched_rule.varargs, 616 | ) 617 | 618 | if key not in rules_dict: 619 | rules_dict[key] = (set(), set()) 620 | 621 | # Gather all target domains 622 | target_domains = matched_rule.parts[1] 623 | rules_dict[key][0].add(target_domains) 624 | rules_dict[key][1].add(matched_rule) 625 | 626 | removed_rules = 0 627 | added_rules = 0 628 | 629 | for matched_target_domains, rules in rules_dict.values(): 630 | if len(matched_target_domains) == 1: 631 | continue 632 | 633 | for rule in rules: 634 | mld.remove(rule.hash_values, rule, RULE_DYNAMIC_PARTS_INDEX) 635 | removed_rules += 1 636 | 637 | matched_rule = next(iter(rules)) 638 | # TODO: find other places where we should be sorting sets to keep an order 639 | target_domain = ConditionalType( 640 | sorted(matched_target_domains), 641 | [], 642 | False, 643 | ) 644 | new_rule = Rule( 645 | matched_rule.rule_type, 646 | tuple( 647 | [ 648 | matched_rule.parts[0], 649 | target_domain, 650 | matched_rule.parts[2], 651 | ] 652 | ), 653 | matched_rule.varargs, 654 | ) 655 | mld.add(new_rule.hash_values, new_rule, RULE_DYNAMIC_PARTS_INDEX) 656 | added_rules += 1 657 | 658 | return removed_rules, added_rules 659 | 660 | 661 | def merge_target_domains(mld: MultiLevelDict[Rule]): 662 | removed_rules = 0 663 | added_rules = 0 664 | for rule_type in CLASS_SETS_RULE_TYPES: 665 | new_removed_rules, new_added_rules = merge_target_domains_rule_type( 666 | mld, 667 | rule_type, 668 | ) 669 | removed_rules += new_removed_rules 670 | added_rules += new_added_rules 671 | 672 | color_print( 673 | f'Merged {removed_rules} rules into {added_rules} with target domains', 674 | color=Color.GREEN, 675 | ) 676 | -------------------------------------------------------------------------------- /sepolicy/match_extract.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 The LineageOS Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from __future__ import annotations 5 | 6 | import re 7 | from functools import cache 8 | from itertools import permutations 9 | from typing import Dict, List, Optional, Tuple 10 | 11 | from conditional_type import IConditionalType 12 | from rule import rule_part 13 | 14 | args_type = Dict[int, rule_part] 15 | args_paths_type = Dict[int, List[List[int]]] 16 | 17 | macro_argument_regex = re.compile(r'\$(\d+)') 18 | 19 | 20 | @cache 21 | def part_extract_single_match_arg_index(part: str): 22 | # Assuming max arg index 9 23 | 24 | if len(part) != 2: 25 | return None 26 | 27 | if part[0] != '$': 28 | return None 29 | 30 | if not part[1].isdigit(): 31 | return None 32 | 33 | return int(part[1]) 34 | 35 | 36 | def merge_arg_values(a: Optional[args_type], b: Optional[args_type]): 37 | if a is None or b is None: 38 | return None 39 | 40 | if any(k in b and a[k] != b[k] for k in a): 41 | return None 42 | 43 | return a | b 44 | 45 | 46 | @cache 47 | def rule_part_str_regex(mrp: str): 48 | last_c = None 49 | in_arg = False 50 | arg_ended = False 51 | arg_index: Optional[int] = None 52 | arg_indices: List[int] = [] 53 | arg_positions: Dict[int, Tuple[int, int]] = {} 54 | 55 | regex_pattern = '' 56 | for i, c in enumerate(mrp): 57 | if c == '$': 58 | if last_c is not None: 59 | assert last_c == '_' 60 | 61 | in_arg = True 62 | 63 | continue 64 | 65 | # Assuming max arg index 9 66 | if in_arg: 67 | assert c.isdigit(), mrp 68 | arg_index = int(c) 69 | 70 | assert arg_index not in arg_positions 71 | arg_positions[arg_index] = (i - 1, i + 1) 72 | arg_indices.append(arg_index) 73 | 74 | in_arg = False 75 | arg_ended = True 76 | regex_pattern += '(.+)' 77 | 78 | continue 79 | 80 | regex_pattern += c 81 | 82 | if arg_ended: 83 | assert c == '_' 84 | arg_ended = False 85 | 86 | if not arg_indices: 87 | return arg_indices, None, None 88 | 89 | regex = re.compile(regex_pattern) 90 | 91 | # # Find all used argument indices in this macro part 92 | # old_arg_indices = [int(i) for i in macro_argument_regex.findall(mrp)] 93 | # for arg_index in old_arg_indices: 94 | # assert arg_index <= 9 95 | 96 | # # Escape the characters in this part of the macro rule 97 | # old_regex_pattern = re.escape(mrp) 98 | 99 | # # Replace escaped $arg with a capture group 100 | # old_regex_pattern = re.sub(r'\\\$(\d+)', r'(.+)', old_regex_pattern) 101 | 102 | # regex = re.compile(old_regex_pattern) 103 | 104 | # assert regex_pattern == old_regex_pattern 105 | # assert arg_indices == old_arg_indices 106 | 107 | return arg_indices, arg_positions, regex 108 | 109 | 110 | def rule_extract_part_str(mrp: str, rp: rule_part) -> Optional[args_type]: 111 | # Single match args can match any type 112 | arg_index = part_extract_single_match_arg_index(mrp) 113 | if arg_index is not None: 114 | return { 115 | arg_index: rp, 116 | } 117 | 118 | if not isinstance(rp, str): 119 | return None 120 | 121 | arg_indices, _, regex = rule_part_str_regex(mrp) 122 | if not arg_indices: 123 | return {} 124 | 125 | assert regex is not None 126 | regex_match = regex.match(rp) 127 | if not regex_match: 128 | return None 129 | 130 | arg_values: args_type = {} 131 | regex_match_groups = regex_match.groups() 132 | for arg_group_index, arg_index in enumerate(arg_indices): 133 | arg_value = regex_match_groups[arg_group_index] 134 | arg_values[arg_index] = arg_value 135 | 136 | return arg_values 137 | 138 | 139 | def rule_extract_part_set_str(mrp: List[str], rp: List[str]): 140 | if len(mrp) != len(rp): 141 | return None 142 | 143 | mrp_uniques = set(mrp) 144 | rp_uniques = set(rp) 145 | 146 | for mrp_part in mrp: 147 | # Match part to itself to see if it has any args 148 | arg_values = rule_extract_part_str(mrp_part, mrp_part) 149 | 150 | # If it has args, don't remove it 151 | if arg_values: 152 | continue 153 | 154 | # If it does not have args and is not present in the rule part, 155 | # fail extract 156 | if mrp_part not in rp_uniques: 157 | return None 158 | 159 | rp_uniques.remove(mrp_part) 160 | mrp_uniques.remove(mrp_part) 161 | 162 | rp_uniques_tuple = tuple(rp_uniques) 163 | 164 | arg_values_list: List[args_type] = [] 165 | for permuted_mrp in permutations(mrp_uniques): 166 | current_arg_values = rule_extract_part_iter( 167 | permuted_mrp, 168 | rp_uniques_tuple, 169 | ) 170 | if current_arg_values is None: 171 | continue 172 | 173 | arg_values_list.append(current_arg_values) 174 | 175 | # sets matching the same arguments in two different ways is unlikely 176 | # but not impossible 177 | num_arg_values = len(arg_values_list) 178 | assert num_arg_values <= 1 179 | 180 | if num_arg_values == 0: 181 | return None 182 | elif num_arg_values == 1: 183 | return arg_values_list[0] 184 | 185 | def rule_extract_part_cond(mrp: IConditionalType, rp: rule_part): 186 | if not isinstance(rp, IConditionalType): 187 | return None 188 | 189 | positive_arg_values = rule_extract_part_set_str(mrp.positive, rp.positive) 190 | negative_arg_values = rule_extract_part_set_str(mrp.negative, rp.negative) 191 | return merge_arg_values(positive_arg_values, negative_arg_values) 192 | 193 | 194 | def rule_extract_part(mrp: rule_part, rp: rule_part): 195 | if isinstance(mrp, str): 196 | return rule_extract_part_str(mrp, rp) 197 | else: 198 | assert isinstance(mrp, IConditionalType) 199 | return rule_extract_part_cond(mrp, rp) 200 | 201 | 202 | def rule_extract_part_iter( 203 | mrp_tuple: Tuple[rule_part, ...], 204 | rp_tuple: Tuple[rule_part, ...], 205 | ): 206 | if len(mrp_tuple) != len(rp_tuple): 207 | return None 208 | 209 | arg_values: Optional[args_type] = {} 210 | for mrp, rp in zip(mrp_tuple, rp_tuple): 211 | current_arg_values = rule_extract_part(mrp, rp) 212 | 213 | arg_values = merge_arg_values(arg_values, current_arg_values) 214 | if arg_values is None: 215 | return None 216 | 217 | return arg_values 218 | -------------------------------------------------------------------------------- /sepolicy/match_replace.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 The LineageOS Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from __future__ import annotations 5 | 6 | from typing import Iterable, List 7 | 8 | from conditional_type import ConditionalType 9 | from match_extract import ( 10 | args_type, 11 | part_extract_single_match_arg_index, 12 | rule_part_str_regex, 13 | ) 14 | from rule import rule_part 15 | 16 | 17 | def rule_replace_simple_str(mrp: str, arg_values: args_type): 18 | arg_indices, arg_positions, _ = rule_part_str_regex(mrp) 19 | if not arg_indices: 20 | return mrp 21 | 22 | assert arg_positions is not None 23 | for arg_index, arg_position in arg_positions.items(): 24 | if arg_index not in arg_values: 25 | continue 26 | 27 | arg_value = arg_values[arg_index] 28 | if not isinstance(arg_value, str): 29 | return None 30 | 31 | mrp = mrp[: arg_position[0]] + arg_value + mrp[arg_position[1] :] 32 | 33 | return mrp 34 | 35 | 36 | def rule_replace_part_str(mrp: str, arg_values: args_type): 37 | arg_index = part_extract_single_match_arg_index(mrp) 38 | if arg_index is not None: 39 | if arg_index not in arg_values: 40 | return mrp 41 | 42 | return arg_values[arg_index] 43 | 44 | return rule_replace_simple_str(mrp, arg_values) 45 | 46 | 47 | def rule_replace_part_set_str(mrp: List[str], arg_values: args_type): 48 | # Sets inside ConditionalType can only contain strings, and should 49 | # not match complex value 50 | new_parts: List[str] = [] 51 | replaced = False 52 | for part in mrp: 53 | new_part = rule_replace_simple_str(part, arg_values) 54 | if new_part is None: 55 | return None 56 | 57 | new_parts.append(new_part) 58 | if part != new_part: 59 | replaced = True 60 | 61 | if not replaced: 62 | return mrp 63 | 64 | return new_parts 65 | 66 | 67 | def rule_replace_part_cond(mrp: ConditionalType, arg_values: args_type): 68 | # It's impossible for conditional types to contain other conditional 69 | # types, match the sets like they would be simple lists of strings 70 | positive = rule_replace_part_set_str(mrp.positive, arg_values) 71 | if positive is None: 72 | return None 73 | 74 | negative = rule_replace_part_set_str(mrp.negative, arg_values) 75 | if negative is None: 76 | return None 77 | 78 | return ConditionalType(positive, negative, mrp.is_all) 79 | 80 | 81 | def rule_replace_part(mrp: rule_part, arg_values: args_type): 82 | if isinstance(mrp, str): 83 | return rule_replace_part_str(mrp, arg_values) 84 | else: 85 | assert isinstance(mrp, ConditionalType) 86 | return rule_replace_part_cond(mrp, arg_values) 87 | 88 | 89 | def rule_replace_part_iter( 90 | mrp_tuple: Iterable[rule_part], 91 | arg_values: args_type, 92 | ): 93 | filled_parts: List[rule_part] = [] 94 | for mrp in mrp_tuple: 95 | filled_mrp = rule_replace_part(mrp, arg_values) 96 | if filled_mrp is None: 97 | return None 98 | 99 | filled_parts.append(filled_mrp) 100 | 101 | return filled_parts 102 | -------------------------------------------------------------------------------- /sepolicy/mld.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 The LineageOS Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from __future__ import annotations 5 | 6 | import itertools 7 | from collections.abc import Hashable 8 | from typing import ( 9 | Dict, 10 | Generator, 11 | Generic, 12 | List, 13 | Sequence, 14 | Set, 15 | Tuple, 16 | TypeVar, 17 | Union, 18 | ) 19 | 20 | T = TypeVar('T') 21 | 22 | 23 | def tuples_with_nones( 24 | t: Sequence[Hashable], 25 | nones_start: int, 26 | ) -> Generator[Tuple[Union[Hashable, None], ...], None, None]: 27 | choices: List[Tuple[Union[Hashable, None], ...]] = [] 28 | for i, x in enumerate(t): 29 | if i < nones_start: 30 | choices.append((x,)) 31 | else: 32 | choices.append((x, None)) 33 | yield from itertools.product(*choices) 34 | 35 | 36 | class MultiLevelDict(Generic[T]): 37 | def __init__(self): 38 | self.__data: Dict[int, Dict[Tuple[Hashable, ...], Set[T]]] = {} 39 | self.__all_data: Set[T] = set() 40 | 41 | def __len__(self): 42 | return len(self.__all_data) 43 | 44 | def data(self): 45 | return self.__data 46 | 47 | def walk(self) -> Generator[T, None, None]: 48 | yield from self.__all_data 49 | 50 | def add( 51 | self, 52 | keys: Sequence[Hashable], 53 | value: T, 54 | nones_start: int = 0, 55 | ): 56 | self.__all_data.add(value) 57 | 58 | levels = len(keys) 59 | if levels not in self.__data: 60 | self.__data[levels] = {} 61 | 62 | levels_data = self.__data[levels] 63 | 64 | for t in tuples_with_nones(keys, nones_start): 65 | if t not in levels_data: 66 | levels_data[t] = set() 67 | 68 | levels_data[t].add(value) 69 | 70 | def remove( 71 | self, 72 | keys: Sequence[Hashable], 73 | value: T, 74 | nones_start: int = 0, 75 | ): 76 | self.__all_data.remove(value) 77 | 78 | levels = len(keys) 79 | assert levels in self.__data 80 | levels_data = self.__data[levels] 81 | 82 | for t in tuples_with_nones(keys, nones_start): 83 | levels_data[t].remove(value) 84 | 85 | def match( 86 | self, 87 | keys: Sequence[Hashable], 88 | ) -> Generator[T]: 89 | keys_tuple = tuple(keys) 90 | 91 | levels = len(keys_tuple) 92 | if levels not in self.__data: 93 | return 94 | 95 | levels_data = self.__data[levels] 96 | if keys_tuple not in levels_data: 97 | return 98 | 99 | yield from levels_data[keys_tuple] 100 | -------------------------------------------------------------------------------- /sepolicy/output.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 The LineageOS Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from __future__ import annotations 5 | 6 | import re 7 | from functools import cache 8 | from pathlib import Path 9 | from typing import Dict, List, Optional, Set 10 | 11 | from mld import MultiLevelDict 12 | from rule import Rule, RuleType 13 | 14 | VENDOR_PREFIX = 'vendor_' 15 | PROPERTY_CONTEXTS_NAME = 'property_contexts' 16 | FILE_CONTEXTS_NAME = 'file_contexts' 17 | HWSERVICE_CONTEXTS_NAME = 'hwservice_contexts' 18 | SERVICE_CONTEXTS_NAME = 'service_contexts' 19 | SEAPP_CONTEXTS_NAME = 'seapp_contexts' 20 | GENFS_CONTEXTS_NAME = 'genfs_contexts' 21 | MAC_PERMISSIONS_NAME = 'mac_permissions.xml' 22 | KEYS_NAME = 'keys.conf' 23 | 24 | 25 | def copy_contexts(input_path: str, output_path: str): 26 | # TODO: align parts against eachother? 27 | 28 | lines: List[str] = [] 29 | with open(input_path, 'r') as file: 30 | for line in file.readlines(): 31 | line = line.strip() 32 | 33 | if not line: 34 | continue 35 | 36 | if line.startswith('#'): 37 | continue 38 | 39 | line = re.sub(r'\s+', ' ', line) 40 | lines.append(line) 41 | 42 | lines.sort() 43 | 44 | with open(output_path, 'w') as file: 45 | for line in lines: 46 | file.write(line) 47 | file.write('\n') 48 | 49 | 50 | def output_contexts(selinux_dir: Optional[str], output_dir: str): 51 | if selinux_dir is None: 52 | return 53 | 54 | for name in [ 55 | PROPERTY_CONTEXTS_NAME, 56 | FILE_CONTEXTS_NAME, 57 | HWSERVICE_CONTEXTS_NAME, 58 | SERVICE_CONTEXTS_NAME, 59 | SEAPP_CONTEXTS_NAME, 60 | ]: 61 | input_path = Path(selinux_dir, name) 62 | if not input_path.exists(): 63 | input_path = Path(selinux_dir, f'{VENDOR_PREFIX}{name}') 64 | 65 | if not input_path.exists(): 66 | continue 67 | 68 | output_path = Path(output_dir, name) 69 | copy_contexts(str(input_path), str(output_path)) 70 | 71 | 72 | def output_genfs_contexts(genfs_rules: List[Rule], output_dir: str): 73 | output_path = Path(output_dir, GENFS_CONTEXTS_NAME) 74 | with open(output_path, 'w') as o: 75 | for rule in genfs_rules: 76 | o.write(str(rule)) 77 | o.write('\n') 78 | 79 | 80 | @cache 81 | def extract_domain_type(domain: str): 82 | domain = re.sub(r'^vendor_', '', domain) 83 | domain = re.sub(r'_exec$', '', domain) 84 | domain = re.sub(r'_client$', '', domain) 85 | domain = re.sub(r'_server$', '', domain) 86 | domain = re.sub(r'_default$', '', domain) 87 | domain = re.sub(r'_hwservice$', '', domain) 88 | domain = re.sub(r'_service$', '', domain) 89 | domain = re.sub(r'_qti$', '', domain) 90 | return domain 91 | 92 | 93 | DEVICE_TYPE_RULES_NAME = 'device.te' 94 | SERVICE_TYPE_RULES_NAME = 'service.te' 95 | HWSERVICE_TYPE_RULES_NAME = 'hwservice.te' 96 | FILE_TYPE_RULES_NAME = 'file.te' 97 | PROPERTY_RULES_NAME = 'property.te' 98 | LEFTOVER_RULES_NAME = 'leftover.te' 99 | ATTRIBUTE_RULES_NAME = 'attribute' 100 | 101 | 102 | def domain_type(rule: Rule): 103 | domain = rule.parts[0] 104 | if not isinstance(domain, str) and len(rule.parts) >= 2: 105 | domain = rule.parts[1] 106 | 107 | if not isinstance(domain, str): 108 | return LEFTOVER_RULES_NAME 109 | 110 | t = extract_domain_type(domain) 111 | return f'{t}.te' 112 | 113 | 114 | def rule_simple_type_name(rule: Rule): 115 | if rule.rule_type == RuleType.TYPE.value: 116 | if 'dev_type' in rule.varargs: 117 | return DEVICE_TYPE_RULES_NAME 118 | elif 'file_type' in rule.varargs or 'fs_type' in rule.varargs: 119 | return FILE_TYPE_RULES_NAME 120 | elif isinstance(rule.parts[0], str): 121 | if rule.parts[0].endswith('_prop'): 122 | return PROPERTY_RULES_NAME 123 | elif rule.parts[0].endswith('_hwservice'): 124 | return HWSERVICE_TYPE_RULES_NAME 125 | elif rule.parts[0].endswith('_service'): 126 | return SERVICE_TYPE_RULES_NAME 127 | 128 | return None 129 | elif rule.rule_type in set( 130 | [ 131 | RuleType.ATTRIBUTE.value, 132 | RuleType.EXPANDATTRIBUTE.value, 133 | 'hal_attribute', 134 | ] 135 | ): 136 | return ATTRIBUTE_RULES_NAME 137 | elif isinstance(rule.parts[0], str): 138 | if rule.parts[0].endswith('_prop'): 139 | return PROPERTY_RULES_NAME 140 | 141 | return None 142 | 143 | 144 | def group_rules(mld: MultiLevelDict[Rule]): 145 | # Group rules based on main type 146 | grouped_rules: Dict[str, Set[Rule]] = {} 147 | for rule in mld.walk(): 148 | name = domain_type(rule) 149 | 150 | if name not in grouped_rules: 151 | grouped_rules[name] = set() 152 | 153 | grouped_rules[name].add(rule) 154 | 155 | # Re-group simple rules into common files 156 | regrouped_rules: Dict[str, Set[Rule]] = {} 157 | for name, rules in grouped_rules.items(): 158 | # If all rules of this group are simple, re-group them 159 | is_all_simple_type = True 160 | simple_type_names: List[Optional[str]] = [] 161 | for rule in rules: 162 | simple_type_name = rule_simple_type_name(rule) 163 | simple_type_names.append(simple_type_name) 164 | 165 | if simple_type_name is None: 166 | is_all_simple_type = False 167 | 168 | for new_name, rule in zip(simple_type_names, rules): 169 | if is_all_simple_type: 170 | assert new_name is not None 171 | name = new_name 172 | 173 | if name not in regrouped_rules: 174 | regrouped_rules[name] = set() 175 | 176 | regrouped_rules[name].add(rule) 177 | 178 | return regrouped_rules 179 | 180 | 181 | def rules_sort_key(rule: Rule): 182 | # Put type rules at the beginning 183 | if rule.rule_type == RuleType.TYPE.value: 184 | return (False, ['0']) 185 | 186 | compare_values = [str(h) for h in rule.hash_values] 187 | return (not rule.is_macro, compare_values) 188 | 189 | 190 | def output_grouped_rules(grouped_rules: Dict[str, Set[Rule]], output_dir: str): 191 | for name, rules in grouped_rules.items(): 192 | sorted_rules = sorted(rules, key=rules_sort_key) 193 | 194 | output_path = Path(output_dir, name) 195 | with open(output_path, 'w') as o: 196 | last_type = None 197 | for rule in sorted_rules: 198 | if last_type is not None and rule.rule_type != last_type: 199 | o.write('\n') 200 | last_type = rule.rule_type 201 | o.write(str(rule)) 202 | o.write('\n') 203 | -------------------------------------------------------------------------------- /sepolicy/pylintrc.toml: -------------------------------------------------------------------------------- 1 | [tool.pylint.'messages control'] 2 | disable = [ 3 | # Defaults 4 | 'raw-checker-failed', 5 | 'bad-inline-option', 6 | 'locally-disabled', 7 | 'file-ignored', 8 | 'suppressed-message', 9 | 'useless-suppression', 10 | 'deprecated-pragma', 11 | 'use-symbolic-message-instead', 12 | 'use-implicit-booleaness-not-comparison-to-string', 13 | 'use-implicit-booleaness-not-comparison-to-zero', 14 | 15 | 'invalid-name', 16 | 'missing-class-docstring', 17 | 'missing-function-docstring', 18 | 'missing-module-docstring', 19 | 'too-few-public-methods', 20 | 'too-many-arguments', 21 | 'too-many-boolean-expressions', 22 | 'too-many-instance-attributes', 23 | 'too-many-lines', 24 | 'too-many-locals', 25 | 'too-many-positional-arguments', 26 | 'too-many-public-methods', 27 | 'too-many-return-statements', 28 | 'unused-argument', 29 | ] 30 | -------------------------------------------------------------------------------- /sepolicy/ruff.toml: -------------------------------------------------------------------------------- 1 | line-length = 80 2 | 3 | [lint] 4 | extend-select = ['A', 'FA100', 'FA102', 'I'] 5 | 6 | [format] 7 | quote-style = 'single' 8 | -------------------------------------------------------------------------------- /sepolicy/rule.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 The LineageOS Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from __future__ import annotations 5 | 6 | import re 7 | from enum import Enum 8 | from typing import Generator, Iterable, List, Optional, Tuple, Union 9 | 10 | from class_set import ClassSet 11 | from conditional_type import IConditionalType 12 | 13 | macro_argument_regex = re.compile(r'\$(\d+)') 14 | 15 | raw_part = Union[str, List['raw_part']] 16 | raw_parts_list = List[raw_part] 17 | rule_part = Union[str, IConditionalType, ClassSet] 18 | rule_part_or_varargs = Union[rule_part, Tuple[str, ...]] 19 | 20 | 21 | RULE_DYNAMIC_PARTS_INDEX = 1 22 | 23 | 24 | def is_type_generated(part: rule_part): 25 | if not isinstance(part, str): 26 | return False 27 | 28 | return part.startswith('base_typeattr_') 29 | 30 | 31 | def unpack_line( 32 | rule: str, 33 | open_char: str, 34 | close_char: str, 35 | separators: str, 36 | open_by_default: bool = False, 37 | ignored_chars: str = '', 38 | ) -> raw_parts_list: 39 | # TODO: test ~{ a b } formatting for source rules 40 | 41 | stack: List[raw_parts_list] = [] 42 | current: raw_parts_list = [] 43 | token = '' 44 | 45 | def add_token(): 46 | nonlocal token 47 | 48 | if token: 49 | current.append(token) 50 | token = '' 51 | 52 | if open_by_default: 53 | rule = f'{open_char}{rule}{close_char}' 54 | 55 | for c in rule: 56 | if c in ignored_chars: 57 | continue 58 | 59 | if c == open_char: 60 | add_token() 61 | stack.append(current) 62 | current = [] 63 | elif c == close_char: 64 | add_token() 65 | last = stack.pop() 66 | last.append(current) 67 | current = last 68 | elif c in separators: 69 | add_token() 70 | else: 71 | token += c 72 | 73 | assert isinstance(current[0], list) 74 | 75 | return current[0] if current else [] 76 | 77 | 78 | def flatten_parts(parts: raw_part) -> Generator[str, None, None]: 79 | if isinstance(parts, str): 80 | yield parts 81 | return 82 | 83 | assert isinstance(parts, list) 84 | 85 | for part in parts: 86 | if isinstance(part, list): 87 | yield from flatten_parts(part) 88 | else: 89 | yield part 90 | 91 | 92 | def remove_ioctl_zeros(ioctls: Iterable[str]): 93 | return list(map(lambda i: hex(int(i, base=16)), ioctls)) 94 | 95 | 96 | class RuleType(str, Enum): 97 | ALLOW = 'allow' 98 | ALLOWXPERM = 'allowxperm' 99 | ATTRIBUTE = 'attribute' 100 | AUDITALLOW = 'auditallow' 101 | DONTAUDIT = 'dontaudit' 102 | DONTAUDITXPERM = 'dontauditxperm' 103 | EXPANDATTRIBUTE = 'expandattribute' 104 | GENFSCON = 'genfscon' 105 | NEVERALLOW = 'neverallow' 106 | NEVERALLOWXPERM = 'neverallowxperm' 107 | TYPE = 'type' 108 | TYPE_TRANSITION = 'type_transition' 109 | TYPEATTRIBUTE = 'typeattribute' 110 | 111 | 112 | ALLOW_RULE_TYPES = [ 113 | RuleType.ALLOW, 114 | RuleType.NEVERALLOW, 115 | RuleType.AUDITALLOW, 116 | RuleType.DONTAUDIT, 117 | ] 118 | 119 | 120 | IOCTL_RULE_TYPES = [ 121 | RuleType.ALLOWXPERM, 122 | RuleType.NEVERALLOWXPERM, 123 | RuleType.DONTAUDITXPERM, 124 | ] 125 | 126 | CLASS_SETS_RULE_TYPES = ALLOW_RULE_TYPES + IOCTL_RULE_TYPES 127 | 128 | 129 | def join_varargs(varargs: Tuple[str, ...]): 130 | s = ' '.join(varargs) 131 | 132 | if len(varargs) > 1: 133 | s = '{ ' + s + ' }' 134 | 135 | return s 136 | 137 | 138 | def format_rule(rule: Rule): 139 | match rule.rule_type: 140 | case ( 141 | RuleType.ALLOW 142 | | RuleType.NEVERALLOW 143 | | RuleType.AUDITALLOW 144 | | RuleType.DONTAUDIT 145 | ): 146 | return '{} {} {}:{} {};'.format( 147 | rule.rule_type, 148 | rule.parts[0], 149 | rule.parts[1], 150 | rule.parts[2], 151 | join_varargs(rule.varargs), 152 | ) 153 | case ( 154 | RuleType.ALLOWXPERM 155 | | RuleType.NEVERALLOWXPERM 156 | | RuleType.DONTAUDITXPERM 157 | ): 158 | return '{} {} {}:{} ioctl {};'.format( 159 | rule.rule_type, 160 | rule.parts[0], 161 | rule.parts[1], 162 | rule.parts[2], 163 | join_varargs(rule.varargs), 164 | ) 165 | case RuleType.TYPE: 166 | varargs = sorted(rule.varargs) 167 | varargs_str = ', '.join(varargs) 168 | return '{} {}, {};'.format( 169 | rule.rule_type, rule.parts[0], varargs_str 170 | ) 171 | case RuleType.TYPE_TRANSITION: 172 | assert len(rule.varargs) in [0, 1] 173 | 174 | if len(rule.varargs) == 1: 175 | name = f'{list(rule.varargs)[0]} ' 176 | else: 177 | name = '' 178 | 179 | return '{} {} {}:{} {}{};'.format( 180 | rule.rule_type, 181 | rule.parts[0], 182 | rule.parts[1], 183 | rule.parts[2], 184 | name, 185 | rule.parts[-1], 186 | ) 187 | case RuleType.GENFSCON: 188 | return 'genfscon {} {} u:object_r:{}:s0'.format( 189 | rule.parts[0], 190 | rule.parts[1], 191 | rule.parts[2], 192 | ) 193 | case ( 194 | RuleType.ATTRIBUTE 195 | | RuleType.TYPEATTRIBUTE 196 | | RuleType.EXPANDATTRIBUTE 197 | ): 198 | parts_str = ' '.join(map(str, rule.parts)) 199 | return f'{rule.rule_type} {parts_str};' 200 | case _: 201 | assert rule.is_macro 202 | parts_str = ', '.join(map(str, rule.parts)) 203 | return f'{rule.rule_type}({parts_str})' 204 | 205 | 206 | class Rule: 207 | def __init__( 208 | self, 209 | rule_type: str, 210 | parts: Tuple[rule_part, ...], 211 | varargs: Tuple[str, ...], 212 | is_macro: bool = False, 213 | ): 214 | self.rule_type = rule_type 215 | self.parts = parts 216 | self.varargs = varargs 217 | self.is_macro = is_macro 218 | self.hash_values: Tuple[rule_part_or_varargs, ...] = tuple( 219 | [self.rule_type] + list(self.parts) + [self.varargs] 220 | ) 221 | 222 | # Postpone hash calculation so that ConditionalTypes are fully 223 | # gathered and ConditionalTypeRedirect can find them 224 | self.__hash: Optional[int] = None 225 | 226 | def __str__(self): 227 | return format_rule(self) 228 | 229 | def __eq__(self, other: object): 230 | assert isinstance(other, Rule) 231 | 232 | return self.hash_values == other.hash_values 233 | 234 | def __hash__(self): 235 | if self.__hash is None: 236 | self.__hash = hash(self.hash_values) 237 | 238 | return self.__hash 239 | -------------------------------------------------------------------------------- /sepolicy/source_rule.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 The LineageOS Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | from __future__ import annotations 5 | 6 | from typing import List 7 | 8 | from classmap import Classmap 9 | from conditional_type import ConditionalType 10 | from rule import ( 11 | Rule, 12 | RuleType, 13 | flatten_parts, 14 | raw_part, 15 | raw_parts_list, 16 | remove_ioctl_zeros, 17 | unpack_line, 18 | ) 19 | 20 | 21 | def is_allow_process_sigchld(parts: raw_parts_list): 22 | return ( 23 | parts[0] == RuleType.ALLOW 24 | and len(parts) == 5 25 | and parts[3:] == ['process', 'sigchld'] 26 | ) 27 | 28 | 29 | def structure_conditional_type(parts: raw_part): 30 | if isinstance(parts, str): 31 | return parts 32 | 33 | positives: List[str] = [] 34 | negatives: List[str] = [] 35 | 36 | if len(parts) == 1 and parts[0] == '*': 37 | return ConditionalType([], [], True) 38 | 39 | for part in parts: 40 | assert isinstance(part, str) 41 | if part.startswith('-'): 42 | negatives.append(part[1:]) 43 | else: 44 | assert part[0].isalpha() or part[0] == '$', parts 45 | positives.append(part) 46 | 47 | return ConditionalType(positives, negatives, False) 48 | 49 | 50 | class SourceRule(Rule): 51 | @classmethod 52 | def from_line(cls, line: str, classmap: Classmap) -> List[Rule]: 53 | parts = unpack_line( 54 | line, 55 | '{', 56 | '}', 57 | ' :,', 58 | open_by_default=True, 59 | ignored_chars=';', 60 | ) 61 | if not parts: 62 | return [] 63 | 64 | if not isinstance(parts[0], str) or len(parts) == 1: 65 | raise ValueError(f'Invalid line: {line}') 66 | 67 | # Remove allow $3 $1:process sigchld as it is part of an ifelse 68 | # statement based on one of the parameters and it is not possible 69 | # to generate the checks for it as part of macro expansion 70 | if is_allow_process_sigchld(parts): 71 | return [] 72 | 73 | rules: List[Rule] = [] 74 | 75 | match parts[0]: 76 | case ( 77 | RuleType.ALLOW.value 78 | | RuleType.NEVERALLOW.value 79 | | RuleType.AUDITALLOW.value 80 | | RuleType.DONTAUDIT.value 81 | ): 82 | assert len(parts) == 5, line 83 | 84 | src = structure_conditional_type(parts[1]) 85 | dst = structure_conditional_type(parts[2]) 86 | 87 | classes = list(flatten_parts(parts[3])) 88 | classmap.sort_classes(classes) 89 | varargs = list(flatten_parts(parts[4])) 90 | 91 | for class_name in classes: 92 | classmap.sort_perms(class_name, varargs) 93 | rule = Rule( 94 | parts[0], 95 | (src, dst, class_name), 96 | tuple(varargs), 97 | ) 98 | rules.append(rule) 99 | case RuleType.TYPE_TRANSITION.value: 100 | assert len(parts) in [5, 6], line 101 | assert isinstance(parts[1], str), line 102 | assert isinstance(parts[2], str), line 103 | assert isinstance(parts[4], str), line 104 | 105 | class_names = flatten_parts(parts[3]) 106 | 107 | # Optional string for userfaultfd 108 | if len(parts) == 6: 109 | assert isinstance(parts[5], str), line 110 | assert parts[5] == '"[userfaultfd]"', line 111 | varargs = [parts[5]] 112 | else: 113 | varargs = [] 114 | 115 | for class_name in class_names: 116 | rule = Rule( 117 | parts[0], 118 | (parts[1], parts[2], class_name, parts[4]), 119 | tuple(varargs), 120 | ) 121 | rules.append(rule) 122 | case ( 123 | RuleType.ALLOWXPERM.value 124 | | RuleType.NEVERALLOWXPERM.value 125 | | RuleType.DONTAUDITXPERM.value 126 | ): 127 | # TODO: ioctl rules are split at comments by the compiler 128 | # and later merged as part of the final processing steps 129 | # Try merging them ahead of time. 130 | assert len(parts) == 6 131 | assert isinstance(parts[1], str), line 132 | assert isinstance(parts[2], str), line 133 | assert isinstance(parts[3], str), line 134 | assert isinstance(parts[4], str), line 135 | assert parts[4] == 'ioctl' 136 | 137 | varargs = list(flatten_parts(parts[5])) 138 | ioctls = remove_ioctl_zeros(varargs) 139 | 140 | rule = Rule( 141 | parts[0], 142 | (parts[1], parts[2], parts[3]), 143 | tuple(ioctls), 144 | ) 145 | rules.append(rule) 146 | case RuleType.ATTRIBUTE.value: 147 | assert len(parts) == 2, line 148 | assert isinstance(parts[1], str), line 149 | 150 | rule = Rule( 151 | parts[0], 152 | (parts[1],), 153 | (), 154 | ) 155 | return [rule] 156 | case RuleType.TYPEATTRIBUTE.value: 157 | assert len(parts) == 3, line 158 | assert isinstance(parts[1], str), line 159 | assert isinstance(parts[2], str), line 160 | 161 | rule = Rule( 162 | parts[0], 163 | (parts[1], parts[2]), 164 | (), 165 | ) 166 | rules.append(rule) 167 | case RuleType.TYPE.value: 168 | assert isinstance(parts[1], str), line 169 | 170 | # Convert type rules to typeattribute to allow easy 171 | # with split typeattributeset rules 172 | for t in parts[2:]: 173 | assert isinstance(t, str) 174 | rule = Rule( 175 | RuleType.TYPEATTRIBUTE.value, 176 | (parts[1], t), 177 | (), 178 | ) 179 | rules.append(rule) 180 | case RuleType.EXPANDATTRIBUTE.value: 181 | assert len(parts) == 3 182 | assert isinstance(parts[1], str), line 183 | assert isinstance(parts[2], str), line 184 | 185 | rule = Rule( 186 | parts[0], 187 | (parts[1], parts[2]), 188 | (), 189 | ) 190 | rules.append(rule) 191 | case _: 192 | assert False, line 193 | 194 | return rules 195 | -------------------------------------------------------------------------------- /sepolicy/utils.py: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 The LineageOS Project 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | 5 | from collections.abc import Hashable 6 | from enum import Enum 7 | from typing import Any, Protocol, runtime_checkable 8 | 9 | 10 | @runtime_checkable 11 | class SizedIndexable(Hashable, Protocol): 12 | def __len__(self) -> int: ... 13 | def __getitem__(self, index: int) -> Any: ... 14 | 15 | 16 | def remove_comments(line: str): 17 | index = line.find('#') 18 | if index != -1: 19 | line = line[:index] 20 | 21 | return line 22 | 23 | 24 | def is_empty_line(line: str): 25 | return not line 26 | 27 | 28 | def split_normalize_text(text: str): 29 | lines = text.splitlines(keepends=True) 30 | lines = list(map(remove_comments, lines)) 31 | lines = list(filter(lambda line: not is_empty_line(line), lines)) 32 | return lines 33 | 34 | class Color(str, Enum): 35 | RED = '\033[0;31m' 36 | GREEN = '\033[0;32m' 37 | YELLOW = '\033[1;33m' 38 | END = '\033[0m' 39 | 40 | 41 | def color_print(*args: object, color: Color): 42 | args_str = ' '.join(str(arg) for arg in args) 43 | args_str = color.value + args_str + Color.END.value 44 | print(args_str) 45 | -------------------------------------------------------------------------------- /sort_dts.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | import sys 5 | import fdt 6 | from fdt import PropStrings, PropWords 7 | from fdt_extra import PropWordsWithPhandles 8 | 9 | def for_each_node(node, fn, *args, max_recurse_level=-1, recurse_level=0, **kwargs): 10 | if node is None: 11 | return 12 | 13 | if max_recurse_level != -1 and recurse_level > max_recurse_level: 14 | return 15 | 16 | fn(node, *args, **kwargs) 17 | 18 | for child_node in node.nodes: 19 | for_each_node( 20 | child_node, fn, *args, 21 | max_recurse_level=max_recurse_level, 22 | recurse_level=recurse_level + 1, 23 | **kwargs) 24 | 25 | def replace_phandle_with_label(path, name, addr, label=None, phandle_labels_map=None): 26 | print(f'Replace phandle {path} {name} {addr} {label}') 27 | index = addr // 4 28 | node = dt.get_node(path) 29 | ref_prop = node.get_property(name) 30 | 31 | if isinstance(ref_prop, PropWords): 32 | new_prop = PropWordsWithPhandles(name, *ref_prop.data) 33 | node.remove_property(name) 34 | node.append(new_prop) 35 | elif isinstance(ref_prop, PropWordsWithPhandles): 36 | new_prop = ref_prop 37 | else: 38 | raise ValueError() 39 | 40 | if not label: 41 | phandle = new_prop.data[index] 42 | if phandle not in phandle_labels_map: 43 | print(f'Invalid phandle {phandle}') 44 | return 45 | 46 | label = phandle_labels_map[phandle][0] 47 | 48 | new_prop.set_phandle_name(index, label) 49 | 50 | def dt_fill_fixups(dt): 51 | fixups_node = dt.root.get_subnode('__fixups__') 52 | if fixups_node is None: 53 | return 54 | 55 | for prop in fixups_node.props: 56 | for value in prop.data: 57 | print(f'Fixup prop {prop} {value}') 58 | node_path, prop_name, prop_data_index = value.split(':') 59 | prop_data_index = int(prop_data_index) 60 | 61 | replace_phandle_with_label(node_path, prop_name, prop_data_index, 62 | prop.name) 63 | 64 | dt.root.remove_subnode('__fixups__') 65 | 66 | def dt_fill_symbols(dt): 67 | SYMBOLS = '__symbols__' 68 | symbols_node = dt.root.get_subnode(SYMBOLS) 69 | phandle_labels_map = {} 70 | 71 | if symbols_node is None: 72 | return 73 | 74 | for prop in symbols_node.props: 75 | assert isinstance(prop, PropStrings) 76 | 77 | label = prop.name 78 | path = prop.value 79 | node = dt.get_node(path) 80 | phandle = node.get_property('phandle').value 81 | 82 | if phandle not in phandle_labels_map: 83 | phandle_labels_map[phandle] = [] 84 | 85 | phandle_labels_map[phandle].append(label) 86 | 87 | if node.label: 88 | label = f'{node.label}: {label}' 89 | 90 | node.set_label(label) 91 | 92 | dt.root.remove_subnode(SYMBOLS) 93 | 94 | LOCAL_FIXUPS = '__local_fixups__' 95 | local_fixups_node = dt.root.get_subnode(LOCAL_FIXUPS) 96 | 97 | if local_fixups_node is None: 98 | return 99 | 100 | def replace_phandles_with_label(node): 101 | for prop in node.props: 102 | abs_node_path = prop.path.removeprefix(f'/{LOCAL_FIXUPS}') 103 | 104 | for value in prop.data: 105 | print(f'Fixup prop {prop} {value}') 106 | replace_phandle_with_label(abs_node_path, prop.name, value, 107 | phandle_labels_map=phandle_labels_map) 108 | 109 | for_each_node(local_fixups_node, replace_phandles_with_label) 110 | 111 | dt.root.remove_subnode(LOCAL_FIXUPS) 112 | 113 | def sort_props(prop): 114 | return str(prop) 115 | 116 | def sort_nodes(node): 117 | return node.name 118 | 119 | def sort_node(node): 120 | node._props.sort(key=sort_props) 121 | node._nodes.sort(key=sort_nodes) 122 | 123 | def dt_sort_nodes(dt): 124 | for_each_node(dt.root, sort_node) 125 | 126 | def node_is_fragment(node): 127 | return node.name.startswith('fragment@') 128 | 129 | def dt_extract_overlays(dt): 130 | overlay_nodes = {} 131 | 132 | for node in dt.root.nodes: 133 | if not node_is_fragment(node): 134 | continue 135 | 136 | overlay_target = node.get_property('target') 137 | assert isinstance(overlay_target, PropWordsWithPhandles) 138 | overlay_label = overlay_target.get_phandle_name(0) 139 | 140 | if overlay_label not in overlay_nodes: 141 | overlay_nodes[overlay_label] = [] 142 | 143 | overlay_node = node.get_subnode('__overlay__') 144 | overlay_node.set_name(f'&{overlay_label}') 145 | 146 | overlay_nodes[overlay_label].append(overlay_node) 147 | 148 | dt.root._nodes = [node for node in dt.root._nodes if not node_is_fragment(node)] 149 | 150 | merged_overlays = [] 151 | 152 | for label in overlay_nodes: 153 | overlays = overlay_nodes[label] 154 | first_overlay = overlays[0] 155 | 156 | for overlay in overlays[1:]: 157 | first_overlay.merge(overlay, replace=True) 158 | 159 | sort_node(first_overlay) 160 | 161 | merged_overlays.append(first_overlay) 162 | 163 | merged_overlays.sort(key=sort_nodes) 164 | 165 | return merged_overlays 166 | 167 | def remove_phandle(node): 168 | node.remove_property('phandle') 169 | 170 | def dt_remove_phandles(dt): 171 | for_each_node(dt.root, remove_phandle) 172 | 173 | if __name__ == '__main__': 174 | if len(sys.argv) < 2: 175 | sys.exit(1) 176 | 177 | dts_file = sys.argv[1] 178 | 179 | if len(sys.argv) == 3: 180 | out_dts_file = sys.argv[2] 181 | else: 182 | out_dts_file = None 183 | 184 | if dts_file.endswith('.dts') or dts_file.endswith('.dtsi'): 185 | with open(dts_file, 'r') as f: 186 | dts_text = f.read() 187 | 188 | dt = fdt.parse_dts(dts_text) 189 | elif dts_file.endswith('.dtb') or dts_file.endswith('.dtbo'): 190 | with open(dts_file, 'rb') as f: 191 | dtb_bin = f.read() 192 | 193 | dt = fdt.parse_dtb(dtb_bin) 194 | 195 | if out_dts_file is None: 196 | dts_file_without_ext = os.path.splitext(dts_file)[0] 197 | out_dts_file = dts_file_without_ext + '.dts' 198 | 199 | else: 200 | raise ValueError('Invalid file extension') 201 | 202 | dt_fill_fixups(dt) 203 | dt_fill_symbols(dt) 204 | dt_remove_phandles(dt) 205 | dt_sort_nodes(dt) 206 | overlays = dt_extract_overlays(dt) 207 | 208 | with open(out_dts_file, 'w') as f: 209 | f.write(dt.to_dts()) 210 | 211 | if overlays: 212 | f.write('\n') 213 | for overlay in overlays: 214 | f.write(overlay.to_dts()) 215 | f.write('\n') 216 | -------------------------------------------------------------------------------- /source_available_files.txt: -------------------------------------------------------------------------------- 1 | app/CtsVendorJniTestCases/CtsVendorJniTestCases.apk 2 | app/oemlibrarytest-vendor/oemlibrarytest-vendor.apk 3 | bin/asanwrapper 4 | bin/awk 5 | bin/bssl 6 | bin/cavp 7 | bin/cplay 8 | bin/dd 9 | bin/dumpEffectConfigFile 10 | bin/egrep 11 | bin/ese-boot-tool 12 | bin/ese-ls-provision 13 | bin/ese-relay-fake 14 | bin/ese-relay-pn80t-nq-nci 15 | bin/ese-relay-pn80t-spidev 16 | bin/ese-replay 17 | bin/ese_nxp_sample 18 | bin/esed 19 | bin/fgrep 20 | bin/getevent 21 | bin/getprop 22 | bin/grep 23 | bin/hw/android.hardware.audio@2.0-service 24 | bin/hw/android.hardware.authsecret@1.0-service 25 | bin/hw/android.hardware.authsecret@1.0-service.citadel 26 | bin/hw/android.hardware.automotive.audiocontrol@1.0-service 27 | bin/hw/android.hardware.automotive.evs@1.0-service 28 | bin/hw/android.hardware.automotive.vehicle@2.0-service 29 | bin/hw/android.hardware.biometrics.fingerprint@2.0-service 30 | bin/hw/android.hardware.biometrics.fingerprint@2.1-service 31 | bin/hw/android.hardware.bluetooth@1.0-service 32 | bin/hw/android.hardware.bluetooth@1.0-service.btlinux 33 | bin/hw/android.hardware.bluetooth@1.0-service.sim 34 | bin/hw/android.hardware.boot@1.0-service 35 | bin/hw/android.hardware.broadcastradio@1.1-service 36 | bin/hw/android.hardware.broadcastradio@2.0-service 37 | bin/hw/android.hardware.camera.provider@2.4-external-service 38 | bin/hw/android.hardware.camera.provider@2.4-service 39 | bin/hw/android.hardware.camera.provider@2.4-service_64 40 | bin/hw/android.hardware.cas@1.0-service 41 | bin/hw/android.hardware.configstore@1.1-service 42 | bin/hw/android.hardware.confirmationui@1.0-service 43 | bin/hw/android.hardware.contexthub@1.0-service 44 | bin/hw/android.hardware.contexthub@1.0-service.nanohub 45 | bin/hw/android.hardware.drm@1.0-service 46 | bin/hw/android.hardware.drm@1.1-service.clearkey 47 | bin/hw/android.hardware.dumpstate@1.0-service 48 | bin/hw/android.hardware.gatekeeper@1.0-service 49 | bin/hw/android.hardware.gnss@1.0-service 50 | bin/hw/android.hardware.gnss@1.1-service 51 | bin/hw/android.hardware.graphics.allocator@2.0-service 52 | bin/hw/android.hardware.graphics.composer@2.1-service 53 | bin/hw/android.hardware.graphics.composer@2.2-service 54 | bin/hw/android.hardware.health@1.0-service 55 | bin/hw/android.hardware.health@2.0-service 56 | bin/hw/android.hardware.health@2.0-service.goldfish 57 | bin/hw/android.hardware.health@2.0-service.override 58 | bin/hw/android.hardware.ir@1.0-service 59 | bin/hw/android.hardware.keymaster@3.0-service 60 | bin/hw/android.hardware.keymaster@4.0-service 61 | bin/hw/android.hardware.keymaster@4.0-service.citadel 62 | bin/hw/android.hardware.keymaster@4.0-strongbox-service 63 | bin/hw/android.hardware.light@2.0-service 64 | bin/hw/android.hardware.light@2.0-service.aw2013 65 | bin/hw/android.hardware.light@2.0-service.leeco_8996 66 | bin/hw/android.hardware.media.omx@1.0-service 67 | bin/hw/android.hardware.memtrack@1.0-service 68 | bin/hw/android.hardware.neuralnetworks@1.0-service-hvx 69 | bin/hw/android.hardware.neuralnetworks@1.1-service-sample-all 70 | bin/hw/android.hardware.neuralnetworks@1.1-service-sample-float-fast 71 | bin/hw/android.hardware.neuralnetworks@1.1-service-sample-float-slow 72 | bin/hw/android.hardware.neuralnetworks@1.1-service-sample-minimal 73 | bin/hw/android.hardware.neuralnetworks@1.1-service-sample-quant 74 | bin/hw/android.hardware.nfc@1.0-service 75 | bin/hw/android.hardware.nfc@1.1-service 76 | bin/hw/android.hardware.oemlock@1.0-service.citadel 77 | bin/hw/android.hardware.power@1.0-service 78 | bin/hw/android.hardware.radio.config@1.0-service 79 | bin/hw/android.hardware.radio@1.2-radio-service 80 | bin/hw/android.hardware.radio@1.2-sap-service 81 | bin/hw/android.hardware.secure_element@1.0-service 82 | bin/hw/android.hardware.secure_element@1.0-service-disabled 83 | bin/hw/android.hardware.sensors@1.0-service 84 | bin/hw/android.hardware.tests.extension.light@2.0-service 85 | bin/hw/android.hardware.thermal@1.0-service 86 | bin/hw/android.hardware.tv.cec@1.0-service 87 | bin/hw/android.hardware.tv.input@1.0-service 88 | bin/hw/android.hardware.usb@1.0-service 89 | bin/hw/android.hardware.usb@1.0-service.basic 90 | bin/hw/android.hardware.vibrator@1.0-service 91 | bin/hw/android.hardware.vibrator@1.0-service.lineage 92 | bin/hw/android.hardware.vr@1.0-service 93 | bin/hw/android.hardware.weaver@1.0-service.citadel 94 | bin/hw/android.hardware.wifi@1.0-service 95 | bin/hw/citadel_integration_tests 96 | bin/hw/citadel_updater 97 | bin/hw/citadel_validation_tool 98 | bin/hw/citadeld 99 | bin/hw/rild 100 | bin/hw/stress_test 101 | bin/hw/stupid 102 | bin/hw/test_citadel 103 | bin/hw/vendor.lineage.livedisplay@2.0-service-legacymm 104 | bin/hw/vendor.lineage.livedisplay@2.0-service-sdm 105 | bin/hw/vendor.lineage.livedisplay@2.0-service-sysfs 106 | bin/hw/vendor.lineage.touch@1.0-service.leeco_8996 107 | bin/hw/vendor.lineage.trust@1.0-service 108 | bin/hw/vendor.nxp.hardware.nfc@1.0-service 109 | bin/hw/vendor.nxp.hardware.nfc@1.1-service 110 | bin/logwrapper 111 | bin/nanoapp_cmd 112 | bin/netmgr 113 | bin/newfs_msdos 114 | bin/sh 115 | bin/storageproxyd 116 | bin/toolbox 117 | bin/toybox_vendor 118 | bin/trusty_keymaster_tipc 119 | bin/vndservice 120 | bin/vndservicemanager 121 | etc/fs_config_dirs 122 | etc/fs_config_files 123 | etc/group 124 | etc/init/SampleDriverAll.rc 125 | etc/init/SampleDriverFloatFast.rc 126 | etc/init/SampleDriverFloatSlow.rc 127 | etc/init/SampleDriverMinimal.rc 128 | etc/init/SampleDriverQuant.rc 129 | etc/init/android.hardware.audio@2.0-service.rc 130 | etc/init/android.hardware.authsecret@1.0-service.citadel.rc 131 | etc/init/android.hardware.authsecret@1.0-service.rc 132 | etc/init/android.hardware.automotive.audiocontrol@1.0-service.rc 133 | etc/init/android.hardware.automotive.evs@1.0-service.rc 134 | etc/init/android.hardware.automotive.vehicle@2.0-service.rc 135 | etc/init/android.hardware.biometrics.fingerprint@2.0-service.rc 136 | etc/init/android.hardware.biometrics.fingerprint@2.1-service.rc 137 | etc/init/android.hardware.bluetooth@1.0-service.btlinux.rc 138 | etc/init/android.hardware.bluetooth@1.0-service.rc 139 | etc/init/android.hardware.bluetooth@1.0-service.sim.rc 140 | etc/init/android.hardware.boot@1.0-service.rc 141 | etc/init/android.hardware.broadcastradio@1.1-service.rc 142 | etc/init/android.hardware.broadcastradio@2.0-service.rc 143 | etc/init/android.hardware.camera.provider@2.4-external-service.rc 144 | etc/init/android.hardware.camera.provider@2.4-service.rc 145 | etc/init/android.hardware.camera.provider@2.4-service_64.rc 146 | etc/init/android.hardware.cas@1.0-service.rc 147 | etc/init/android.hardware.configstore@1.1-service.rc 148 | etc/init/android.hardware.confirmationui@1.0-service.rc 149 | etc/init/android.hardware.contexthub@1.0-service.nanohub.rc 150 | etc/init/android.hardware.contexthub@1.0-service.rc 151 | etc/init/android.hardware.drm@1.0-service.rc 152 | etc/init/android.hardware.drm@1.1-service.clearkey.rc 153 | etc/init/android.hardware.dumpstate@1.0-service.rc 154 | etc/init/android.hardware.gatekeeper@1.0-service.rc 155 | etc/init/android.hardware.gnss@1.0-service.rc 156 | etc/init/android.hardware.gnss@1.1-service.rc 157 | etc/init/android.hardware.graphics.allocator@2.0-service.rc 158 | etc/init/android.hardware.graphics.composer@2.1-service.rc 159 | etc/init/android.hardware.graphics.composer@2.2-service.rc 160 | etc/init/android.hardware.health@1.0-service.rc 161 | etc/init/android.hardware.health@2.0-service.goldfish.rc 162 | etc/init/android.hardware.health@2.0-service.rc 163 | etc/init/android.hardware.ir@1.0-service.rc 164 | etc/init/android.hardware.keymaster@3.0-service.rc 165 | etc/init/android.hardware.keymaster@4.0-service.citadel.rc 166 | etc/init/android.hardware.keymaster@4.0-service.rc 167 | etc/init/android.hardware.keymaster@4.0-strongbox-service.rc 168 | etc/init/android.hardware.light@2.0-service.aw2013.rc 169 | etc/init/android.hardware.light@2.0-service.leeco_8996.rc 170 | etc/init/android.hardware.light@2.0-service.rc 171 | etc/init/android.hardware.media.omx@1.0-service.rc 172 | etc/init/android.hardware.memtrack@1.0-service.rc 173 | etc/init/android.hardware.neuralnetworks@1.0-service-hvx.rc 174 | etc/init/android.hardware.nfc@1.0-service.rc 175 | etc/init/android.hardware.nfc@1.1-service.rc 176 | etc/init/android.hardware.oemlock@1.0-service.citadel.rc 177 | etc/init/android.hardware.power@1.0-service.rc 178 | etc/init/android.hardware.radio.config@1.0-service.rc 179 | etc/init/android.hardware.radio@1.2-radio-service.rc 180 | etc/init/android.hardware.radio@1.2-sap-service.rc 181 | etc/init/android.hardware.secure_element@1.0-service-disabled.rc 182 | etc/init/android.hardware.secure_element@1.0-service.rc 183 | etc/init/android.hardware.sensors@1.0-service.rc 184 | etc/init/android.hardware.tests.extension.light@2.0-service.rc 185 | etc/init/android.hardware.thermal@1.0-service.rc 186 | etc/init/android.hardware.tv.cec@1.0-service.rc 187 | etc/init/android.hardware.tv.input@1.0-service.rc 188 | etc/init/android.hardware.usb@1.0-service.basic.rc 189 | etc/init/android.hardware.usb@1.0-service.rc 190 | etc/init/android.hardware.vibrator@1.0-service.lineage.rc 191 | etc/init/android.hardware.vibrator@1.0-service.rc 192 | etc/init/android.hardware.vr@1.0-service.rc 193 | etc/init/android.hardware.weaver@1.0-service.citadel.rc 194 | etc/init/android.hardware.wifi@1.0-service.rc 195 | etc/init/citadeld.rc 196 | etc/init/esed.rc 197 | etc/init/rild.rc 198 | etc/init/vendor.lineage.livedisplay@2.0-service-legacymm.rc 199 | etc/init/vendor.lineage.livedisplay@2.0-service-sdm.rc 200 | etc/init/vendor.lineage.livedisplay@2.0-service-sysfs.rc 201 | etc/init/vendor.lineage.touch@1.0-service.leeco_8996.rc 202 | etc/init/vendor.lineage.trust@1.0-service.rc 203 | etc/init/vendor.nxp.hardware.nfc@1.0-service.rc 204 | etc/init/vendor.nxp.hardware.nfc@1.1-service.rc 205 | etc/init/vndservicemanager.rc 206 | etc/mkshrc 207 | etc/passwd 208 | etc/permissions/vendorprivapp-permissions-test.xml 209 | etc/seccomp_policy/codec2.vendor.base.policy 210 | etc/seccomp_policy/configstore@1.1.policy 211 | etc/selinux/plat_pub_versioned.cil 212 | etc/selinux/plat_sepolicy_vers.txt 213 | etc/selinux/precompiled_sepolicy 214 | etc/selinux/precompiled_sepolicy.plat_and_mapping.sha256 215 | etc/selinux/vendor_file_contexts 216 | etc/selinux/vendor_hwservice_contexts 217 | etc/selinux/vendor_mac_permissions.xml 218 | etc/selinux/vendor_property_contexts 219 | etc/selinux/vendor_seapp_contexts 220 | etc/selinux/vendor_sepolicy.cil 221 | etc/selinux/vndservice_contexts 222 | etc/vintf/compatibility_matrix.xml 223 | lib/android.frameworks.displayservice@1.0-adapter-helper.so 224 | lib/android.frameworks.schedulerservice@1.0-adapter-helper.so 225 | lib/android.frameworks.sensorservice@1.0-adapter-helper.so 226 | lib/android.frameworks.vr.composer@1.0-adapter-helper.so 227 | lib/android.hardware.audio.common@2.0-adapter-helper.so 228 | lib/android.hardware.audio.common@4.0-adapter-helper.so 229 | lib/android.hardware.audio.effect@2.0-adapter-helper.so 230 | lib/android.hardware.audio.effect@4.0-adapter-helper.so 231 | lib/android.hardware.audio@2.0-adapter-helper.so 232 | lib/android.hardware.audio@4.0-adapter-helper.so 233 | lib/android.hardware.authsecret@1.0-adapter-helper.so 234 | lib/android.hardware.authsecret@1.0-impl.nos.so 235 | lib/android.hardware.automotive.audiocontrol@1.0-adapter-helper.so 236 | lib/android.hardware.automotive.evs@1.0-adapter-helper.so 237 | lib/android.hardware.automotive.vehicle@2.0-adapter-helper.so 238 | lib/android.hardware.automotive.vehicle@2.0-manager-lib.so 239 | lib/android.hardware.biometrics.fingerprint@2.1-adapter-helper.so 240 | lib/android.hardware.bluetooth.a2dp@1.0-adapter-helper.so 241 | lib/android.hardware.bluetooth@1.0-adapter-helper.so 242 | lib/android.hardware.boot@1.0-adapter-helper.so 243 | lib/android.hardware.broadcastradio@1.0-adapter-helper.so 244 | lib/android.hardware.broadcastradio@1.1-adapter-helper.so 245 | lib/android.hardware.broadcastradio@2.0-adapter-helper.so 246 | lib/android.hardware.camera.common@1.0-adapter-helper.so 247 | lib/android.hardware.camera.device@1.0-adapter-helper.so 248 | lib/android.hardware.camera.device@3.2-adapter-helper.so 249 | lib/android.hardware.camera.device@3.3-adapter-helper.so 250 | lib/android.hardware.camera.device@3.4-adapter-helper.so 251 | lib/android.hardware.camera.metadata@3.2-adapter-helper.so 252 | lib/android.hardware.camera.metadata@3.3-adapter-helper.so 253 | lib/android.hardware.camera.provider@2.4-adapter-helper.so 254 | lib/android.hardware.cas.native@1.0-adapter-helper.so 255 | lib/android.hardware.cas@1.0-adapter-helper.so 256 | lib/android.hardware.configstore@1.0-adapter-helper.so 257 | lib/android.hardware.configstore@1.1-adapter-helper.so 258 | lib/android.hardware.confirmationui@1.0-adapter-helper.so 259 | lib/android.hardware.contexthub@1.0-adapter-helper.so 260 | lib/android.hardware.drm@1.0-adapter-helper.so 261 | lib/android.hardware.drm@1.1-adapter-helper.so 262 | lib/android.hardware.dumpstate@1.0-adapter-helper.so 263 | lib/android.hardware.gatekeeper@1.0-adapter-helper.so 264 | lib/android.hardware.gnss@1.0-adapter-helper.so 265 | lib/android.hardware.gnss@1.1-adapter-helper.so 266 | lib/android.hardware.graphics.allocator@2.0-adapter-helper.so 267 | lib/android.hardware.graphics.bufferqueue@1.0-adapter-helper.so 268 | lib/android.hardware.graphics.common@1.0-adapter-helper.so 269 | lib/android.hardware.graphics.common@1.1-adapter-helper.so 270 | lib/android.hardware.graphics.composer@2.1-adapter-helper.so 271 | lib/android.hardware.graphics.composer@2.2-adapter-helper.so 272 | lib/android.hardware.graphics.mapper@2.0-adapter-helper.so 273 | lib/android.hardware.graphics.mapper@2.1-adapter-helper.so 274 | lib/android.hardware.health@1.0-adapter-helper.so 275 | lib/android.hardware.health@2.0-adapter-helper.so 276 | lib/android.hardware.ir@1.0-adapter-helper.so 277 | lib/android.hardware.keymaster@3.0-adapter-helper.so 278 | lib/android.hardware.keymaster@4.0-adapter-helper.so 279 | lib/android.hardware.keymaster@4.0-impl.nos.so 280 | lib/android.hardware.light@2.0-adapter-helper.so 281 | lib/android.hardware.media.bufferpool@1.0-adapter-helper.so 282 | lib/android.hardware.media.omx@1.0-adapter-helper.so 283 | lib/android.hardware.media@1.0-adapter-helper.so 284 | lib/android.hardware.memtrack@1.0-adapter-helper.so 285 | lib/android.hardware.neuralnetworks@1.0-adapter-helper.so 286 | lib/android.hardware.neuralnetworks@1.1-adapter-helper.so 287 | lib/android.hardware.nfc@1.0-adapter-helper.so 288 | lib/android.hardware.nfc@1.1-adapter-helper.so 289 | lib/android.hardware.oemlock@1.0-adapter-helper.so 290 | lib/android.hardware.oemlock@1.0-impl.nos.so 291 | lib/android.hardware.power@1.0-adapter-helper.so 292 | lib/android.hardware.power@1.1-adapter-helper.so 293 | lib/android.hardware.power@1.2-adapter-helper.so 294 | lib/android.hardware.power@1.3-adapter-helper.so 295 | lib/android.hardware.power@1.3.so 296 | lib/android.hardware.radio.config@1.0-adapter-helper.so 297 | lib/android.hardware.radio.deprecated@1.0-adapter-helper.so 298 | lib/android.hardware.radio@1.0-adapter-helper.so 299 | lib/android.hardware.radio@1.1-adapter-helper.so 300 | lib/android.hardware.radio@1.2-adapter-helper.so 301 | lib/android.hardware.renderscript@1.0-adapter-helper.so 302 | lib/android.hardware.secure_element@1.0-adapter-helper.so 303 | lib/android.hardware.sensors@1.0-adapter-helper.so 304 | lib/android.hardware.soundtrigger@2.0-adapter-helper.so 305 | lib/android.hardware.soundtrigger@2.1-adapter-helper.so 306 | lib/android.hardware.tests.bar@1.0-adapter-helper.so 307 | lib/android.hardware.tests.bar@1.0.so 308 | lib/android.hardware.tests.baz@1.0-adapter-helper.so 309 | lib/android.hardware.tests.baz@1.0.so 310 | lib/android.hardware.tests.expression@1.0-adapter-helper.so 311 | lib/android.hardware.tests.expression@1.0.so 312 | lib/android.hardware.tests.extension.light@2.0-adapter-helper.so 313 | lib/android.hardware.tests.extension.light@2.0.so 314 | lib/android.hardware.tests.foo@1.0-adapter-helper.so 315 | lib/android.hardware.tests.foo@1.0.so 316 | lib/android.hardware.tests.hash@1.0-adapter-helper.so 317 | lib/android.hardware.tests.hash@1.0.so 318 | lib/android.hardware.tests.inheritance@1.0-adapter-helper.so 319 | lib/android.hardware.tests.inheritance@1.0.so 320 | lib/android.hardware.tests.libhwbinder@1.0-adapter-helper.so 321 | lib/android.hardware.tests.libhwbinder@1.0.so 322 | lib/android.hardware.tests.memory@1.0-adapter-helper.so 323 | lib/android.hardware.tests.memory@1.0.so 324 | lib/android.hardware.tests.msgq@1.0-adapter-helper.so 325 | lib/android.hardware.tests.msgq@1.0.so 326 | lib/android.hardware.tests.multithread@1.0-adapter-helper.so 327 | lib/android.hardware.tests.multithread@1.0.so 328 | lib/android.hardware.tests.pointer@1.0-adapter-helper.so 329 | lib/android.hardware.tests.pointer@1.0.so 330 | lib/android.hardware.tests.trie@1.0-adapter-helper.so 331 | lib/android.hardware.tests.trie@1.0.so 332 | lib/android.hardware.tetheroffload.config@1.0-adapter-helper.so 333 | lib/android.hardware.tetheroffload.control@1.0-adapter-helper.so 334 | lib/android.hardware.thermal@1.0-adapter-helper.so 335 | lib/android.hardware.thermal@1.1-adapter-helper.so 336 | lib/android.hardware.tv.cec@1.0-adapter-helper.so 337 | lib/android.hardware.tv.input@1.0-adapter-helper.so 338 | lib/android.hardware.usb.gadget@1.0-adapter-helper.so 339 | lib/android.hardware.usb@1.0-adapter-helper.so 340 | lib/android.hardware.usb@1.1-adapter-helper.so 341 | lib/android.hardware.vibrator@1.0-adapter-helper.so 342 | lib/android.hardware.vibrator@1.1-adapter-helper.so 343 | lib/android.hardware.vibrator@1.2-adapter-helper.so 344 | lib/android.hardware.vr@1.0-adapter-helper.so 345 | lib/android.hardware.weaver@1.0-adapter-helper.so 346 | lib/android.hardware.weaver@1.0-impl.nos.so 347 | lib/android.hardware.wifi.hostapd@1.0-adapter-helper.so 348 | lib/android.hardware.wifi.offload@1.0-adapter-helper.so 349 | lib/android.hardware.wifi.supplicant@1.0-adapter-helper.so 350 | lib/android.hardware.wifi.supplicant@1.1-adapter-helper.so 351 | lib/android.hardware.wifi@1.0-adapter-helper.so 352 | lib/android.hardware.wifi@1.1-adapter-helper.so 353 | lib/android.hardware.wifi@1.2-adapter-helper.so 354 | lib/android.hidl.allocator@1.0-adapter-helper.so 355 | lib/android.hidl.base@1.0-adapter-helper.so 356 | lib/android.hidl.manager@1.0-adapter-helper.so 357 | lib/android.hidl.manager@1.1-adapter-helper.so 358 | lib/android.hidl.memory.block@1.0-adapter-helper.so 359 | lib/android.hidl.memory.token@1.0-adapter-helper.so 360 | lib/android.hidl.memory@1.0-adapter-helper.so 361 | lib/android.hidl.token@1.0-adapter-helper.so 362 | lib/android.system.net.netd@1.0-adapter-helper.so 363 | lib/android.system.net.netd@1.1-adapter-helper.so 364 | lib/android.system.wifi.keystore@1.0-adapter-helper.so 365 | lib/camera.device@1.0-impl-legacy.so 366 | lib/camera.device@1.0-impl.so 367 | lib/camera.device@3.2-impl.so 368 | lib/camera.device@3.3-impl.so 369 | lib/camera.device@3.4-external-impl.so 370 | lib/camera.device@3.4-impl.so 371 | lib/ese_spi_nxp.so 372 | lib/hidl.tests.vendor@1.0-adapter-helper.so 373 | lib/hidl.tests.vendor@1.0.so 374 | lib/hidl.tests.vendor@1.1-adapter-helper.so 375 | lib/hidl.tests.vendor@1.1.so 376 | lib/hw/android.hardware.audio.effect@2.0-impl.so 377 | lib/hw/android.hardware.audio.effect@4.0-impl.so 378 | lib/hw/android.hardware.audio@2.0-impl.so 379 | lib/hw/android.hardware.audio@4.0-impl.so 380 | lib/hw/android.hardware.bluetooth.a2dp@1.0-impl.so 381 | lib/hw/android.hardware.bluetooth@1.0-impl-sim.so 382 | lib/hw/android.hardware.bluetooth@1.0-impl.so 383 | lib/hw/android.hardware.boot@1.0-impl.so 384 | lib/hw/android.hardware.broadcastradio@1.0-impl.so 385 | lib/hw/android.hardware.camera.provider@2.4-impl-legacy.so 386 | lib/hw/android.hardware.camera.provider@2.4-impl.so 387 | lib/hw/android.hardware.contexthub@1.0-impl.generic.so 388 | lib/hw/android.hardware.contexthub@1.0-impl.nanohub.so 389 | lib/hw/android.hardware.contexthub@1.0-impl.so 390 | lib/hw/android.hardware.drm@1.0-impl.so 391 | lib/hw/android.hardware.gatekeeper@1.0-impl.so 392 | lib/hw/android.hardware.gnss@1.0-impl.so 393 | lib/hw/android.hardware.graphics.allocator@2.0-impl.so 394 | lib/hw/android.hardware.graphics.composer@2.1-impl.so 395 | lib/hw/android.hardware.graphics.mapper@2.0-impl-2.1.so 396 | lib/hw/android.hardware.graphics.mapper@2.0-impl.so 397 | lib/hw/android.hardware.health@1.0-impl.so 398 | lib/hw/android.hardware.ir@1.0-impl.so 399 | lib/hw/android.hardware.keymaster@3.0-impl.so 400 | lib/hw/android.hardware.light@2.0-impl.so 401 | lib/hw/android.hardware.memtrack@1.0-impl.so 402 | lib/hw/android.hardware.nfc@1.0-impl-bcm.so 403 | lib/hw/android.hardware.nfc@1.0-impl.so 404 | lib/hw/android.hardware.power@1.0-impl.so 405 | lib/hw/android.hardware.renderscript@1.0-impl.so 406 | lib/hw/android.hardware.sensors@1.0-impl.so 407 | lib/hw/android.hardware.soundtrigger@2.0-impl.so 408 | lib/hw/android.hardware.soundtrigger@2.1-impl.so 409 | lib/hw/android.hardware.thermal@1.0-impl.so 410 | lib/hw/android.hardware.tv.cec@1.0-impl.so 411 | lib/hw/android.hardware.tv.input@1.0-impl.so 412 | lib/hw/android.hardware.vibrator@1.0-impl.so 413 | lib/hw/android.hardware.vr@1.0-impl.so 414 | lib/hw/audio.primary.caremu.so 415 | lib/hw/audio.primary.default.so 416 | lib/hw/audio.r_submix.default.so 417 | lib/hw/audio.stub.default.so 418 | lib/hw/audio.usb.default.so 419 | lib/hw/audio_amplifier.default.so 420 | lib/hw/audio_policy.stub.so 421 | lib/hw/consumerir.default.so 422 | lib/hw/context_hub.default.so 423 | lib/hw/fingerprint.default.so 424 | lib/hw/gatekeeper.trusty.so 425 | lib/hw/gralloc.default.so 426 | lib/hw/hwcomposer.default.so 427 | lib/hw/keystore.trusty.so 428 | lib/hw/local_time.default.so 429 | lib/hw/nfc.default.so 430 | lib/hw/nfc_nci.bcm2079x.default.so 431 | lib/hw/nfc_nci.default.so 432 | lib/hw/nfc_nci.nqx.default.so 433 | lib/hw/power.default.so 434 | lib/hw/radio.fm.default.so 435 | lib/hw/sensors.dynamic_sensor_hal.so 436 | lib/hw/sound_trigger.stub.default.so 437 | lib/hw/thermal.default.so 438 | lib/hw/tv_input.default.so 439 | lib/hw/vendor.nxp.hardware.nfc@1.0-impl.so 440 | lib/hw/vibrator.default.so 441 | lib/hw/vr.default.so 442 | lib/hw/vulkan.default.so 443 | lib/jcos_nq_client.so 444 | lib/libalsautils.so 445 | lib/libasyncio.so 446 | lib/libavservices_minijail_vendor.so 447 | lib/libbfqio.so 448 | lib/libchrome.so 449 | lib/libcld80211.so 450 | lib/libdemangle.so 451 | lib/libdng_sdk.so 452 | lib/libdrm.so 453 | lib/libdrm_amdgpu.so 454 | lib/libdrm_etnaviv.so 455 | lib/libdrm_freedreno.so 456 | lib/libdrm_intel.so 457 | lib/libdrm_nouveau.so 458 | lib/libdrm_radeon.so 459 | lib/libdrm_rockchip.so 460 | lib/libdrm_tegra.so 461 | lib/libdynamic_sensor_ext.so 462 | lib/libeffects.so 463 | lib/libeffectsconfig.so 464 | lib/libese-app-boot-fortest.so 465 | lib/libese-app-boot.so 466 | lib/libese-app-weaver.so 467 | lib/libese-hw-echo.so 468 | lib/libese-hw-fake.so 469 | lib/libese-hw-nxp-pn80t-common.so 470 | lib/libese-hw-nxp-pn80t-nq-nci.so 471 | lib/libese-hw-nxp-pn80t-spidev.so 472 | lib/libese-sysdeps.so 473 | lib/libese-teq1-private.so 474 | lib/libese-teq1.so 475 | lib/libese.so 476 | lib/libese_cpp_nxp_pn80t_nq_nci.so 477 | lib/libgtest_prod.so 478 | lib/libgui_vendor.so 479 | lib/libhidladapter.so 480 | lib/libhidparser.so 481 | lib/libhwc2on1adapter.so 482 | lib/libhwc2onfbadapter.so 483 | lib/libhwminijail.so 484 | lib/libjson.so 485 | lib/libkeymaster3device.so 486 | lib/libkeymaster4.so 487 | lib/libkeymaster4support.so 488 | lib/libkeystore-engine-wifi-hidl.so 489 | lib/libkeystore-wifi-hidl.so 490 | lib/libkms.so 491 | lib/liblayers_proto.so 492 | lib/libnbaio_mono.so 493 | lib/libnetfilter_conntrack.so 494 | lib/libnfnetlink.so 495 | lib/libnos.so 496 | lib/libnos_citadeld_proxy.so 497 | lib/libnos_client_citadel.so 498 | lib/libnos_datagram.so 499 | lib/libnos_datagram_citadel.so 500 | lib/libnos_transport.so 501 | lib/libnosprotos.so 502 | lib/libp61-jcop-kit.so 503 | lib/libpcap.so 504 | lib/libperfmgr.so 505 | lib/libprotobuf-cpp-full-rtti.so 506 | lib/libprotobuf-cpp-lite-rtti.so 507 | lib/libreference-ril.so 508 | lib/libril.so 509 | lib/librilutils.so 510 | lib/libsensorndkbridge.so 511 | lib/libstagefright_soft_xaacdec.so 512 | lib/libstdc++.so 513 | lib/libtextclassifier_hash.so 514 | lib/libtinycompress.so 515 | lib/libtinyxml.so 516 | lib/libtrusty.so 517 | lib/libusb.so 518 | lib/libvendorjnitest.so 519 | lib/libwebrtc_audio_coding.so 520 | lib/libwebrtc_audio_preprocessing.so 521 | lib/libwifi-hal.so 522 | lib/ls_client.so 523 | lib/ls_nq_client.so 524 | lib/mediacas/libclearkeycasplugin.so 525 | lib/mediacas/libmockcasplugin.so 526 | lib/mediadrm/libdrmclearkeyplugin.so 527 | lib/mediadrm/libmockdrmcryptoplugin.so 528 | lib/nfc_nci.nqx.default.hw.so 529 | lib/nfc_nci_nxp.so 530 | lib/nos_app_avb.so 531 | lib/nos_app_keymaster.so 532 | lib/nos_app_weaver.so 533 | lib/nugget_tools.so 534 | lib/se_nq_extn_client.so 535 | lib/soundfx/libaudiopreprocessing.so 536 | lib/soundfx/libbundlewrapper.so 537 | lib/soundfx/libdownmix.so 538 | lib/soundfx/libdynproc.so 539 | lib/soundfx/libeffectproxy.so 540 | lib/soundfx/libldnhncr.so 541 | lib/soundfx/libreverbwrapper.so 542 | lib/soundfx/libvisualizer.so 543 | lib/vendor.display.config@1.0-adapter-helper.so 544 | lib/vendor.display.config@1.0.so 545 | lib/vendor.display.config@1.1-adapter-helper.so 546 | lib/vendor.display.config@1.1.so 547 | lib/vendor.display.config@1.2-adapter-helper.so 548 | lib/vendor.display.config@1.2.so 549 | lib/vendor.display.config@1.3-adapter-helper.so 550 | lib/vendor.display.config@1.3.so 551 | lib/vendor.display.config@1.4-adapter-helper.so 552 | lib/vendor.display.config@1.4.so 553 | lib/vendor.display.config@1.5-adapter-helper.so 554 | lib/vendor.display.config@1.5.so 555 | lib/vendor.display.config@1.6-adapter-helper.so 556 | lib/vendor.display.config@1.6.so 557 | lib/vendor.display.config@1.7-adapter-helper.so 558 | lib/vendor.display.config@1.7.so 559 | lib/vendor.lineage.biometrics.fingerprint.inscreen@1.0-adapter-helper.so 560 | lib/vendor.lineage.biometrics.fingerprint.inscreen@1.0.so 561 | lib/vendor.lineage.camera.motor@1.0-adapter-helper.so 562 | lib/vendor.lineage.camera.motor@1.0.so 563 | lib/vendor.lineage.livedisplay@2.0-adapter-helper.so 564 | lib/vendor.lineage.livedisplay@2.0.so 565 | lib/vendor.lineage.power@1.0-adapter-helper.so 566 | lib/vendor.lineage.power@1.0.so 567 | lib/vendor.lineage.touch@1.0-adapter-helper.so 568 | lib/vendor.lineage.touch@1.0.so 569 | lib/vendor.lineage.trust@1.0-adapter-helper.so 570 | lib/vendor.lineage.trust@1.0.so 571 | lib/vendor.nxp.hardware.nfc@1.0-adapter-helper.so 572 | lib/vendor.nxp.hardware.nfc@1.0.so 573 | lib/vendor.nxp.nxpese@1.0-adapter-helper.so 574 | lib/vendor.nxp.nxpese@1.0.so 575 | lib/vendor.nxp.nxpnfc@1.0-adapter-helper.so 576 | lib/vendor.nxp.nxpnfc@1.0.so 577 | lib/vendor.qti.hardware.camera.device@1.0-adapter-helper.so 578 | lib/vendor.qti.hardware.camera.device@1.0.so 579 | lib/vendor.qti.hardware.display.allocator@1.0-adapter-helper.so 580 | lib/vendor.qti.hardware.display.allocator@1.0.so 581 | lib/vendor.qti.hardware.display.composer@1.0-adapter-helper.so 582 | lib/vendor.qti.hardware.display.composer@1.0.so 583 | lib/vendor.qti.hardware.display.mapper@1.0-adapter-helper.so 584 | lib/vendor.qti.hardware.display.mapper@1.0.so 585 | lib/vendor.qti.hardware.wifi.hostapd@1.0-adapter-helper.so 586 | lib/vendor.qti.hardware.wifi.hostapd@1.0.so 587 | lib/vendor.qti.hardware.wifi.supplicant@1.0-adapter-helper.so 588 | lib/vendor.qti.hardware.wifi.supplicant@1.0.so 589 | lib/vendor.qti.hardware.wifi.supplicant@2.0-adapter-helper.so 590 | lib/vendor.qti.hardware.wifi.supplicant@2.0.so 591 | lib/vendor.qti.hardware.wifi@1.0-adapter-helper.so 592 | lib/vendor.qti.hardware.wifi@1.0.so 593 | lib/vendor.xiaomi.hardware.fingerprintextension@1.0-adapter-helper.so 594 | lib/vendor.xiaomi.hardware.fingerprintextension@1.0.so 595 | lib64/android.frameworks.displayservice@1.0-adapter-helper.so 596 | lib64/android.frameworks.schedulerservice@1.0-adapter-helper.so 597 | lib64/android.frameworks.sensorservice@1.0-adapter-helper.so 598 | lib64/android.frameworks.vr.composer@1.0-adapter-helper.so 599 | lib64/android.hardware.audio.common@2.0-adapter-helper.so 600 | lib64/android.hardware.audio.common@4.0-adapter-helper.so 601 | lib64/android.hardware.audio.effect@2.0-adapter-helper.so 602 | lib64/android.hardware.audio.effect@4.0-adapter-helper.so 603 | lib64/android.hardware.audio@2.0-adapter-helper.so 604 | lib64/android.hardware.audio@4.0-adapter-helper.so 605 | lib64/android.hardware.authsecret@1.0-adapter-helper.so 606 | lib64/android.hardware.authsecret@1.0-impl.nos.so 607 | lib64/android.hardware.automotive.audiocontrol@1.0-adapter-helper.so 608 | lib64/android.hardware.automotive.evs@1.0-adapter-helper.so 609 | lib64/android.hardware.automotive.vehicle@2.0-adapter-helper.so 610 | lib64/android.hardware.automotive.vehicle@2.0-manager-lib.so 611 | lib64/android.hardware.biometrics.fingerprint@2.1-adapter-helper.so 612 | lib64/android.hardware.bluetooth.a2dp@1.0-adapter-helper.so 613 | lib64/android.hardware.bluetooth@1.0-adapter-helper.so 614 | lib64/android.hardware.boot@1.0-adapter-helper.so 615 | lib64/android.hardware.broadcastradio@1.0-adapter-helper.so 616 | lib64/android.hardware.broadcastradio@1.1-adapter-helper.so 617 | lib64/android.hardware.broadcastradio@2.0-adapter-helper.so 618 | lib64/android.hardware.camera.common@1.0-adapter-helper.so 619 | lib64/android.hardware.camera.device@1.0-adapter-helper.so 620 | lib64/android.hardware.camera.device@3.2-adapter-helper.so 621 | lib64/android.hardware.camera.device@3.3-adapter-helper.so 622 | lib64/android.hardware.camera.device@3.4-adapter-helper.so 623 | lib64/android.hardware.camera.metadata@3.2-adapter-helper.so 624 | lib64/android.hardware.camera.metadata@3.3-adapter-helper.so 625 | lib64/android.hardware.camera.provider@2.4-adapter-helper.so 626 | lib64/android.hardware.cas.native@1.0-adapter-helper.so 627 | lib64/android.hardware.cas@1.0-adapter-helper.so 628 | lib64/android.hardware.configstore@1.0-adapter-helper.so 629 | lib64/android.hardware.configstore@1.1-adapter-helper.so 630 | lib64/android.hardware.confirmationui@1.0-adapter-helper.so 631 | lib64/android.hardware.contexthub@1.0-adapter-helper.so 632 | lib64/android.hardware.drm@1.0-adapter-helper.so 633 | lib64/android.hardware.drm@1.1-adapter-helper.so 634 | lib64/android.hardware.dumpstate@1.0-adapter-helper.so 635 | lib64/android.hardware.gatekeeper@1.0-adapter-helper.so 636 | lib64/android.hardware.gnss@1.0-adapter-helper.so 637 | lib64/android.hardware.gnss@1.1-adapter-helper.so 638 | lib64/android.hardware.graphics.allocator@2.0-adapter-helper.so 639 | lib64/android.hardware.graphics.bufferqueue@1.0-adapter-helper.so 640 | lib64/android.hardware.graphics.common@1.0-adapter-helper.so 641 | lib64/android.hardware.graphics.common@1.1-adapter-helper.so 642 | lib64/android.hardware.graphics.composer@2.1-adapter-helper.so 643 | lib64/android.hardware.graphics.composer@2.2-adapter-helper.so 644 | lib64/android.hardware.graphics.mapper@2.0-adapter-helper.so 645 | lib64/android.hardware.graphics.mapper@2.1-adapter-helper.so 646 | lib64/android.hardware.health@1.0-adapter-helper.so 647 | lib64/android.hardware.health@2.0-adapter-helper.so 648 | lib64/android.hardware.ir@1.0-adapter-helper.so 649 | lib64/android.hardware.keymaster@3.0-adapter-helper.so 650 | lib64/android.hardware.keymaster@4.0-adapter-helper.so 651 | lib64/android.hardware.keymaster@4.0-impl.nos.so 652 | lib64/android.hardware.light@2.0-adapter-helper.so 653 | lib64/android.hardware.media.bufferpool@1.0-adapter-helper.so 654 | lib64/android.hardware.media.omx@1.0-adapter-helper.so 655 | lib64/android.hardware.media@1.0-adapter-helper.so 656 | lib64/android.hardware.memtrack@1.0-adapter-helper.so 657 | lib64/android.hardware.neuralnetworks@1.0-adapter-helper.so 658 | lib64/android.hardware.neuralnetworks@1.1-adapter-helper.so 659 | lib64/android.hardware.nfc@1.0-adapter-helper.so 660 | lib64/android.hardware.nfc@1.1-adapter-helper.so 661 | lib64/android.hardware.oemlock@1.0-adapter-helper.so 662 | lib64/android.hardware.oemlock@1.0-impl.nos.so 663 | lib64/android.hardware.power@1.0-adapter-helper.so 664 | lib64/android.hardware.power@1.1-adapter-helper.so 665 | lib64/android.hardware.power@1.2-adapter-helper.so 666 | lib64/android.hardware.power@1.3-adapter-helper.so 667 | lib64/android.hardware.power@1.3.so 668 | lib64/android.hardware.radio.config@1.0-adapter-helper.so 669 | lib64/android.hardware.radio.deprecated@1.0-adapter-helper.so 670 | lib64/android.hardware.radio@1.0-adapter-helper.so 671 | lib64/android.hardware.radio@1.1-adapter-helper.so 672 | lib64/android.hardware.radio@1.2-adapter-helper.so 673 | lib64/android.hardware.renderscript@1.0-adapter-helper.so 674 | lib64/android.hardware.secure_element@1.0-adapter-helper.so 675 | lib64/android.hardware.sensors@1.0-adapter-helper.so 676 | lib64/android.hardware.soundtrigger@2.0-adapter-helper.so 677 | lib64/android.hardware.soundtrigger@2.1-adapter-helper.so 678 | lib64/android.hardware.tests.bar@1.0-adapter-helper.so 679 | lib64/android.hardware.tests.bar@1.0.so 680 | lib64/android.hardware.tests.baz@1.0-adapter-helper.so 681 | lib64/android.hardware.tests.baz@1.0.so 682 | lib64/android.hardware.tests.expression@1.0-adapter-helper.so 683 | lib64/android.hardware.tests.expression@1.0.so 684 | lib64/android.hardware.tests.extension.light@2.0-adapter-helper.so 685 | lib64/android.hardware.tests.extension.light@2.0.so 686 | lib64/android.hardware.tests.foo@1.0-adapter-helper.so 687 | lib64/android.hardware.tests.foo@1.0.so 688 | lib64/android.hardware.tests.hash@1.0-adapter-helper.so 689 | lib64/android.hardware.tests.hash@1.0.so 690 | lib64/android.hardware.tests.inheritance@1.0-adapter-helper.so 691 | lib64/android.hardware.tests.inheritance@1.0.so 692 | lib64/android.hardware.tests.libhwbinder@1.0-adapter-helper.so 693 | lib64/android.hardware.tests.libhwbinder@1.0.so 694 | lib64/android.hardware.tests.memory@1.0-adapter-helper.so 695 | lib64/android.hardware.tests.memory@1.0.so 696 | lib64/android.hardware.tests.msgq@1.0-adapter-helper.so 697 | lib64/android.hardware.tests.msgq@1.0.so 698 | lib64/android.hardware.tests.multithread@1.0-adapter-helper.so 699 | lib64/android.hardware.tests.multithread@1.0.so 700 | lib64/android.hardware.tests.pointer@1.0-adapter-helper.so 701 | lib64/android.hardware.tests.pointer@1.0.so 702 | lib64/android.hardware.tests.trie@1.0-adapter-helper.so 703 | lib64/android.hardware.tests.trie@1.0.so 704 | lib64/android.hardware.tetheroffload.config@1.0-adapter-helper.so 705 | lib64/android.hardware.tetheroffload.control@1.0-adapter-helper.so 706 | lib64/android.hardware.thermal@1.0-adapter-helper.so 707 | lib64/android.hardware.thermal@1.1-adapter-helper.so 708 | lib64/android.hardware.tv.cec@1.0-adapter-helper.so 709 | lib64/android.hardware.tv.input@1.0-adapter-helper.so 710 | lib64/android.hardware.usb.gadget@1.0-adapter-helper.so 711 | lib64/android.hardware.usb@1.0-adapter-helper.so 712 | lib64/android.hardware.usb@1.1-adapter-helper.so 713 | lib64/android.hardware.vibrator@1.0-adapter-helper.so 714 | lib64/android.hardware.vibrator@1.1-adapter-helper.so 715 | lib64/android.hardware.vibrator@1.2-adapter-helper.so 716 | lib64/android.hardware.vr@1.0-adapter-helper.so 717 | lib64/android.hardware.weaver@1.0-adapter-helper.so 718 | lib64/android.hardware.weaver@1.0-impl.nos.so 719 | lib64/android.hardware.wifi.hostapd@1.0-adapter-helper.so 720 | lib64/android.hardware.wifi.offload@1.0-adapter-helper.so 721 | lib64/android.hardware.wifi.supplicant@1.0-adapter-helper.so 722 | lib64/android.hardware.wifi.supplicant@1.1-adapter-helper.so 723 | lib64/android.hardware.wifi@1.0-adapter-helper.so 724 | lib64/android.hardware.wifi@1.1-adapter-helper.so 725 | lib64/android.hardware.wifi@1.2-adapter-helper.so 726 | lib64/android.hidl.allocator@1.0-adapter-helper.so 727 | lib64/android.hidl.base@1.0-adapter-helper.so 728 | lib64/android.hidl.manager@1.0-adapter-helper.so 729 | lib64/android.hidl.manager@1.1-adapter-helper.so 730 | lib64/android.hidl.memory.block@1.0-adapter-helper.so 731 | lib64/android.hidl.memory.token@1.0-adapter-helper.so 732 | lib64/android.hidl.memory@1.0-adapter-helper.so 733 | lib64/android.hidl.token@1.0-adapter-helper.so 734 | lib64/android.system.net.netd@1.0-adapter-helper.so 735 | lib64/android.system.net.netd@1.1-adapter-helper.so 736 | lib64/android.system.wifi.keystore@1.0-adapter-helper.so 737 | lib64/camera.device@1.0-impl-legacy.so 738 | lib64/camera.device@1.0-impl.so 739 | lib64/camera.device@3.2-impl.so 740 | lib64/camera.device@3.3-impl.so 741 | lib64/camera.device@3.4-external-impl.so 742 | lib64/camera.device@3.4-impl.so 743 | lib64/ese_spi_nxp.so 744 | lib64/hidl.tests.vendor@1.0-adapter-helper.so 745 | lib64/hidl.tests.vendor@1.0.so 746 | lib64/hidl.tests.vendor@1.1-adapter-helper.so 747 | lib64/hidl.tests.vendor@1.1.so 748 | lib64/hw/android.hardware.audio.effect@2.0-impl.so 749 | lib64/hw/android.hardware.audio.effect@4.0-impl.so 750 | lib64/hw/android.hardware.audio@2.0-impl.so 751 | lib64/hw/android.hardware.audio@4.0-impl.so 752 | lib64/hw/android.hardware.bluetooth.a2dp@1.0-impl.so 753 | lib64/hw/android.hardware.bluetooth@1.0-impl-sim.so 754 | lib64/hw/android.hardware.bluetooth@1.0-impl.so 755 | lib64/hw/android.hardware.boot@1.0-impl.so 756 | lib64/hw/android.hardware.broadcastradio@1.0-impl.so 757 | lib64/hw/android.hardware.camera.provider@2.4-impl-legacy.so 758 | lib64/hw/android.hardware.camera.provider@2.4-impl.so 759 | lib64/hw/android.hardware.contexthub@1.0-impl.generic.so 760 | lib64/hw/android.hardware.contexthub@1.0-impl.nanohub.so 761 | lib64/hw/android.hardware.contexthub@1.0-impl.so 762 | lib64/hw/android.hardware.drm@1.0-impl.so 763 | lib64/hw/android.hardware.gatekeeper@1.0-impl.so 764 | lib64/hw/android.hardware.gnss@1.0-impl.so 765 | lib64/hw/android.hardware.graphics.allocator@2.0-impl.so 766 | lib64/hw/android.hardware.graphics.composer@2.1-impl.so 767 | lib64/hw/android.hardware.graphics.mapper@2.0-impl-2.1.so 768 | lib64/hw/android.hardware.graphics.mapper@2.0-impl.so 769 | lib64/hw/android.hardware.health@1.0-impl.so 770 | lib64/hw/android.hardware.ir@1.0-impl.so 771 | lib64/hw/android.hardware.keymaster@3.0-impl.so 772 | lib64/hw/android.hardware.light@2.0-impl.so 773 | lib64/hw/android.hardware.memtrack@1.0-impl.so 774 | lib64/hw/android.hardware.nfc@1.0-impl-bcm.so 775 | lib64/hw/android.hardware.nfc@1.0-impl.so 776 | lib64/hw/android.hardware.power@1.0-impl.so 777 | lib64/hw/android.hardware.renderscript@1.0-impl.so 778 | lib64/hw/android.hardware.sensors@1.0-impl.so 779 | lib64/hw/android.hardware.thermal@1.0-impl.so 780 | lib64/hw/android.hardware.tv.cec@1.0-impl.so 781 | lib64/hw/android.hardware.tv.input@1.0-impl.so 782 | lib64/hw/android.hardware.vibrator@1.0-impl.so 783 | lib64/hw/android.hardware.vr@1.0-impl.so 784 | lib64/hw/audio.primary.caremu.so 785 | lib64/hw/audio.primary.default.so 786 | lib64/hw/audio.r_submix.default.so 787 | lib64/hw/audio.stub.default.so 788 | lib64/hw/audio.usb.default.so 789 | lib64/hw/audio_amplifier.default.so 790 | lib64/hw/audio_policy.stub.so 791 | lib64/hw/consumerir.default.so 792 | lib64/hw/context_hub.default.so 793 | lib64/hw/fingerprint.default.so 794 | lib64/hw/gatekeeper.trusty.so 795 | lib64/hw/gralloc.default.so 796 | lib64/hw/hwcomposer.default.so 797 | lib64/hw/keystore.trusty.so 798 | lib64/hw/local_time.default.so 799 | lib64/hw/nfc.default.so 800 | lib64/hw/nfc_nci.bcm2079x.default.so 801 | lib64/hw/nfc_nci.default.so 802 | lib64/hw/nfc_nci.nqx.default.so 803 | lib64/hw/power.default.so 804 | lib64/hw/radio.fm.default.so 805 | lib64/hw/sensors.dynamic_sensor_hal.so 806 | lib64/hw/sound_trigger.stub.default.so 807 | lib64/hw/thermal.default.so 808 | lib64/hw/tv_input.default.so 809 | lib64/hw/vendor.nxp.hardware.nfc@1.0-impl.so 810 | lib64/hw/vibrator.default.so 811 | lib64/hw/vr.default.so 812 | lib64/hw/vulkan.default.so 813 | lib64/jcos_nq_client.so 814 | lib64/libalsautils.so 815 | lib64/libasyncio.so 816 | lib64/libavservices_minijail_vendor.so 817 | lib64/libbfqio.so 818 | lib64/libchrome.so 819 | lib64/libcld80211.so 820 | lib64/libdemangle.so 821 | lib64/libdng_sdk.so 822 | lib64/libdrm.so 823 | lib64/libdrm_amdgpu.so 824 | lib64/libdrm_etnaviv.so 825 | lib64/libdrm_freedreno.so 826 | lib64/libdrm_intel.so 827 | lib64/libdrm_nouveau.so 828 | lib64/libdrm_radeon.so 829 | lib64/libdrm_rockchip.so 830 | lib64/libdrm_tegra.so 831 | lib64/libdynamic_sensor_ext.so 832 | lib64/libeffects.so 833 | lib64/libeffectsconfig.so 834 | lib64/libese-app-boot-fortest.so 835 | lib64/libese-app-boot.so 836 | lib64/libese-app-weaver.so 837 | lib64/libese-hw-echo.so 838 | lib64/libese-hw-fake.so 839 | lib64/libese-hw-nxp-pn80t-common.so 840 | lib64/libese-hw-nxp-pn80t-nq-nci.so 841 | lib64/libese-hw-nxp-pn80t-spidev.so 842 | lib64/libese-sysdeps.so 843 | lib64/libese-teq1-private.so 844 | lib64/libese-teq1.so 845 | lib64/libese.so 846 | lib64/libese_cpp_nxp_pn80t_nq_nci.so 847 | lib64/libgtest_prod.so 848 | lib64/libgui_vendor.so 849 | lib64/libhidladapter.so 850 | lib64/libhidparser.so 851 | lib64/libhwc2on1adapter.so 852 | lib64/libhwc2onfbadapter.so 853 | lib64/libhwminijail.so 854 | lib64/libjson.so 855 | lib64/libkeymaster3device.so 856 | lib64/libkeymaster4.so 857 | lib64/libkeymaster4support.so 858 | lib64/libkeystore-engine-wifi-hidl.so 859 | lib64/libkeystore-wifi-hidl.so 860 | lib64/libkms.so 861 | lib64/liblayers_proto.so 862 | lib64/libnbaio_mono.so 863 | lib64/libnetfilter_conntrack.so 864 | lib64/libnfnetlink.so 865 | lib64/libnos.so 866 | lib64/libnos_citadeld_proxy.so 867 | lib64/libnos_client_citadel.so 868 | lib64/libnos_datagram.so 869 | lib64/libnos_datagram_citadel.so 870 | lib64/libnos_transport.so 871 | lib64/libnosprotos.so 872 | lib64/libp61-jcop-kit.so 873 | lib64/libpcap.so 874 | lib64/libperfmgr.so 875 | lib64/libprotobuf-cpp-full-rtti.so 876 | lib64/libprotobuf-cpp-lite-rtti.so 877 | lib64/libreference-ril.so 878 | lib64/libril.so 879 | lib64/librilutils.so 880 | lib64/libsensorndkbridge.so 881 | lib64/libstdc++.so 882 | lib64/libtextclassifier_hash.so 883 | lib64/libtinycompress.so 884 | lib64/libtinyxml.so 885 | lib64/libtrusty.so 886 | lib64/libusb.so 887 | lib64/libvendorjnitest.so 888 | lib64/libwebrtc_audio_coding.so 889 | lib64/libwebrtc_audio_preprocessing.so 890 | lib64/libwifi-hal.so 891 | lib64/ls_client.so 892 | lib64/ls_nq_client.so 893 | lib64/mediacas/libclearkeycasplugin.so 894 | lib64/mediacas/libmockcasplugin.so 895 | lib64/mediadrm/libdrmclearkeyplugin.so 896 | lib64/mediadrm/libmockdrmcryptoplugin.so 897 | lib64/nfc_nci.nqx.default.hw.so 898 | lib64/nfc_nci_nxp.so 899 | lib64/nos_app_avb.so 900 | lib64/nos_app_keymaster.so 901 | lib64/nos_app_weaver.so 902 | lib64/nugget_tools.so 903 | lib64/se_nq_extn_client.so 904 | lib64/soundfx/libaudiopreprocessing.so 905 | lib64/soundfx/libbundlewrapper.so 906 | lib64/soundfx/libdownmix.so 907 | lib64/soundfx/libdynproc.so 908 | lib64/soundfx/libeffectproxy.so 909 | lib64/soundfx/libldnhncr.so 910 | lib64/soundfx/libreverbwrapper.so 911 | lib64/soundfx/libvisualizer.so 912 | lib64/vendor.display.config@1.0-adapter-helper.so 913 | lib64/vendor.display.config@1.0.so 914 | lib64/vendor.display.config@1.1-adapter-helper.so 915 | lib64/vendor.display.config@1.1.so 916 | lib64/vendor.display.config@1.2-adapter-helper.so 917 | lib64/vendor.display.config@1.2.so 918 | lib64/vendor.display.config@1.3-adapter-helper.so 919 | lib64/vendor.display.config@1.3.so 920 | lib64/vendor.display.config@1.4-adapter-helper.so 921 | lib64/vendor.display.config@1.4.so 922 | lib64/vendor.display.config@1.5-adapter-helper.so 923 | lib64/vendor.display.config@1.5.so 924 | lib64/vendor.display.config@1.6-adapter-helper.so 925 | lib64/vendor.display.config@1.6.so 926 | lib64/vendor.display.config@1.7-adapter-helper.so 927 | lib64/vendor.display.config@1.7.so 928 | lib64/vendor.lineage.biometrics.fingerprint.inscreen@1.0-adapter-helper.so 929 | lib64/vendor.lineage.biometrics.fingerprint.inscreen@1.0.so 930 | lib64/vendor.lineage.camera.motor@1.0-adapter-helper.so 931 | lib64/vendor.lineage.camera.motor@1.0.so 932 | lib64/vendor.lineage.livedisplay@2.0-adapter-helper.so 933 | lib64/vendor.lineage.livedisplay@2.0.so 934 | lib64/vendor.lineage.power@1.0-adapter-helper.so 935 | lib64/vendor.lineage.power@1.0.so 936 | lib64/vendor.lineage.touch@1.0-adapter-helper.so 937 | lib64/vendor.lineage.touch@1.0.so 938 | lib64/vendor.lineage.trust@1.0-adapter-helper.so 939 | lib64/vendor.lineage.trust@1.0.so 940 | lib64/vendor.nxp.hardware.nfc@1.0-adapter-helper.so 941 | lib64/vendor.nxp.hardware.nfc@1.0.so 942 | lib64/vendor.nxp.nxpese@1.0-adapter-helper.so 943 | lib64/vendor.nxp.nxpese@1.0.so 944 | lib64/vendor.nxp.nxpnfc@1.0-adapter-helper.so 945 | lib64/vendor.nxp.nxpnfc@1.0.so 946 | lib64/vendor.qti.hardware.camera.device@1.0-adapter-helper.so 947 | lib64/vendor.qti.hardware.camera.device@1.0.so 948 | lib64/vendor.qti.hardware.display.allocator@1.0-adapter-helper.so 949 | lib64/vendor.qti.hardware.display.allocator@1.0.so 950 | lib64/vendor.qti.hardware.display.composer@1.0-adapter-helper.so 951 | lib64/vendor.qti.hardware.display.composer@1.0.so 952 | lib64/vendor.qti.hardware.display.mapper@1.0-adapter-helper.so 953 | lib64/vendor.qti.hardware.display.mapper@1.0.so 954 | lib64/vendor.qti.hardware.wifi.hostapd@1.0-adapter-helper.so 955 | lib64/vendor.qti.hardware.wifi.hostapd@1.0.so 956 | lib64/vendor.qti.hardware.wifi.supplicant@1.0-adapter-helper.so 957 | lib64/vendor.qti.hardware.wifi.supplicant@1.0.so 958 | lib64/vendor.qti.hardware.wifi.supplicant@2.0-adapter-helper.so 959 | lib64/vendor.qti.hardware.wifi.supplicant@2.0.so 960 | lib64/vendor.qti.hardware.wifi@1.0-adapter-helper.so 961 | lib64/vendor.qti.hardware.wifi@1.0.so 962 | lib64/vendor.xiaomi.hardware.fingerprintextension@1.0-adapter-helper.so 963 | lib64/vendor.xiaomi.hardware.fingerprintextension@1.0.so 964 | overlay/DisplayCutoutEmulationCorner/DisplayCutoutEmulationCornerOverlay.apk 965 | overlay/DisplayCutoutEmulationDouble/DisplayCutoutEmulationDoubleOverlay.apk 966 | overlay/DisplayCutoutEmulationNarrow/DisplayCutoutEmulationNarrowOverlay.apk 967 | overlay/DisplayCutoutEmulationTall/DisplayCutoutEmulationTallOverlay.apk 968 | overlay/DisplayCutoutEmulationWide/DisplayCutoutEmulationWideOverlay.apk 969 | overlay/SysuiDarkTheme/SysuiDarkThemeOverlay.apk 970 | priv-app/VendorPrivAppPermissionTest/VendorPrivAppPermissionTest.apk 971 | -------------------------------------------------------------------------------- /subtree_modules_data.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | TARGET_DIR="qcom/opensource" 4 | 5 | source "$1" 6 | 7 | echo "$REPOS_TO_URL" 8 | 9 | for REPO_NAME in "${!REPOS_TO_REF[@]}"; do 10 | REPO_REF="${REPOS_TO_REF[$REPO_NAME]}" 11 | REPO_URL="${REPOS_TO_URL[$REPO_NAME]}" 12 | REPO_DISK_PATH="$TARGET_DIR/$REPO_NAME" 13 | 14 | if [ ! -d "$REPO_DISK_PATH" ]; then 15 | REPO_DISK_DIR_PATH=$(dirname "$REPO_DISK_PATH") 16 | mkdir -p "$REPO_DISK_DIR_PATH" 17 | 18 | git subtree add --prefix="$REPO_DISK_PATH" "$REPO_URL" "$REPO_REF" 19 | else 20 | git subtree pull --prefix="$REPO_DISK_PATH" "$REPO_URL" "$REPO_REF" 21 | fi 22 | done 23 | --------------------------------------------------------------------------------