├── .dockerignore ├── .gitignore ├── Dockerfile ├── README.md ├── old └── .gitkeep ├── output └── .gitkeep ├── payload_dumper.py ├── requirements.txt └── update_metadata_pb2.py /.dockerignore: -------------------------------------------------------------------------------- 1 | * 2 | !payload_dumper.py 3 | !requirements.txt 4 | !update_metadata_pb2.py 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Ignore everything 2 | * 3 | 4 | # But not these files... 5 | !payload_dumper.py 6 | !requirements.txt 7 | !update_metadata_pb2.py 8 | !output/.gitkeep 9 | !README.md 10 | !Dockerfile 11 | !.dockerignore 12 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.9.7-alpine3.14 2 | 3 | RUN apk upgrade \ 4 | && apk add xz 5 | 6 | WORKDIR /app 7 | VOLUME ["/app"] 8 | 9 | COPY . /app 10 | 11 | RUN apk add --no-cache --virtual .build-deps build-base \ 12 | && pip --no-cache-dir install -r requirements.txt \ 13 | && apk del .build-deps 14 | 15 | ENTRYPOINT ["python","payload_dumper.py"] 16 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # payload dumper 2 | Script tested on Yandex Amber OTA's (full and incremental) under Linux (but may works on Windows too) 3 | 4 | ## System requirement 5 | 6 | - Python3, pip 7 | - google protobuf for python `pip install protobuf` 8 | 9 | ### Docker 10 | 11 | Alternatively you can use Docker: 12 | ```bash 13 | docker run --rm -v "${PWD}":/data -it vm03/payload_dumper /data/payload.bin --out /data 14 | ``` 15 | or self build Docker image 16 | ```bash 17 | # build the container image 18 | $ docker build -t payload_dumper . 19 | 20 | # mount current PWD and pass payload.bin 21 | $ docker run --rm -v "${PWD}":/data -it payload_dumper /data/payload.bin --out /data 22 | ``` 23 | 24 | ## Guide 25 | 26 | ### Preparation 27 | - Make sure you have Python 3.6 or later installed. 28 | - Download payload_dumper.py, update_metadata_pb2.py and requirements.txt 29 | - Extract your OTA zip and place payload.bin in the same folder as these files. 30 | - Open PowerShell, Command Prompt, or Terminal depending on your OS. 31 | - Enter the following command: python -m pip install -r requirements.txt 32 | 33 | ### Full OTA 34 | 35 | - When that’s finished, enter this command: python payload_dumper.py payload.bin 36 | - This will start to extract the images within the payload.bin file to the output folder you are in. 37 | 38 | ### Incremental OTA 39 | 40 | - Copy original images (from full OTA or dumped from devices) to old folder (with part name + .img, ex: boot.img, system.img) 41 | - run python payload_dumper.py --diff payload.bin 42 | - file extracted to the output folder you are in. 43 | -------------------------------------------------------------------------------- /old/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vm03/payload_dumper/2f0a964b8b77c6244e3e12735f85539c938f9c97/old/.gitkeep -------------------------------------------------------------------------------- /output/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vm03/payload_dumper/2f0a964b8b77c6244e3e12735f85539c938f9c97/output/.gitkeep -------------------------------------------------------------------------------- /payload_dumper.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import struct 3 | import hashlib 4 | import bz2 5 | import sys 6 | import argparse 7 | import bsdiff4 8 | import io 9 | import os 10 | import brotli 11 | import zipfile 12 | import zstandard 13 | import fsspec 14 | import urllib.parse 15 | from pathlib import Path 16 | 17 | try: 18 | import lzma 19 | except ImportError: 20 | from backports import lzma 21 | 22 | import update_metadata_pb2 as um 23 | 24 | BSDF2_MAGIC = b'BSDF2' 25 | 26 | flatten = lambda l: [item for sublist in l for item in sublist] 27 | 28 | def u32(x): 29 | return struct.unpack('>I', x)[0] 30 | 31 | def u64(x): 32 | return struct.unpack('>Q', x)[0] 33 | 34 | def bsdf2_decompress(alg, data): 35 | if alg == 0: 36 | return data 37 | elif alg == 1: 38 | return bz2.decompress(data) 39 | elif alg == 2: 40 | return brotli.decompress(data) 41 | 42 | # Adapted from bsdiff4.read_patch 43 | def bsdf2_read_patch(fi): 44 | """read a bsdiff/BSDF2-format patch from stream 'fi' 45 | """ 46 | magic = fi.read(8) 47 | if magic == bsdiff4.format.MAGIC: 48 | # bsdiff4 uses bzip2 (algorithm 1) 49 | alg_control = alg_diff = alg_extra = 1 50 | elif magic[:5] == BSDF2_MAGIC: 51 | alg_control = magic[5] 52 | alg_diff = magic[6] 53 | alg_extra = magic[7] 54 | else: 55 | raise ValueError("incorrect magic bsdiff/BSDF2 header") 56 | 57 | # length headers 58 | len_control = bsdiff4.core.decode_int64(fi.read(8)) 59 | len_diff = bsdiff4.core.decode_int64(fi.read(8)) 60 | len_dst = bsdiff4.core.decode_int64(fi.read(8)) 61 | 62 | # read the control header 63 | bcontrol = bsdf2_decompress(alg_control, fi.read(len_control)) 64 | tcontrol = [(bsdiff4.core.decode_int64(bcontrol[i:i + 8]), 65 | bsdiff4.core.decode_int64(bcontrol[i + 8:i + 16]), 66 | bsdiff4.core.decode_int64(bcontrol[i + 16:i + 24])) 67 | for i in range(0, len(bcontrol), 24)] 68 | 69 | # read the diff and extra blocks 70 | bdiff = bsdf2_decompress(alg_diff, fi.read(len_diff)) 71 | bextra = bsdf2_decompress(alg_extra, fi.read()) 72 | return len_dst, tcontrol, bdiff, bextra 73 | 74 | def verify_contiguous(exts): 75 | blocks = 0 76 | 77 | for ext in exts: 78 | if ext.start_block != blocks: 79 | return False 80 | 81 | blocks += ext.num_blocks 82 | 83 | return True 84 | 85 | def open_payload_file(file_path): 86 | """ 87 | Opens a payload file, whether it's a local file, a remote file, 88 | or inside a zip archive (local or remote). 89 | 90 | Returns a file-like object pointing to the payload.bin content. 91 | """ 92 | # Check if the file is a URL 93 | is_url = file_path.startswith(('http://', 'https://', 's3://', 'gs://')) 94 | 95 | if is_url: 96 | # Handle remote file 97 | protocol = urllib.parse.urlparse(file_path).scheme 98 | fs = fsspec.filesystem(protocol) 99 | 100 | # Open the remote file 101 | remote_file = fs.open(file_path) 102 | 103 | # Check if it's a zip file 104 | if zipfile.is_zipfile(remote_file): 105 | # Reset the file pointer 106 | remote_file.seek(0) 107 | 108 | # Open as a zip file and extract payload.bin 109 | with zipfile.ZipFile(remote_file) as zf: 110 | if "payload.bin" in zf.namelist(): 111 | return zf.open("payload.bin") 112 | else: 113 | raise ValueError("payload.bin not found in zip file") 114 | else: 115 | # Not a zip file, use as is 116 | return remote_file 117 | else: 118 | # Local file 119 | if zipfile.is_zipfile(file_path): 120 | with zipfile.ZipFile(file_path) as zf: 121 | if "payload.bin" in zf.namelist(): 122 | return zf.open("payload.bin") 123 | else: 124 | raise ValueError("payload.bin not found in zip file") 125 | else: 126 | # Local file, not a zip 127 | return open(file_path, 'rb') 128 | 129 | def data_for_op(op, payload_file, out_file, old_file, data_offset, block_size): 130 | payload_file.seek(data_offset + op.data_offset) 131 | data = payload_file.read(op.data_length) 132 | 133 | if op.data_sha256_hash: 134 | assert hashlib.sha256(data).digest() == op.data_sha256_hash, 'operation data hash mismatch' 135 | 136 | if op.type == op.REPLACE_XZ: 137 | dec = lzma.LZMADecompressor() 138 | data = dec.decompress(data) 139 | out_file.seek(op.dst_extents[0].start_block*block_size) 140 | out_file.write(data) 141 | elif op.type == op.ZSTD: 142 | dec = zstandard.ZstdDecompressor().decompressobj() 143 | data = dec.decompress(data) 144 | out_file.seek(op.dst_extents[0].start_block*block_size) 145 | out_file.write(data) 146 | elif op.type == op.REPLACE_BZ: 147 | dec = bz2.BZ2Decompressor() 148 | data = dec.decompress(data) 149 | out_file.seek(op.dst_extents[0].start_block*block_size) 150 | out_file.write(data) 151 | elif op.type == op.REPLACE: 152 | out_file.seek(op.dst_extents[0].start_block*block_size) 153 | out_file.write(data) 154 | elif op.type == op.SOURCE_COPY: 155 | if not old_file: 156 | print("SOURCE_COPY supported only for differential OTA") 157 | sys.exit(-2) 158 | out_file.seek(op.dst_extents[0].start_block*block_size) 159 | for ext in op.src_extents: 160 | old_file.seek(ext.start_block*block_size) 161 | data = old_file.read(ext.num_blocks*block_size) 162 | out_file.write(data) 163 | elif op.type in (op.SOURCE_BSDIFF, op.BROTLI_BSDIFF): 164 | if not old_file: 165 | print("BSDIFF supported only for differential OTA") 166 | sys.exit(-3) 167 | out_file.seek(op.dst_extents[0].start_block*block_size) 168 | tmp_buff = io.BytesIO() 169 | for ext in op.src_extents: 170 | old_file.seek(ext.start_block*block_size) 171 | old_data = old_file.read(ext.num_blocks*block_size) 172 | tmp_buff.write(old_data) 173 | tmp_buff.seek(0) 174 | old_data = tmp_buff.read() 175 | tmp_buff.seek(0) 176 | tmp_buff.write(bsdiff4.core.patch(old_data, *bsdf2_read_patch(io.BytesIO(data)))) 177 | n = 0 178 | tmp_buff.seek(0) 179 | for ext in op.dst_extents: 180 | tmp_buff.seek(n*block_size) 181 | n += ext.num_blocks 182 | data = tmp_buff.read(ext.num_blocks*block_size) 183 | out_file.seek(ext.start_block*block_size) 184 | out_file.write(data) 185 | elif op.type == op.ZERO: 186 | for ext in op.dst_extents: 187 | out_file.seek(ext.start_block*block_size) 188 | out_file.write(b'\x00' * ext.num_blocks*block_size) 189 | else: 190 | print("Unsupported type = %d" % op.type) 191 | sys.exit(-1) 192 | 193 | return data 194 | 195 | def dump_part(part, payload_file, data_offset, block_size, out_dir, old_dir=None, use_diff=False): 196 | sys.stdout.write(f"Processing {part.partition_name} partition") 197 | sys.stdout.flush() 198 | 199 | # Ensure output directory exists 200 | Path(out_dir).mkdir(exist_ok=True) 201 | 202 | out_file = open(f'{out_dir}/{part.partition_name}.img', 'wb') 203 | 204 | if use_diff: 205 | old_file_path = f'{old_dir}/{part.partition_name}.img' 206 | if os.path.exists(old_file_path): 207 | old_file = open(old_file_path, 'rb') 208 | else: 209 | print(f"\nWarning: Original image {old_file_path} not found for differential OTA") 210 | old_file = None 211 | else: 212 | old_file = None 213 | 214 | for op in part.operations: 215 | data = data_for_op(op, payload_file, out_file, old_file, data_offset, block_size) 216 | sys.stdout.write(".") 217 | sys.stdout.flush() 218 | 219 | out_file.close() 220 | if old_file: 221 | old_file.close() 222 | 223 | print("Done") 224 | 225 | def main(): 226 | parser = argparse.ArgumentParser(description='OTA payload dumper') 227 | parser.add_argument('payload_path', type=str, 228 | help='payload file path or URL (can be a zip file)') 229 | parser.add_argument('--out', default='output', 230 | help='output directory (default: output)') 231 | parser.add_argument('--diff', action='store_true', 232 | help='extract differential OTA, you need put original images to old dir') 233 | parser.add_argument('--old', default='old', 234 | help='directory with original images for differential OTA (default: old)') 235 | parser.add_argument('--images', default="", 236 | help='comma-separated list of images to extract (default: all)') 237 | args = parser.parse_args() 238 | 239 | # Ensure output directory exists 240 | if not os.path.exists(args.out): 241 | os.makedirs(args.out) 242 | 243 | # Open the payload file (handles local/remote and zip/non-zip) 244 | with open_payload_file(args.payload_path) as payload_file: 245 | # Read and verify the magic header 246 | magic = payload_file.read(4) 247 | assert magic == b'CrAU', "Invalid magic header, not an OTA payload" 248 | 249 | file_format_version = u64(payload_file.read(8)) 250 | assert file_format_version == 2, f"Unsupported file format version: {file_format_version}" 251 | 252 | manifest_size = u64(payload_file.read(8)) 253 | 254 | metadata_signature_size = 0 255 | if file_format_version > 1: 256 | metadata_signature_size = u32(payload_file.read(4)) 257 | 258 | manifest = payload_file.read(manifest_size) 259 | metadata_signature = payload_file.read(metadata_signature_size) 260 | 261 | data_offset = payload_file.tell() 262 | 263 | dam = um.DeltaArchiveManifest() 264 | dam.ParseFromString(manifest) 265 | block_size = dam.block_size 266 | 267 | if args.images == "": 268 | for part in dam.partitions: 269 | dump_part(part, payload_file, data_offset, block_size, args.out, 270 | args.old if args.diff else None, args.diff) 271 | else: 272 | images = args.images.split(",") 273 | for image in images: 274 | partition = [part for part in dam.partitions if part.partition_name == image] 275 | if partition: 276 | dump_part(partition[0], payload_file, data_offset, block_size, args.out, 277 | args.old if args.diff else None, args.diff) 278 | else: 279 | sys.stderr.write(f"Partition {image} not found in payload!\n") 280 | 281 | if __name__ == "__main__": 282 | main() 283 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | protobuf>=5.27.3 2 | six>=1.16.0 3 | bsdiff4>=1.1.5 4 | brotli>=1.1.0 5 | zstandard>=0.23.0 6 | fsspec>=2023.0.0 7 | requests>=2.28.0 8 | aiohttp>=3.8.0 9 | -------------------------------------------------------------------------------- /update_metadata_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # NO CHECKED-IN PROTOBUF GENCODE 4 | # source: update_metadata.proto 5 | # Protobuf Python Version: 5.27.2 6 | """Generated protocol buffer code.""" 7 | from google.protobuf import descriptor as _descriptor 8 | from google.protobuf import descriptor_pool as _descriptor_pool 9 | from google.protobuf import runtime_version as _runtime_version 10 | from google.protobuf import symbol_database as _symbol_database 11 | from google.protobuf.internal import builder as _builder 12 | _runtime_version.ValidateProtobufRuntimeVersion( 13 | _runtime_version.Domain.PUBLIC, 14 | 5, 15 | 27, 16 | 2, 17 | '', 18 | 'update_metadata.proto' 19 | ) 20 | # @@protoc_insertion_point(imports) 21 | 22 | _sym_db = _symbol_database.Default() 23 | 24 | 25 | 26 | 27 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x15update_metadata.proto\x12\x16\x63hromeos_update_engine\"1\n\x06\x45xtent\x12\x13\n\x0bstart_block\x18\x01 \x01(\x04\x12\x12\n\nnum_blocks\x18\x02 \x01(\x04\"\x9f\x01\n\nSignatures\x12@\n\nsignatures\x18\x01 \x03(\x0b\x32,.chromeos_update_engine.Signatures.Signature\x1aO\n\tSignature\x12\x13\n\x07version\x18\x01 \x01(\rB\x02\x18\x01\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c\x12\x1f\n\x17unpadded_signature_size\x18\x03 \x01(\x07\"+\n\rPartitionInfo\x12\x0c\n\x04size\x18\x01 \x01(\x04\x12\x0c\n\x04hash\x18\x02 \x01(\x0c\"\xb0\x04\n\x10InstallOperation\x12;\n\x04type\x18\x01 \x02(\x0e\x32-.chromeos_update_engine.InstallOperation.Type\x12\x13\n\x0b\x64\x61ta_offset\x18\x02 \x01(\x04\x12\x13\n\x0b\x64\x61ta_length\x18\x03 \x01(\x04\x12\x33\n\x0bsrc_extents\x18\x04 \x03(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x12\n\nsrc_length\x18\x05 \x01(\x04\x12\x33\n\x0b\x64st_extents\x18\x06 \x03(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x12\n\ndst_length\x18\x07 \x01(\x04\x12\x18\n\x10\x64\x61ta_sha256_hash\x18\x08 \x01(\x0c\x12\x17\n\x0fsrc_sha256_hash\x18\t \x01(\x0c\"\xef\x01\n\x04Type\x12\x0b\n\x07REPLACE\x10\x00\x12\x0e\n\nREPLACE_BZ\x10\x01\x12\x0c\n\x04MOVE\x10\x02\x1a\x02\x08\x01\x12\x0e\n\x06\x42SDIFF\x10\x03\x1a\x02\x08\x01\x12\x0f\n\x0bSOURCE_COPY\x10\x04\x12\x11\n\rSOURCE_BSDIFF\x10\x05\x12\x0e\n\nREPLACE_XZ\x10\x08\x12\x08\n\x04ZERO\x10\x06\x12\x0b\n\x07\x44ISCARD\x10\x07\x12\x11\n\rBROTLI_BSDIFF\x10\n\x12\x0c\n\x08PUFFDIFF\x10\t\x12\x0c\n\x08ZUCCHINI\x10\x0b\x12\x12\n\x0eLZ4DIFF_BSDIFF\x10\x0c\x12\x14\n\x10LZ4DIFF_PUFFDIFF\x10\r\x12\x08\n\x04ZSTD\x10\x0e\"\x81\x02\n\x11\x43owMergeOperation\x12<\n\x04type\x18\x01 \x01(\x0e\x32..chromeos_update_engine.CowMergeOperation.Type\x12\x32\n\nsrc_extent\x18\x02 \x01(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x32\n\ndst_extent\x18\x03 \x01(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x12\n\nsrc_offset\x18\x04 \x01(\r\"2\n\x04Type\x12\x0c\n\x08\x43OW_COPY\x10\x00\x12\x0b\n\x07\x43OW_XOR\x10\x01\x12\x0f\n\x0b\x43OW_REPLACE\x10\x02\"\xe7\x06\n\x0fPartitionUpdate\x12\x16\n\x0epartition_name\x18\x01 \x02(\t\x12\x17\n\x0frun_postinstall\x18\x02 \x01(\x08\x12\x18\n\x10postinstall_path\x18\x03 \x01(\t\x12\x17\n\x0f\x66ilesystem_type\x18\x04 \x01(\t\x12M\n\x17new_partition_signature\x18\x05 \x03(\x0b\x32,.chromeos_update_engine.Signatures.Signature\x12\x41\n\x12old_partition_info\x18\x06 \x01(\x0b\x32%.chromeos_update_engine.PartitionInfo\x12\x41\n\x12new_partition_info\x18\x07 \x01(\x0b\x32%.chromeos_update_engine.PartitionInfo\x12<\n\noperations\x18\x08 \x03(\x0b\x32(.chromeos_update_engine.InstallOperation\x12\x1c\n\x14postinstall_optional\x18\t \x01(\x08\x12=\n\x15hash_tree_data_extent\x18\n \x01(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x38\n\x10hash_tree_extent\x18\x0b \x01(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x1b\n\x13hash_tree_algorithm\x18\x0c \x01(\t\x12\x16\n\x0ehash_tree_salt\x18\r \x01(\x0c\x12\x37\n\x0f\x66\x65\x63_data_extent\x18\x0e \x01(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x32\n\nfec_extent\x18\x0f \x01(\x0b\x32\x1e.chromeos_update_engine.Extent\x12\x14\n\tfec_roots\x18\x10 \x01(\r:\x01\x32\x12\x0f\n\x07version\x18\x11 \x01(\t\x12\x43\n\x10merge_operations\x18\x12 \x03(\x0b\x32).chromeos_update_engine.CowMergeOperation\x12\x19\n\x11\x65stimate_cow_size\x18\x13 \x01(\x04\x12\x1d\n\x15\x65stimate_op_count_max\x18\x14 \x01(\x04\"L\n\x15\x44ynamicPartitionGroup\x12\x0c\n\x04name\x18\x01 \x02(\t\x12\x0c\n\x04size\x18\x02 \x01(\x04\x12\x17\n\x0fpartition_names\x18\x03 \x03(\t\"8\n\x0eVABCFeatureSet\x12\x10\n\x08threaded\x18\x01 \x01(\x08\x12\x14\n\x0c\x62\x61tch_writes\x18\x02 \x01(\x08\"\x9c\x02\n\x18\x44ynamicPartitionMetadata\x12=\n\x06groups\x18\x01 \x03(\x0b\x32-.chromeos_update_engine.DynamicPartitionGroup\x12\x18\n\x10snapshot_enabled\x18\x02 \x01(\x08\x12\x14\n\x0cvabc_enabled\x18\x03 \x01(\x08\x12\x1e\n\x16vabc_compression_param\x18\x04 \x01(\t\x12\x13\n\x0b\x63ow_version\x18\x05 \x01(\r\x12@\n\x10vabc_feature_set\x18\x06 \x01(\x0b\x32&.chromeos_update_engine.VABCFeatureSet\x12\x1a\n\x12\x63ompression_factor\x18\x07 \x01(\x04\"c\n\x08\x41pexInfo\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x03\x12\x15\n\ris_compressed\x18\x03 \x01(\x08\x12\x19\n\x11\x64\x65\x63ompressed_size\x18\x04 \x01(\x03\"C\n\x0c\x41pexMetadata\x12\x33\n\tapex_info\x18\x01 \x03(\x0b\x32 .chromeos_update_engine.ApexInfo\"\xc3\x03\n\x14\x44\x65ltaArchiveManifest\x12\x18\n\nblock_size\x18\x03 \x01(\r:\x04\x34\x30\x39\x36\x12\x19\n\x11signatures_offset\x18\x04 \x01(\x04\x12\x17\n\x0fsignatures_size\x18\x05 \x01(\x04\x12\x18\n\rminor_version\x18\x0c \x01(\r:\x01\x30\x12;\n\npartitions\x18\r \x03(\x0b\x32\'.chromeos_update_engine.PartitionUpdate\x12\x15\n\rmax_timestamp\x18\x0e \x01(\x03\x12T\n\x1a\x64ynamic_partition_metadata\x18\x0f \x01(\x0b\x32\x30.chromeos_update_engine.DynamicPartitionMetadata\x12\x16\n\x0epartial_update\x18\x10 \x01(\x08\x12\x33\n\tapex_info\x18\x11 \x03(\x0b\x32 .chromeos_update_engine.ApexInfo\x12\x1c\n\x14security_patch_level\x18\x12 \x01(\tJ\x04\x08\x01\x10\x02J\x04\x08\x02\x10\x03J\x04\x08\x06\x10\x07J\x04\x08\x07\x10\x08J\x04\x08\x08\x10\tJ\x04\x08\t\x10\nJ\x04\x08\n\x10\x0bJ\x04\x08\x0b\x10\x0c') 28 | 29 | _globals = globals() 30 | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 31 | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'update_metadata_pb2', _globals) 32 | if not _descriptor._USE_C_DESCRIPTORS: 33 | DESCRIPTOR._loaded_options = None 34 | _globals['_SIGNATURES_SIGNATURE'].fields_by_name['version']._loaded_options = None 35 | _globals['_SIGNATURES_SIGNATURE'].fields_by_name['version']._serialized_options = b'\030\001' 36 | _globals['_INSTALLOPERATION_TYPE'].values_by_name["MOVE"]._loaded_options = None 37 | _globals['_INSTALLOPERATION_TYPE'].values_by_name["MOVE"]._serialized_options = b'\010\001' 38 | _globals['_INSTALLOPERATION_TYPE'].values_by_name["BSDIFF"]._loaded_options = None 39 | _globals['_INSTALLOPERATION_TYPE'].values_by_name["BSDIFF"]._serialized_options = b'\010\001' 40 | _globals['_EXTENT']._serialized_start=49 41 | _globals['_EXTENT']._serialized_end=98 42 | _globals['_SIGNATURES']._serialized_start=101 43 | _globals['_SIGNATURES']._serialized_end=260 44 | _globals['_SIGNATURES_SIGNATURE']._serialized_start=181 45 | _globals['_SIGNATURES_SIGNATURE']._serialized_end=260 46 | _globals['_PARTITIONINFO']._serialized_start=262 47 | _globals['_PARTITIONINFO']._serialized_end=305 48 | _globals['_INSTALLOPERATION']._serialized_start=308 49 | _globals['_INSTALLOPERATION']._serialized_end=858 50 | _globals['_INSTALLOPERATION_TYPE']._serialized_start=629 51 | _globals['_INSTALLOPERATION_TYPE']._serialized_end=858 52 | _globals['_COWMERGEOPERATION']._serialized_start=861 53 | _globals['_COWMERGEOPERATION']._serialized_end=1118 54 | _globals['_COWMERGEOPERATION_TYPE']._serialized_start=1068 55 | _globals['_COWMERGEOPERATION_TYPE']._serialized_end=1118 56 | _globals['_PARTITIONUPDATE']._serialized_start=1121 57 | _globals['_PARTITIONUPDATE']._serialized_end=1992 58 | _globals['_DYNAMICPARTITIONGROUP']._serialized_start=1994 59 | _globals['_DYNAMICPARTITIONGROUP']._serialized_end=2070 60 | _globals['_VABCFEATURESET']._serialized_start=2072 61 | _globals['_VABCFEATURESET']._serialized_end=2128 62 | _globals['_DYNAMICPARTITIONMETADATA']._serialized_start=2131 63 | _globals['_DYNAMICPARTITIONMETADATA']._serialized_end=2415 64 | _globals['_APEXINFO']._serialized_start=2417 65 | _globals['_APEXINFO']._serialized_end=2516 66 | _globals['_APEXMETADATA']._serialized_start=2518 67 | _globals['_APEXMETADATA']._serialized_end=2585 68 | _globals['_DELTAARCHIVEMANIFEST']._serialized_start=2588 69 | _globals['_DELTAARCHIVEMANIFEST']._serialized_end=3039 70 | # @@protoc_insertion_point(module_scope) 71 | --------------------------------------------------------------------------------