├── .github └── workflows │ └── docker-image.yml ├── .gitignore ├── LICENSE ├── README.md ├── draytek_arsenal ├── pyproject.toml ├── requirements.txt └── src │ └── draytek_arsenal │ ├── __init__.py │ ├── __main__.py │ ├── commands │ ├── __init__.py │ ├── base.py │ ├── dlm_hash.py │ ├── extract.py │ ├── extract_linux.py │ ├── find_endianness.py │ ├── find_loading_addr.py │ ├── mips_compile.py │ ├── mips_merge.py │ ├── parse.py │ └── patch.py │ ├── compression.py │ ├── dlm.py │ ├── draytek_format.py │ ├── format.py │ ├── fs.py │ └── linux.py ├── draytek_fw.ksy └── mips-tools ├── Dockerfile ├── entry.sh └── mips-tools.sh /.github/workflows/docker-image.yml: -------------------------------------------------------------------------------- 1 | name: Create and publish mips-tools Docker image 2 | 3 | # Configures this workflow to run every time a change is pushed to the branch called `release`. 4 | on: 5 | push: 6 | branches: [ "main" ] 7 | paths: 8 | - 'mips-tools/**' 9 | pull_request: 10 | branches: [ "main" ] 11 | paths: 12 | - 'mips-tools/**' 13 | 14 | # Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds. 15 | env: 16 | REGISTRY: ghcr.io 17 | IMAGE_NAME: ${{ github.repository }} 18 | 19 | # There is a single job in this workflow. It's configured to run on the latest available version of Ubuntu. 20 | jobs: 21 | build-and-push-image: 22 | runs-on: ubuntu-latest 23 | # Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job. 24 | permissions: 25 | contents: read 26 | packages: write 27 | attestations: write 28 | id-token: write 29 | 30 | steps: 31 | - name: Checkout repository 32 | uses: actions/checkout@v4 33 | # Uses the `docker/login-action` action to log in to the Container registry registry using the account and password that will publish the packages. Once published, the packages are scoped to the account defined here. 34 | - name: Log in to the Container registry 35 | uses: docker/login-action@65b78e6e13532edd9afa3aa52ac7964289d1a9c1 36 | with: 37 | registry: ${{ env.REGISTRY }} 38 | username: ${{ github.actor }} 39 | password: ${{ secrets.GITHUB_TOKEN }} 40 | # This step uses [docker/metadata-action](https://github.com/docker/metadata-action#about) to extract tags and labels that will be applied to the specified image. The `id` "meta" allows the output of this step to be referenced in a subsequent step. The `images` value provides the base name for the tags and labels. 41 | - name: Extract metadata (tags, labels) for Docker 42 | id: meta 43 | uses: docker/metadata-action@9ec57ed1fcdbf14dcef7dfbe97b2010124a938b7 44 | with: 45 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} 46 | # This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages. 47 | # It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository. 48 | # It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step. 49 | - name: Build and push Docker image 50 | id: push 51 | uses: docker/build-push-action@f2a1d5e99d037542a71f64918e516c093c6f3fc4 52 | with: 53 | context: ./mips-tools 54 | push: true 55 | tags: ${{ steps.meta.outputs.tags }} 56 | labels: ${{ steps.meta.outputs.labels }} 57 | 58 | # This step generates an artifact attestation for the image, which is an unforgeable statement about where and how it was built. It increases supply chain security for people who consume the image. For more information, see "[AUTOTITLE](/actions/security-guides/using-artifact-attestations-to-establish-provenance-for-builds)." 59 | - name: Generate artifact attestation 60 | uses: actions/attest-build-provenance@v1 61 | with: 62 | subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}} 63 | subject-digest: ${{ steps.push.outputs.digest }} 64 | push-to-registry: true 65 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Spyder project settings 132 | .spyderproject 133 | .spyproject 134 | 135 | # Rope project settings 136 | .ropeproject 137 | 138 | # mkdocs documentation 139 | /site 140 | 141 | # mypy 142 | .mypy_cache/ 143 | .dmypy.json 144 | dmypy.json 145 | 146 | # Pyre type checker 147 | .pyre/ 148 | 149 | # pytype static type analyzer 150 | .pytype/ 151 | 152 | # Cython debug symbols 153 | cython_debug/ 154 | 155 | # PyCharm 156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 158 | # and can be added to the global gitignore or merged into this file. For a more nuclear 159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 160 | #.idea/ 161 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Faraday 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Draytek Arsenal: Observability and hardening toolkit for Draytek edge devices. 2 | Advanced attackers are increasingly choosing edge devices as targets. However, these devices are controlled by closed-source software known as firmware, often distributed in a proprietary format. This is an added difficulty for defenders and researchers, who must understand how to extract firmware to assess its security. 3 | 4 | This is more than just a hypothetical scenario, as we discovered recently when a client was compromised. With Draytek equipment at the edge of their infrastructure, the natural question was: Could this be the attackers' entry point? Over 500k Draytek devices are exposed to the Internet. Yet, no working tool exists to extract their firmware and assist researchers and defenders working with these devices. 5 | 6 | During our assessment, we reverse-engineered Draytek's firmware format, which contains a bootloader, a compressed RTOS kernel, and two filesystems. Through our investigation, we developed tools to extract these components, unveiling the real-time operating system's capability to load code modules dynamically. These modules are loaded from one of the filesystems in the firmware image during boot but can also be loaded while the system is running and stored in a separate filesystem in flash memory. An attacker can exploit this feature to achieve persistence by loading a module that remains active even after a reboot or firmware upgrade, and the end-user does not have a way to detect this type of attack. Consequently, we developed our own module to check the integrity of loaded modules in memory, mitigating this potential threat. 7 | 8 | In our pursuit of a more secure internet, we are making this set of tools accessible to the community, enabling observability, hardening, transparency, and vulnerability research on Draytek edge devices 9 | 10 | ## Presentation 11 | This tool was developed as part of a research project that was presented at [DEFCON HHV and La Villa Hacker](https://defcon.org/html/defcon-32/dc-32-creator-talks.html#54642). You can find the slides and PoC videos [here](https://drive.google.com/drive/folders/1G-fvAntkuCg9Hu_MeMSdYTCd7KAlIywk?usp=sharing). 12 | 13 | ## Note 14 | We initially developed this as an internal tool. It was just a set of scripts, but it showed great potential, prompting us to make it open-source. Since then, we have been working to integrate these scripts into the Python package you will find in this repo and make them compatible with other device models. 15 | 16 | ## Get started ## 17 | 18 | __Requirements:__ 19 | 20 | * Python3 21 | * Docker (Optional) 22 | 23 | ### Installation ### 24 | 25 | (Optional) Create and activate python virtual environment: 26 | ```bash 27 | $ python3 -m virtualenv .venv 28 | $ source .venv/bin.activate 29 | ``` 30 | 31 | Install `draytek_arsenal`: 32 | ```bash 33 | $ cd draytek_arsenal 34 | $ python3 -m pip install -r requirements.txt 35 | $ python3 -m pip install . 36 | ``` 37 | 38 | Test the installation: 39 | ``` 40 | $ python3 -m draytek_arsenal 41 | ``` 42 | 43 | ### Install as developer ### 44 | 45 | This installation will be affected by local code changes 46 | ``` 47 | $ python3 -m pip install -e . 48 | ``` 49 | 50 | ### Mips-tools ### 51 | 52 | Some commands as `mips_compile` and `mips_merge` needs a complementary Docker image in order to work. 53 | If it has not been downloaded this error message is shown: 54 | ``` 55 | [x] Image 'draytek-arsenal' not found. Please build or download the image. 56 | ``` 57 | 58 | You could download the image with the following command: 59 | 60 | ```bash 61 | $ docker pull ghcr.io/infobyte/draytek-arsenal:main 62 | ``` 63 | 64 | Or build it with: 65 | ```bash 66 | $ docker build -t draytek-arsenal ./mips-tools 67 | ``` 68 | 69 | 70 | ## Usage ## 71 | 72 | `draytek-arsenal` is a set of scripts collected in a python package. So, to use it you should select a command: 73 | 74 | ``` 75 | usage: draytek-arsenal [-h] [command] args.. 76 | ``` 77 | 78 | Some of the commands are: 79 | 80 | 81 | ### parse_firmware ### 82 | 83 | Parse and show information of a Draytec firmware. 84 | 85 | ``` 86 | usage: parse_firmware [-h] firmware 87 | 88 | positional arguments: 89 | firmware Path to the firmware 90 | 91 | options: 92 | -h, --help show this help message and exit 93 | ``` 94 | 95 | ### extract small business ### 96 | 97 | Command used to extract and decompress Draytek running an RTOS. 98 | 99 | ``` 100 | usage: extract_rtos [-h] [--rtos RTOS] [--fs FS] [--dlm DLM] [--dlm-key1 DLM_KEY1] 101 | [--dlm-key2 DLM_KEY2] 102 | firmware 103 | 104 | positional arguments: 105 | firmware Path to the firmware 106 | 107 | options: 108 | -h, --help show this help message and exit 109 | --rtos RTOS, -r RTOS File path where to extract and decompress the RTOS 110 | --fs FS, -f FS Directory path where to extract and decompress the File 111 | System 112 | --dlm DLM, -d DLM Directory path where to extract and decompress the DLMs 113 | --dlm-key1 DLM_KEY1 First key used to decrypt DLMs 114 | --dlm-key2 DLM_KEY2 First key used to decrypt DLMs 115 | ``` 116 | 117 | ### extract linux ### 118 | 119 | Command used to extract and decompress Draytek running linux 120 | 121 | ``` 122 | usage: extract_linux [-h] [--fs FS] --key KEY firmware 123 | 124 | positional arguments: 125 | firmware Path to the firmware 126 | 127 | options: 128 | -h, --help show this help message and exit 129 | --fs FS, -f FS Directory path where to extract and decompress the File System 130 | --key KEY Key used to decrypt 131 | ``` 132 | 133 | ### dlm_hash ### 134 | 135 | Get the hash of a DLM module. 136 | 137 | ``` 138 | usage: dlm_hash [-h] [-c] dlm 139 | 140 | positional arguments: 141 | dlm Path to the dlm 142 | 143 | options: 144 | -h, --help show this help message and exit 145 | -c Print as .c code 146 | ``` 147 | 148 | ### find_loading_addr ### 149 | 150 | Find the address where the RTOS if loaded with the first jump instruction. 151 | 152 | ``` 153 | usage: find_loading_addr [-h] rtos 154 | 155 | positional arguments: 156 | rtos Path to the rtos 157 | 158 | options: 159 | -h, --help show this help message and exit 160 | ``` 161 | 162 | ### find_endianness ### 163 | 164 | Checks if the RTOS is little or big endian. 165 | 166 | ``` 167 | usage: find_endianness [-h] rtos 168 | 169 | positional arguments: 170 | rtos Path to the rtos 171 | 172 | options: 173 | -h, --help show this help message and exit 174 | ``` 175 | 176 | ### mips_compile ### 177 | 178 | Compile MIPS relocatable binary (used for DLMs). 179 | 180 | ``` 181 | usage: mips_compile [-h] output [input ...] 182 | 183 | positional arguments: 184 | output Output file 185 | input Output file 186 | 187 | options: 188 | -h, --help show this help message and exit 189 | ``` 190 | 191 | ### mips_merge ### 192 | 193 | Merge two ELF MIPS relocatable files. 194 | 195 | ``` 196 | usage: mips_merge [-h] first_input second_input output 197 | 198 | positional arguments: 199 | first_input First input file 200 | second_input Second input file 201 | output Output file 202 | 203 | options: 204 | -h, --help show this help message and exit 205 | ``` 206 | -------------------------------------------------------------------------------- /draytek_arsenal/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "draytek_arsenal" 3 | version = "0.0.1" 4 | authors = [ 5 | { name="Octavio Gianatiempo", email="ogianatiempo@faradaysec.com" }, 6 | { name="Gaston Aznarez", email="gaznarez@faradaysec.com" } 7 | ] 8 | description = "A package to help security assignments on Draytek firmwaresDraytek Arsenal is a set of tools designed to enhance the security of Draytek edge devices. Written as a Python package and utilizing Docker, this toolkit empowers defenders and researchers to analyze, modify, and strengthen Draytek firmware. With features like firmware format parsing, module management, and integrity-checking module compilation, Draytek Arsenal offers a comprehensive solution for firmware analysis and customization. Accessible to the cybersecurity community under a MIT license, this toolset encourages collaboration and transparency in the pursuit of a more secure internet." 9 | readme = "README.md" 10 | requires-python = ">=3.10" 11 | -------------------------------------------------------------------------------- /draytek_arsenal/requirements.txt: -------------------------------------------------------------------------------- 1 | capstone==5.0.3 2 | certifi==2024.7.4 3 | charset-normalizer==3.3.2 4 | docker==7.1.0 5 | idna==3.7 6 | kaitaistruct==0.10 7 | lief==0.16.1 8 | lz4==4.3.3 9 | PyYAML==6.0.1 10 | requests==2.32.0 11 | urllib3==2.2.2 12 | hexdump==3.3 13 | -------------------------------------------------------------------------------- /draytek_arsenal/src/draytek_arsenal/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/infobyte/draytek-arsenal/d601252b2e6a62e3cd3e5962e164d32dabf1c6ae/draytek_arsenal/src/draytek_arsenal/__init__.py -------------------------------------------------------------------------------- /draytek_arsenal/src/draytek_arsenal/__main__.py: -------------------------------------------------------------------------------- 1 | import os 2 | import argparse 3 | import importlib.util 4 | import sys 5 | import draytek_arsenal 6 | from draytek_arsenal.commands.base import Command 7 | from typing import List, Type 8 | 9 | def load_commands(commands_dir: str) -> List[Type[Command]]: 10 | """ 11 | Dynamically loads all the subclasses of Command from the files in the commands directory 12 | """ 13 | commands = [] 14 | if os.path.exists(commands_dir): 15 | for file_name in os.listdir(commands_dir): 16 | if file_name.endswith(".py"): 17 | module_name = file_name[:-3] # Remove the .py extension 18 | module_path = os.path.join(commands_dir, file_name) 19 | spec = importlib.util.spec_from_file_location(module_name, module_path) 20 | 21 | if spec is None or spec.loader is None: 22 | raise RuntimeError(f"Can't load spec from {file_name}") 23 | 24 | module = importlib.util.module_from_spec(spec) 25 | 26 | # Load the module 27 | spec.loader.exec_module(module) 28 | 29 | for name in dir(module): 30 | obj = getattr(module, name) 31 | if isinstance(obj, type) and issubclass(obj, Command) and obj != Command: 32 | commands.append(obj) 33 | 34 | return commands 35 | 36 | def create_parser(command: Command) -> argparse.ArgumentParser: 37 | """ 38 | Creates a arguments parser for a specific command 39 | """ 40 | parser = argparse.ArgumentParser( 41 | description=command.description(), 42 | prog=command.name() 43 | ) 44 | for arg in command.args(): 45 | parser.add_argument(*arg["flags"], **arg["kwargs"]) 46 | return parser 47 | 48 | def main(commands): 49 | parser = argparse.ArgumentParser( 50 | description="Draytek firmware analysis tools", 51 | prog="draytek-arsenal", 52 | add_help=False 53 | ) 54 | 55 | parser.error = lambda _self, _msg: None 56 | 57 | parser.add_argument( 58 | "command", 59 | choices=[command.name() for command in commands], 60 | help="The command to execute" 61 | ) 62 | 63 | try: 64 | args, remaining_argv = parser.parse_known_args() 65 | 66 | except: 67 | if not "--help" in sys.argv: 68 | print(f"Missing or invalid command.\n") 69 | parser.print_help() 70 | exit(1) 71 | 72 | parser.print_help() 73 | exit(0) 74 | 75 | # Get the selected command 76 | selected_command = next((command for command in commands if command.name() == args.command), None) 77 | if selected_command is not None: 78 | command_parser = create_parser(selected_command) 79 | command_args = command_parser.parse_args(remaining_argv) 80 | 81 | if "--help" in sys.argv: 82 | command_parser.print_help() 83 | else: 84 | selected_command.execute(command_args) 85 | 86 | 87 | else: 88 | print(f"Invalid command '{args.command}'.") 89 | parser.print_help() 90 | 91 | if __name__ == "__main__": 92 | commands = load_commands(os.path.dirname(draytek_arsenal.commands.__file__)) 93 | main(commands) 94 | -------------------------------------------------------------------------------- /draytek_arsenal/src/draytek_arsenal/commands/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/infobyte/draytek-arsenal/d601252b2e6a62e3cd3e5962e164d32dabf1c6ae/draytek_arsenal/src/draytek_arsenal/commands/__init__.py -------------------------------------------------------------------------------- /draytek_arsenal/src/draytek_arsenal/commands/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import Any, Dict, List 3 | 4 | class Command(ABC): 5 | @staticmethod 6 | @abstractmethod 7 | def name() -> str: 8 | ... 9 | 10 | @staticmethod 11 | @abstractmethod 12 | def args() -> List[Dict[str, Any]]: 13 | ... 14 | 15 | @staticmethod 16 | @abstractmethod 17 | def description() -> str: 18 | ... 19 | -------------------------------------------------------------------------------- /draytek_arsenal/src/draytek_arsenal/commands/dlm_hash.py: -------------------------------------------------------------------------------- 1 | from draytek_arsenal.commands.base import Command 2 | from typing import Any, Dict, List, Tuple 3 | import hashlib 4 | import lief 5 | from enum import Enum, auto 6 | from dataclasses import dataclass 7 | from struct import pack 8 | from pprint import pprint 9 | from os.path import basename 10 | 11 | 12 | # Mips instructions has 32 bites length and the first 6 are the opcode 13 | CODE_HASH_MASK = 0xfc 14 | 15 | PROGBITS = lief.ELF.Section.TYPE.PROGBITS 16 | NOBITS = lief.ELF.Section.TYPE.NOBITS 17 | WRITE = lief.ELF.Section.FLAGS.WRITE 18 | ALLOC = lief.ELF.Section.FLAGS.ALLOC 19 | EXECINSTR = lief.ELF.Section.FLAGS.EXECINSTR 20 | MERGE = lief.ELF.Section.FLAGS.MERGE 21 | STRINGS = lief.ELF.Section.FLAGS.STRINGS 22 | 23 | 24 | SECTION_LIST_TEMPLATE_HEADER = "dlm_section_info {}_sections[]= {{\n" 25 | SECTION_LIST_TEMPLATE_FOOTER = "};\n" 26 | SECTION_STRUCT_TEMPLATE = "\t{{ {}, {}, {}, {} }},\n" 27 | HASH_STRUCT_TEMPLATE = "dlm_info {}_info = {{ {}, {}, {}, {} }};\n" 28 | 29 | @dataclass 30 | class SectionHash: 31 | name: str 32 | offset: int 33 | size: int 34 | is_code: bool 35 | hash: str 36 | 37 | 38 | class SectionType(Enum): 39 | HASHABLE = auto() 40 | SKIPPED = auto() 41 | NOHASHABLE = auto() 42 | CODE = auto() 43 | 44 | 45 | class DlmHashCommand(Command): 46 | 47 | @staticmethod 48 | def name() -> str: 49 | return "dlm_hash" 50 | 51 | @staticmethod 52 | def description() -> str: 53 | return "Get the hash of a DLM" 54 | 55 | @staticmethod 56 | def args() -> List[Dict[str, Any]]: 57 | return [ 58 | {"flags": ["dlm"], "kwargs": {"type": str, "help": "Path to the dlm"}}, 59 | {"flags": ["-c"], "kwargs": {"action": "store_true", "help": "Print as .c code"}} 60 | ] 61 | 62 | @staticmethod 63 | def check_mask(value: int, mask: lief.ELF.Section.FLAGS) -> bool: 64 | return value & mask == mask 65 | 66 | @staticmethod 67 | def get_section_type(section: lief.ELF.Section) -> SectionType: 68 | s_type = section.type 69 | s_flags = section.flags 70 | 71 | # Check if it is dynamic data 72 | if s_type == NOBITS or DlmHashCommand.check_mask(s_flags, WRITE | ALLOC): 73 | return SectionType.NOHASHABLE 74 | 75 | elif s_type == PROGBITS: 76 | # Check if it is code 77 | if DlmHashCommand.check_mask(s_flags, ALLOC | EXECINSTR): 78 | return SectionType.CODE 79 | 80 | # Check if it is static data 81 | elif DlmHashCommand.check_mask(s_flags, MERGE | ALLOC): 82 | return SectionType.HASHABLE 83 | 84 | elif DlmHashCommand.check_mask(s_flags, ALLOC): 85 | return SectionType.NOHASHABLE 86 | 87 | return SectionType.SKIPPED 88 | 89 | 90 | @staticmethod 91 | def hash(content: bytes, is_code: bool = False) -> str: 92 | if is_code: 93 | new_content = b"" 94 | 95 | # Iterate over instructions 96 | for inst_start in range(0, len(content), 4): 97 | data = (content[inst_start] & CODE_HASH_MASK).to_bytes(1) 98 | new_content += pack(" Tuple[List[SectionHash] | None, int]: 107 | parsed_dlm = lief.parse(dlm) 108 | 109 | if parsed_dlm is None: 110 | print("[x] Error parsing the DLM") 111 | return None, 0 112 | 113 | hashes: List[SectionHash] = [] 114 | offset = 0 115 | total_size = 0 116 | 117 | for section in parsed_dlm.sections: 118 | is_code = False 119 | hash: str 120 | actual_offset = offset 121 | 122 | section_type = DlmHashCommand.get_section_type(section) 123 | 124 | if section_type != SectionType.SKIPPED: 125 | offset += section.size 126 | total_size += section.size 127 | 128 | if section_type != SectionType.NOHASHABLE: 129 | 130 | match section_type: 131 | case SectionType.HASHABLE: 132 | hash = DlmHashCommand.hash(section.content.tobytes()) 133 | 134 | case SectionType.CODE: 135 | is_code = True 136 | hash = DlmHashCommand.hash(section.content.tobytes(), is_code=True) 137 | 138 | hashes.append( 139 | SectionHash( 140 | section.name, 141 | actual_offset, 142 | section.size, 143 | is_code, 144 | hash 145 | ) 146 | ) 147 | 148 | return hashes, total_size 149 | 150 | @staticmethod 151 | def bytes_to_code(hash: str) -> str: 152 | result = "{" 153 | 154 | for byte_offset in range(0, len(hash), 2): 155 | result += f"0x{hash[byte_offset: byte_offset + 2]}, " 156 | 157 | result += "}" 158 | 159 | return result 160 | 161 | @staticmethod 162 | def name_to_code(name: str) -> str: 163 | result = "{" 164 | 165 | for char in name: 166 | result += f"\'{char}\', " 167 | 168 | result += "}" 169 | 170 | return result 171 | 172 | 173 | @staticmethod 174 | def generate_code(hashes: List[SectionHash], dlm: str, size: int) -> str: 175 | dlm_name = basename(dlm) 176 | var_prefix = dlm_name.split(".")[0] 177 | 178 | result = SECTION_LIST_TEMPLATE_HEADER.format(var_prefix) 179 | 180 | for hash in hashes: 181 | result += SECTION_STRUCT_TEMPLATE.format( 182 | hash.offset, 183 | hash.size, 184 | 1 if hash.is_code else 0, 185 | DlmHashCommand.bytes_to_code(hash.hash) 186 | ) 187 | 188 | result += SECTION_LIST_TEMPLATE_FOOTER 189 | 190 | result += HASH_STRUCT_TEMPLATE.format( 191 | var_prefix, 192 | DlmHashCommand.name_to_code(dlm_name), 193 | size, 194 | "(dlm_section_info *)&" + var_prefix + "_sections", 195 | len(hashes) 196 | ) 197 | 198 | return result 199 | 200 | 201 | @staticmethod 202 | def execute(args) -> None: 203 | dlm = args.dlm 204 | 205 | hashes, size = DlmHashCommand.get_hashes(dlm) 206 | 207 | if hashes is None: 208 | return 209 | 210 | if args.c: 211 | print(DlmHashCommand.generate_code(hashes, dlm, size)) 212 | 213 | else: 214 | pprint([h.__dict__ for h in hashes]) 215 | -------------------------------------------------------------------------------- /draytek_arsenal/src/draytek_arsenal/commands/extract.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, List 2 | from draytek_arsenal.commands.base import Command 3 | from draytek_arsenal.format import parse_firmware 4 | from draytek_arsenal.compression import Lz4 5 | from draytek_arsenal.fs import PFSExtractor 6 | from os import path 7 | from struct import pack 8 | import tempfile 9 | import os 10 | 11 | class ExtractCommand(Command): 12 | @staticmethod 13 | def name() -> str: 14 | return "extract_rtos" 15 | 16 | 17 | @staticmethod 18 | def args() -> List[Dict[str, Any]]: 19 | return [ 20 | {"flags": ["firmware"], "kwargs": {"type": str, "help": "Path to the firmware"}}, 21 | { 22 | "flags": ["--rtos", "-r"], 23 | "kwargs": { 24 | "type": str, 25 | "help": "File path where to extract and decompress the RTOS", 26 | "required": False 27 | } 28 | }, 29 | { 30 | "flags": ["--fs", "-f"], 31 | "kwargs": { 32 | "type": str, 33 | "help": "Directory path where to extract and decompress the File System", 34 | "required": False 35 | } 36 | }, 37 | { 38 | "flags": ["--dlm", "-d"], 39 | "kwargs": { 40 | "type": str, 41 | "help": "Directory path where to extract and decompress the DLMs", 42 | "required": False 43 | } 44 | }, 45 | { 46 | "flags": ["--dlm-key1"], 47 | "kwargs": { 48 | "type": str, 49 | "help": "First key used to decrypt DLMs", 50 | "required": False 51 | } 52 | }, 53 | { 54 | "flags": ["--dlm-key2"], 55 | "kwargs": { 56 | "type": str, 57 | "help": "First key used to decrypt DLMs", 58 | "required": False 59 | } 60 | }, 61 | ] 62 | 63 | 64 | @staticmethod 65 | def description() -> str: 66 | return "Command used to extract and decompress Draytek RTOS packages" 67 | 68 | 69 | @staticmethod 70 | def execute(args) -> None: 71 | fw_struct = parse_firmware(args.firmware) 72 | 73 | if args.rtos is None and args.dlm is None and args.fs is None: 74 | print(f"[x] Nothing to extract. Please set some extraction flag.") 75 | 76 | if args.rtos is not None: 77 | print("[+] Extracting RTOS from firmware") 78 | 79 | if not path.isdir(path.dirname(args.rtos)): 80 | print("[x] Bad RTOS output file") 81 | 82 | elif fw_struct.bin.rtos.rtos_size != len(fw_struct.bin.rtos.data): 83 | print(f"[x] Data length ({len(fw_struct.bin.rtos.data)}) doesn't match with the header length ({fw_struct.bin.rtos.rtos_size})") 84 | 85 | else: 86 | unstructured_bootloader = b"".join([pack(">I", integer) for integer in fw_struct.bin.bootloader.data[:-1]]) 87 | 88 | lz4 = Lz4() 89 | decompressed_rtos = lz4.decompress(fw_struct.bin.rtos.data) 90 | with open(args.rtos, "wb") as output_file: 91 | output_file.write(unstructured_bootloader + decompressed_rtos) 92 | 93 | print(f"[+] RTOS extracted in {args.rtos}") 94 | 95 | if args.dlm is not None: 96 | 97 | if not fw_struct.has_dlm: 98 | print(f"[*] Skiping DLMs extraction: the file does not have the magic") 99 | 100 | elif args.dlm_key1 is None or args.dlm_key2 is None: 101 | print(f"[x] One or more keys are not provided") 102 | 103 | else: 104 | print("[+] Extracting DLMs from firmware") 105 | 106 | with tempfile.NamedTemporaryFile() as tmp_dlms: 107 | print(f"[*] Writing DLMs FS to tmp file: {tmp_dlms.name}") 108 | 109 | data = b"DLM/1.0" + fw_struct.bin.dlm.data 110 | tmp_dlms.write(data) 111 | 112 | if not path.exists(args.dlm): 113 | os.makedirs(args.dlm) 114 | 115 | pfs_extractor = PFSExtractor( 116 | bytes.fromhex(args.dlm_key1), 117 | bytes.fromhex(args.dlm_key2) 118 | ) 119 | _ = pfs_extractor.extract(tmp_dlms.name, args.dlm) 120 | 121 | print(f"[+] DLMs extracted to {args.dlm}") 122 | 123 | 124 | if args.fs is not None: 125 | print("[+] Extracting FS from firmware") 126 | 127 | with tempfile.NamedTemporaryFile() as tmp_fs: 128 | print(f"[*] Writing decompressed FS to tmp file: {tmp_fs.name}") 129 | lz4 = Lz4() 130 | tmp_fs.write( 131 | lz4.decompress(fw_struct.web.data) 132 | ) 133 | 134 | if not path.exists(args.fs): 135 | os.makedirs(args.fs) 136 | 137 | 138 | pfs_extractor = PFSExtractor() 139 | _ = pfs_extractor.extract(tmp_fs.name, args.fs) 140 | 141 | print(f"[+] fs extracted to {args.fs}") 142 | 143 | print("[*] All done..") 144 | 145 | -------------------------------------------------------------------------------- /draytek_arsenal/src/draytek_arsenal/commands/extract_linux.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, List 2 | from draytek_arsenal.commands.base import Command 3 | from draytek_arsenal.format import parse_firmware 4 | from draytek_arsenal.compression import Lz4 5 | from os import path 6 | import tempfile 7 | import os 8 | import sys 9 | from Crypto.Cipher import ChaCha20 10 | 11 | class ExtractCommand(Command): 12 | @staticmethod 13 | def name() -> str: 14 | return "extract_linux" 15 | 16 | 17 | @staticmethod 18 | def args() -> List[Dict[str, Any]]: 19 | return [ 20 | {"flags": ["firmware"], "kwargs": {"type": str, "help": "Path to the firmware"}}, 21 | { 22 | "flags": ["--fs", "-f"], 23 | "kwargs": { 24 | "type": str, 25 | "help": "Directory path where to extract and decompress the File System", 26 | "required": False 27 | } 28 | }, 29 | { 30 | "flags": ["--key"], 31 | "kwargs": { 32 | "type": str, 33 | "help": "Key used to decrypt", 34 | "required": True 35 | } 36 | }, 37 | ] 38 | 39 | 40 | @staticmethod 41 | def description() -> str: 42 | return "Command used to extract and decompress Draytek Linux packages" 43 | 44 | 45 | @staticmethod 46 | def execute(args) -> None: 47 | fw = parse_firmware(args.firmware) 48 | 49 | dec_data = do_decrypt(fw.nonce, fw.data, args.key.encode()) 50 | lz4_data = split_lz4_image(dec_data) 51 | 52 | with tempfile.NamedTemporaryFile() as tmp_file: 53 | print(f"[*] Writing FS to tmp file: {tmp_file.name}") 54 | tmp_file.write(lz4_data) 55 | 56 | decompress(tmp_file.name, args.fs) 57 | 58 | print(f"[+] Extracted in {args.fs}") 59 | 60 | 61 | def do_decrypt(nonce: str, data: bytes, key: bytes) -> bytes: 62 | print(f"[*] Decrypting {len(data)} bytes with\n\tnonce: {nonce}\n\tkey: {key}") 63 | cipher = ChaCha20.new(key=key, nonce=nonce) 64 | dec_data = cipher.decrypt(data) 65 | 66 | print(f"[+] Decripted {len(dec_data)} bytes") 67 | 68 | return dec_data 69 | 70 | def split_lz4_image(data: bytes) -> bytes: 71 | try: 72 | start = data.find(b"\x02\x21\x4C\x18") 73 | if start == -1: 74 | print("[-] Error: no lz4 header") 75 | exit(0) 76 | 77 | end = data.find(b"R!!!", start) 78 | tmp_end = end 79 | while tmp_end != -1: 80 | end = tmp_end 81 | tmp_end = data.find(b"R!!!", end + 1) 82 | 83 | if end == -1: 84 | raise Exception("Can't find end of LZ4 image") 85 | 86 | end += (0x14 - 6) 87 | return data[start: end] 88 | 89 | except Exception as e: 90 | print("[x] Error: split_lz4_image") 91 | print(e) 92 | 93 | def decompress(input: str, output_dir: str) -> None: 94 | try: 95 | with tempfile.NamedTemporaryFile() as tmp_file: 96 | os.system(f"lz4 -d {input} {tmp_file.name}") 97 | os.system(f"mkdir -p {output_dir}") 98 | os.system(f"cpio -idmv --file {tmp_file.name} -D {output_dir}") 99 | 100 | except Exception as e: 101 | print("[x] Error: decompress") 102 | print(e) 103 | 104 | -------------------------------------------------------------------------------- /draytek_arsenal/src/draytek_arsenal/commands/find_endianness.py: -------------------------------------------------------------------------------- 1 | from draytek_arsenal.commands.base import Command 2 | from typing import Any, Dict, List 3 | 4 | from capstone import Cs, CS_ARCH_MIPS, CS_MODE_MIPS32, CS_MODE_BIG_ENDIAN, CS_MODE_LITTLE_ENDIAN 5 | 6 | 7 | class FindEndiannessCommand(Command): 8 | 9 | @staticmethod 10 | def name() -> str: 11 | return "find_endianness" 12 | 13 | @staticmethod 14 | def description() -> str: 15 | return "Checks if the RTOS is little or big endian" 16 | 17 | @staticmethod 18 | def args() -> List[Dict[str, Any]]: 19 | return [ 20 | {"flags": ["rtos"], "kwargs": {"type": str, "help": "Path to the rtos"}}, 21 | ] 22 | 23 | @staticmethod 24 | def execute(args) -> None: 25 | rtos = args.rtos 26 | 27 | with open(rtos, "rb") as f: 28 | # Try to disassembly some instructions to check the endianness 29 | f.seek(0x100) 30 | code = f.read(4) 31 | 32 | big_endian_md = Cs(CS_ARCH_MIPS, CS_MODE_MIPS32 + CS_MODE_BIG_ENDIAN) 33 | big_endian_instructions = list(big_endian_md.disasm(code, 0)) 34 | 35 | little_endian_md = Cs(CS_ARCH_MIPS, CS_MODE_MIPS32 + CS_MODE_LITTLE_ENDIAN) 36 | little_endian_instructions = list(little_endian_md.disasm(code,0)) 37 | 38 | if big_endian_instructions: 39 | print("BE: Big endian") 40 | elif little_endian_instructions: 41 | print("LE: Little endian") 42 | else: 43 | print("UNKWNOWN: Couldn't determine endianess") 44 | -------------------------------------------------------------------------------- /draytek_arsenal/src/draytek_arsenal/commands/find_loading_addr.py: -------------------------------------------------------------------------------- 1 | from draytek_arsenal.commands.base import Command 2 | from typing import Any, Dict, List 3 | 4 | 5 | from capstone import Cs, CS_ARCH_MIPS, CS_MODE_MIPS32, CS_MODE_BIG_ENDIAN, CS_MODE_LITTLE_ENDIAN 6 | 7 | 8 | class FindEndiannessCommand(Command): 9 | 10 | @staticmethod 11 | def name() -> str: 12 | return "find_loading_addr" 13 | 14 | @staticmethod 15 | def description() -> str: 16 | return "Find the address where the RTOS if loaded with the first jump instruction" 17 | 18 | @staticmethod 19 | def args() -> List[Dict[str, Any]]: 20 | return [ 21 | {"flags": ["rtos"], "kwargs": {"type": str, "help": "Path to the rtos"}}, 22 | ] 23 | 24 | @staticmethod 25 | def execute(args) -> None: 26 | rtos = args.rtos 27 | 28 | with open(rtos, "rb") as f: 29 | # Peek 4, tratar de desensamblar y con eso determinar endianness 30 | md = Cs(CS_ARCH_MIPS, CS_MODE_MIPS32 + CS_MODE_BIG_ENDIAN) 31 | code = f.read(50 * 4) 32 | instructions = [i for i in md.disasm(code, 0)] 33 | 34 | if not instructions: 35 | f.seek(0) 36 | md = Cs(CS_ARCH_MIPS, CS_MODE_MIPS32 + CS_MODE_LITTLE_ENDIAN) 37 | code = f.read(50 * 4) 38 | instructions = [i for i in md.disasm(code, 0)] 39 | 40 | # Encontrar el primer salto, que es a la funcion que descomprime el kernel 41 | # Determinar que registro se usa para saltar 42 | # Calcular el valor de ese registro 43 | # Redondear a 0x1000 (valor & 0xfffff000) 44 | mipsEmu = MIPSEmu() 45 | for i in instructions: 46 | ins = MIPSInstruction(i) 47 | if ins.mnemonic() != "jalr": 48 | mipsEmu.parseInstruction(ins) 49 | else: 50 | jmpReg = ins.arg(0) 51 | jmpAddr = mipsEmu.register(jmpReg) 52 | lAddr = jmpAddr & 0xfffff000 53 | print("[*] First jump found") 54 | print("[*] Kernel decompression function at 0x{:x}".format(jmpAddr)) 55 | print("[+] Loading address is 0x{:x}".format(lAddr)) 56 | exit(0) 57 | print("No jump found...") 58 | exit(1) 59 | 60 | 61 | 62 | # Modified from https://github.com/infobyte/cve-2022-27255/blob/main/analysis/firmware_base_address_finder.py by Octavio Galland 63 | class MIPSInstruction(): 64 | def __init__(self, capstoneInstruction): 65 | self.__args = capstoneInstruction.op_str.split(", ") 66 | self.__mnemonic = capstoneInstruction.mnemonic 67 | def argCount(self): 68 | return len(self.__args) 69 | def arg(self, i): 70 | assert i < self.argCount() 71 | return self.__args[i] 72 | def args(self): 73 | return self.__args 74 | def mnemonic(self): 75 | return self.__mnemonic 76 | 77 | class MIPSEmu(): 78 | def __init__(self): 79 | self.__regs = { 80 | "$zero":0, 81 | "$at": 0, 82 | "$v0": 0, 83 | "$v1": 0, 84 | "$a0": 0, 85 | "$a1": 0, 86 | "$a2": 0, 87 | "$a3": 0, 88 | "$t0": 0, 89 | "$t1": 0, 90 | "$t2": 0, 91 | "$t3": 0, 92 | "$t4": 0, 93 | "$t5": 0, 94 | "$t6": 0, 95 | "$t7": 0, 96 | "$s0": 0, 97 | "$s1": 0, 98 | "$s2": 0, 99 | "$s3": 0, 100 | "$s4": 0, 101 | "$s5": 0, 102 | "$s6": 0, 103 | "$s7": 0, 104 | "$t8": 0, 105 | "$t9": 0, 106 | "$k0": 0, 107 | "$k1": 0, 108 | "$gp": 0, 109 | "$sp": 0, 110 | "$s8": 0, 111 | "$ra": 0, 112 | "$sr": 0, 113 | "$lo": 0, 114 | "$hi": 0, 115 | "$bad": 0, 116 | "$cause": 0, 117 | "$pc": 0, 118 | "$fsr": 0, 119 | "$fir": 0, 120 | "$fp": 0 121 | } 122 | 123 | def parseInstruction(self, instruction): 124 | # opcode dstReg, val1, val2 125 | opcode = instruction.mnemonic() 126 | if opcode in ["mtc0", "mfc0", "ehb"]: 127 | # ignore coprocessor related instructions 128 | return 129 | if opcode in ["sw", "lw", "bne", "beq", "sync", "ins", "jal", "nop", "j"]: 130 | # ignore load, store adn branches for now 131 | return 132 | if opcode == "move": 133 | dstReg = instruction.arg(0) 134 | val1 = self.register(instruction.arg(1)) 135 | self.register(dstReg, val1) 136 | return 137 | if opcode == "lui": 138 | dstReg = instruction.arg(0) 139 | val1 = int(instruction.arg(1), 0) 140 | self.register(dstReg, val1 << 16) 141 | return 142 | if opcode in ["addi", "addiu", "ori", "xori", "andi"]: 143 | dstReg = instruction.arg(0) 144 | val1 = self.register(instruction.arg(1)) 145 | val2 = int(instruction.arg(2), 0) 146 | if opcode == "ori": 147 | self.register(dstReg, (val1 | val2) & 0xffffffff) 148 | elif opcode == "xori": 149 | self.register(dstReg, (val1 ^ val2) & 0xffffffff) 150 | elif opcode == "andi": 151 | self.register(dstReg, (val1 & val2) & 0xffffffff) 152 | else: 153 | self.register(dstReg, (val1 + val2) & 0xffffffff) 154 | return 155 | if opcode == "or": 156 | dstReg = instruction.arg(0) 157 | val1 = self.register(instruction.arg(1)) 158 | val2 = self.register(instruction.arg(2)) 159 | self.register(dstReg, (val1 | val2) & 0xffffffff) 160 | return 161 | if opcode == "and": 162 | dstReg = instruction.arg(0) 163 | val1 = self.register(instruction.arg(1)) 164 | val2 = self.register(instruction.arg(2)) 165 | self.register(dstReg, (val1 & val2) & 0xffffffff) 166 | return 167 | raise Exception("opcode not implemented...") 168 | 169 | 170 | def register(self, regId, value=None): 171 | if value: 172 | assert regId in self.__regs 173 | self.__regs[regId] = value 174 | else: 175 | assert regId in self.__regs 176 | return self.__regs[regId] 177 | -------------------------------------------------------------------------------- /draytek_arsenal/src/draytek_arsenal/commands/mips_compile.py: -------------------------------------------------------------------------------- 1 | from draytek_arsenal.commands.base import Command 2 | from typing import Any, Dict, List 3 | import docker 4 | import os 5 | 6 | MIPS_TOOLS_IMAGE = "draytek-arsenal" 7 | 8 | class MipsCompileCommand(Command): 9 | 10 | @staticmethod 11 | def name() -> str: 12 | return "mips_compile" 13 | 14 | @staticmethod 15 | def description() -> str: 16 | return "Compile MIPS relocatable binary (used for DLMs)" 17 | 18 | @staticmethod 19 | def args() -> List[Dict[str, Any]]: 20 | return [ 21 | {"flags": ["output"], "kwargs": {"type": str, "help": "Output file"}}, 22 | {"flags": ["input"], "kwargs": {"type": str, "nargs": "*", "help": "Output file"}}, 23 | ] 24 | 25 | 26 | @staticmethod 27 | def execute(args): 28 | client = docker.from_env() 29 | 30 | try: 31 | # Check if the image exists 32 | _ = client.images.get(MIPS_TOOLS_IMAGE) 33 | print(f"[+] Image '{MIPS_TOOLS_IMAGE}' is present.") 34 | 35 | container_arg = " ".join(["compile", args.output] + args.input) 36 | 37 | print(f"[*] Running mips_tools with: '{container_arg}'") 38 | # Run a container from the image with the specified argument 39 | client.containers.run( 40 | MIPS_TOOLS_IMAGE, 41 | container_arg, 42 | volumes=[f"{os.getcwd()}:/shared"] 43 | ) 44 | 45 | print("[+] Compiled with success. Bye.") 46 | 47 | except docker.errors.ImageNotFound: 48 | print(f"[x] Image '{MIPS_TOOLS_IMAGE}' not found. Please build or download the image.") 49 | 50 | except docker.errors.ContainerError as e: 51 | print(f"[x] Conteiner returns with an error:\n{e}") 52 | 53 | except docker.errors.APIError as e: 54 | print(f"[x] {str(e)}") 55 | -------------------------------------------------------------------------------- /draytek_arsenal/src/draytek_arsenal/commands/mips_merge.py: -------------------------------------------------------------------------------- 1 | from draytek_arsenal.commands.base import Command 2 | from typing import Any, Dict, List 3 | import docker 4 | import os 5 | 6 | MIPS_TOOLS_IMAGE = "draytek-arsenal" 7 | 8 | class MipsMergeCommand(Command): 9 | 10 | @staticmethod 11 | def name() -> str: 12 | return "mips_merge" 13 | 14 | @staticmethod 15 | def description() -> str: 16 | return "Merge two ELF MIPS relocatable files" 17 | 18 | @staticmethod 19 | def args() -> List[Dict[str, Any]]: 20 | return [ 21 | {"flags": ["first_input"], "kwargs": {"type": str, "help": "First input file"}}, 22 | {"flags": ["second_input"], "kwargs": {"type": str, "help": "Second input file"}}, 23 | {"flags": ["output"], "kwargs": {"type": str, "help": "Output file"}}, 24 | ] 25 | 26 | 27 | @staticmethod 28 | def execute(args): 29 | client = docker.from_env() 30 | 31 | try: 32 | # Check if the image exists 33 | _ = client.images.get(MIPS_TOOLS_IMAGE) 34 | print(f"[+] Image '{MIPS_TOOLS_IMAGE}' is present.") 35 | 36 | container_arg = " ".join(["merge", args.first_input, args.second_input, args.output]) 37 | 38 | print(f"[*] Running mips_tools with: '{container_arg}'") 39 | # Run a container from the image with the specified argument 40 | client.containers.run( 41 | MIPS_TOOLS_IMAGE, 42 | container_arg, 43 | volumes=[f"{os.getcwd()}:/shared"] 44 | ) 45 | 46 | print("[+] Compiled with success. Bye.") 47 | 48 | except docker.errors.ImageNotFound: 49 | print(f"[x] Image '{MIPS_TOOLS_IMAGE}' not found. Please build or download the image.") 50 | 51 | except docker.errors.ContainerError as e: 52 | print(f"[x] Conteiner returns with an error:\n{e}") 53 | 54 | except docker.errors.APIError as e: 55 | print(f"[x] {str(e)}") 56 | -------------------------------------------------------------------------------- /draytek_arsenal/src/draytek_arsenal/commands/parse.py: -------------------------------------------------------------------------------- 1 | from draytek_arsenal.commands.base import Command 2 | import yaml 3 | from typing import Any, Dict, List 4 | from draytek_arsenal.format import parse_firmware 5 | from draytek_arsenal.draytek_format import Draytek 6 | 7 | class ParseCommand(Command): 8 | 9 | @staticmethod 10 | def name() -> str: 11 | return "parse_firmware" 12 | 13 | @staticmethod 14 | def description() -> str: 15 | return "Parse and show information of a Draytec firmware" 16 | 17 | @staticmethod 18 | def args() -> List[Dict[str, Any]]: 19 | return [{"flags": ["firmware"], "kwargs": {"type": str, "help": "Path to the firmware"}}] 20 | 21 | 22 | @staticmethod 23 | def execute(args): 24 | 25 | struct = parse_firmware(args.firmware) 26 | 27 | if isinstance(struct, Draytek): 28 | object = { 29 | "type": "RTOS", 30 | "bin": { 31 | "header": { 32 | "size": hex(struct.bin.header.size), 33 | "version_info": hex(struct.bin.header.version_info), 34 | "next_section": hex(struct.bin.header.next_section.value), 35 | "adjusted_size": hex(struct.bin.header.adj_size), 36 | "bootloader_version": struct.bin.header.bootloader_version, 37 | "product_number": struct.bin.header.product_number 38 | }, 39 | "rtos": { 40 | "size": hex(struct.bin.rtos.rtos_size) 41 | }, 42 | "checksum": hex(struct.bin.checksum) 43 | 44 | }, 45 | "web": { 46 | "header": { 47 | "size": hex(struct.web.header.size), 48 | "adjusted_size": hex(struct.web.header.adj_size), 49 | "next_section": hex(struct.web.header.next_section) 50 | }, 51 | "checksum": hex(struct.web.checksum) 52 | } 53 | } 54 | 55 | else: 56 | object = { 57 | "type": "Linux", 58 | "nonce": struct.nonce.decode(), 59 | "image_start": struct.image_start, 60 | "image_len": struct.image_len 61 | } 62 | 63 | print("[+] Firmware information:\n" + yaml.safe_dump(object)) 64 | -------------------------------------------------------------------------------- /draytek_arsenal/src/draytek_arsenal/commands/patch.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Dict, List 2 | from draytek_arsenal.commands.base import Command 3 | from draytek_arsenal.format import parse_firmware 4 | from draytek_arsenal.compression import Lz4 5 | from draytek_arsenal.fs import PFSExtractor 6 | from os import path 7 | from struct import pack, unpack 8 | import tempfile 9 | import os 10 | from capstone import Cs, CS_ARCH_MIPS, CS_MODE_MIPS32, CS_MODE_BIG_ENDIAN 11 | 12 | class PatchCommand(Command): 13 | @staticmethod 14 | def name() -> str: 15 | return "patch" 16 | 17 | 18 | @staticmethod 19 | def args() -> List[Dict[str, Any]]: 20 | return [ 21 | {"flags": ["firmware"], "kwargs": {"type": str, "help": "Path to the firmware"}}, 22 | # {"flags": ["patches"], "kwargs": {"type": str, "help": "Path to the patches file"}}, 23 | {"flags": ["output"], "kwargs": {"type": str, "help": "Path to the output file"}}, 24 | ] 25 | 26 | @staticmethod 27 | def description() -> str: 28 | return "Command used to patch Draytek firmwares" 29 | 30 | @staticmethod 31 | def disasm(code): 32 | capmd = Cs(CS_ARCH_MIPS, CS_MODE_MIPS32 + CS_MODE_BIG_ENDIAN) 33 | instructions = [i for i in capmd.disasm(code, 0)] 34 | for i in instructions: 35 | print(f" {i.mnemonic} {i.op_str}") 36 | 37 | @staticmethod 38 | def patch(data, addr, patch): 39 | patch_offset = addr - 0x8002d478 40 | patch_size = len(patch) 41 | print("[+] Patching code @ 0x{:x}".format(patch_offset)) 42 | PatchCommand.disasm(data[patch_offset:patch_offset+patch_size]) 43 | print(" With:") 44 | PatchCommand.disasm(patch) 45 | data = data[:patch_offset] + patch + data[patch_offset+patch_size:] 46 | 47 | @staticmethod 48 | def v2kCheckSum(data): 49 | c = 0 50 | for i in range(0, len(data), 4): 51 | c+=unpack(">I", data[i:i + 4])[0] 52 | c = (c & 0xffffffff) 53 | return c 54 | 55 | @staticmethod 56 | def repack_fw(fw_struct, patched_rtos): 57 | ############################################### 58 | # No chequee si anda bien en el caso con DLMs # 59 | ############################################### 60 | 61 | # BIN 62 | ## Header w/o size ni version_info | next_section 63 | bin_data = fw_struct.bin.header.rest 64 | ## Bootloader 65 | for w in fw_struct.bin.bootloader.data: 66 | bin_data += pack(">I", w) 67 | ## RTOS 68 | bin_data += pack(">I", len(patched_rtos)) 69 | bin_data += patched_rtos 70 | bin_data += b"\x00" * (4 - (len(patched_rtos) & 0x3)) # Checksum needs len(bin_data) % 4 == 0 71 | ## Easier to calculate next_section here 72 | next_section = len(bin_data) 73 | ## DLMs 74 | if fw_struct.has_dlm: 75 | bin_data += fw_struct.bin.dlm.magic.encode('utf-8') 76 | bin_data += fw_struct.bin.dlm.data 77 | ## Header size and version_info | next_section 78 | bin_data = pack(">I", len(bin_data) + 12) + pack(">I", fw_struct.bin.header.version_info << 24 | next_section) + bin_data 79 | ## Checksum 80 | bin_data += pack(">I", PatchCommand.v2kCheckSum(bin_data) ^ 0xffffffff) 81 | 82 | # WEB 83 | web_data = pack(">I", fw_struct.web.header.size) 84 | web_data += pack(">I", fw_struct.web.header.next_section) 85 | web_data += fw_struct.web.data 86 | web_data += fw_struct.web.padding 87 | web_data += pack(">I", fw_struct.web.not_checksum) 88 | 89 | return bin_data + web_data 90 | 91 | @staticmethod 92 | def execute(args) -> None: 93 | fw_struct = parse_firmware(args.firmware) 94 | 95 | if not args.output: 96 | print("[x] Need output filename") 97 | 98 | elif fw_struct.bin.rtos.rtos_size != len(fw_struct.bin.rtos.data): 99 | print(f"[x] Data length ({len(fw_struct.bin.rtos.data)}) doesn't match with the header length ({fw_struct.bin.rtos.rtos_size})") 100 | 101 | else: 102 | lz4 = Lz4() 103 | decompressed_rtos = lz4.decompress(fw_struct.bin.rtos.data) 104 | 105 | patched_rtos = lz4.compress(decompressed_rtos) 106 | 107 | repacked_fw = PatchCommand.repack_fw(fw_struct, patched_rtos) 108 | 109 | with open(args.output, "wb") as output_file: 110 | output_file.write(repacked_fw) 111 | 112 | 113 | print(f"[*] RTOS patched in {args.output}") 114 | 115 | -------------------------------------------------------------------------------- /draytek_arsenal/src/draytek_arsenal/compression.py: -------------------------------------------------------------------------------- 1 | import struct 2 | import lz4.block 3 | 4 | class Lz4(): 5 | """Draytek's modified Lz4 implementation""" 6 | magic = b"\xaa\x1d\x7f\x50" 7 | max_decompressed_block_size = 0x10000 8 | 9 | def decompress(self, input, last_block: int | None = None) -> bytes: 10 | if self.magic != input[:4]: 11 | print("[LZ4] Bad block magic: 0x{:x}".format(struct.unpack(">I", input[:4])[0])) 12 | block_offset = 4 13 | output = b"" 14 | while block_offset < len(input) and (last_block is None or block_offset != last_block): 15 | block_data_size = struct.unpack(" 0: 18 | print(f"[LZ4] Uncrompressed block") 19 | 20 | if block_data_size > 2 * self.max_decompressed_block_size: 21 | print( 22 | f"[LZ4] Wrong block size: {hex(block_data_size)} at {hex(block_offset)} with {input[block_offset:block_offset + 4]}" 23 | ) 24 | exit(1) 25 | block_data_offset = block_offset + 4 26 | block = input[block_data_offset:block_data_offset+block_data_size] 27 | try: 28 | output += lz4.block.decompress(block, uncompressed_size=self.max_decompressed_block_size) 29 | except Exception as e: 30 | print(e) 31 | exit(1) 32 | 33 | block_offset += block_data_size + 4 34 | return output 35 | 36 | def compress(self, input): 37 | output = self.magic 38 | 39 | i = 0 40 | for i in range(0,len(input), self.max_decompressed_block_size): 41 | compressed_block = lz4.block.compress(input[i:i + self.max_decompressed_block_size], store_size=False) 42 | output += struct.pack(" None: 13 | self._key1 = key1 14 | self._key2 = key2 15 | 16 | def generate_digest(self, k1, k2): 17 | if len(k2) > 0x41: 18 | k2 = md5(k2).digest() 19 | k2 = k2 + b'\x00' * (0x40 - len(k2)) 20 | k2_1 = bytes(a ^ b for a, b in zip(k2, b'\x36' * 0x40)) 21 | k2_2 = bytes(a ^ b for a, b in zip(k2, b'\x5c' * 0x40)) 22 | d1 = md5(k2_1 + k1).digest() 23 | d2 = md5(k2_2 + d1).digest() 24 | return d2 25 | 26 | def xtea_decrypt(self, v, k, endianness = ">"): 27 | assert(len(v) == 8) 28 | assert(len(k) == 16) 29 | v0 = struct.unpack(endianness + "I", v[0:4])[0] 30 | v1 = struct.unpack(endianness + "I", v[4:8])[0] 31 | ks = struct.unpack(endianness + "IIII", k) 32 | sum = 0xc6ef3720 33 | delta = 0x61c88647 34 | while sum != 0: 35 | # print("sum: 0x{:x}, v0: 0x{:x}, v1: 0x{:x}".format(sum, v0, v1)) 36 | rk = (sum + ks[3 & (sum >> 11)]) & 0xffffffff 37 | # print("a1_1: 0x{:x}".format(a1_1)) 38 | sum = (sum + delta) & 0xffffffff 39 | v1 = (v1 - (rk ^ ((v0 >> 5 ^ v0 << 4) + v0) & 0xffffffff)) & 0xffffffff 40 | v0 = (v0 - ((sum + ks[sum & 3]) & 0xffffffff ^ ((v1 >> 5 ^ v1 << 4) + v1) & 0xffffffff)) & 0xffffffff 41 | return struct.pack(endianness + "I", v0) + struct.pack(endianness + "I", v1) 42 | 43 | def xtea_encrypt(self, v, k, endianness = ">"): 44 | assert(len(v) == 8) 45 | assert(len(k) == 16) 46 | v0 = struct.unpack(endianness + "I", v[0:4])[0] 47 | v1 = struct.unpack(endianness + "I", v[4:8])[0] 48 | ks = struct.unpack(endianness + "IIII", k) 49 | sum = 0x0 50 | delta = 0x61c88647 51 | while sum != 0xc6ef3720: 52 | # print("sum: 0x{:x}, v0: 0x{:x}, v1: 0x{:x}".format(sum, v0, v1)) 53 | v0 = (v0 + ((sum + ks[sum & 3]) & 0xffffffff ^ ((v1 >> 5 ^ v1 << 4) + v1) & 0xffffffff)) & 0xffffffff 54 | sum = (sum - delta) & 0xffffffff 55 | if sum < 0: 56 | sum = ((sum ^ 0xffffffff) + 1) & 0xffffffff 57 | rk = (sum + ks[3 & (sum >> 11)]) & 0xffffffff 58 | v1 = (v1 + (rk ^ ((v0 >> 5 ^ v0 << 4) + v0) & 0xffffffff)) & 0xffffffff 59 | # print("a1_1: 0x{:x}".format(a1_1)) 60 | return struct.pack(endianness + "I", v0) + struct.pack(endianness + "I", v1) 61 | 62 | def restore(self, data): 63 | dlm_header = data[:0x34] 64 | print("[*] Parsing header") 65 | size, main_id_0, main_id_1, main_id_2, main_id_3, padding, signature = struct.unpack(self.dlm_header_format, dlm_header) 66 | print(f"\tsize: {size}\n\tsignature: {hex(signature)}") 67 | 68 | dlm_data = data[0x34:] 69 | dlm_data_len = len(dlm_data) 70 | print(f"\tdata_len: {len(dlm_data)}") 71 | 72 | if main_id_0 != 0: 73 | print("[x] Main ID 0: 0x{:x}".format(main_id_0)) 74 | exit(1) 75 | 76 | if main_id_2 != 1: 77 | print("[x] Main ID 2: 0x{:x}".format(main_id_2)) 78 | 79 | if main_id_3 != 0xffff: 80 | print("[x] Main ID 3: 0x{:x}".format(main_id_3)) 81 | exit(1) 82 | 83 | if signature != 0x12345678: 84 | print("[x] Signature: 0x{:x}".format(signature)) 85 | exit(1) 86 | 87 | if dlm_data_len != size - 0x34: 88 | print("[x] DLM data length error") 89 | exit(1) 90 | 91 | print("[*] Generating digest") 92 | key = self.generate_digest(self._key1, self._key2) 93 | 94 | print("[*] Decrypting data") 95 | dlm_data_decrypted = b'' 96 | idx = 0 97 | while dlm_data_len - idx >= 8: 98 | dlm_data_decrypted += self.xtea_decrypt(dlm_data[idx:idx + 8], key) 99 | idx += 8 100 | print(f"[*] Process: {((100 / dlm_data_len) * idx):.1f} of 100.0", end="\r") 101 | 102 | dlm_data_decrypted = dlm_data_decrypted + dlm_data[idx:] 103 | # Remove padding (xtea decrypts 8 byte blocks and the last one might have padding) 104 | dlm_data_decrypted = dlm_data_decrypted.rstrip(b"\x00") 105 | lz4 = Lz4() 106 | print("[*] Decryption header:") 107 | hexdump(dlm_data_decrypted[:0x18]) 108 | # Add checks for compression_header size and digest 109 | print("[*] Decompressing") 110 | dlm_data_decrypted_decompressed = lz4.decompress(dlm_data_decrypted[0x18:]) 111 | return dlm_data_decrypted_decompressed 112 | 113 | def pack(self, data): 114 | lz4 = Lz4() 115 | digest = self.generate_digest(data, self._key2) 116 | compressed_data = lz4.compress(data) 117 | compressed_data_len = len(compressed_data) 118 | # Possible place for IoC 119 | compression_header = struct.pack(self.compression_header_format, 0, compressed_data_len, digest) 120 | compressed_data = compression_header + compressed_data 121 | compressed_data_len = len(compressed_data) 122 | 123 | key = self.generate_digest(self._key1, self._key2) 124 | encrypted_compressed_data = b"" 125 | idx = 0 126 | while compressed_data_len - idx >= 8: 127 | encrypted_compressed_data += self.xtea_encrypt(compressed_data[idx:idx + 8], key) 128 | idx += 8 129 | encrypted_compressed_data = encrypted_compressed_data + compressed_data[idx:] 130 | 131 | size = compressed_data_len + 0x34 132 | main_id_0 = 0 133 | main_id_1 = 1 134 | main_id_2 = 1 135 | main_id_3 = 0xffff 136 | # Possible place for IoC 137 | padding = b"\x00" * 36 138 | dlm_header = struct.pack(self.dlm_header_format, size, main_id_0, main_id_1, main_id_2, main_id_3, padding, self.dlm_signature) 139 | dlm = dlm_header + encrypted_compressed_data 140 | 141 | return dlm 142 | -------------------------------------------------------------------------------- /draytek_arsenal/src/draytek_arsenal/draytek_format.py: -------------------------------------------------------------------------------- 1 | # This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild 2 | 3 | import kaitaistruct 4 | from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO 5 | 6 | 7 | if getattr(kaitaistruct, 'API_VERSION', (0, 9)) < (0, 9): 8 | raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__)) 9 | 10 | class Draytek(KaitaiStruct): 11 | def __init__(self, has_dlm, _io, _parent=None, _root=None): 12 | self._io = _io 13 | self._parent = _parent 14 | self._root = _root if _root else self 15 | self.has_dlm = has_dlm 16 | self._read() 17 | 18 | def _read(self): 19 | self.bin = Draytek.BinSection(self._io, self, self._root) 20 | self.web = Draytek.WebSection(self._io, self, self._root) 21 | 22 | class BinSection(KaitaiStruct): 23 | def __init__(self, _io, _parent=None, _root=None): 24 | self._io = _io 25 | self._parent = _parent 26 | self._root = _root if _root else self 27 | self._read() 28 | 29 | def _read(self): 30 | self.header = Draytek.BinHeader(self._io, self, self._root) 31 | self.bootloader = Draytek.Bootloader(self._io, self, self._root) 32 | self.rtos = Draytek.Rtos(self._io, self, self._root) 33 | if self._parent.has_dlm: 34 | self.dlm = Draytek.Dlm(self._io, self, self._root) 35 | 36 | self.not_checksum = self._io.read_u4be() 37 | 38 | @property 39 | def checksum(self): 40 | if hasattr(self, '_m_checksum'): 41 | return self._m_checksum 42 | 43 | self._m_checksum = (self.not_checksum ^ 4294967295) 44 | return getattr(self, '_m_checksum', None) 45 | 46 | 47 | class Dlm(KaitaiStruct): 48 | def __init__(self, _io, _parent=None, _root=None): 49 | self._io = _io 50 | self._parent = _parent 51 | self._root = _root if _root else self 52 | self._read() 53 | 54 | def _read(self): 55 | self.magic = self._io.read_bytes(7) 56 | if not self.magic == b"\x44\x4C\x4D\x2F\x31\x2E\x30": 57 | raise kaitaistruct.ValidationNotEqualError(b"\x44\x4C\x4D\x2F\x31\x2E\x30", self.magic, self._io, u"/types/dlm/seq/0") 58 | self.data = self._io.read_bytes((self._parent.header.adj_size - self._io.pos())) 59 | 60 | 61 | class Bootloader(KaitaiStruct): 62 | def __init__(self, _io, _parent=None, _root=None): 63 | self._io = _io 64 | self._parent = _parent 65 | self._root = _root if _root else self 66 | self._read() 67 | 68 | def _read(self): 69 | self.data = [] 70 | i = 0 71 | while True: 72 | _ = self._io.read_u4be() 73 | self.data.append(_) 74 | if _ == 2774181210: 75 | break 76 | i += 1 77 | 78 | 79 | class Rtos(KaitaiStruct): 80 | def __init__(self, _io, _parent=None, _root=None): 81 | self._io = _io 82 | self._parent = _parent 83 | self._root = _root if _root else self 84 | self._read() 85 | 86 | def _read(self): 87 | self.rtos_size = self._io.read_u4be() 88 | self.data = self._io.read_bytes(self.rtos_size) 89 | if (self._io.pos() % 4) != 0: 90 | self.padding = self._io.read_bytes((4 - (self._io.pos() % 4))) 91 | 92 | 93 | 94 | class U3(KaitaiStruct): 95 | def __init__(self, _io, _parent=None, _root=None): 96 | self._io = _io 97 | self._parent = _parent 98 | self._root = _root if _root else self 99 | self._read() 100 | 101 | def _read(self): 102 | self.b12 = self._io.read_u2be() 103 | self.b3 = self._io.read_u1() 104 | 105 | @property 106 | def value(self): 107 | if hasattr(self, '_m_value'): 108 | return self._m_value 109 | 110 | self._m_value = ((self.b12 << 12) | self.b3) 111 | return getattr(self, '_m_value', None) 112 | 113 | 114 | class BinHeader(KaitaiStruct): 115 | def __init__(self, _io, _parent=None, _root=None): 116 | self._io = _io 117 | self._parent = _parent 118 | self._root = _root if _root else self 119 | self._read() 120 | 121 | def _read(self): 122 | self.size = self._io.read_u4be() 123 | self.version_info = self._io.read_u1() 124 | self.next_section = Draytek.U3(self._io, self, self._root) 125 | self.rest = self._io.read_bytes(248) 126 | 127 | @property 128 | def adj_size(self): 129 | if hasattr(self, '_m_adj_size'): 130 | return self._m_adj_size 131 | 132 | self._m_adj_size = ((((self.size + 3) >> 2) - 1) << 2) 133 | return getattr(self, '_m_adj_size', None) 134 | 135 | @property 136 | def bootloader_version(self): 137 | if hasattr(self, '_m_bootloader_version'): 138 | return self._m_bootloader_version 139 | 140 | self._m_bootloader_version = (self.version_info >> 4) 141 | return getattr(self, '_m_bootloader_version', None) 142 | 143 | @property 144 | def product_number(self): 145 | if hasattr(self, '_m_product_number'): 146 | return self._m_product_number 147 | 148 | self._m_product_number = (self.version_info & 15) 149 | return getattr(self, '_m_product_number', None) 150 | 151 | 152 | class WebSection(KaitaiStruct): 153 | def __init__(self, _io, _parent=None, _root=None): 154 | self._io = _io 155 | self._parent = _parent 156 | self._root = _root if _root else self 157 | self._read() 158 | 159 | def _read(self): 160 | self.header = Draytek.WebHeader(self._io, self, self._root) 161 | self.data = self._io.read_bytes(self.header.next_section) 162 | self.padding = self._io.read_bytes(((self.header.size - self.header.next_section) - 12)) 163 | self.not_checksum = self._io.read_u4be() 164 | 165 | @property 166 | def checksum(self): 167 | if hasattr(self, '_m_checksum'): 168 | return self._m_checksum 169 | 170 | self._m_checksum = (self.not_checksum ^ 4294967295) 171 | return getattr(self, '_m_checksum', None) 172 | 173 | 174 | class WebHeader(KaitaiStruct): 175 | def __init__(self, _io, _parent=None, _root=None): 176 | self._io = _io 177 | self._parent = _parent 178 | self._root = _root if _root else self 179 | self._read() 180 | 181 | def _read(self): 182 | self.size = self._io.read_u4be() 183 | self.next_section = self._io.read_u4be() 184 | 185 | @property 186 | def adj_size(self): 187 | if hasattr(self, '_m_adj_size'): 188 | return self._m_adj_size 189 | 190 | self._m_adj_size = ((((self.size + 3) >> 2) - 1) << 2) 191 | return getattr(self, '_m_adj_size', None) 192 | 193 | 194 | 195 | -------------------------------------------------------------------------------- /draytek_arsenal/src/draytek_arsenal/format.py: -------------------------------------------------------------------------------- 1 | from draytek_arsenal.linux import DraytekLinux 2 | from draytek_arsenal.draytek_format import Draytek 3 | from kaitaistruct import KaitaiStream 4 | from io import BytesIO 5 | 6 | 7 | def parse_firmware(filename: str,) -> Draytek | DraytekLinux: 8 | f = open(filename, 'rb') 9 | data = f.read() 10 | 11 | if b"nonce" in data and b"enc_Image" in data: 12 | return DraytekLinux(data) 13 | 14 | has_dlm = b"DLM/1.0" in data 15 | 16 | try: 17 | return Draytek(has_dlm, KaitaiStream(BytesIO(data))) 18 | 19 | except Exception: 20 | # close file descriptor, then reraise the exception 21 | f.close() 22 | raise 23 | -------------------------------------------------------------------------------- /draytek_arsenal/src/draytek_arsenal/fs.py: -------------------------------------------------------------------------------- 1 | import os 2 | import errno 3 | import struct 4 | from draytek_arsenal.compression import Lz4 5 | from draytek_arsenal.dlm import DLM 6 | 7 | class PFSCommon(object): 8 | def _make_short(self, data, endianness): 9 | """Returns a 2 byte integer.""" 10 | # data = binwalk.core.compat.str2bytes(data) 11 | return struct.unpack('%sH' % endianness, data)[0] 12 | 13 | def _make_int(self, data, endianness): 14 | """Returns a 4 byte integer.""" 15 | # data = binwalk.core.compat.str2bytes(data) 16 | return struct.unpack('%sI' % endianness, data)[0] 17 | 18 | class PFS(PFSCommon): 19 | """Class for accessing PFS meta-data.""" 20 | HEADER_SIZE = 16 21 | def __init__(self, fname, endianness='<'): 22 | self.endianness = endianness 23 | self.meta = open(fname, 'rb') 24 | header = self.meta.read(self.HEADER_SIZE) 25 | self.file_list_start = self.meta.tell() 26 | self.dlm_fs = header[:7] == b"DLM/1.0" 27 | self.num_files = self._make_short(header[-2:], endianness) 28 | self.node_size = self._get_fname_len() + 12 29 | 30 | def _get_fname_len(self, bufflen=128): 31 | """Returns the number of bytes designated for the filename.""" 32 | buff = self.meta.peek(bufflen) 33 | strlen = buff.find(b'\x00') 34 | for i, b in enumerate(buff[strlen:]): 35 | if b != 0: 36 | return strlen+i 37 | return bufflen 38 | 39 | def _get_node(self): 40 | """Reads a chunk of meta data from file and returns a PFSNode.""" 41 | data = self.meta.read(self.node_size) 42 | return PFSNode(data, self.endianness) 43 | 44 | def get_end_of_meta_data(self): 45 | """Returns integer indicating the end of the file system meta data.""" 46 | return self.HEADER_SIZE + self.node_size * self.num_files 47 | 48 | def entries(self): 49 | """Returns file meta-data entries one by one.""" 50 | self.meta.seek(self.file_list_start) 51 | for i in range(0, self.num_files): 52 | yield self._get_node() 53 | 54 | def __enter__(self): 55 | return self 56 | 57 | def __exit__(self, type, value, traceback): 58 | self.meta.close() 59 | 60 | class PFSNode(PFSCommon): 61 | """A node in the PFS Filesystem containing meta-data about a single file.""" 62 | def __init__(self, data, endianness): 63 | self.fname, data = data[:-12], data[-12:] 64 | self._decode_fname() 65 | self.inode_no = self._make_int(data[:4], endianness) 66 | self.foffset = self._make_int(data[4:8], endianness) 67 | self.fsize = self._make_int(data[8:], endianness) 68 | 69 | def _decode_fname(self): 70 | """Extracts the actual string from the available bytes.""" 71 | null_pos = self.fname.find(b'\x00') 72 | if null_pos != -1: 73 | self.fname = self.fname[:null_pos] 74 | self.fname = self.fname.replace(b'\\', b'/').decode() 75 | 76 | class PFSExtractor(): 77 | """ 78 | Extractor for Draytek PFS File System Formats. 79 | Adapted from https://github.com/ReFirmLabs/binwalk/blob/master/src/binwalk/plugins/unpfs.py 80 | """ 81 | 82 | def __init__(self, key1: bytes = b"", key2: bytes = b"") -> None: 83 | self._key1 = key1 84 | self._key2 = key2 85 | 86 | def _create_dir_from_fname(self, fname): 87 | try: 88 | os.makedirs(os.path.dirname(fname)) 89 | except OSError as e: 90 | if e.errno != errno.EEXIST: 91 | raise e 92 | 93 | def extract(self, fname, out_dir): 94 | fname = os.path.abspath(fname) 95 | lz4 = Lz4() 96 | dlm = DLM(self._key1, self._key2) 97 | 98 | print("[*] Extracting PFS filesystem to: {}".format(out_dir)) 99 | 100 | with PFS(fname) as fs: 101 | # The end of PFS meta data is the start of the actual data 102 | data = open(fname, 'rb') 103 | data.seek(fs.get_end_of_meta_data()) 104 | for entry in fs.entries(): 105 | print(f"[*] FS entry found: '{entry.fname}'") 106 | outfile_path = os.path.abspath(os.path.join(out_dir, entry.fname)) 107 | 108 | self._create_dir_from_fname(outfile_path) 109 | 110 | with open(outfile_path, 'wb') as outfile: 111 | file_content = data.read(entry.fsize) 112 | if fs.dlm_fs: 113 | print("[*] Restoring file as DLM") 114 | file_content = dlm.restore(file_content) 115 | elif lz4.magic == file_content[:4]: 116 | file_content = lz4.decompress(file_content) 117 | outfile.write(file_content) 118 | 119 | data.close() 120 | 121 | return out_dir 122 | -------------------------------------------------------------------------------- /draytek_arsenal/src/draytek_arsenal/linux.py: -------------------------------------------------------------------------------- 1 | import struct 2 | 3 | 4 | class DraytekLinux: 5 | def __init__(self, data: bytes) -> None: 6 | self._data = data 7 | 8 | self.nonce: bytes 9 | self.image_start: int 10 | self.image_end: int 11 | 12 | 13 | nonce_magic = data.find(b"nonce") 14 | if nonce_magic == -1: 15 | raise AttributeError("[x] Couldn't find the 'nonce' magic") 16 | 17 | self.nonce = data[nonce_magic + 9: nonce_magic + 9 + 0xC] 18 | 19 | image_magic = data.find(b"enc_Image") 20 | if image_magic == -1: 21 | raise AttributeError("[x] Couldn't find the 'enc_Image' magic") 22 | 23 | len_offset = image_magic + 9 24 | self.image_start = image_magic + 13 25 | self.image_len = struct.unpack("> 2) -1 ) << 2 39 | bootloader_version: 40 | value: version_info >> 4 41 | product_number: 42 | value: version_info & 0xf 43 | 44 | bootloader: 45 | seq: 46 | - id: data 47 | type: u4 48 | repeat: until 49 | repeat-until: _ == 0xa55aa55a 50 | 51 | rtos: 52 | seq: 53 | - id: rtos_size 54 | type: u4 55 | - id: data 56 | size: rtos_size 57 | - id: padding 58 | size: 4 - _io.pos % 4 # Check if there is no other data 59 | if: _io.pos % 4 != 0 60 | 61 | dlm: 62 | seq: 63 | - id: magic 64 | contents: DLM/1.0 65 | - id: data 66 | size: _parent.header.adj_size - _io.pos 67 | 68 | bin_section: 69 | seq: 70 | - id: header 71 | type: bin_header 72 | - id: bootloader 73 | type: bootloader 74 | - id: rtos 75 | type: rtos 76 | - id: dlm 77 | type: dlm 78 | if: _parent.has_dlm 79 | - id: not_checksum 80 | type: u4 81 | instances: 82 | checksum: 83 | value: not_checksum ^ 0xffffffff 84 | 85 | web_header: 86 | seq: 87 | - id: size 88 | type: u4 89 | - id: next_section 90 | type: u4 91 | instances: 92 | adj_size: 93 | value: (((size + 3) >> 2) -1 ) << 2 94 | 95 | web_section: 96 | seq: 97 | - id: header 98 | type: web_header 99 | - id: data 100 | size: header.next_section 101 | - id: padding 102 | size: header.size - header.next_section - 12 103 | - id: not_checksum 104 | type: u4 105 | instances: 106 | checksum: 107 | value: not_checksum ^ 0xffffffff 108 | -------------------------------------------------------------------------------- /mips-tools/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu 2 | RUN apt update 3 | RUN apt install -y gcc-mips-linux-gnu 4 | 5 | WORKDIR /opt/ 6 | COPY entry.sh . 7 | 8 | ENTRYPOINT ["bash", "./entry.sh"] 9 | -------------------------------------------------------------------------------- /mips-tools/entry.sh: -------------------------------------------------------------------------------- 1 | 2 | #!/bin/bash 3 | 4 | # Check if the correct number of arguments is provided 5 | if [ "$#" -lt 1 ]; then 6 | echo "Usage: $0 [arguments...]" >&2 7 | exit 1 8 | fi 9 | 10 | # Determine which command to execute 11 | case "$1" in 12 | "compile") 13 | # Check if the correct number of arguments is provided for compile 14 | if [ "$#" -lt 3 ]; then 15 | echo "Usage: $0 compile [ ...]" >&2 16 | exit 1 17 | fi 18 | # Extract output file path 19 | output_file="/shared/$2" 20 | # Shift arguments to get input files 21 | shift 2 22 | input_files=() 23 | for file in "$@"; do 24 | input_files+=("/shared/$file") 25 | done 26 | # Execute the compile command 27 | echo "Compiling ${input_files[*]} to $output_file..." 28 | 29 | mips-linux-gnu-gcc -nostdlib -r -mno-abicalls -ffreestanding -msoft-float \ 30 | -o $output_file ${input_files[*]} 31 | ;; 32 | "merge") 33 | # Check if the correct number of arguments is provided for merge 34 | if [ "$#" -ne 4 ]; then 35 | echo "Usage: $0 merge " >&2 36 | exit 1 37 | fi 38 | # Execute the merge command 39 | echo "Merging $2 and $3 to $4..." 40 | 41 | mips-linux-gnu-ld -relocatable "/shared/$2" "/shared/$3" -o "/shared/$4" 42 | ;; 43 | "shell") 44 | # Execute the shell command 45 | echo "Entering interactive shell..." 46 | # Actual shell command goes here 47 | bash 48 | ;; 49 | "run") 50 | # Check if the correct number of arguments is provided for run 51 | if [ "$#" -lt 2 ]; then 52 | echo "Usage: $0 run [ ...]" >&2 53 | exit 1 54 | fi 55 | # Extract executable 56 | executable="$2" 57 | # Shift arguments to get the arguments for the executable 58 | shift 2 59 | # Execute the run command 60 | bash -c "$executable $@" 61 | # bash -c "$executable $@" 62 | # Actual run command goes here 63 | ;; 64 | *) 65 | # Invalid command 66 | echo "Unknown command: $1" >&2 67 | exit 1 68 | ;; 69 | esac 70 | -------------------------------------------------------------------------------- /mips-tools/mips-tools.sh: -------------------------------------------------------------------------------- 1 | 2 | #!/bin/bash 3 | 4 | # Check if the mounting path is provided 5 | if [ -z "$1" ]; then 6 | echo "Usage: $0 " 7 | exit 1 8 | fi 9 | 10 | # Extract the mounting path 11 | mounting_path="$1" 12 | 13 | # Check if the mounting path exists 14 | if [ ! -d "$mounting_path" ]; then 15 | echo "Mounting path '$mounting_path' does not exist." 16 | exit 1 17 | fi 18 | 19 | echo "Mounting path: $mounting_path" 20 | 21 | # Check if Docker image 'mips-tools' exists 22 | if ! docker image inspect mips-tools &> /dev/null; then 23 | echo "Docker image 'mips-tools' not found, building it in $(dirname "$0")" 24 | docker build -t mips-tools $(dirname "$0") 25 | fi 26 | 27 | shift 1 28 | 29 | echo "Running $@" 30 | 31 | # Run Docker container named 'mips-tools' 32 | docker run -it -v "$mounting_path":/shared mips-tools "$@" 33 | --------------------------------------------------------------------------------