├── package.json ├── config.json.example ├── requirements.txt ├── .github ├── dependabot.yml └── workflows │ └── security.yml ├── stat_file.js ├── README.md ├── download_file.js └── process_opens.py /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "corellium-api": "github:corellium/corellium-api" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /config.json.example: -------------------------------------------------------------------------------- 1 | { 2 | "endpoint": "https://app.corellium.com", 3 | "username": "user@example.com", 4 | "password": "hunter2", 5 | "project": "Default Project", 6 | "instance": "" 7 | } 8 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | certifi==2024.7.4 2 | cffi==1.14.5 3 | chardet==4.0.0 4 | cryptography==44.0.1 5 | delegator.py==0.1.1 6 | docker==5.0.0 7 | english==2020.7.0 8 | idna==3.7 9 | monotonic==1.6 10 | neotime==1.7.4 11 | packaging==20.9 12 | pansi==2020.7.3 13 | pexpect==4.8.0 14 | prompt-toolkit==3.0.18 15 | ptyprocess==0.7.0 16 | py2neo==2021.1.3 17 | pycparser==2.20 18 | Pygments==2.9.0 19 | pyparsing==2.4.7 20 | python-magic==0.4.23 21 | pytz==2021.1 22 | requests==2.32.2 23 | six==1.16.0 24 | urllib3==1.26.19 25 | wcwidth==0.2.5 26 | websocket-client==1.0.1 27 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "weekly" 12 | 13 | -------------------------------------------------------------------------------- /stat_file.js: -------------------------------------------------------------------------------- 1 | const { Corellium } = require("corellium-api"); 2 | const process = require('process'); 3 | 4 | const config_data = require('./config.json'); 5 | 6 | async function main() { 7 | // Configure the API. 8 | let corellium = new Corellium({ 9 | endpoint: config_data.endpoint, 10 | username: config_data.username, 11 | password: config_data.password 12 | }); 13 | 14 | await corellium.login(); 15 | 16 | let projects = await corellium.projects(); 17 | let project = projects.find((project) => project.name === config_data.project); 18 | let instances = await project.instances(); 19 | 20 | let instance = instances.find( 21 | (instance) => instance.id === config_data.instance, 22 | ); 23 | 24 | let agent = await instance.newAgent(); 25 | await agent.ready(); 26 | 27 | try { 28 | let result = await agent.stat(process.argv[2]); 29 | console.log(JSON.stringify(result)); 30 | } catch(err) { 31 | console.log('NotFound') 32 | } 33 | 34 | await agent.disconnect(); 35 | } 36 | 37 | main().catch((err) => { 38 | console.error(err); 39 | }); 40 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Mapping iOS Persistence Attack Surface using Corellium 2 | 3 | Accompanying code for a [blog post](https://www.corellium.com/blog/mapping-ios-persistence). 4 | 5 | This repository contains a `uniq_opens.txt` file as generated in the blog post. Replace this file if new data is generated. 6 | 7 | ## Setup 8 | 9 | 1. Install Neo4J Desktop or create [AuraDB](https://neo4j.com/cloud/platform/aura-graph-database/) instance 10 | 2. Run `brew install libmagic` 11 | 3. Run `python3 -m pip install -r requirements.txt` in the project root 12 | 4. Run `npm install` in the project root 13 | 5. Download and unpack the target IPSW and mount the root filesystem 14 | 6. Run `diskutil enableOwnership /Volumes//Volumes/SkyF19F77.D10D101D20D201OS` (or whatever the actual mount point is) 15 | 7. Edit the `process_opens.py` script so that `ROOT_FS_PATH` is the mount point of the filesystem 16 | 8. Edit the `process_opens.py` script to fill in the Neo4J credentials/host 17 | 9. Copy `config.json.example` to `config.json` and fill in the Corellium credentials, project name, and device UUID 18 | 10. Invoke the script: `sudo python3 process_opens.py` (as root because we enable permissions on the FS) 19 | 20 | Once complete, validate that the data imported successfully by running this Cypher query in the Neo4J Browser: 21 | 22 | ``` 23 | MATCH (p:Process) RETURN p 24 | ``` 25 | -------------------------------------------------------------------------------- /download_file.js: -------------------------------------------------------------------------------- 1 | const { Corellium } = require("corellium-api"); 2 | const fs = require('fs'); 3 | const process = require('process'); 4 | 5 | const config_data = require('./config.json'); 6 | 7 | async function downloadFile(agent, remote_path, local_path) { 8 | return new Promise(resolve => { 9 | const dl = agent.download(remote_path); 10 | let b = fs.createWriteStream(local_path); 11 | b.on('finish', resolve); 12 | dl.pipe(b); 13 | }); 14 | } 15 | 16 | async function main() { 17 | // Configure the API. 18 | let corellium = new Corellium({ 19 | endpoint: config_data.endpoint, 20 | username: config_data.username, 21 | password: config_data.password 22 | }); 23 | 24 | await corellium.login(); 25 | let projects = await corellium.projects(); 26 | let project = projects.find((project) => project.name === config_data.project); 27 | 28 | let instances = await project.instances(); 29 | let instance = instances.find( 30 | (instance) => instance.id === config_data.instance, 31 | ); 32 | 33 | let agent = await instance.newAgent(); 34 | await agent.ready(); 35 | 36 | await downloadFile(agent, process.argv[2], process.argv[3]); 37 | 38 | await agent.disconnect(); 39 | } 40 | 41 | main().catch((err) => { 42 | console.error(err); 43 | }); 44 | -------------------------------------------------------------------------------- /.github/workflows/security.yml: -------------------------------------------------------------------------------- 1 | name: Security Compliance 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | - master 8 | 9 | permissions: read-all 10 | 11 | jobs: 12 | check-quality: 13 | runs-on: ubuntu-latest 14 | name: Datadog Static Analyzer 15 | env: 16 | DD_API_KEY: ${{ secrets.DD_API_KEY }} 17 | DD_APP_KEY: ${{ secrets.DD_APP_KEY }} 18 | DD_SERVICE: ios_persistence_mapping 19 | DD_ENV: ci 20 | steps: 21 | - name: Checkout 22 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 23 | - name: Check code meets quality standards 24 | id: datadog-static-analysis 25 | run: | 26 | sudo apt update 27 | sudo apt install nodejs 28 | # Download Datadog static analyzer v0.6.4: 29 | # https://github.com/DataDog/datadog-static-analyzer/releases 30 | DATADOG_STATIC_ANALYZER_URL=https://github.com/DataDog/datadog-static-analyzer/releases/download/0.6.4/datadog-static-analyzer-x86_64-unknown-linux-gnu.zip 31 | curl -L $DATADOG_STATIC_ANALYZER_URL > /tmp/ddog-static-analyzer.zip 32 | unzip /tmp/ddog-static-analyzer.zip -d /tmp 33 | sudo mv /tmp/datadog-static-analyzer /usr/local/datadog-static-analyzer 34 | # Run Static Analysis 35 | /usr/local/datadog-static-analyzer -i . -o report.sarif -f sarif 36 | # Upload results 37 | npx @datadog/datadog-ci sarif upload report.sarif 38 | software-composition-analysis: 39 | runs-on: ubuntu-latest 40 | name: Datadog SBOM Generation and Upload 41 | env: 42 | DD_API_KEY: ${{ secrets.DD_API_KEY }} 43 | DD_APP_KEY: ${{ secrets.DD_APP_KEY }} 44 | steps: 45 | - name: Checkout 46 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 47 | - name: Check imported libraries are secure and compliant 48 | id: datadog-software-composition-analysis 49 | run: | 50 | sudo apt update 51 | sudo apt install nodejs 52 | # Download the Datadog OSV Scanner v0.14.0: 53 | # https://github.com/DataDog/osv-scanner/releases 54 | DATADOG_OSV_SCANNER_URL=https://github.com/DataDog/osv-scanner/releases/download/v0.14.0/osv-scanner_linux_amd64.zip 55 | # Install OSV Scanner 56 | sudo mkdir /osv-scanner 57 | sudo curl -L -o /osv-scanner/osv-scanner.zip $DATADOG_OSV_SCANNER_URL 58 | sudo unzip /osv-scanner/osv-scanner.zip -d /osv-scanner 59 | sudo chmod 755 /osv-scanner/osv-scanner 60 | # Run OSV Scanner and scan your dependencies 61 | /osv-scanner/osv-scanner --skip-git -r --experimental-only-packages --format=cyclonedx-1-5 --paths-relative-to-scan-dir --output=sbom.json . 62 | # Upload results to Datadog 63 | npx @datadog/datadog-ci sbom upload sbom.json -------------------------------------------------------------------------------- /process_opens.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import json 3 | import os 4 | 5 | import magic 6 | import delegator 7 | from py2neo import Graph 8 | 9 | 10 | ROOT_FS_PATH = '/Volumes/SkyF19F77.D10D101D20D201OS' 11 | LOGFILE = 'uniq_opens.txt' 12 | 13 | NEO4J_USERNAME = 'neo4j' 14 | NEO4J_PASSWORD = 'hunter2' 15 | 16 | # For AuraDB: 17 | NEO4J_HOST = 'neo4j+s://randomly-generated.databases.neo4j.io' 18 | 19 | # For local Neo4J Desktop: 20 | # NEO4J_HOST = "bolt://127.0.0.1:7687" 21 | 22 | 23 | 24 | class Process(): 25 | def __init__(self, name): 26 | self.name = name 27 | 28 | if len(name) >= 15: 29 | # Could be a partial name 30 | search_name = name + '*' 31 | else: 32 | search_name = name 33 | 34 | cmd = 'find "{}" -name "{}" -type f 2>/dev/null'.format(ROOT_FS_PATH, search_name) 35 | r = delegator.run(cmd) 36 | try: 37 | full_path = r.out.splitlines()[0] 38 | except IndexError: 39 | print('Error finding local file "{}": "{}"'.format(search_name, r.out)) 40 | raise 41 | 42 | self.path = '/' + os.path.relpath(full_path, ROOT_FS_PATH) 43 | self.name = os.path.basename(full_path) 44 | 45 | def create(self): 46 | return "CREATE (:Process {{name:'{}', path:'{}'}})".format(self.name, self.path) 47 | 48 | 49 | class Filename(): 50 | def __init__(self, opened_path): 51 | self.opened_path = opened_path 52 | self.is_device_file = False 53 | self.is_missing = False 54 | self.is_dir = False 55 | self.file_type = None 56 | self.owner = None 57 | self.group = None 58 | self.perms = 0 59 | self.size = 0 60 | 61 | # Find the file on the filesystem 62 | if opened_path.startswith('/dev/'): 63 | self.is_device_file = True 64 | return 65 | 66 | # First try local, then remote 67 | try: 68 | self._get_local_stat(opened_path) 69 | except FileNotFoundError: 70 | print('Unable to find "{}" locally, trying remote...'.format(opened_path)) 71 | self._get_remote_stat(opened_path) 72 | 73 | def _get_local_stat(self, opened_path): 74 | if opened_path.startswith('/'): 75 | opened_path = opened_path[1:] 76 | 77 | final_path = os.path.join(ROOT_FS_PATH, opened_path) 78 | if not os.path.exists(final_path): 79 | raise FileNotFoundError() 80 | 81 | s = os.stat(final_path) 82 | self.owner = s.st_uid 83 | self.group = s.st_gid 84 | self.perms = s.st_mode 85 | self.size = s.st_size 86 | 87 | if os.path.isdir(final_path): 88 | self.is_dir = True 89 | return 90 | 91 | self.file_type = magic.from_file(final_path) 92 | 93 | def _get_remote_stat(self, opened_path): 94 | # Stat the file on the remote device 95 | cmd = 'node stat_file.js "{}"'.format(opened_path) 96 | r = delegator.run(cmd) 97 | 98 | if 'NotFound' in r.out: 99 | print('Unable to find "{}" remotely!'.format(opened_path)) 100 | self.is_missing = True 101 | return 102 | 103 | try: 104 | s = json.loads(r.out) 105 | except json.decoder.JSONDecodeError: 106 | print(opened_path) 107 | print(r.out) 108 | print(r.err) 109 | raise 110 | 111 | self.owner = s['uid'] 112 | self.group = s['gid'] 113 | self.perms = s['mode'] 114 | self.size = s['size'] 115 | 116 | if 'entries' in s.keys(): 117 | # This is a directory 118 | self.is_dir = True 119 | return 120 | 121 | # Download the file and check its type 122 | cmd = 'node download_file.js "{}" ./tmp'.format(opened_path) 123 | print("Downloading {}...".format(opened_path)) 124 | r = delegator.run(cmd) 125 | print("Done downloading") 126 | self.file_type = magic.from_file('./tmp') 127 | os.unlink('./tmp') 128 | 129 | def create(self): 130 | attributes = '' 131 | if self.is_device_file: 132 | attributes += ':DEVICE_FILE' 133 | if self.is_missing: 134 | attributes += ':MISSING' 135 | if self.is_dir: 136 | attributes += ':DIRECTORY' 137 | return "CREATE (:File{} {{path:'{}', type:'{}', owner:'{}', group:'{}', permissions:'{}', size:'{}'}})".format(attributes, self.opened_path, self.file_type, self.owner, self.group, oct(self.perms), self.size) 138 | 139 | 140 | processes = {} 141 | files = {} 142 | 143 | graph = Graph(NEO4J_HOST, auth=(NEO4J_USERNAME, NEO4J_PASSWORD)) 144 | 145 | with open(LOGFILE, 'r') as f: 146 | for line in f.read().splitlines(): 147 | tx = graph.begin() 148 | 149 | count = line.split()[0] 150 | 151 | try: 152 | process = line.split('Process: "')[1].split('", Path:')[0] 153 | except IndexError: 154 | print("Invalid line:\n\t{}\n".format(line)) 155 | continue 156 | 157 | opened_path = os.path.normpath(line.split('Path: "')[1].split('"')[0]) 158 | 159 | print('Process: "{}", Path: "{}"'.format(process, opened_path)) 160 | 161 | if process not in processes.keys(): 162 | p = Process(process) 163 | processes[process] = p 164 | # print(p.create()) 165 | tx.run(p.create()) 166 | else: 167 | p = processes[process] 168 | 169 | if opened_path not in files.keys(): 170 | the_f = Filename(opened_path) 171 | files[opened_path] = the_f 172 | # print(the_f.create()) 173 | tx.run(the_f.create()) 174 | else: 175 | the_f = files[opened_path] 176 | 177 | tx.run("MATCH (p:Process {{name: '{}'}}), (f:File {{path: '{}'}}) CREATE (p)-[:OPENED {{count:{}}}]->(f)".format(p.name, the_f.opened_path, count)) 178 | graph.commit(tx) 179 | --------------------------------------------------------------------------------