├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── config.yml │ ├── feature_request.md │ └── question.md └── pull_request_template.md ├── .gitignore ├── .gitlab-ci.yml ├── LICENSE ├── README.md ├── app ├── __init__.py ├── arsenal_svc.py └── parsers │ ├── __init__.py │ ├── endpoint.py │ ├── ipaddr.py │ ├── nmap.py │ ├── ss.py │ └── torchserve_api.py ├── conf └── default.yml ├── data ├── abilities │ ├── collection │ │ ├── 15f1db6a-6cb2-40f0-9651-4ee90bab949e.yml │ │ ├── 60aeed61-6ea9-4dbb-afe5-2c674820de35.yml │ │ └── 7491aaeb-284c-4f1d-ad1e-db73d539071d.yml │ ├── command-and-control │ │ ├── 244d2f34-7b6f-4c46-8555-3038f483936f.yml │ │ ├── 4aa55a9e-b37d-4e41-9a4d-f67415d03e34.yml │ │ ├── 8a1913ed-4ddf-497c-8f95-ebf1eb93b518.yml │ │ └── d5406943-15ef-4441-86c0-595490471d83.yml │ ├── discovery │ │ ├── 18708a19-4dc3-4cbc-9ba8-49582f186d72.yml │ │ ├── 8a8d3faa-5989-4491-accc-8ffe53f0fe8d.yml │ │ └── 8bbada08-d5ae-4000-bc22-92c1e35820c1.yml │ ├── ml-attack-staging │ │ └── 5e437f42-cd5f-400f-b65d-d78821f31c69.yml │ ├── ml-model-access │ │ └── 92071bdd-83f6-4945-b41b-1e2835b8ff23.yml │ └── reconnaissance │ │ ├── 03b2fc99-62fb-4fc9-98fd-3717ed6cecd2.yml │ │ ├── 189afbfb-3874-401e-a2fb-9bfafb822f35.yml │ │ ├── 8316e1bf-9158-4604-aa91-007c74d13b6e.yml │ │ └── e84a8a0e-25a6-4ec9-98d9-23bcf42b842d.yml ├── adversaries │ ├── 463fa6a5-3f3c-461c-81fa-ad048c61de10.yml │ ├── 869ffd59-31f7-479d-a59d-1d8aadf9042b.yml │ └── a0a26dc8-d285-4bfa-9516-9dfb2fde2302.yml └── sources │ └── e1d863de-24cc-4937-876c-6d1b12e6b4e8.yml ├── default.yml ├── docs ├── Makefile ├── assets │ ├── A.png │ ├── access.png │ ├── access_2.png │ ├── arsenal_diagram.png │ ├── ml_attack_staging_example_output.png │ └── ml_attack_staging_new_potential_link.png ├── make.bat └── source │ ├── adversary.md │ ├── conf.py │ ├── dev.md │ ├── index.rst │ ├── intro.md │ └── victim.md ├── hook.py ├── images └── overview.png ├── payloads ├── build_and_attack_counterfit_target.py ├── coco_index_to_name.json ├── counterfit_install.sh ├── discover_prediction_endpoint.sh ├── discover_torchserve_api.sh ├── image_classifier.py ├── imagenet_name_to_index.json ├── kitten.jpg ├── network_svc_info.sh ├── persons.jpg ├── proto_addr_info.sh ├── socket_info.sh └── tensorflow_file_search.sh ├── requirements.txt ├── templates └── arsenal.html └── tests ├── __init__.py └── parsers ├── __init__.py ├── test_endpoint.py ├── test_ipaddr.py ├── test_nmap.py ├── test_ss.py └── test_torchserve_api.py /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: "\U0001F41E Bug report" 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: wbooth 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. 16 | 17 | **Expected behavior** 18 | A clear and concise description of what you expected to happen. 19 | 20 | **Screenshots** 21 | If applicable, add screenshots to help explain your problem. 22 | 23 | **Desktop (please complete the following information):** 24 | - OS: [e.g. Mac, Windows, Kali] 25 | - Browser [e.g. chrome, safari] 26 | - Version [e.g. 2.8.0] 27 | 28 | **Additional context** 29 | Add any other context about the problem here. 30 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | contact_links: 2 | - name: Documentation 3 | url: https://caldera.readthedocs.io/en/latest/ 4 | about: Your question may be answered in the documentation 5 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: "\U0001F680 New Feature Request" 3 | about: Propose a new feature 4 | title: '' 5 | labels: feature 6 | assignees: 'wbooth' 7 | 8 | --- 9 | 10 | **What problem are you trying to solve? Please describe.** 11 | > Eg. I'm always frustrated when [...] 12 | 13 | 14 | **The ideal solution: What should the feature should do?** 15 | > a clear and concise description 16 | 17 | 18 | **What category of feature is this?** 19 | 20 | - [ ] UI/UX 21 | - [ ] API 22 | - [ ] Other 23 | 24 | **If you have code or pseudo-code please provide:** 25 | 26 | 27 | ```python 28 | 29 | ``` 30 | 31 | - [ ] Willing to submit a pull request to implement this feature? 32 | 33 | **Additional context** 34 | Add any other context or screenshots about the feature request here. 35 | 36 | Thank you for your contribution! 37 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/question.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: "\U00002753 Question" 3 | about: Support questions 4 | title: '' 5 | labels: question 6 | assignees: '' 7 | 8 | --- 9 | 10 | 11 | 12 | 13 | 18 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | ## Description 2 | 3 | (insert summary) 4 | 5 | ## Type of change 6 | 7 | Please delete options that are not relevant. 8 | 9 | - [ ] Bug fix (non-breaking change which fixes an issue) 10 | - [ ] New feature (non-breaking change which adds functionality) 11 | - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) 12 | - [ ] This change requires a documentation update 13 | 14 | ## How Has This Been Tested? 15 | 16 | Please describe the tests that you ran to verify your changes. 17 | 18 | 19 | ## Checklist: 20 | 21 | - [ ] My code follows the style guidelines of this project 22 | - [ ] I have performed a self-review of my own code 23 | - [ ] I have made corresponding changes to the documentation 24 | - [ ] I have added tests that prove my fix is effective or that my feature works 25 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.crt 3 | *.pyd 4 | *.DS_Store 5 | *.spec 6 | *.pstat 7 | *.tokens 8 | *__pycache__* 9 | .idea/* 10 | .logs/* 11 | !.logs/.gitkeep 12 | conf/secrets.yml 13 | venv/ -------------------------------------------------------------------------------- /.gitlab-ci.yml: -------------------------------------------------------------------------------- 1 | image: "python:3.9" 2 | 3 | stages: 4 | - Documentation 5 | 6 | # GitLab Pages # 7 | ################ 8 | .pages: 9 | script: 10 | - pip install -U sphinx sphinx-autoapi sphinx-rtd-theme myst-parser furo 11 | - sphinx-build -d docs/build/doctrees docs/source docs/build/html 12 | - mv docs/build/html public/ 13 | 14 | test-pages: 15 | stage: Documentation 16 | tags: 17 | - docker 18 | extends: .pages 19 | needs: [] 20 | artifacts: 21 | expose_as: 'HTML Documentation' 22 | paths: 23 | - public/ 24 | expire_in: 1 week 25 | only: 26 | - merge_requests 27 | 28 | pages: 29 | stage: Documentation 30 | tags: 31 | - pages 32 | extends: .pages 33 | artifacts: 34 | paths: 35 | - public 36 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # CALDERA plugin: **Arsenal** 2 | 3 | Arsenal is a plugin developed for adversary emulation of AI-enabled systems. This plugin will provide TTPs defined in [`MITRE ATLAS`](https://atlas.mitre.org/) to interface with [`CALDERA`](https://github.com/mitre/caldera). 4 | 5 | [`Read the full documentation`](https://mitre-atlas.github.io/arsenal/intro.html#arsenal) 6 | 7 | For ml-attack-staging and ml-model-access abilities (see list below), additional information and [`examples`](https://mitre-atlas.github.io/arsenal/adversary.html#adversary-arsenal) on using these abilities are detailed in the arsenal/docs/ folder. 8 | 9 | 10 | *JUNE 2023 included abilities:* 11 | 12 | - Discover remote services 13 | - Discover local services 14 | - Discover available network services 15 | - Search and stage Tensorflow model files/checkpoints 16 | - Discover ML specific services - Torchserve 17 | - Discover GPUs on a system 18 | - Stage a local image for classification 19 | - Install ML-related tools (on C2-server or victim system): Python, [`Microsoft Counterfit`](https://github.com/Azure/counterfit), Tensorflow-CPU, Tesorflow-GPU 20 | - Gain API access to a served model (Torchserve) 21 | - Build a custom Microsoft Counterfit target and stage an attack 22 | 23 | *JUNE 2023 included Adversaries:* 24 | - "Tensormancer" - Discover a Tensorflow model or checkpoint and stage an image for classification 25 | - Exfiltrate a model or checkpoint file 26 | - Stage an adversarial ML attack on a discovered ML model or service using Microsoft Counterfit library 27 | 28 | # Usage 29 | 30 | ## System requirements: 31 | - **Ubuntu 18.04** or **20.04** 32 | - **Python version 3.7+** 33 | 34 | ### Plugin Dependencies: 35 | - [`Caldera Stockpile`](https://github.com/mitre/stockpile): Some Arsenal abilities and adversaries require addition TTPs and requirements include in the Caldera Stockpile. A version more recent than this commit is required for these capabilities: [Stockpile](https://github.com/mitre/stockpile/tree/d128da223aa93f71841bb160ccb09fb9cb590345). 36 | - [`Microsoft Counterfit`](https://github.com/Azure/counterfit): a required dependency to create and run adversarial machine learning attacks. This dependency is used by the [`Build and Attack a Custom CFTarget`](data/abilities/ml-attack-staging/5e437f42-cd5f-400f-b65d-d78821f31c69.yml) ability and its [payload](./payloads/build_and_attack_counterfit_target.py). 37 | - If following the [Installation with Caldera](#installation-with-caldera*), this dependency is installed automatically on the C2 server (host). 38 | - It can also be installed locally using the `requirements.txt` located in this repository or installed on remote machines using the [`Install Counterfit`](data/abilities/command-and-control/8a1913ed-4ddf-497c-8f95-ebf1eb93b518.yml) ability. 39 | 40 | ## Installation with CALDERA*: 41 | 42 | 1. Navigate to [`caldera-atlas`](https://github.com/mitre-atlas/caldera-atlas) repository and follow steps for installation and setup. 43 | 44 | 2. Navigate to the UI: `localhost:8888` 45 | 46 | **`arsenal` is not yet a default CALDERA plugin, therefore there are additional steps to include this plugin into the app.* 47 | 48 | 49 | *Contact us atlas@mitre.org* 50 | 51 | ![overview](docs/assets/A.png) 52 | -------------------------------------------------------------------------------- /app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mitre-atlas/arsenal/2d80c59a637e32dc5d8bdc06f1b449745330b1ec/app/__init__.py -------------------------------------------------------------------------------- /app/arsenal_svc.py: -------------------------------------------------------------------------------- 1 | 2 | import os 3 | 4 | from aiohttp_jinja2 import template 5 | 6 | from app.utility.base_service import BaseService 7 | 8 | 9 | class ArsenalService(BaseService): 10 | 11 | def __init__(self, services): 12 | self.auth_svc = services.get('auth_svc') 13 | self.file_svc = services.get('file_svc') 14 | self.data_svc = services.get('data_svc') 15 | self.contact_svc = services.get('contact_svc') 16 | self.log = self.add_service('arsenal_svc', self) 17 | self.arsenal_dir = os.path.join('plugins', 'arsenal') 18 | 19 | @template('arsenal.html') 20 | async def splash(self, request): 21 | return dict() 22 | -------------------------------------------------------------------------------- /app/parsers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mitre-atlas/arsenal/2d80c59a637e32dc5d8bdc06f1b449745330b1ec/app/parsers/__init__.py -------------------------------------------------------------------------------- /app/parsers/endpoint.py: -------------------------------------------------------------------------------- 1 | from app.objects.secondclass.c_fact import Fact 2 | from app.objects.secondclass.c_relationship import Relationship 3 | from app.utility.base_parser import BaseParser 4 | 5 | 6 | class Parser(BaseParser): 7 | """ 8 | Parser that will connect the binding address to the prediction endpoint. 9 | 10 | Parser expects a newline-seperated list: resnet-18 11 | fasterrcnn 12 | densenet 13 | """ 14 | 15 | def parse(self, blob): 16 | 17 | inference_address = self._get_inference_address() 18 | 19 | relationships = [] 20 | for model_name in self.line(blob): 21 | 22 | model_name = model_name.strip() 23 | if model_name == 'null': 24 | continue 25 | 26 | for mp in self.mappers: 27 | if 'prediction_endpoint' not in mp.source: 28 | raise NotImplementedError('only creation of target.model_server.framework fact is supported') 29 | 30 | pred_endpoint = inference_address + '/predictions/' + model_name 31 | relationships.append( 32 | Relationship(source=Fact(mp.source, pred_endpoint), 33 | edge=mp.edge, 34 | target=Fact(mp.target, None)) 35 | ) 36 | return relationships 37 | 38 | def _get_inference_address(self) -> str: 39 | """ 40 | Retrieves IP addresses from facts used in execution of the ability. 41 | """ 42 | address_facts = [used_fact.value for used_fact in self.used_facts if 'inference' in used_fact.name.split('.')[-1]] 43 | 44 | if len(address_facts) != 1: 45 | raise NotImplementedError(f"Only allow one inference fact to be passed to the ability: {address_facts}") 46 | 47 | inference_address = address_facts[0] if len(address_facts) == 1 else None 48 | 49 | return inference_address -------------------------------------------------------------------------------- /app/parsers/ipaddr.py: -------------------------------------------------------------------------------- 1 | from app.objects.secondclass.c_fact import Fact 2 | from app.objects.secondclass.c_relationship import Relationship 3 | from app.utility.base_parser import BaseParser 4 | 5 | from ipaddress import ip_address, ip_interface 6 | 7 | 8 | class Parser(BaseParser): 9 | """ 10 | Functionality that parses network and IP addresses and stores the pairs as a fact 11 | if they are valid address. 12 | 13 | 14 | # TODO: remove the '.1' subnet once there is another method to parse dev 'docker0' 15 | """ 16 | 17 | exclude = ['0.0.0.0', '127.0.0.1'] 18 | subnet_exclude = ['.255', '.0', '.1'] 19 | 20 | def parse(self, blob): 21 | relationships = [] 22 | 23 | # create multiple fact mappings for each network dev-ipaddr pair 24 | for line in self.line(blob): 25 | # parser expects " " (ex: eth0 10.X.Y.Y/16) 26 | dev, dev_cidr_range = line.split() 27 | 28 | dev_raw_ip = dev_cidr_range 29 | if "/" in dev_cidr_range: 30 | dev_raw_ip = dev_raw_ip.split("/")[0] 31 | 32 | if self._is_valid_ip(dev_raw_ip): 33 | relationships.extend(self._apply_mappers( 34 | dev=dev, 35 | dev_cidr_range=dev_cidr_range 36 | )) 37 | 38 | return relationships 39 | 40 | def _apply_mappers(self, dev: str, dev_cidr_range: str): 41 | relationships = [Relationship( 42 | source=Fact(mp.source, dev), 43 | edge=mp.edge, 44 | target=Fact(mp.target, self.parse_opts[mp.target.split('.').pop()](dev_cidr_range)) 45 | ) for mp in self.mappers] 46 | 47 | return relationships 48 | 49 | @property 50 | def parse_opts(self): 51 | return dict( 52 | IPv4_address=self.parse_ip_address, 53 | IPv4_network=self.parse_ip_network 54 | ) 55 | 56 | @staticmethod 57 | def parse_ip_address(dev_cidr_range: str): 58 | ip_addr = ip_interface(dev_cidr_range).ip 59 | return str(ip_addr) 60 | 61 | @staticmethod 62 | def parse_ip_network(dev_cidr_range: str): 63 | ip_network = ip_interface(dev_cidr_range).network 64 | return str(ip_network) 65 | 66 | def _is_valid_ip(self, raw_ip): 67 | try: 68 | # The following hardcoded addresses are not used to bind to an interface: 69 | # - ['.255', '.0', '.1'] 70 | if raw_ip in self.exclude: 71 | return False 72 | if any([True if raw_ip.endswith(x) else None for x in self.subnet_exclude]): 73 | return False 74 | ip_address(raw_ip) 75 | except Exception: 76 | return False 77 | return True -------------------------------------------------------------------------------- /app/parsers/nmap.py: -------------------------------------------------------------------------------- 1 | from app.objects.secondclass.c_fact import Fact 2 | from app.objects.secondclass.c_relationship import Relationship 3 | from app.utility.base_parser import BaseParser 4 | 5 | import re 6 | 7 | 8 | class Parser(BaseParser): 9 | """ 10 | Functionality to parse incoming IPv4 addresses and create Fact Sources 11 | for binding addresses and associated IPv4 addresses if valid. 12 | 13 | Excludes common ports from endpoint discovery. 14 | """ 15 | exclude = ['21', '22', '23', '25', '53', '111', '139', '445'] 16 | 17 | def parse(self, blob): 18 | 19 | disc_bind_addrs = self._parse_to_binding_addresses(blob) 20 | 21 | relationships = [] 22 | for mp in self.mappers: 23 | if 'binding_address' not in mp.source: 24 | raise NotImplementedError('only creation of target.api.binding_address fact is supported') 25 | relationships.append( 26 | Relationship(source=Fact(mp.source, disc_bind_addrs), 27 | edge=mp.edge, 28 | target=Fact(mp.target, None)) 29 | ) 30 | return relationships 31 | 32 | def _parse_to_binding_addresses(self, blob): 33 | disc_bind_addrs = '' 34 | for line in self.line(blob): 35 | host_data, ports_data = line.split('\t')[:2] 36 | 37 | host_ip_addr = self.ip(host_data)[0] 38 | ports_data = ports_data.strip('Ports: ') 39 | if len(ports_data) == 0: 40 | continue 41 | 42 | for port_info in ports_data.split(): 43 | port, state = re.split('/{1,3}', port_info)[:-3] 44 | 45 | if port in self.exclude or state != 'open': 46 | continue 47 | 48 | bind_addr = ':'.join([host_ip_addr, port]) 49 | 50 | disc_bind_addrs = ', '.join([ 51 | disc_bind_addrs, bind_addr 52 | ]) if disc_bind_addrs else bind_addr 53 | 54 | disc_bind_addrs = disc_bind_addrs.strip(', ') 55 | 56 | return disc_bind_addrs -------------------------------------------------------------------------------- /app/parsers/ss.py: -------------------------------------------------------------------------------- 1 | from app.objects.secondclass.c_fact import Fact 2 | from app.objects.secondclass.c_relationship import Relationship 3 | from app.utility.base_parser import BaseParser 4 | 5 | from ipaddress import ip_address 6 | 7 | 8 | class Parser(BaseParser): 9 | """ 10 | 11 | Functionality to parse incoming IPv4 addresses and create Fact Sources 12 | for binding addresses and associated IPv4 addresses if valid. 13 | 14 | Excludes common ports from endpoint discovery. 15 | 16 | """ 17 | # specify ports to exclude from API endpoint discovery 18 | exclude = ['21', '22', '23', '25', '53', '111', '139', '445'] 19 | 20 | def parse(self, blob): 21 | 22 | collected_address = self._get_collected_address() 23 | binding_addresses = self._parse_to_binding_addresses(blob, collected_address) 24 | 25 | relationships = [] 26 | for mp in self.mappers: 27 | if 'binding_address' not in mp.source: 28 | raise NotImplementedError('only creation of target.api.binding_address fact is supported') 29 | relationships.append( 30 | Relationship( 31 | source=Fact(mp.source, binding_addresses), 32 | edge=mp.edge, 33 | target=Fact(mp.target, None) 34 | ) 35 | ) 36 | return relationships 37 | 38 | def _get_collected_address(self) -> str: 39 | """ 40 | Retrieves IP addresses from facts used in execution of the ability. 41 | """ 42 | address_facts = [used_fact.value for used_fact in self.used_facts if 'IPv4_address' in used_fact.name] 43 | 44 | if len(address_facts) > 1: 45 | raise NotImplementedError(f"Only allow one IPv4_address fact to be passed to the ability: {address_facts}") 46 | 47 | collected_address = address_facts[0] if len(address_facts) == 1 else None 48 | 49 | return collected_address 50 | 51 | def _parse_to_binding_addresses(self, blob: str, collected_address: str) -> str: 52 | """ 53 | Main parsing method. Turns the text blob into a single string containing binding addresses. 54 | """ 55 | disc_binding_addresses = '' 56 | for line in self.line(blob): 57 | 58 | sock, _ = line.split() # parser expects " " 59 | local_address, port = sock.rsplit(':', 1) # split the into 60 | if port in self.exclude: 61 | continue 62 | 63 | bind_address = None 64 | 65 | try: 66 | local_address_obj = ip_address(local_address) 67 | except: 68 | local_address_obj = None 69 | 70 | if local_address_obj and local_address_obj.version == 4 and not local_address_obj.is_loopback: 71 | if local_address_obj.is_unspecified: 72 | bind_address = collected_address 73 | else: 74 | bind_address = local_address 75 | elif local_address_obj is None: 76 | if local_address == '*': 77 | bind_address = collected_address 78 | 79 | if bind_address is not None: 80 | bind_address = ':'.join([bind_address, port]) 81 | disc_binding_addresses = ', '.join([ 82 | disc_binding_addresses, bind_address 83 | ]) if disc_binding_addresses else bind_address 84 | 85 | disc_binding_addresses = disc_binding_addresses.strip(', ') 86 | return disc_binding_addresses -------------------------------------------------------------------------------- /app/parsers/torchserve_api.py: -------------------------------------------------------------------------------- 1 | from app.objects.secondclass.c_fact import Fact 2 | from app.objects.secondclass.c_relationship import Relationship 3 | from app.utility.base_parser import BaseParser 4 | 5 | 6 | class Parser(BaseParser): 7 | """ 8 | Parser that will extract relevant information about an API that has been 9 | scanned. 10 | 11 | Parser expects 12 | (ex. INFERENCE_API 10.X.Y.Y:) 13 | 14 | Parse Fact store for #{target.model_server} and link facts 15 | with found API endpoints. When TorchServe (TS) starts 16 | it starts two web services: 17 | - Inference API 18 | - Management API 19 | """ 20 | 21 | def parse(self, blob): 22 | relationships = [] 23 | for line in self.line(blob): 24 | api_type, bind_addr = line.split() 25 | for mp in self.mappers: 26 | if 'model_server.framework' not in mp.source: 27 | raise NotImplementedError('only creation of target.model_server.framework fact is supported') 28 | target_type = self._map_target_type(mp) 29 | if target_type in api_type.lower(): 30 | relationships.append( 31 | Relationship( 32 | source=Fact(mp.source, 'TorchServe'), 33 | edge=mp.edge, 34 | target=Fact(mp.target, bind_addr) 35 | ) 36 | ) 37 | return relationships 38 | 39 | def _map_target_type(self, map): 40 | """ 41 | Funtionality to extract api endpoint type from target 42 | """ 43 | 44 | targets = map.target.split('.').pop() 45 | target_type = targets.split('_')[0] 46 | 47 | return target_type 48 | -------------------------------------------------------------------------------- /conf/default.yml: -------------------------------------------------------------------------------- 1 | --- 2 | gist_api_key: {} -------------------------------------------------------------------------------- /data/abilities/collection/15f1db6a-6cb2-40f0-9651-4ee90bab949e.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - id: 15f1db6a-6cb2-40f0-9651-4ee90bab949e 4 | name: Find Tensorflow model checkpoint files 5 | description: Locate Tensorflow model checkpoint files 6 | tactic: collection 7 | technique_name: Data from local system 8 | technique_id: AML.T0037 9 | platforms: 10 | darwin: 11 | sh: 12 | command: | 13 | find /Users -name '*.ckpt.*' -type f -not -path '*/\.*' 2>&1 | grep -v "Permission denied" | grep -v "Operation not permitted" 14 | parsers: 15 | plugins.stockpile.app.parsers.find: 16 | - source: host.file.path 17 | edge: tensorflow 18 | linux: 19 | sh: 20 | command: | 21 | find /home -name '*.ckpt.*' -type f -not -path '*/\.*' 2>&1 | grep -v "Permission denied" | grep -v "Operation not permitted" 22 | -------------------------------------------------------------------------------- /data/abilities/collection/60aeed61-6ea9-4dbb-afe5-2c674820de35.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - id: 60aeed61-6ea9-4dbb-afe5-2c674820de35 4 | name: Search and Stage Tensorflow model files 5 | description: Searches for Tensorflow directories and checkpoint files, and then stages 6 | tactic: collection 7 | technique_id: AML.T0035 8 | technique_name: ML Artifact Collection 9 | platforms: 10 | darwin: 11 | sh: 12 | command: | 13 | chmod +x ./tensorflow_file_search.sh; ./tensorflow_file_search.sh /Users '#{host.dir.staged}' 14 | timeout: 180 15 | payloads: 16 | - tensorflow_file_search.sh 17 | cleanup: | 18 | if [ -d '#{host.dir.staged}' ]; then rm -rf '#{host.dir.staged}/.s'; fi; 19 | parsers: 20 | plugins.stockpile.app.parsers.basic: 21 | - source: host.dir.staged 22 | linux: 23 | sh: 24 | command: | 25 | chmod +x ./tensorflow_file_search.sh; ./tensorflow_file_search.sh /home '#{host.dir.staged}' 26 | timeout: 180 27 | payloads: 28 | - tensorflow_file_search.sh 29 | cleanup: | 30 | if [ -d '#{host.dir.staged}' ]; then rm -rf '#{host.dir.staged}/.s'; fi; 31 | parsers: 32 | plugins.stockpile.app.parsers.basic: 33 | - source: host.dir.staged -------------------------------------------------------------------------------- /data/abilities/collection/7491aaeb-284c-4f1d-ad1e-db73d539071d.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - id: 7491aaeb-284c-4f1d-ad1e-db73d539071d 4 | name: CNN Image Classifier 5 | description: Searches for images and applies an image classifier 6 | tactic: collection 7 | technique: 8 | attack_id: T1074.001 9 | name: "Data Staged: Local Data Staging" 10 | platforms: 11 | windows: 12 | psh: 13 | command: | 14 | python ./image_classifier.py --dir C:/Users --stage '#{host.dir.staged}' 15 | timeout: 180 16 | payloads: 17 | - image_classifier.py 18 | parsers: 19 | plugins.stockpile.app.parsers.tensorflow_filter_out: 20 | - source: host.dir.staged 21 | darwin: 22 | sh: 23 | command: | 24 | python ./image_classifier.py --dir /Users --stage '#{host.dir.staged}' 25 | timeout: 180 26 | payloads: 27 | - image_classifier.py 28 | parsers: 29 | plugins.stockpile.app.parsers.tensorflow_filter_out: 30 | - source: host.dir.staged 31 | linux: 32 | sh: 33 | command: | 34 | python3 ./image_classifier.py --dir /home --stage '#{host.dir.staged}' 35 | timeout: 180 36 | payloads: 37 | - image_classifier.py 38 | parsers: 39 | plugins.stockpile.app.parsers.tensorflow_filter_out: 40 | - source: host.dir.staged 41 | requirements: 42 | - plugins.stockpile.app.requirements.existential: 43 | - source: host.install.python 44 | - plugins.stockpile.app.requirements.existential: 45 | - source: host.install.tensorflow 46 | -------------------------------------------------------------------------------- /data/abilities/command-and-control/244d2f34-7b6f-4c46-8555-3038f483936f.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - id: 244d2f34-7b6f-4c46-8555-3038f483936f 4 | name: PIP Install Tensorflow-CPU 5 | description: Use pip to install Tensorflow-CPU 6 | tactic: command-and-control 7 | technique: 8 | attack_id: T1105 9 | name: "Ingress Tool Transfer" 10 | platforms: 11 | windows: 12 | psh: 13 | command: | 14 | python -m pip install tensorflow-cpu --quiet --exists-action ignore; 15 | if ($LASTEXITCODE -eq 0) { 16 | echo "cpu"; 17 | } 18 | parsers: 19 | plugins.stockpile.app.parsers.basic: 20 | - source: host.install.tensorflow 21 | darwin: 22 | sh: 23 | command: | 24 | python -m pip install tensorflow-cpu --quiet --exists-action i; 25 | if [[ $LASTEXITCODE -eq 0 ]] ; then 26 | echo "cpu"; 27 | fi 28 | parsers: 29 | plugins.stockpile.app.parsers.basic: 30 | - source: host.install.tensorflow 31 | linux: 32 | sh: 33 | command: | 34 | python3 -m pip install tensorflow-cpu --quiet --exists-action i; 35 | if [ $? -eq 0 ]; then 36 | echo "cpu"; 37 | fi 38 | parsers: 39 | plugins.stockpile.app.parsers.basic: 40 | - source: host.install.tensorflow 41 | requirements: 42 | - plugins.stockpile.app.requirements.existential: 43 | - source: host.install.python 44 | -------------------------------------------------------------------------------- /data/abilities/command-and-control/4aa55a9e-b37d-4e41-9a4d-f67415d03e34.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - id: 4aa55a9e-b37d-4e41-9a4d-f67415d03e34 4 | name: Install Python 5 | description: Download and install Python 6 | tactic: command-and-control 7 | technique: 8 | attack_id: T1105 9 | name: "Ingress Tool Transfer" 10 | platforms: 11 | windows: 12 | psh: 13 | command: | 14 | $wc=New-Object System.Net.WebClient; 15 | $output="C:\temp\Python310Installer.exe"; 16 | $wc.DownloadFile("https://www.python.org/ftp/python/3.10.0/python-3.10.0-amd64.exe", $output); 17 | C:\temp\Python310Installer.exe /quiet InstallAllUsers=0 PrependPath=1 Include_test=0; 18 | if ($LASTEXITCODE -eq 0) { 19 | echo "3.10.0"; 20 | } 21 | cleanup: | 22 | rm Python310Installer.exe; 23 | parsers: 24 | plugins.stockpile.app.parsers.basic: 25 | - source: host.install.python 26 | -------------------------------------------------------------------------------- /data/abilities/command-and-control/8a1913ed-4ddf-497c-8f95-ebf1eb93b518.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - id: 8a1913ed-4ddf-497c-8f95-ebf1eb93b518 4 | name: Install Counterfit 5 | description: Creates a virtual environment and installs counterfit into it. 6 | tactic: command-and-control 7 | technique: 8 | attack_id: T1105 9 | name: "Ingress Tool Transfer" 10 | platforms: 11 | linux: 12 | sh: 13 | command: | 14 | chmod +x ./counterfit_install.sh; ./counterfit_install.sh 15 | payloads: 16 | - counterfit_install.sh 17 | parsers: 18 | plugins.stockpile.app.parsers.basic: 19 | - source: host.install.counterfit 20 | timeout: 2400 21 | -------------------------------------------------------------------------------- /data/abilities/command-and-control/d5406943-15ef-4441-86c0-595490471d83.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - id: d5406943-15ef-4441-86c0-595490471d83 4 | name: PIP Install Tensorflow-GPU 5 | description: Use pip to install Tensorflow-GPU 6 | tactic: command-and-control 7 | technique: 8 | attack_id: T1105 9 | name: "Ingress Tool Transfer" 10 | platforms: 11 | windows: 12 | psh: 13 | command: | 14 | python -m pip install tensorflow<2.11 --quiet --exists-action ignore; 15 | if ($LASTEXITCODE -eq 0) { 16 | echo "gpu"; 17 | } 18 | parsers: 19 | plugins.stockpile.app.parsers.basic: 20 | - source: host.install.tensorflow 21 | darwin: 22 | sh: 23 | command: | 24 | python -m pip install tensorflow --quiet --exists-action i; 25 | if [[ $LASTEXITCODE -eq 0 ]]; then 26 | echo "gpu"; 27 | fi 28 | parsers: 29 | plugins.stockpile.app.parsers.basic: 30 | - source: host.install.tensorflow 31 | linux: 32 | sh: 33 | command: | 34 | python3 -m pip install tensorflow --quiet --exists-action i; 35 | if [ $? -eq 0 ]; then 36 | echo "gpu"; 37 | fi 38 | parsers: 39 | plugins.stockpile.app.parsers.basic: 40 | - source: host.install.tensorflow 41 | requirements: 42 | - plugins.stockpile.app.requirements.existential: 43 | - source: host.gpu.name 44 | - plugins.stockpile.app.requirements.existential: 45 | - source: host.install.python 46 | -------------------------------------------------------------------------------- /data/abilities/discovery/18708a19-4dc3-4cbc-9ba8-49582f186d72.yml: -------------------------------------------------------------------------------- 1 | - id: 18708a19-4dc3-4cbc-9ba8-49582f186d72 2 | name: Discover TorchServe API 3 | description: discover TorchServe API Inference and Management Endpoints 4 | tactic: discovery 5 | technique_name: Discover ML Artifacts 6 | technique_id: AML.T0007 7 | executors: 8 | - platform: linux 9 | name: sh 10 | command: | 11 | chmod +x discover_torchserve_api.sh && ./discover_torchserve_api.sh "#{target.api.binding_address_list}" 12 | parsers: 13 | - module: plugins.arsenal.app.parsers.torchserve_api 14 | parserconfigs: 15 | - source: target.model_server.framework 16 | edge: has_inference_address 17 | target: target.model_server.inference_address 18 | - source: target.model_server.framework 19 | edge: has_management_address 20 | target: target.model_server.management_address 21 | payloads: [discover_torchserve_api.sh] 22 | cleanup: [rm discover_torchserve_api.sh] 23 | timeout: 60 24 | requirements: 25 | - module: plugins.stockpile.app.requirements.paw_provenance 26 | relationship_match: 27 | - source: target.api.binding_address_list -------------------------------------------------------------------------------- /data/abilities/discovery/8a8d3faa-5989-4491-accc-8ffe53f0fe8d.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - id: 8a8d3faa-5989-4491-accc-8ffe53f0fe8d 4 | name: Determine Python3 version 5 | description: Determine Python3 is installed and version 6 | tactic: discovery 7 | technique: 8 | attack_id: T1518 9 | name: "Software Discovery" 10 | platforms: 11 | windows: 12 | psh: 13 | command: python --version 14 | parsers: 15 | plugins.stockpile.app.parsers.basic: 16 | - source: host.install.python 17 | linux: 18 | sh: 19 | command: python3 --version 20 | parsers: 21 | plugins.stockpile.app.parsers.basic: 22 | - source: host.install.python 23 | darwin: 24 | sh: 25 | command: python --version 26 | parsers: 27 | plugins.stockpile.app.parsers.basic: 28 | - source: host.install.python -------------------------------------------------------------------------------- /data/abilities/discovery/8bbada08-d5ae-4000-bc22-92c1e35820c1.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | - id: 8bbada08-d5ae-4000-bc22-92c1e35820c1 4 | name: Discover GPUs present 5 | description: Discover GPUs present 6 | tactic: discovery 7 | technique: 8 | attack_id: T1082 9 | name: System Information Discovery 10 | platforms: 11 | windows: 12 | psh: 13 | command: | 14 | (Get-WmiObject Win32_VideoController).Name 15 | parsers: 16 | plugins.stockpile.app.parsers.json: 17 | - source: host.gpu.name 18 | darwin: 19 | sh: 20 | command: | 21 | system_profiler SPDisplaysDataType 22 | parsers: 23 | plugins.stockpile.app.parsers.system_profiler_spdisplaysdatatype: 24 | - source: host.gpu.name 25 | linux: 26 | sh: 27 | command: | 28 | lspci | grep VGA 29 | parsers: 30 | plugins.stockpile.app.parsers.lspci: 31 | - source: host.gpu.name 32 | -------------------------------------------------------------------------------- /data/abilities/ml-attack-staging/5e437f42-cd5f-400f-b65d-d78821f31c69.yml: -------------------------------------------------------------------------------- 1 | - id: 5e437f42-cd5f-400f-b65d-d78821f31c69 2 | name: Build and Attack a Custom CFTarget 3 | description: Creates an interface between a target model and the attacks included in 4 | a framework. This is done by creating a sub-class of counterfit.core.targets.CFTarget, 5 | where the collected target.model_server.prediction_endpoint fact will be used to 6 | specify the target_endpoint. Once the respective target is built, the attack is 7 | executed against the target. 8 | tactic: ml-attack-staging 9 | technique_name: craft-adversarial-data 10 | technique_id: AML.T0043 11 | executors: 12 | - platform: linux 13 | name: sh 14 | command: | 15 | $HOME/venv/cf_venv/bin/python3 build_and_attack_counterfit_target.py \ 16 | --endpoint #{target.model_server.prediction_endpoint} 17 | payloads: [ 18 | build_and_attack_counterfit_target.py, 19 | kitten.jpg, 20 | persons.jpg, 21 | imagenet_name_to_index.json, 22 | coco_index_to_name.json 23 | ] 24 | cleanup: [rm build_and_attack_counterfit_target.py \ 25 | kitten.jpg \ 26 | persons.jpg \ 27 | imagenet_name_to_index.json \ 28 | coco_index_to_name.json 29 | ] 30 | timeout: 2400 # setting timeout to be 45 minutes; just in case the attack takes a while 31 | requirements: 32 | - module: plugins.stockpile.app.requirements.paw_provenance 33 | relationship_match: 34 | - source: target.model_server.prediction_endpoint 35 | - module: plugins.stockpile.app.requirements.existential 36 | relationship_match: 37 | - source: host.install.counterfit -------------------------------------------------------------------------------- /data/abilities/ml-model-access/92071bdd-83f6-4945-b41b-1e2835b8ff23.yml: -------------------------------------------------------------------------------- 1 | - id: 92071bdd-83f6-4945-b41b-1e2835b8ff23 2 | name: ML Model Inference API Access 3 | description: Gain access to TorchServe prediction endpoint 4 | tactic: ml-model-access 5 | technique_name: ML Model Inference API Access 6 | technique_id: AML.T0040 7 | executors: 8 | - platform: linux 9 | name: sh 10 | command: | 11 | echo #{target.model_server.inference_address} >/dev/null 2>&1 && chmod +x discover_prediction_endpoint.sh && ./discover_prediction_endpoint.sh "#{target.model_server.management_address}" 12 | parsers: 13 | - module: plugins.arsenal.app.parsers.endpoint 14 | parserconfigs: 15 | - source: target.model_server.prediction_endpoint 16 | payloads: [discover_prediction_endpoint.sh] 17 | cleanup: [rm discover_prediction_endpoint.sh] 18 | timeout: 300 19 | repeatable: false 20 | requirements: 21 | - module: plugins.stockpile.app.requirements.paw_provenance 22 | relationship_match: 23 | - source: target.model_server.inference_address 24 | - module: plugins.stockpile.app.requirements.paw_provenance 25 | relationship_match: 26 | - source: target.model_server.management_address -------------------------------------------------------------------------------- /data/abilities/reconnaissance/03b2fc99-62fb-4fc9-98fd-3717ed6cecd2.yml: -------------------------------------------------------------------------------- 1 | - id: 03b2fc99-62fb-4fc9-98fd-3717ed6cecd2 2 | name: Gather Information for Protocol Addresses 3 | description: 4 | Gathers the IP address and netmask (in CIDR notation) for each IP address 5 | available on the machine. Only IPs with "scope global" (valid everywhere) are 6 | considered. 7 | tactic: reconnaissance 8 | technique_name: Active Scanning 9 | technique_id: AML.T0006 10 | executors: 11 | - platform: linux 12 | name: sh 13 | command: | 14 | chmod +x proto_addr_info.sh && ./proto_addr_info.sh 15 | parsers: 16 | - module: plugins.arsenal.app.parsers.ipaddr 17 | parserconfigs: 18 | - source: host.network_interface.name 19 | edge: has_IPv4_address 20 | target: host.network_interface.IPv4_address 21 | - source: host.network_interface.name 22 | edge: has_IPv4_network 23 | target: host.network_interface.IPv4_network 24 | payloads: [proto_addr_info.sh] 25 | cleanup: [rm proto_addr_info.sh] 26 | singleton: true -------------------------------------------------------------------------------- /data/abilities/reconnaissance/189afbfb-3874-401e-a2fb-9bfafb822f35.yml: -------------------------------------------------------------------------------- 1 | - id: 189afbfb-3874-401e-a2fb-9bfafb822f35 2 | name: Gather Information for Remote Services 3 | description: Gathers information on the status ("Up", "Down", etc.) of other hosts on the victim's 4 | network by executing a "ping scan". Then, a "port scan" is executed on "Up" hosts to gather 5 | information for remote services and collect values for the target.api.binding_address fact. 6 | tactic: reconnaissance 7 | technique_id: AML.T0006 8 | technique_name: Active Scanning 9 | executors: 10 | - platform: linux 11 | name: sh 12 | command: | 13 | chmod +x network_svc_info.sh && ./network_svc_info.sh #{host.network_interface.IPv4_network} 14 | parsers: 15 | - module: plugins.arsenal.app.parsers.nmap 16 | parserconfigs: 17 | - source: target.api.binding_address_list 18 | payloads: [network_svc_info.sh] 19 | cleanup: [rm network_svc_info.sh] 20 | timeout: '600' # setting timeout to 10minutes for now (scan may take time) 21 | requirements: 22 | - module: plugins.stockpile.app.requirements.paw_provenance 23 | relationship_match: 24 | - source: host.network_interface.IPv4_network -------------------------------------------------------------------------------- /data/abilities/reconnaissance/8316e1bf-9158-4604-aa91-007c74d13b6e.yml: -------------------------------------------------------------------------------- 1 | - id: 8316e1bf-9158-4604-aa91-007c74d13b6e 2 | name: Gather Information for TCP Sockets 3 | description: 4 | Show TCP connections (-t) in listening (-l) state, without resolving the IP addresses 5 | and the port number (-n). 6 | tactic: reconnaissance 7 | technique_name: Active Scanning 8 | technique_id: AML.T0006 9 | executors: 10 | - platform: linux 11 | name: sh 12 | command: | 13 | echo #{host.network_interface.IPv4_address} >/dev/null 2>&1 && chmod +x socket_info.sh && ./socket_info.sh 14 | parsers: 15 | - module: plugins.arsenal.app.parsers.ss 16 | parserconfigs: 17 | - source: target.api.binding_address_list 18 | payloads: [socket_info.sh] 19 | cleanup: [rm socket_info.sh] 20 | requirements: 21 | - module: plugins.stockpile.app.requirements.paw_provenance 22 | relationship_match: 23 | - source: host.network_interface.IPv4_address 24 | singleton: true -------------------------------------------------------------------------------- /data/abilities/reconnaissance/e84a8a0e-25a6-4ec9-98d9-23bcf42b842d.yml: -------------------------------------------------------------------------------- 1 | - id: e84a8a0e-25a6-4ec9-98d9-23bcf42b842d 2 | name: List Network Interfaces 3 | description: Uses the "kernel and system information virtual filesystem" (/sys) 4 | to quickly list the available (physical or virtual) network interfaces. 5 | tactic: reconnaissance 6 | technique_name: Active Scanning 7 | technique_id: AML.T0006 8 | executors: 9 | - platform: linux 10 | name: sh 11 | command: ls -1 /sys/class/net 2> /dev/null 12 | parsers: 13 | - module: plugins.stockpile.app.parsers.basic 14 | parserconfigs: 15 | - source: host.network_interface.name 16 | singleton: true -------------------------------------------------------------------------------- /data/adversaries/463fa6a5-3f3c-461c-81fa-ad048c61de10.yml: -------------------------------------------------------------------------------- 1 | adversary_id: 463fa6a5-3f3c-461c-81fa-ad048c61de10 2 | name: ML Model Evader 3 | description: The adversary's end goal is to evade a downstream task where machine 4 | learning is utilized. The adversary will attempt to discover any endpoint (s) that 5 | provide legitimate access to the ML Model Inference API by simultaneously performing 6 | remote service discovery (on victim's network) and local service discovery (on victim's 7 | host). 8 | atomic_ordering: 9 | - 8a1913ed-4ddf-497c-8f95-ebf1eb93b518 10 | - 03b2fc99-62fb-4fc9-98fd-3717ed6cecd2 11 | - 8316e1bf-9158-4604-aa91-007c74d13b6e 12 | - 189afbfb-3874-401e-a2fb-9bfafb822f35 13 | - 18708a19-4dc3-4cbc-9ba8-49582f186d72 14 | - 92071bdd-83f6-4945-b41b-1e2835b8ff23 15 | - 5e437f42-cd5f-400f-b65d-d78821f31c69 16 | objective: 495a9828-cab1-44dd-a0ca-66e58177d8cc 17 | tags: [] 18 | -------------------------------------------------------------------------------- /data/adversaries/869ffd59-31f7-479d-a59d-1d8aadf9042b.yml: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | id: 869ffd59-31f7-479d-a59d-1d8aadf9042b 4 | name: Tensormancer 5 | description: Adversary used for demoing Tensorflow neural network abilities 6 | atomic_ordering: 7 | - 6469befa-748a-4b9c-a96d-f191fde47d89 # Create staging directory 8 | - 8bbada08-d5ae-4000-bc22-92c1e35820c1 # Find GPU 9 | - 15f1db6a-6cb2-40f0-9651-4ee90bab949e # Locate Tensorflow checkpoint files 10 | - 60aeed61-6ea9-4dbb-afe5-2c674820de35 # Find Tensorflow model and checkpoint files, and stage 11 | - 4aa55a9e-b37d-4e41-9a4d-f67415d03e34 # Install Python 12 | - 8a8d3faa-5989-4491-accc-8ffe53f0fe8d # Determine Python version (for MacOS and Linux as python is already installed) 13 | - d5406943-15ef-4441-86c0-595490471d83 # Install Tensorflow-GPU 14 | - 244d2f34-7b6f-4c46-8555-3038f483936f # Install Tensorflow-CPU 15 | - 7491aaeb-284c-4f1d-ad1e-db73d539071d # Classify images with CNN, stage matches 16 | - 300157e5-f4ad-4569-b533-9d1fa0e74d74 # Compress stage directory 17 | - ea713bc4-63f0-491c-9a6f-0b01d560b87e # Exfil staged directory 18 | -------------------------------------------------------------------------------- /data/adversaries/a0a26dc8-d285-4bfa-9516-9dfb2fde2302.yml: -------------------------------------------------------------------------------- 1 | adversary_id: a0a26dc8-d285-4bfa-9516-9dfb2fde2302 2 | name: ML Model Thief 3 | description: Searches for machine learning model files, stages, and exfiltrates them. 4 | atomic_ordering: 5 | - 90c2efaa-8205-480d-8bb6-61d90dbaf81b 6 | - 6469befa-748a-4b9c-a96d-f191fde47d89 7 | - 4e97e699-93d7-4040-b5a3-2e906a58199e 8 | - 300157e5-f4ad-4569-b533-9d1fa0e74d74 9 | - ea713bc4-63f0-491c-9a6f-0b01d560b87e 10 | objective: 495a9828-cab1-44dd-a0ca-66e58177d8cc 11 | tags: [] 12 | -------------------------------------------------------------------------------- /data/sources/e1d863de-24cc-4937-876c-6d1b12e6b4e8.yml: -------------------------------------------------------------------------------- 1 | facts: 2 | - trait: file.sensitive.extension 3 | value: pkl 4 | - trait: file.sensitive.extension 5 | value: pth 6 | - trait: file.sensitive.extension 7 | value: pt 8 | - trait: file.sensitive.extension 9 | value: onnx 10 | - trait: file.sensitive.extension 11 | value: tflite 12 | - trait: file.sensitive.extension 13 | value: pb 14 | - trait: file.sensitive.extension 15 | value: h5 16 | - trait: file.sensitive.extension 17 | value: hdf5 18 | - trait: file.sensitive.extension 19 | value: mar 20 | rules: [] 21 | relationships: [] 22 | plugin: 'arsenal' 23 | id: e1d863de-24cc-4937-876c-6d1b12e6b4e8 24 | name: arsenal 25 | -------------------------------------------------------------------------------- /default.yml: -------------------------------------------------------------------------------- 1 | ability_refresh: 1 2 | api_key_blue: BLUEADMIN123 3 | api_key_red: ADMIN123 4 | app.contact.dns.domain: mycaldera.caldera 5 | app.contact.dns.socket: 0.0.0.0:8853 6 | app.contact.ftp.host: 0.0.0.0 7 | app.contact.ftp.port: 2222 8 | app.contact.ftp.pword: caldera 9 | app.contact.ftp.server.dir: ftp_dir 10 | app.contact.ftp.user: caldera_user 11 | app.contact.gist: API_KEY 12 | app.contact.html: /weather 13 | app.contact.http: http://0.0.0.0:8888 14 | app.contact.slack.api_key: SLACK_TOKEN 15 | app.contact.slack.bot_id: SLACK_BOT_ID 16 | app.contact.slack.channel_id: SLACK_CHANNEL_ID 17 | app.contact.tcp: 0.0.0.0:7010 18 | app.contact.tunnel.ssh.host_key_file: REPLACE_WITH_KEY_FILE_PATH 19 | app.contact.tunnel.ssh.host_key_passphrase: REPLACE_WITH_KEY_FILE_PASSPHRASE 20 | app.contact.tunnel.ssh.socket: 0.0.0.0:8022 21 | app.contact.tunnel.ssh.user_name: sandcat 22 | app.contact.tunnel.ssh.user_password: s4ndc4t! 23 | app.contact.udp: 0.0.0.0:7011 24 | app.contact.websocket: 0.0.0.0:7012 25 | auth.login.handler.module: default 26 | crypt_salt: REPLACE_WITH_RANDOM_VALUE 27 | encryption_key: ADMIN123 28 | exfil_dir: /tmp/caldera 29 | host: 0.0.0.0 30 | objects.planners.default: atomic 31 | plugins: 32 | - access 33 | - arsenal 34 | - almanac 35 | - atomic 36 | - compass 37 | - debrief 38 | - fieldmanual 39 | - manx 40 | - response 41 | - sandcat 42 | - training 43 | - stockpile 44 | port: 8888 45 | reports_dir: /tmp 46 | requirements: 47 | go: 48 | command: go version 49 | type: installed_program 50 | version: 1.11 51 | python: 52 | attr: version 53 | module: sys 54 | type: python_module 55 | version: 3.7.0 56 | users: 57 | blue: 58 | blue: admin 59 | red: 60 | admin: admin 61 | red: admin 62 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/assets/A.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mitre-atlas/arsenal/2d80c59a637e32dc5d8bdc06f1b449745330b1ec/docs/assets/A.png -------------------------------------------------------------------------------- /docs/assets/access.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mitre-atlas/arsenal/2d80c59a637e32dc5d8bdc06f1b449745330b1ec/docs/assets/access.png -------------------------------------------------------------------------------- /docs/assets/access_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mitre-atlas/arsenal/2d80c59a637e32dc5d8bdc06f1b449745330b1ec/docs/assets/access_2.png -------------------------------------------------------------------------------- /docs/assets/arsenal_diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mitre-atlas/arsenal/2d80c59a637e32dc5d8bdc06f1b449745330b1ec/docs/assets/arsenal_diagram.png -------------------------------------------------------------------------------- /docs/assets/ml_attack_staging_example_output.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mitre-atlas/arsenal/2d80c59a637e32dc5d8bdc06f1b449745330b1ec/docs/assets/ml_attack_staging_example_output.png -------------------------------------------------------------------------------- /docs/assets/ml_attack_staging_new_potential_link.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mitre-atlas/arsenal/2d80c59a637e32dc5d8bdc06f1b449745330b1ec/docs/assets/ml_attack_staging_new_potential_link.png -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | 4 | pushd %~dp0 5 | 6 | 7 | REM Command file for Sphinx documentation 8 | 9 | 10 | if "%SPHINXBUILD%" == "" ( 11 | 12 | set SPHINXBUILD=sphinx-build 13 | 14 | ) 15 | 16 | set SOURCEDIR=source 17 | 18 | set BUILDDIR=build 19 | 20 | 21 | if "%1" == "" goto help 22 | 23 | 24 | %SPHINXBUILD% >NUL 2>NUL 25 | 26 | if errorlevel 9009 ( 27 | 28 | echo. 29 | 30 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 31 | 32 | echo.installed, then set the SPHINXBUILD environment variable to point 33 | 34 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 35 | 36 | echo.may add the Sphinx directory to PATH. 37 | 38 | echo. 39 | 40 | echo.If you don't have Sphinx installed, grab it from 41 | 42 | echo.https://www.sphinx-doc.org/ 43 | 44 | exit /b 1 45 | 46 | ) 47 | 48 | 49 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 50 | 51 | goto end 52 | 53 | 54 | :help 55 | 56 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 57 | 58 | 59 | :end 60 | 61 | popd 62 | -------------------------------------------------------------------------------- /docs/source/adversary.md: -------------------------------------------------------------------------------- 1 | # Autonomous Adversary Profiles 2 | 3 | This plugin focuses on using **both** traditional cybersecurity TTPs with AI/ML specific TTPs to Red-Team a system that is AI-enabled. It is our collected understanding that AI/ML algorithms and artifacts are rarely stand-alone, and adversaries in-the-wild will use traditional cybersecurity tactics and techniques alongside AI/ML tactics and techniques in order to achieve their goals. For more examples of these attacks on real-workd systems, please navigate to the [`MITRE ATLAS Case Studies`](https://atlas.mitre.org/studies) page. 4 | 5 | All `arsenal` adversaries use a combination of TTPs from MITRE ATT&CK (plugin: [`stockpile`](https://github.com/mitre/stockpile)) and MITRE ATLAS (plugin: arsenal). 6 | 7 | Below is a description and example of the current autonomous adversary profiles currently implemented: 8 | 9 | # Tensormancer 10 | An adversary profile to demo neural network abilities using the Tensorflow library. 11 | 12 | *Abilities*: 13 | 14 | 1. [`Create a staging directory`](https://github.com/mitre/stockpile/blob/master/data/abilities/collection/6469befa-748a-4b9c-a96d-f191fde47d89.yml) for exfiltration. 15 | 2. Discover GPUs present 16 | 3. Find Tensorflow model checkpoint files with the extension: `.ckpt` 17 | 4. Search and Stage Tensorflow model files 18 | - Searches for Tensorflow directories and checkpoint files, and then stages for exfiltration. 19 | 5. Install Python 20 | - Download and install Python and it's dependencies (`Python 3.7+`) where the agent is deployed. 21 | 6. Determine Python3 version 22 | - Determine Python3 is installed and version (`Python 3.7+`) where the agent is deployed. 23 | 7. PIP Install Tensorflow-GPU 24 | - Use pip to install Tensorflow-GPU 25 | 8. PIP Install Tensorflow-CPU 26 | - Use pip to install Tensorflow-CPU 27 | 9. CNN Image Classifier 28 | - Searches for images and applies an image classifier 29 | 10. [`Compress staged directory`](https://github.com/mitre/stockpile/blob/master/data/abilities/exfiltration/300157e5-f4ad-4569-b533-9d1fa0e74d74.yml) 30 | - Compress a directory on the file system 31 | 11. [`Exfil staged directory`](https://github.com/mitre/stockpile/blob/master/data/abilities/exfiltration/ea713bc4-63f0-491c-9a6f-0b01d560b87e.yml) 32 | - exfiltrate over the C2 channel 33 | 34 | 35 | # ML Model Thief 36 | An adversary profile to find any hosted ML algorithms with file extensions matching: `.mar`, `.pth`, `.pt`, `.onnx`, `.pkl`, `.tflite`, `.pb`, `.hdf5` and exfiltrate the algorithm back to the C2 server. 37 | 38 | 1. [`Find Files`](https://github.com/mitre/stockpile/blob/master/data/abilities/collection/90c2efaa-8205-480d-8bb6-61d90dbaf81b.yml) 39 | 2. [`Create staging directory`](https://github.com/mitre/stockpile/blob/master/data/abilities/collection/6469befa-748a-4b9c-a96d-f191fde47d89.yml) 40 | 3. [`Stage sensitive files`](https://github.com/mitre/stockpile/blob/master/data/abilities/collection/4e97e699-93d7-4040-b5a3-2e906a58199e.yml) 41 | 4. [`Compress staged directory`](https://github.com/mitre/stockpile/blob/master/data/abilities/exfiltration/300157e5-f4ad-4569-b533-9d1fa0e74d74.yml) 42 | 5. [`Exfil staged directory`](https://github.com/mitre/stockpile/blob/master/data/abilities/exfiltration/ea713bc4-63f0-491c-9a6f-0b01d560b87e.yml) 43 | - exfiltrate over the C2 channel 44 | 45 | # ML Model Evader 46 | An adversary profile to evade correction classification or detection of a machine learning algorithm using the Microsoft Counterfit library. It is recommended to use the [`ML-Vulhub Example-00`](https://github.com/mitre-atlas/ml-vulhub/tree/main/envs/example-00-ml-dev) or the [`ML-Vulhub Example-01`](https://github.com/mitre-atlas/ml-vulhub/tree/main/envs/example-01-ml-dev) example environments in conjuction with this adversary profile. The set-up of this example vulnerable environment is detailed in [`caldera-atlas`](https://github.com/mitre-atlas/caldera-atlas). 47 | 48 | *Abilities*: 49 | 50 | 1. Find or Install Microsoft Counterfit package 51 | - This package is installed as a python-venv within the C2 server at installation via [`caldera-atlas`](https://github.com/mitre-atlas/caldera-atlas), this ability checks for the virtual environment and packages installed, and if not found will install the necessary dependencies. This is esspecially useful for installation on a victim system for processing externally to the C2 server. 52 | 2. Gather Information for Protocol Addresses 53 | - Gathers the IP address and netmask (in CIDR notation) for each IP address 54 | available on system where the agent is deployed. Only IPs with "scope global" (valid everywhere) are considered. 55 | 3. Gather Information for TCP sockets 56 | - Show TCP connections (-t) in listening (-l) state, without resolving the IP addresses and the port number (-n). 57 | 4. Gather Information for Remote Services 58 | - Gathers information on the status ("Up", "Down", etc.) of other hosts on the network where the agent is deployed by executing a "ping scan". Then, a "port scan" is executed on "Up" hosts to gather information for remote services. 59 | 5. Discover Torchserve API 60 | - This ability specifically looks for any Torshserve API endpoints that may be hosting models. If found, it will create Facts with the API endpoint and algorithm for future API access. Torchserve has two API endpoints, an Inference API and a Management API, both are needed information for gaining information about hosted algorithms and sending data for inference. A single API may host multiple algorithms. 61 | 6. ML Model Inference API Access 62 | - This ability maps the algorithms hosted via the Torchserve Management API to the prediction endpoint for the Torchserve Inference API. 63 | 7. Build and Attack a Custom CFTarget (Counterfit Target) 64 | - This ability creates Counterfit Targets from any and all found algorithms and API endpoints. From these targets, users can choose which endpoints and data (images) they would like to use to generate Adversarial Attacks using the Counterfit library. Current imagery attacks that are autonomously generated are the black-box optimizers: hop-skip-jump, boundary, and copycat-cnn. 65 | 66 | - **If you are using the [`ML-Vulhub Example-01`](https://github.com/mitre-atlas/ml-vulhub/tree/main/envs/example-01-ml-dev) example environment, you will need to **manuall** add this ability to select which found model you would like to craft adversarial data for. You can run this ability multiple times, selecting new models for each new Link Command, as shown here:** 67 | 68 | ![ml_attack_staging](../assets/ml_attack_staging_new_potential_link.png) 69 | 70 | An example output of Adding this potential Link Command is: 71 | 72 | ![ml_attack_staging_2](../assets/ml_attack_staging_example_output.png) -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/main/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | import os 14 | from datetime import date 15 | 16 | # import sys 17 | # sys.path.insert(0, os.path.abspath('../../')) 18 | 19 | # -- Project information ----------------------------------------------------- 20 | 21 | project = "Arsenal" 22 | copyright = f"{date.today().year}, ATLAS" 23 | author = ( 24 | "Marissa Dotter, Keith Manville, Aidan Fennelly" 25 | ) 26 | 27 | # The full version, including alpha/beta/rc tags 28 | if os.environ.get("CI_COMMIT_TAG"): 29 | release = os.environ["CI_COMMIT_TAG"] 30 | else: 31 | release = "latest" 32 | 33 | # -- General configuration --------------------------------------------------- 34 | 35 | # Add any Sphinx extension module names here, as strings. They can be 36 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 37 | # ones. 38 | extensions = [ 39 | "sphinx.ext.autodoc", 40 | "sphinx.ext.napoleon", 41 | "autoapi.extension", 42 | "myst_parser", 43 | "sphinx.ext.intersphinx", 44 | ] 45 | 46 | 47 | intersphinx_mapping = { 48 | "python": ("https://docs.python.org/", None), 49 | "PIL": ("https://pillow.readthedocs.io/en/stable/", None) 50 | } 51 | tls_verify = False 52 | 53 | # Autodoc settings 54 | autodoc_typehints = "description" 55 | 56 | # Autoapi settings 57 | autoapi_options = ["members", "undoc-members", "show-inheritance", "show-module-summary"] 58 | autoapi_python_class_content = "both" 59 | autoapi_type = "python" 60 | autoapi_dirs = ["../"] 61 | 62 | # Add any paths that contain templates here, relative to this directory. 63 | templates_path = ["_templates"] 64 | 65 | # List of patterns, relative to source directory, that match files and 66 | # directories to ignore when looking for source files. 67 | # This pattern also affects html_static_path and html_extra_path. 68 | exclude_patterns = ["**/_tests"] 69 | 70 | # -- Options for HTML output ------------------------------------------------- 71 | 72 | # The theme to use for HTML and HTML Help pages. See the documentation for 73 | # a list of builtin themes. 74 | # 75 | html_theme = "furo" 76 | html_logo = "../assets/A.png" 77 | html_theme_options = { 78 | "logo_only": False, 79 | "display_version": True, 80 | } 81 | # Add any paths that contain custom static files (such as style sheets) here, 82 | # relative to this directory. They are copied after the builtin static files, 83 | # so a file named "default.css" will overwrite the builtin "default.css". 84 | html_static_path = ["_static"] 85 | 86 | # favicon location 87 | html_favicon = "../assets/A.png" 88 | -------------------------------------------------------------------------------- /docs/source/dev.md: -------------------------------------------------------------------------------- 1 | # Developers 2 | 3 | There are multiple ways to run the **arsenal** plugin within **caldera**. The recommended way is mounting the plugin as a *volume* in a Docker container, which allows for active development and modification of the plugin without restarting the CALDERA server. 4 | 5 | *Alternative deployment requires re-starting the `caldera` server when changes to the plugin are made.* 6 | 7 | ## Recommended Installation and Quick Start 8 | Navigate to the [`caldera-atlas`](https://github.com/mitre-atlas/caldera-atlas) repository and follow steps for installing the required repos for development, or use the quick start script. 9 | 10 | 11 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to Arsenal 2 | ============================================== 3 | 4 | Running Arsenal with CALDERA 5 | ---------------------------- 6 | :doc:`intro` 7 | Introduction to Arsenal and instructions on installing the repo and using it with CALDERA. 8 | 9 | 10 | Development 11 | ----------- 12 | 13 | :doc:`dev` 14 | How to use Arsenal and contribute. 15 | 16 | Red-Teaming 17 | ------------ 18 | :doc:`adversary` 19 | What is an adversary in Arsenal. What actions can they take. 20 | 21 | :doc:`victim` 22 | What is a victim in Arsenal. What actions can they take. 23 | -------------------------------------------------------------------------------- /docs/source/intro.md: -------------------------------------------------------------------------------- 1 | # ARSENAL 2 | 3 | ## **arsenal** is a Machine Learning (ML) plugin to [`CALDERA`](https://github.com/mitre/caldera), that implements Tactics, Techniques, and Procedures (TTPs) specific to ML operations described in [`MITRE ATLAS`](https://atlas.mitre.org/). 4 | 5 | --- 6 | 7 | ## Included ML Libraries 8 | 9 | --- 10 | ### Arsenal implements the following libraries for emulating AI/ML adversarial behavior: 11 | 12 | - [`Counterfit`](https://github.com/Azure/counterfit) - which wraps [`Adversarial Robustness Toolbox (ART)`](https://github.com/Trusted-AI/adversarial-robustness-toolbox), [`TextAttack`](https://github.com/QData/TextAttack), and [`Augly`](https://github.com/facebookresearch/AugLy) 13 | 14 | - [`Vulhub`](https://github.com/vulhub/vulhub) in conjunction with [`ML-Vulhub`](https://github.com/mitre-atlas/ml-vulhub) - deploy vulnerable environments via docker 15 | - [`torch-serve`](https://pytorch.org/serve/) - serve any type of model to emulate / red-team 16 | - [`MinIO`](https://github.com/minio/minio) - deploy object stores and S3 buckets to immitate a victim system hosting data and/or models 17 | - [`Almanac`](https://github.com/mitre-atlas/almanac) - generate adversary layers (sequences of operations) based on MITRE ATLAS TTPs. 18 | 19 | --- 20 | Implemented Abilities 21 | --- 22 | 23 | - [`Reconnaissance: Remote Services`](https://atlas.mitre.org/techniques/AML.T0006) 24 | 25 | This ability enables users to first scan a vicitm's system and collect information about IP addresses that are in use 26 | 27 | - [`Reconnaissance: List Network Interfaces`](https://atlas.mitre.org/techniques/AML.T0006) 28 | 29 | This ability allows users to list available (physical or virtual) network interfaces 30 | 31 | - [`Reconnaissance: Gather Information for Protocol Addresses`](https://atlas.mitre.org/techniques/AML.T0006) 32 | 33 | This ability allows users gather the IP address and netmask (in CIDR notation) for each IP address available on the machine. Only IPs with "scope global" (valid everywhere) are 34 | considered. 35 | 36 | - [`Reconnaissance: Gather Information for TCP Sockets`](https://atlas.mitre.org/techniques/AML.T0006) 37 | 38 | Show TCP connections in listening state, without resolving the IP addresses and the port number. 39 | 40 | - [`Collection: Find Tensorflow model checkpoint files`](https://atlas.mitre.org/techniques/AML.T0037) 41 | 42 | Locate Tensorflow model checkpoint files 43 | 44 | - [`Collection: Search and Stage Tensorflow model files`](https://atlas.mitre.org/techniques/AML.T0035) 45 | 46 | Searches for Tensorflow directories and checkpoint files, and then stages 47 | 48 | - [`Collection: CNN Image Classifier`](https://attack.mitre.org/techniques/T1074/001/) 49 | 50 | Searches for images and applies an image classifier 51 | 52 | - [`Command and Control: Install Python`](https://attack.mitre.org/techniques/T1105/) 53 | 54 | 55 | - [`Command and Control: Install Counterfit`](https://attack.mitre.org/techniques/T1105/) 56 | 57 | 58 | - [`Command and Control: PIP Install Tensorflow-CPU`](https://attack.mitre.org/techniques/T1105/) 59 | 60 | 61 | - [`Command and Control: PIP Install Tensorflow-GPU`](https://attack.mitre.org/techniques/T1105/) 62 | 63 | - [`Discovery: Determine Python3 version`](https://attack.mitre.org/techniques/T1518/) 64 | 65 | - [`Discovery: Discover GPUs present`](https://attack.mitre.org/techniques/T1082/) 66 | 67 | - [`Discovery: Discover TorchServe API`](https://atlas.mitre.org/techniques/AML.T0007) 68 | 69 | This ability allows a user to discover IP address that host ML services (PyTorchServe) and the model file(s) they are serving. 70 | 71 | - [`ML Model Access: Inference API Access`](https://atlas.mitre.org/techniques/AML.T0040) 72 | 73 | Gain access to TorchServe prediction endpoint 74 | 75 | - [`ML Attack Staging: Build and Attack a Custom CFTarget`](https://atlas.mitre.org/techniques/AML.T0043) 76 | 77 | Creates an interface between a target model and the attacks included in a framework. This is done by creating a sub-class of counterfit.core.targets.CFTarget, where the collected target.model_server.prediction_endpoint fact will be used to specify the target_endpoint. Once the respective target is built, the attack is executed against the target. 78 | 79 | 80 | --- 81 | Implemented Autonoumous Adversaries 82 | --- 83 | For implemented autonomous adversaries, navigate to our [`page`](https://mitre-atlas.github.io/arsenal/adversary.html) for more details! 84 | 85 | --- 86 | ## Developers 87 | 88 | If you are a developer and looking to contribute to `arsenal` or modify it for your needs, please navigate to the [`developers page`](https://mitre-atlas.github.io/arsenal/dev.html). 89 | 90 | *Currently, `arsenal` is not a default plugin within `caldera`, and therefore additional setup to integrate with the app is required.* 91 | -------------------------------------------------------------------------------- /docs/source/victim.md: -------------------------------------------------------------------------------- 1 | # Arsenal: Victim 2 | 3 | Arsenal requires a victim system to Red Team (preferrably) hosting a Machine Learning algorithm or API endpoint. 4 | 5 | However, hosting an algorithm is not required, an adversary may be interested in other AI-realted artifacts such as discovering hardware/compute capabilities of an adversary, exfiltrating data hosted on the system, etc. regardless of a hosted algorithm. 6 | 7 | --- 8 | 9 | ### Currently, `arsenal` uses the package [`ml-vulhub`](https://github.com/mitre-atlas/ml-vulhub) to deploy vulnerable environments. 10 | --- 11 | 12 | An example use-case is an adversary that does reconnaissance on a victim's system in order to gather information for targeting. Using `arsenal` within `caldera`, we can use the ability `Gather Information for Remote Services` to first investigate a victim system before deploying an agent to that system. 13 | 14 | For instance, an agent that first performs remote reconnaissance, running from the Command-and-Control (C2) server could find that at a particular IP address the following services are running, in addition to ML services: 15 | 16 | ![access_1](../assets/access.png) 17 | 18 | However, to extract additional resources or ML services, an agent would need to deployed on that vulnerable system. 19 | 20 | ![access_2](../assets/access_2.png) -------------------------------------------------------------------------------- /hook.py: -------------------------------------------------------------------------------- 1 | 2 | from app.utility.base_world import BaseWorld 3 | from plugins.arsenal.app.arsenal_svc import ArsenalService 4 | 5 | 6 | name = 'Arsenal' 7 | description = 'A plugin of abilities, adversaries, payloads and planners for the ATLAS framework' 8 | address = '/plugin/arsenal/gui' 9 | access = BaseWorld.Access.APP 10 | 11 | 12 | async def enable(services): 13 | arsenal_svc = ArsenalService(services) 14 | services.get('app_svc').application.router.add_route('GET', '/plugin/arsenal/gui', arsenal_svc.splash) 15 | -------------------------------------------------------------------------------- /images/overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mitre-atlas/arsenal/2d80c59a637e32dc5d8bdc06f1b449745330b1ec/images/overview.png -------------------------------------------------------------------------------- /payloads/build_and_attack_counterfit_target.py: -------------------------------------------------------------------------------- 1 | from typing import List, Union 2 | import argparse 3 | import sys 4 | import os 5 | import json 6 | import requests 7 | 8 | # FIXME: Added to 9 | os.environ['OPENBLAS_NUM_THREADS'] = '1' 10 | 11 | import numpy as np 12 | from io import BytesIO 13 | from PIL import Image 14 | 15 | try: 16 | from counterfit.core.targets import CFTarget 17 | from counterfit.core import Counterfit 18 | from counterfit.core.output import CFPrint 19 | except ImportError as e: 20 | import pip 21 | 22 | pip.main(['install', '--user', 'counterfit[dev] @ git+https://github.com/Azure/counterfit.git@main']) 23 | 24 | from counterfit.core.targets import CFTarget 25 | from counterfit.core import Counterfit 26 | from counterfit.core.output import CFPrint 27 | 28 | 29 | # Map (common) DL model architecture to (typical) task "type" 30 | MODEL_ARCHITECTURES_MAP = { 31 | "alexnet": "Image Classification", 32 | "densenet161": "Image Classification", 33 | "resnet": "Image Classification", 34 | "resnet-18": "Image Classification", 35 | "resnet-50": "Image Classification", 36 | "resnet-101": "Image Classification", 37 | "resnet-152-batch_v2": "Image Classification", 38 | "vgg16": "Image Classification", 39 | "vgg19": "Image Classification", 40 | "mobilenet": "Image Classification", 41 | "squeezenet1_1": "Image Classification", 42 | "rcnn": "Object Detection", 43 | "faster": "Object Detection", 44 | "fastrcnn": "Object Detection", 45 | "maskrcnn": "Object Detection", 46 | "yolo": "Object Detection", 47 | "deeplab": "Image Segmentation" 48 | } 49 | 50 | # Map (common) DL task to (typical) benchmark dataset 51 | DEFAULT_DATASET_MAP = { 52 | "Image Classification": "ImageNet", 53 | "Object Detection": "COCO", 54 | "Image Segmentation": "COCO" 55 | } 56 | 57 | 58 | def get_output_classes(model_type: str) -> List[str]: 59 | dataset = DEFAULT_DATASET_MAP[model_type] 60 | if dataset == "ImageNet": 61 | labels_map = json.load(open("imagenet_name_to_index.json")) 62 | output_labels = list(labels_map.keys()) 63 | # print(output_labels[:5]) 64 | return output_labels 65 | else: 66 | raise ValueError(f"Unknown model type: {model_type}") 67 | 68 | 69 | class TorchServeImageNetClassifier(CFTarget): 70 | data_type = "image" 71 | target_name = "torchserve_imagenet_classifier" 72 | task = "classification" 73 | class_map = json.load(open("imagenet_name_to_index.json")) 74 | output_classes = list(class_map.keys()) 75 | endpoint = "" 76 | input_shape = (720, 720, 3) 77 | output_classes = get_output_classes("Image Classification") 78 | sample_input_path = "kitten.jpg" 79 | classifier = "closed-box" 80 | X = [] # X will be array of ndarrays, not just a single array (very nonintuitive) 81 | 82 | def load(self): 83 | input_path = self.fullpath(self.sample_input_path) 84 | self.X.append(np.asarray(Image.open(input_path)).astype(np.float32)) 85 | # FIXME there are only 998 output classes, not 1000 86 | self.num_output_classes = len(self.output_classes) 87 | 88 | def predict(self, x_batch): 89 | # check if x_batch is a single image or a batch of images 90 | if len(x_batch.shape) == 4: 91 | x = x_batch[0] 92 | else: 93 | x = x_batch 94 | 95 | # prepare input image to be sent to endpoint 96 | img = Image.fromarray((x * 255).astype(np.uint8)) 97 | tmp = BytesIO() 98 | img.save(tmp, format="PNG") 99 | bytes = tmp.getvalue() 100 | 101 | # send image to endpoint and get response 102 | result = requests.post(self.endpoint, files={"data": bytes}).json() 103 | scores = np.zeros((self.num_output_classes,)) 104 | for cat, score in result.items(): 105 | scores[self.class_map[cat]] = score 106 | 107 | return scores.tolist() 108 | 109 | 110 | def get_model_name_from_endpoint(endpoint): 111 | # retrieve "model name" from passed in args.endpoint 112 | pred_path_list = endpoint.split('/') 113 | if pred_path_list[-2] == "predictions": 114 | # args.endpoint == "/predictions/{model_name}" 115 | return pred_path_list[-1] 116 | elif pred_path_list[-3] == "predictions": 117 | # args.endpoint == "/predictions/{model_name}/{version}" 118 | return pred_path_list[2] 119 | 120 | 121 | def get_model_type_from_model_name(model_name): 122 | print(model_name) 123 | for name, task_type in MODEL_ARCHITECTURES_MAP.items(): 124 | # choose first element (by convention; no intuition behind this convention) 125 | if name in model_name: 126 | return task_type 127 | return "" 128 | 129 | 130 | def image_classification(attack_list, endpoint, model_name): 131 | kwargs = {"target_name": model_name,"endpoint": endpoint} 132 | ts_target = TorchServeImageNetClassifier(**kwargs) 133 | ts_target.load() 134 | for attack in attack_list: 135 | try: 136 | print(f"Building attack: {attack}...") 137 | cf_attack = Counterfit.build_attack(ts_target, attack) 138 | # set num_iter to 60% of default value to speed up attack 139 | if attack == "boundary": 140 | cf_attack.options.attack_parameters["max_iter"]["current"] = 2000 141 | elif attack == "hop_skip_jump": 142 | cf_attack.options.attack_parameters["max_iter"]["current"] = 5 143 | print(f"Running attack on the {ts_target.target_name} CFTarget...") 144 | Counterfit.run_attack(cf_attack) 145 | print(f"Initial labels: {cf_attack.initial_labels}") 146 | print(f"Final labels: {cf_attack.final_labels}") 147 | except Exception as error: 148 | CFPrint.failed(f"Failed to run attack {attack} with error: {error}") 149 | 150 | 151 | def setup_args(): 152 | # TODO below will get ALL art attacks, not just black-box; so don't use it yet... 153 | default_art_attacks = list(Counterfit.get_frameworks()["art"]["attacks"].keys()) 154 | default_attacks_to_run = ["hop_skip_jump"] 155 | # TODO make choices list "complete" 156 | # choices = ["boundary", "copycat_cnn", "functionally_equivalent_extraction", "hop_skip_jump", "knockoff_nets"] 157 | 158 | parser = argparse.ArgumentParser(description="Build CFTarget.") 159 | 160 | parser.add_argument("--endpoint", help="API route or model file location where Counterfit will collect outputs.", required=True, type=str) 161 | parser.add_argument("--attacks", help="The type of attack (s) to run.", nargs='*', metavar="List of attacks", default=default_attacks_to_run) 162 | 163 | args = parser.parse_args() 164 | 165 | if len(sys.argv) == 1: 166 | parser.print_help() 167 | sys.exit(1) 168 | 169 | return args 170 | 171 | 172 | def main(): 173 | args = setup_args() 174 | # TODO(afennelly) error checks for correct usage, ie handle bad endpoint 175 | # NOTE: below will break for Windows OS 176 | pred_endpoint = f"http://{args.endpoint}" 177 | 178 | model_name = get_model_name_from_endpoint(pred_endpoint) 179 | model_type = get_model_type_from_model_name(model_name) 180 | 181 | # check if the model architecture "type" has been set 182 | if model_type == "Image Classification": 183 | image_classification( 184 | attack_list=args.attacks, 185 | endpoint=pred_endpoint, 186 | model_name=model_name 187 | ) 188 | 189 | 190 | if __name__ == "__main__": 191 | main() 192 | -------------------------------------------------------------------------------- /payloads/coco_index_to_name.json: -------------------------------------------------------------------------------- 1 | { 2 | "0": "__background__", 3 | "1": "person", 4 | "2": "bicycle", 5 | "3": "car", 6 | "4": "motorcycle", 7 | "5": "airplane", 8 | "6": "bus", 9 | "7": "train", 10 | "8": "truck", 11 | "9": "boat", 12 | "10": "traffic light", 13 | "11": "fire hydrant", 14 | "12": "N/A", 15 | "13": "stop sign", 16 | "14": "parking meter", 17 | "15": "bench", 18 | "16": "bird", 19 | "17": "cat", 20 | "18": "dog", 21 | "19": "horse", 22 | "20": "sheep", 23 | "21": "cow", 24 | "22": "elephant", 25 | "23": "bear", 26 | "24": "zebra", 27 | "25": "giraffe", 28 | "26": "N/A", 29 | "27": "backpack", 30 | "28": "umbrella", 31 | "29": "N/A", 32 | "30": "N/A", 33 | "31": "handbag", 34 | "32": "tie", 35 | "33": "suitcase", 36 | "34": "frisbee", 37 | "35": "skis", 38 | "36": "snowboard", 39 | "37": "sports ball", 40 | "38": "kite", 41 | "39": "baseball bat", 42 | "40": "baseball glove", 43 | "41": "skateboard", 44 | "42": "surfboard", 45 | "43": "tennis racket", 46 | "44": "bottle", 47 | "45": "N/A", 48 | "46": "wine glass", 49 | "47": "cup", 50 | "48": "fork", 51 | "49": "knife", 52 | "50": "spoon", 53 | "51": "bowl", 54 | "52": "banana", 55 | "53": "apple", 56 | "54": "sandwich", 57 | "55": "orange", 58 | "56": "broccoli", 59 | "57": "carrot", 60 | "58": "hot dog", 61 | "59": "pizza", 62 | "60": "donut", 63 | "61": "cake", 64 | "62": "chair", 65 | "63": "couch", 66 | "64": "potted plant", 67 | "65": "bed", 68 | "66": "N/A", 69 | "67": "dining table", 70 | "68": "N/A", 71 | "69": "N/A", 72 | "70": "toilet", 73 | "71": "N/A", 74 | "72": "tv", 75 | "73": "laptop", 76 | "74": "mouse", 77 | "75": "remote", 78 | "76": "keyboard", 79 | "77": "cell phone", 80 | "78": "microwave", 81 | "79": "oven", 82 | "80": "toaster", 83 | "81": "sink", 84 | "82": "refrigerator", 85 | "83": "N/A", 86 | "84": "book", 87 | "85": "clock", 88 | "86": "vase", 89 | "87": "scissors", 90 | "88": "teddy bear", 91 | "89": "hair drier", 92 | "90": "toothbrush" 93 | } -------------------------------------------------------------------------------- /payloads/counterfit_install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ ! -d "$HOME/venv/cf_venv" ]; then 4 | if [ ! -d "$HOME/venv" ]; then 5 | mkdir $HOME/venv/ 6 | fi 7 | python3 -m venv $HOME/venv/cf_venv 8 | fi 9 | if ! { $HOME/venv/cf_venv/bin/pip list counterfit | grep 'counterfit'; } >/dev/null 2>&1; then 10 | $HOME/venv/cf_venv/bin/python3 -m pip install -U pip wheel setuptools >/dev/null 11 | $HOME/venv/cf_venv/bin/python3 -m pip install git+https://github.com/Azure/counterfit.git@main >/dev/null 12 | $HOME/venv/cf_venv/bin/python3 -m pip install counterfit[dev] >/dev/null 13 | fi 14 | 15 | 16 | 17 | if ! [ -e $HOME/venv/cf_venv/bin/python3 ]; then 18 | echo "$HOME/venv/cf_venv/bin/python3"; 19 | fi 20 | -------------------------------------------------------------------------------- /payloads/discover_prediction_endpoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if command -v curl &> /dev/null; then 4 | models_endpoint="$1/models" 5 | # echo $models_endpoint 6 | # this is only tested for one model in ip_addr/models 7 | if curl -s $models_endpoint | grep "modelName" >> /dev/null; then 8 | model_name="$(curl -s $models_endpoint | grep "modelName" | awk -F '"' '{print $4}')" 9 | echo "$model_name" 10 | fi 11 | fi -------------------------------------------------------------------------------- /payloads/discover_torchserve_api.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # store each bind addr as element in array (from target.api.binding_address_list) 4 | IFS=', ' read -r -a candidates_arr <<< "$1" 5 | 6 | # check if "curl" command is available 7 | if command -v curl &> /dev/null; then 8 | # iterate over each binding address 9 | for bind_addr in "${candidates_arr[@]}" 10 | do 11 | ping_path="$bind_addr/ping" 12 | description_path="$bind_addr/api-description" 13 | # first, try "Health API" (https://pytorch.org/serve/inference_api.html#health-check-api), 14 | # Management API will not return "Healthy" in the output 15 | if ( curl -s -m 3 ${ping_path} | grep "Healthy" &> /dev/null ); then 16 | echo "INFERENCE_API $bind_addr" 17 | elif (curl -s -m 3 ${description_path} | grep "List registered models in TorchServe." &> /dev/null ); then 18 | echo "MANAGEMENT_API $bind_addr" 19 | fi 20 | done 21 | fi -------------------------------------------------------------------------------- /payloads/image_classifier.py: -------------------------------------------------------------------------------- 1 | 2 | import argparse 3 | from contextlib import redirect_stdout 4 | import glob 5 | import os 6 | import shutil 7 | import sys 8 | import numpy as np 9 | 10 | # NOTE: Keep before tensorflow import, must silence tensorflow logs or else sabotages post-ability parser 11 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' 12 | import tensorflow as tf 13 | 14 | # NOTE: Uncomment for SSL certification trouble when behind a pesky MITM box. 15 | import ssl 16 | ssl._create_default_https_context = ssl._create_unverified_context 17 | 18 | 19 | IMAGE_FILE_TYPES = ['png', 'jpeg', 'jpg', 'gif'] 20 | TARGET_IMAGE_SIZE = (224, 224) 21 | TARGET_CLASS = "mask" 22 | DEFAULT_MODEL = "resnet" 23 | 24 | 25 | def read_image(image_path: str) -> np.ndarray: 26 | image = tf.keras.preprocessing.image.load_img(image_path, target_size=TARGET_IMAGE_SIZE) 27 | image = np.expand_dims(image, axis=0) 28 | image = tf.keras.applications.imagenet_utils.preprocess_input(image) 29 | return image 30 | 31 | 32 | def retrieve_model(model_name: str) -> dict: 33 | model_dict = { 34 | 'resnet': tf.keras.applications.resnet50.ResNet50, 35 | 'vgg': tf.keras.applications.vgg16.VGG16, 36 | 'mobilenet': tf.keras.applications.mobilenet_v2.MobileNetV2 37 | } 38 | return model_dict[model_name](weights='imagenet') 39 | 40 | 41 | def predict_class(image: np.ndarray, model): 42 | preds = model.predict(image) 43 | preds = tf.keras.applications.imagenet_utils.decode_predictions(preds=preds) 44 | return preds 45 | 46 | 47 | def process_dir(path: str, model, target_class: str) -> list: 48 | matches = [] 49 | for i in IMAGE_FILE_TYPES: 50 | for file_ in glob.glob(f'{path}/*.{i}', recursive=True): 51 | if process_file(file_, model, target_class): 52 | matches.append(f'{file_}\n') 53 | return matches 54 | 55 | 56 | def process_file(file: np.ndarray, model, target_class) -> bool: 57 | try: 58 | image = read_image(file) 59 | except: 60 | return False 61 | preds = predict_class(image, model) 62 | return True if preds[0][0][1] == target_class else False 63 | 64 | 65 | def get_argparser(): 66 | parser = argparse.ArgumentParser() 67 | path = parser.add_mutually_exclusive_group(required=True) 68 | path.add_argument('--file') 69 | path.add_argument('--dir') 70 | parser.add_argument('--class', default=TARGET_CLASS) 71 | parser.add_argument('--model', default=DEFAULT_MODEL) 72 | parser.add_argument('--stage', required=True, help='staging directory') 73 | return parser 74 | 75 | 76 | def main(): 77 | parser = get_argparser() 78 | args = vars(parser.parse_args()) 79 | 80 | model = retrieve_model(args['model']) 81 | 82 | if args['file']: 83 | if process_file( 84 | file=args['file'], 85 | model=model, 86 | target_class=args['class']): 87 | shutil.copy(args['file'].strip('\n'), args['stage']) 88 | print(args['stage'], file=sys.stdout) 89 | elif args['dir']: 90 | matches = process_dir( 91 | path=args['dir'], 92 | model=model, 93 | target_class=args['class'] 94 | ) 95 | if matches: 96 | try: 97 | os.makedirs(args['stage']) 98 | except FileExistsError: 99 | pass 100 | _ = [shutil.copy(m.strip('\n'), args['stage']) for m in matches] 101 | print(args['stage'], file=sys.stdout) 102 | 103 | 104 | if __name__ == '__main__': 105 | main() 106 | -------------------------------------------------------------------------------- /payloads/imagenet_name_to_index.json: -------------------------------------------------------------------------------- 1 | { 2 | "tench": 0, 3 | "goldfish": 1, 4 | "great_white_shark": 2, 5 | "tiger_shark": 3, 6 | "hammerhead": 4, 7 | "electric_ray": 5, 8 | "stingray": 6, 9 | "cock": 7, 10 | "hen": 8, 11 | "ostrich": 9, 12 | "brambling": 10, 13 | "goldfinch": 11, 14 | "house_finch": 12, 15 | "junco": 13, 16 | "indigo_bunting": 14, 17 | "robin": 15, 18 | "bulbul": 16, 19 | "jay": 17, 20 | "magpie": 18, 21 | "chickadee": 19, 22 | "water_ouzel": 20, 23 | "kite": 21, 24 | "bald_eagle": 22, 25 | "vulture": 23, 26 | "great_grey_owl": 24, 27 | "European_fire_salamander": 25, 28 | "common_newt": 26, 29 | "eft": 27, 30 | "spotted_salamander": 28, 31 | "axolotl": 29, 32 | "bullfrog": 30, 33 | "tree_frog": 31, 34 | "tailed_frog": 32, 35 | "loggerhead": 33, 36 | "leatherback_turtle": 34, 37 | "mud_turtle": 35, 38 | "terrapin": 36, 39 | "box_turtle": 37, 40 | "banded_gecko": 38, 41 | "common_iguana": 39, 42 | "American_chameleon": 40, 43 | "whiptail": 41, 44 | "agama": 42, 45 | "frilled_lizard": 43, 46 | "alligator_lizard": 44, 47 | "Gila_monster": 45, 48 | "green_lizard": 46, 49 | "African_chameleon": 47, 50 | "Komodo_dragon": 48, 51 | "African_crocodile": 49, 52 | "American_alligator": 50, 53 | "triceratops": 51, 54 | "thunder_snake": 52, 55 | "ringneck_snake": 53, 56 | "hognose_snake": 54, 57 | "green_snake": 55, 58 | "king_snake": 56, 59 | "garter_snake": 57, 60 | "water_snake": 58, 61 | "vine_snake": 59, 62 | "night_snake": 60, 63 | "boa_constrictor": 61, 64 | "rock_python": 62, 65 | "Indian_cobra": 63, 66 | "green_mamba": 64, 67 | "sea_snake": 65, 68 | "horned_viper": 66, 69 | "diamondback": 67, 70 | "sidewinder": 68, 71 | "trilobite": 69, 72 | "harvestman": 70, 73 | "scorpion": 71, 74 | "black_and_gold_garden_spider": 72, 75 | "barn_spider": 73, 76 | "garden_spider": 74, 77 | "black_widow": 75, 78 | "tarantula": 76, 79 | "wolf_spider": 77, 80 | "tick": 78, 81 | "centipede": 79, 82 | "black_grouse": 80, 83 | "ptarmigan": 81, 84 | "ruffed_grouse": 82, 85 | "prairie_chicken": 83, 86 | "peacock": 84, 87 | "quail": 85, 88 | "partridge": 86, 89 | "African_grey": 87, 90 | "macaw": 88, 91 | "sulphur-crested_cockatoo": 89, 92 | "lorikeet": 90, 93 | "coucal": 91, 94 | "bee_eater": 92, 95 | "hornbill": 93, 96 | "hummingbird": 94, 97 | "jacamar": 95, 98 | "toucan": 96, 99 | "drake": 97, 100 | "red-breasted_merganser": 98, 101 | "goose": 99, 102 | "black_swan": 100, 103 | "tusker": 101, 104 | "echidna": 102, 105 | "platypus": 103, 106 | "wallaby": 104, 107 | "koala": 105, 108 | "wombat": 106, 109 | "jellyfish": 107, 110 | "sea_anemone": 108, 111 | "brain_coral": 109, 112 | "flatworm": 110, 113 | "nematode": 111, 114 | "conch": 112, 115 | "snail": 113, 116 | "slug": 114, 117 | "sea_slug": 115, 118 | "chiton": 116, 119 | "chambered_nautilus": 117, 120 | "Dungeness_crab": 118, 121 | "rock_crab": 119, 122 | "fiddler_crab": 120, 123 | "king_crab": 121, 124 | "American_lobster": 122, 125 | "spiny_lobster": 123, 126 | "crayfish": 124, 127 | "hermit_crab": 125, 128 | "isopod": 126, 129 | "white_stork": 127, 130 | "black_stork": 128, 131 | "spoonbill": 129, 132 | "flamingo": 130, 133 | "little_blue_heron": 131, 134 | "American_egret": 132, 135 | "bittern": 133, 136 | "crane": 517, 137 | "limpkin": 135, 138 | "European_gallinule": 136, 139 | "American_coot": 137, 140 | "bustard": 138, 141 | "ruddy_turnstone": 139, 142 | "red-backed_sandpiper": 140, 143 | "redshank": 141, 144 | "dowitcher": 142, 145 | "oystercatcher": 143, 146 | "pelican": 144, 147 | "king_penguin": 145, 148 | "albatross": 146, 149 | "grey_whale": 147, 150 | "killer_whale": 148, 151 | "dugong": 149, 152 | "sea_lion": 150, 153 | "Chihuahua": 151, 154 | "Japanese_spaniel": 152, 155 | "Maltese_dog": 153, 156 | "Pekinese": 154, 157 | "Shih-Tzu": 155, 158 | "Blenheim_spaniel": 156, 159 | "papillon": 157, 160 | "toy_terrier": 158, 161 | "Rhodesian_ridgeback": 159, 162 | "Afghan_hound": 160, 163 | "basset": 161, 164 | "beagle": 162, 165 | "bloodhound": 163, 166 | "bluetick": 164, 167 | "black-and-tan_coonhound": 165, 168 | "Walker_hound": 166, 169 | "English_foxhound": 167, 170 | "redbone": 168, 171 | "borzoi": 169, 172 | "Irish_wolfhound": 170, 173 | "Italian_greyhound": 171, 174 | "whippet": 172, 175 | "Ibizan_hound": 173, 176 | "Norwegian_elkhound": 174, 177 | "otterhound": 175, 178 | "Saluki": 176, 179 | "Scottish_deerhound": 177, 180 | "Weimaraner": 178, 181 | "Staffordshire_bullterrier": 179, 182 | "American_Staffordshire_terrier": 180, 183 | "Bedlington_terrier": 181, 184 | "Border_terrier": 182, 185 | "Kerry_blue_terrier": 183, 186 | "Irish_terrier": 184, 187 | "Norfolk_terrier": 185, 188 | "Norwich_terrier": 186, 189 | "Yorkshire_terrier": 187, 190 | "wire-haired_fox_terrier": 188, 191 | "Lakeland_terrier": 189, 192 | "Sealyham_terrier": 190, 193 | "Airedale": 191, 194 | "cairn": 192, 195 | "Australian_terrier": 193, 196 | "Dandie_Dinmont": 194, 197 | "Boston_bull": 195, 198 | "miniature_schnauzer": 196, 199 | "giant_schnauzer": 197, 200 | "standard_schnauzer": 198, 201 | "Scotch_terrier": 199, 202 | "Tibetan_terrier": 200, 203 | "silky_terrier": 201, 204 | "soft-coated_wheaten_terrier": 202, 205 | "West_Highland_white_terrier": 203, 206 | "Lhasa": 204, 207 | "flat-coated_retriever": 205, 208 | "curly-coated_retriever": 206, 209 | "golden_retriever": 207, 210 | "Labrador_retriever": 208, 211 | "Chesapeake_Bay_retriever": 209, 212 | "German_short-haired_pointer": 210, 213 | "vizsla": 211, 214 | "English_setter": 212, 215 | "Irish_setter": 213, 216 | "Gordon_setter": 214, 217 | "Brittany_spaniel": 215, 218 | "clumber": 216, 219 | "English_springer": 217, 220 | "Welsh_springer_spaniel": 218, 221 | "cocker_spaniel": 219, 222 | "Sussex_spaniel": 220, 223 | "Irish_water_spaniel": 221, 224 | "kuvasz": 222, 225 | "schipperke": 223, 226 | "groenendael": 224, 227 | "malinois": 225, 228 | "briard": 226, 229 | "kelpie": 227, 230 | "komondor": 228, 231 | "Old_English_sheepdog": 229, 232 | "Shetland_sheepdog": 230, 233 | "collie": 231, 234 | "Border_collie": 232, 235 | "Bouvier_des_Flandres": 233, 236 | "Rottweiler": 234, 237 | "German_shepherd": 235, 238 | "Doberman": 236, 239 | "miniature_pinscher": 237, 240 | "Greater_Swiss_Mountain_dog": 238, 241 | "Bernese_mountain_dog": 239, 242 | "Appenzeller": 240, 243 | "EntleBucher": 241, 244 | "boxer": 242, 245 | "bull_mastiff": 243, 246 | "Tibetan_mastiff": 244, 247 | "French_bulldog": 245, 248 | "Great_Dane": 246, 249 | "Saint_Bernard": 247, 250 | "Eskimo_dog": 248, 251 | "malamute": 249, 252 | "Siberian_husky": 250, 253 | "dalmatian": 251, 254 | "affenpinscher": 252, 255 | "basenji": 253, 256 | "pug": 254, 257 | "Leonberg": 255, 258 | "Newfoundland": 256, 259 | "Great_Pyrenees": 257, 260 | "Samoyed": 258, 261 | "Pomeranian": 259, 262 | "chow": 260, 263 | "keeshond": 261, 264 | "Brabancon_griffon": 262, 265 | "Pembroke": 263, 266 | "Cardigan": 264, 267 | "toy_poodle": 265, 268 | "miniature_poodle": 266, 269 | "standard_poodle": 267, 270 | "Mexican_hairless": 268, 271 | "timber_wolf": 269, 272 | "white_wolf": 270, 273 | "red_wolf": 271, 274 | "coyote": 272, 275 | "dingo": 273, 276 | "dhole": 274, 277 | "African_hunting_dog": 275, 278 | "hyena": 276, 279 | "red_fox": 277, 280 | "kit_fox": 278, 281 | "Arctic_fox": 279, 282 | "grey_fox": 280, 283 | "tabby": 281, 284 | "tiger_cat": 282, 285 | "Persian_cat": 283, 286 | "Siamese_cat": 284, 287 | "Egyptian_cat": 285, 288 | "cougar": 286, 289 | "lynx": 287, 290 | "leopard": 288, 291 | "snow_leopard": 289, 292 | "jaguar": 290, 293 | "lion": 291, 294 | "tiger": 292, 295 | "cheetah": 293, 296 | "brown_bear": 294, 297 | "American_black_bear": 295, 298 | "ice_bear": 296, 299 | "sloth_bear": 297, 300 | "mongoose": 298, 301 | "meerkat": 299, 302 | "tiger_beetle": 300, 303 | "ladybug": 301, 304 | "ground_beetle": 302, 305 | "long-horned_beetle": 303, 306 | "leaf_beetle": 304, 307 | "dung_beetle": 305, 308 | "rhinoceros_beetle": 306, 309 | "weevil": 307, 310 | "fly": 308, 311 | "bee": 309, 312 | "ant": 310, 313 | "grasshopper": 311, 314 | "cricket": 312, 315 | "walking_stick": 313, 316 | "cockroach": 314, 317 | "mantis": 315, 318 | "cicada": 316, 319 | "leafhopper": 317, 320 | "lacewing": 318, 321 | "dragonfly": 319, 322 | "damselfly": 320, 323 | "admiral": 321, 324 | "ringlet": 322, 325 | "monarch": 323, 326 | "cabbage_butterfly": 324, 327 | "sulphur_butterfly": 325, 328 | "lycaenid": 326, 329 | "starfish": 327, 330 | "sea_urchin": 328, 331 | "sea_cucumber": 329, 332 | "wood_rabbit": 330, 333 | "hare": 331, 334 | "Angora": 332, 335 | "hamster": 333, 336 | "porcupine": 334, 337 | "fox_squirrel": 335, 338 | "marmot": 336, 339 | "beaver": 337, 340 | "guinea_pig": 338, 341 | "sorrel": 339, 342 | "zebra": 340, 343 | "hog": 341, 344 | "wild_boar": 342, 345 | "warthog": 343, 346 | "hippopotamus": 344, 347 | "ox": 345, 348 | "water_buffalo": 346, 349 | "bison": 347, 350 | "ram": 348, 351 | "bighorn": 349, 352 | "ibex": 350, 353 | "hartebeest": 351, 354 | "impala": 352, 355 | "gazelle": 353, 356 | "Arabian_camel": 354, 357 | "llama": 355, 358 | "weasel": 356, 359 | "mink": 357, 360 | "polecat": 358, 361 | "black-footed_ferret": 359, 362 | "otter": 360, 363 | "skunk": 361, 364 | "badger": 362, 365 | "armadillo": 363, 366 | "three-toed_sloth": 364, 367 | "orangutan": 365, 368 | "gorilla": 366, 369 | "chimpanzee": 367, 370 | "gibbon": 368, 371 | "siamang": 369, 372 | "guenon": 370, 373 | "patas": 371, 374 | "baboon": 372, 375 | "macaque": 373, 376 | "langur": 374, 377 | "colobus": 375, 378 | "proboscis_monkey": 376, 379 | "marmoset": 377, 380 | "capuchin": 378, 381 | "howler_monkey": 379, 382 | "titi": 380, 383 | "spider_monkey": 381, 384 | "squirrel_monkey": 382, 385 | "Madagascar_cat": 383, 386 | "indri": 384, 387 | "Indian_elephant": 385, 388 | "African_elephant": 386, 389 | "lesser_panda": 387, 390 | "giant_panda": 388, 391 | "barracouta": 389, 392 | "eel": 390, 393 | "coho": 391, 394 | "rock_beauty": 392, 395 | "anemone_fish": 393, 396 | "sturgeon": 394, 397 | "gar": 395, 398 | "lionfish": 396, 399 | "puffer": 397, 400 | "abacus": 398, 401 | "abaya": 399, 402 | "academic_gown": 400, 403 | "accordion": 401, 404 | "acoustic_guitar": 402, 405 | "aircraft_carrier": 403, 406 | "airliner": 404, 407 | "airship": 405, 408 | "altar": 406, 409 | "ambulance": 407, 410 | "amphibian": 408, 411 | "analog_clock": 409, 412 | "apiary": 410, 413 | "apron": 411, 414 | "ashcan": 412, 415 | "assault_rifle": 413, 416 | "backpack": 414, 417 | "bakery": 415, 418 | "balance_beam": 416, 419 | "balloon": 417, 420 | "ballpoint": 418, 421 | "Band_Aid": 419, 422 | "banjo": 420, 423 | "bannister": 421, 424 | "barbell": 422, 425 | "barber_chair": 423, 426 | "barbershop": 424, 427 | "barn": 425, 428 | "barometer": 426, 429 | "barrel": 427, 430 | "barrow": 428, 431 | "baseball": 429, 432 | "basketball": 430, 433 | "bassinet": 431, 434 | "bassoon": 432, 435 | "bathing_cap": 433, 436 | "bath_towel": 434, 437 | "bathtub": 435, 438 | "beach_wagon": 436, 439 | "beacon": 437, 440 | "beaker": 438, 441 | "bearskin": 439, 442 | "beer_bottle": 440, 443 | "beer_glass": 441, 444 | "bell_cote": 442, 445 | "bib": 443, 446 | "bicycle-built-for-two": 444, 447 | "bikini": 445, 448 | "binder": 446, 449 | "binoculars": 447, 450 | "birdhouse": 448, 451 | "boathouse": 449, 452 | "bobsled": 450, 453 | "bolo_tie": 451, 454 | "bonnet": 452, 455 | "bookcase": 453, 456 | "bookshop": 454, 457 | "bottlecap": 455, 458 | "bow": 456, 459 | "bow_tie": 457, 460 | "brass": 458, 461 | "brassiere": 459, 462 | "breakwater": 460, 463 | "breastplate": 461, 464 | "broom": 462, 465 | "bucket": 463, 466 | "buckle": 464, 467 | "bulletproof_vest": 465, 468 | "bullet_train": 466, 469 | "butcher_shop": 467, 470 | "cab": 468, 471 | "caldron": 469, 472 | "candle": 470, 473 | "cannon": 471, 474 | "canoe": 472, 475 | "can_opener": 473, 476 | "cardigan": 474, 477 | "car_mirror": 475, 478 | "carousel": 476, 479 | "carpenter's_kit": 477, 480 | "carton": 478, 481 | "car_wheel": 479, 482 | "cash_machine": 480, 483 | "cassette": 481, 484 | "cassette_player": 482, 485 | "castle": 483, 486 | "catamaran": 484, 487 | "CD_player": 485, 488 | "cello": 486, 489 | "cellular_telephone": 487, 490 | "chain": 488, 491 | "chainlink_fence": 489, 492 | "chain_mail": 490, 493 | "chain_saw": 491, 494 | "chest": 492, 495 | "chiffonier": 493, 496 | "chime": 494, 497 | "china_cabinet": 495, 498 | "Christmas_stocking": 496, 499 | "church": 497, 500 | "cinema": 498, 501 | "cleaver": 499, 502 | "cliff_dwelling": 500, 503 | "cloak": 501, 504 | "clog": 502, 505 | "cocktail_shaker": 503, 506 | "coffee_mug": 504, 507 | "coffeepot": 505, 508 | "coil": 506, 509 | "combination_lock": 507, 510 | "computer_keyboard": 508, 511 | "confectionery": 509, 512 | "container_ship": 510, 513 | "convertible": 511, 514 | "corkscrew": 512, 515 | "cornet": 513, 516 | "cowboy_boot": 514, 517 | "cowboy_hat": 515, 518 | "cradle": 516, 519 | "crash_helmet": 518, 520 | "crate": 519, 521 | "crib": 520, 522 | "Crock_Pot": 521, 523 | "croquet_ball": 522, 524 | "crutch": 523, 525 | "cuirass": 524, 526 | "dam": 525, 527 | "desk": 526, 528 | "desktop_computer": 527, 529 | "dial_telephone": 528, 530 | "diaper": 529, 531 | "digital_clock": 530, 532 | "digital_watch": 531, 533 | "dining_table": 532, 534 | "dishrag": 533, 535 | "dishwasher": 534, 536 | "disk_brake": 535, 537 | "dock": 536, 538 | "dogsled": 537, 539 | "dome": 538, 540 | "doormat": 539, 541 | "drilling_platform": 540, 542 | "drum": 541, 543 | "drumstick": 542, 544 | "dumbbell": 543, 545 | "Dutch_oven": 544, 546 | "electric_fan": 545, 547 | "electric_guitar": 546, 548 | "electric_locomotive": 547, 549 | "entertainment_center": 548, 550 | "envelope": 549, 551 | "espresso_maker": 550, 552 | "face_powder": 551, 553 | "feather_boa": 552, 554 | "file": 553, 555 | "fireboat": 554, 556 | "fire_engine": 555, 557 | "fire_screen": 556, 558 | "flagpole": 557, 559 | "flute": 558, 560 | "folding_chair": 559, 561 | "football_helmet": 560, 562 | "forklift": 561, 563 | "fountain": 562, 564 | "fountain_pen": 563, 565 | "four-poster": 564, 566 | "freight_car": 565, 567 | "French_horn": 566, 568 | "frying_pan": 567, 569 | "fur_coat": 568, 570 | "garbage_truck": 569, 571 | "gasmask": 570, 572 | "gas_pump": 571, 573 | "goblet": 572, 574 | "go-kart": 573, 575 | "golf_ball": 574, 576 | "golfcart": 575, 577 | "gondola": 576, 578 | "gong": 577, 579 | "gown": 578, 580 | "grand_piano": 579, 581 | "greenhouse": 580, 582 | "grille": 581, 583 | "grocery_store": 582, 584 | "guillotine": 583, 585 | "hair_slide": 584, 586 | "hair_spray": 585, 587 | "half_track": 586, 588 | "hammer": 587, 589 | "hamper": 588, 590 | "hand_blower": 589, 591 | "hand-held_computer": 590, 592 | "handkerchief": 591, 593 | "hard_disc": 592, 594 | "harmonica": 593, 595 | "harp": 594, 596 | "harvester": 595, 597 | "hatchet": 596, 598 | "holster": 597, 599 | "home_theater": 598, 600 | "honeycomb": 599, 601 | "hook": 600, 602 | "hoopskirt": 601, 603 | "horizontal_bar": 602, 604 | "horse_cart": 603, 605 | "hourglass": 604, 606 | "iPod": 605, 607 | "iron": 606, 608 | "jack-o'-lantern": 607, 609 | "jean": 608, 610 | "jeep": 609, 611 | "jersey": 610, 612 | "jigsaw_puzzle": 611, 613 | "jinrikisha": 612, 614 | "joystick": 613, 615 | "kimono": 614, 616 | "knee_pad": 615, 617 | "knot": 616, 618 | "lab_coat": 617, 619 | "ladle": 618, 620 | "lampshade": 619, 621 | "laptop": 620, 622 | "lawn_mower": 621, 623 | "lens_cap": 622, 624 | "letter_opener": 623, 625 | "library": 624, 626 | "lifeboat": 625, 627 | "lighter": 626, 628 | "limousine": 627, 629 | "liner": 628, 630 | "lipstick": 629, 631 | "Loafer": 630, 632 | "lotion": 631, 633 | "loudspeaker": 632, 634 | "loupe": 633, 635 | "lumbermill": 634, 636 | "magnetic_compass": 635, 637 | "mailbag": 636, 638 | "mailbox": 637, 639 | "maillot": 639, 640 | "manhole_cover": 640, 641 | "maraca": 641, 642 | "marimba": 642, 643 | "mask": 643, 644 | "matchstick": 644, 645 | "maypole": 645, 646 | "maze": 646, 647 | "measuring_cup": 647, 648 | "medicine_chest": 648, 649 | "megalith": 649, 650 | "microphone": 650, 651 | "microwave": 651, 652 | "military_uniform": 652, 653 | "milk_can": 653, 654 | "minibus": 654, 655 | "miniskirt": 655, 656 | "minivan": 656, 657 | "missile": 657, 658 | "mitten": 658, 659 | "mixing_bowl": 659, 660 | "mobile_home": 660, 661 | "Model_T": 661, 662 | "modem": 662, 663 | "monastery": 663, 664 | "monitor": 664, 665 | "moped": 665, 666 | "mortar": 666, 667 | "mortarboard": 667, 668 | "mosque": 668, 669 | "mosquito_net": 669, 670 | "motor_scooter": 670, 671 | "mountain_bike": 671, 672 | "mountain_tent": 672, 673 | "mouse": 673, 674 | "mousetrap": 674, 675 | "moving_van": 675, 676 | "muzzle": 676, 677 | "nail": 677, 678 | "neck_brace": 678, 679 | "necklace": 679, 680 | "nipple": 680, 681 | "notebook": 681, 682 | "obelisk": 682, 683 | "oboe": 683, 684 | "ocarina": 684, 685 | "odometer": 685, 686 | "oil_filter": 686, 687 | "organ": 687, 688 | "oscilloscope": 688, 689 | "overskirt": 689, 690 | "oxcart": 690, 691 | "oxygen_mask": 691, 692 | "packet": 692, 693 | "paddle": 693, 694 | "paddlewheel": 694, 695 | "padlock": 695, 696 | "paintbrush": 696, 697 | "pajama": 697, 698 | "palace": 698, 699 | "panpipe": 699, 700 | "paper_towel": 700, 701 | "parachute": 701, 702 | "parallel_bars": 702, 703 | "park_bench": 703, 704 | "parking_meter": 704, 705 | "passenger_car": 705, 706 | "patio": 706, 707 | "pay-phone": 707, 708 | "pedestal": 708, 709 | "pencil_box": 709, 710 | "pencil_sharpener": 710, 711 | "perfume": 711, 712 | "Petri_dish": 712, 713 | "photocopier": 713, 714 | "pick": 714, 715 | "pickelhaube": 715, 716 | "picket_fence": 716, 717 | "pickup": 717, 718 | "pier": 718, 719 | "piggy_bank": 719, 720 | "pill_bottle": 720, 721 | "pillow": 721, 722 | "ping-pong_ball": 722, 723 | "pinwheel": 723, 724 | "pirate": 724, 725 | "pitcher": 725, 726 | "plane": 726, 727 | "planetarium": 727, 728 | "plastic_bag": 728, 729 | "plate_rack": 729, 730 | "plow": 730, 731 | "plunger": 731, 732 | "Polaroid_camera": 732, 733 | "pole": 733, 734 | "police_van": 734, 735 | "poncho": 735, 736 | "pool_table": 736, 737 | "pop_bottle": 737, 738 | "pot": 738, 739 | "potter's_wheel": 739, 740 | "power_drill": 740, 741 | "prayer_rug": 741, 742 | "printer": 742, 743 | "prison": 743, 744 | "projectile": 744, 745 | "projector": 745, 746 | "puck": 746, 747 | "punching_bag": 747, 748 | "purse": 748, 749 | "quill": 749, 750 | "quilt": 750, 751 | "racer": 751, 752 | "racket": 752, 753 | "radiator": 753, 754 | "radio": 754, 755 | "radio_telescope": 755, 756 | "rain_barrel": 756, 757 | "recreational_vehicle": 757, 758 | "reel": 758, 759 | "reflex_camera": 759, 760 | "refrigerator": 760, 761 | "remote_control": 761, 762 | "restaurant": 762, 763 | "revolver": 763, 764 | "rifle": 764, 765 | "rocking_chair": 765, 766 | "rotisserie": 766, 767 | "rubber_eraser": 767, 768 | "rugby_ball": 768, 769 | "rule": 769, 770 | "running_shoe": 770, 771 | "safe": 771, 772 | "safety_pin": 772, 773 | "saltshaker": 773, 774 | "sandal": 774, 775 | "sarong": 775, 776 | "sax": 776, 777 | "scabbard": 777, 778 | "scale": 778, 779 | "school_bus": 779, 780 | "schooner": 780, 781 | "scoreboard": 781, 782 | "screen": 782, 783 | "screw": 783, 784 | "screwdriver": 784, 785 | "seat_belt": 785, 786 | "sewing_machine": 786, 787 | "shield": 787, 788 | "shoe_shop": 788, 789 | "shoji": 789, 790 | "shopping_basket": 790, 791 | "shopping_cart": 791, 792 | "shovel": 792, 793 | "shower_cap": 793, 794 | "shower_curtain": 794, 795 | "ski": 795, 796 | "ski_mask": 796, 797 | "sleeping_bag": 797, 798 | "slide_rule": 798, 799 | "sliding_door": 799, 800 | "slot": 800, 801 | "snorkel": 801, 802 | "snowmobile": 802, 803 | "snowplow": 803, 804 | "soap_dispenser": 804, 805 | "soccer_ball": 805, 806 | "sock": 806, 807 | "solar_dish": 807, 808 | "sombrero": 808, 809 | "soup_bowl": 809, 810 | "space_bar": 810, 811 | "space_heater": 811, 812 | "space_shuttle": 812, 813 | "spatula": 813, 814 | "speedboat": 814, 815 | "spider_web": 815, 816 | "spindle": 816, 817 | "sports_car": 817, 818 | "spotlight": 818, 819 | "stage": 819, 820 | "steam_locomotive": 820, 821 | "steel_arch_bridge": 821, 822 | "steel_drum": 822, 823 | "stethoscope": 823, 824 | "stole": 824, 825 | "stone_wall": 825, 826 | "stopwatch": 826, 827 | "stove": 827, 828 | "strainer": 828, 829 | "streetcar": 829, 830 | "stretcher": 830, 831 | "studio_couch": 831, 832 | "stupa": 832, 833 | "submarine": 833, 834 | "suit": 834, 835 | "sundial": 835, 836 | "sunglass": 836, 837 | "sunglasses": 837, 838 | "sunscreen": 838, 839 | "suspension_bridge": 839, 840 | "swab": 840, 841 | "sweatshirt": 841, 842 | "swimming_trunks": 842, 843 | "swing": 843, 844 | "switch": 844, 845 | "syringe": 845, 846 | "table_lamp": 846, 847 | "tank": 847, 848 | "tape_player": 848, 849 | "teapot": 849, 850 | "teddy": 850, 851 | "television": 851, 852 | "tennis_ball": 852, 853 | "thatch": 853, 854 | "theater_curtain": 854, 855 | "thimble": 855, 856 | "thresher": 856, 857 | "throne": 857, 858 | "tile_roof": 858, 859 | "toaster": 859, 860 | "tobacco_shop": 860, 861 | "toilet_seat": 861, 862 | "torch": 862, 863 | "totem_pole": 863, 864 | "tow_truck": 864, 865 | "toyshop": 865, 866 | "tractor": 866, 867 | "trailer_truck": 867, 868 | "tray": 868, 869 | "trench_coat": 869, 870 | "tricycle": 870, 871 | "trimaran": 871, 872 | "tripod": 872, 873 | "triumphal_arch": 873, 874 | "trolleybus": 874, 875 | "trombone": 875, 876 | "tub": 876, 877 | "turnstile": 877, 878 | "typewriter_keyboard": 878, 879 | "umbrella": 879, 880 | "unicycle": 880, 881 | "upright": 881, 882 | "vacuum": 882, 883 | "vase": 883, 884 | "vault": 884, 885 | "velvet": 885, 886 | "vending_machine": 886, 887 | "vestment": 887, 888 | "viaduct": 888, 889 | "violin": 889, 890 | "volleyball": 890, 891 | "waffle_iron": 891, 892 | "wall_clock": 892, 893 | "wallet": 893, 894 | "wardrobe": 894, 895 | "warplane": 895, 896 | "washbasin": 896, 897 | "washer": 897, 898 | "water_bottle": 898, 899 | "water_jug": 899, 900 | "water_tower": 900, 901 | "whiskey_jug": 901, 902 | "whistle": 902, 903 | "wig": 903, 904 | "window_screen": 904, 905 | "window_shade": 905, 906 | "Windsor_tie": 906, 907 | "wine_bottle": 907, 908 | "wing": 908, 909 | "wok": 909, 910 | "wooden_spoon": 910, 911 | "wool": 911, 912 | "worm_fence": 912, 913 | "wreck": 913, 914 | "yawl": 914, 915 | "yurt": 915, 916 | "web_site": 916, 917 | "comic_book": 917, 918 | "crossword_puzzle": 918, 919 | "street_sign": 919, 920 | "traffic_light": 920, 921 | "book_jacket": 921, 922 | "menu": 922, 923 | "plate": 923, 924 | "guacamole": 924, 925 | "consomme": 925, 926 | "hot_pot": 926, 927 | "trifle": 927, 928 | "ice_cream": 928, 929 | "ice_lolly": 929, 930 | "French_loaf": 930, 931 | "bagel": 931, 932 | "pretzel": 932, 933 | "cheeseburger": 933, 934 | "hotdog": 934, 935 | "mashed_potato": 935, 936 | "head_cabbage": 936, 937 | "broccoli": 937, 938 | "cauliflower": 938, 939 | "zucchini": 939, 940 | "spaghetti_squash": 940, 941 | "acorn_squash": 941, 942 | "butternut_squash": 942, 943 | "cucumber": 943, 944 | "artichoke": 944, 945 | "bell_pepper": 945, 946 | "cardoon": 946, 947 | "mushroom": 947, 948 | "Granny_Smith": 948, 949 | "strawberry": 949, 950 | "orange": 950, 951 | "lemon": 951, 952 | "fig": 952, 953 | "pineapple": 953, 954 | "banana": 954, 955 | "jackfruit": 955, 956 | "custard_apple": 956, 957 | "pomegranate": 957, 958 | "hay": 958, 959 | "carbonara": 959, 960 | "chocolate_sauce": 960, 961 | "dough": 961, 962 | "meat_loaf": 962, 963 | "pizza": 963, 964 | "potpie": 964, 965 | "burrito": 965, 966 | "red_wine": 966, 967 | "espresso": 967, 968 | "cup": 968, 969 | "eggnog": 969, 970 | "alp": 970, 971 | "bubble": 971, 972 | "cliff": 972, 973 | "coral_reef": 973, 974 | "geyser": 974, 975 | "lakeside": 975, 976 | "promontory": 976, 977 | "sandbar": 977, 978 | "seashore": 978, 979 | "valley": 979, 980 | "volcano": 980, 981 | "ballplayer": 981, 982 | "groom": 982, 983 | "scuba_diver": 983, 984 | "rapeseed": 984, 985 | "daisy": 985, 986 | "yellow_lady's_slipper": 986, 987 | "corn": 987, 988 | "acorn": 988, 989 | "hip": 989, 990 | "buckeye": 990, 991 | "coral_fungus": 991, 992 | "agaric": 992, 993 | "gyromitra": 993, 994 | "stinkhorn": 994, 995 | "earthstar": 995, 996 | "hen-of-the-woods": 996, 997 | "bolete": 997, 998 | "ear": 998, 999 | "toilet_tissue": 999 1000 | } -------------------------------------------------------------------------------- /payloads/kitten.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mitre-atlas/arsenal/2d80c59a637e32dc5d8bdc06f1b449745330b1ec/payloads/kitten.jpg -------------------------------------------------------------------------------- /payloads/network_svc_info.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # define what host discovery techniques should be used 4 | # - see https://nmap.org/book/host-discovery-techniques.html 5 | # - privileged users "default" would be "-PE -PS443 -PA80 -PP" 6 | HOST_DISC_OPTS="-PS443 -PA80" 7 | # define input options to improve scan time 8 | # - intuition on selected values: https://nmap.org/book/man-performance.html 9 | TIME_OPTS="-T4 --max-rtt-timeout 100ms --max-retries 0 --min-parallelism 100" 10 | # assume first arg passed will always contain an IPv4 network address 11 | TRG=$1 12 | 13 | # check if command "nmap" is available 14 | if command -v nmap &> /dev/null; then 15 | # perform ping scan of network and capture "up" hosts in UP_HOSTS_LIST 16 | UP_HOSTS_LIST=$(nmap -sn $HOST_DISC_OPTS $TIME_OPTS $TRG | awk '/Nmap scan/{gsub(/[()]/,"",$NF); print $NF}') 17 | nmap -oG up_hosts_scan.txt $UP_HOSTS_LIST &> /dev/null 18 | cat up_hosts_scan.txt | grep -v "Status" | grep -v "Nmap" 19 | rm up_hosts_scan.txt 20 | fi -------------------------------------------------------------------------------- /payloads/persons.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mitre-atlas/arsenal/2d80c59a637e32dc5d8bdc06f1b449745330b1ec/payloads/persons.jpg -------------------------------------------------------------------------------- /payloads/proto_addr_info.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # check if "ip" command is available 4 | if command -v ip &> /dev/null; then 5 | # look at protocol addresses of all available devices 6 | if [ $# -eq 0 ]; then 7 | ip -4 addr show | grep -A 2 "LOWER_UP" | grep inet | awk '{print $NF " " $2}' 8 | # look at protocol address of a specific device 9 | else 10 | ip -4 addr show dev $1 | grep -A 2 "LOWER_UP" | grep inet | awk '{print $NF " " $2}' 11 | fi 12 | fi -------------------------------------------------------------------------------- /payloads/socket_info.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | # Check if "ss" command is available 5 | if command -v ss &> /dev/null; then 6 | # Show TCP connections (-t) in listening (-l) state, w/o resolving the IP addresses 7 | # and the port number (-n). Output format is <"Local Address:Port" "Process">. 8 | # - Ex. row: 127.0.0.1:33203 users:(("containerd",pid=120,fd=8)) 9 | 10 | # Check (1) "sudo" command available and (2) current user is "sudo" group member 11 | if command -v sudo &> /dev/null && id -nG "$USER" | grep -qw "sudo"; then 12 | # Display the name of the process using the socket (-p) 13 | sudo ss -t -l -n -p | grep -v "State" | awk '{print $4 " " $6}' 14 | else 15 | # If available to user, display the name of the process using the socket (-p) 16 | ss -t -l -n -p | grep -v "State" | awk '{print $4 " " $6}' 17 | fi 18 | fi -------------------------------------------------------------------------------- /payloads/tensorflow_file_search.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | TF_MODEL_FILE_EXT=".pb" 4 | TF_MODEL_CKPT_EXT=".ckpt" 5 | 6 | function is_tensorflow_dir { 7 | contents=($(ls "$1")) 8 | assets="assets" 9 | variables="variables" 10 | if [[ " ${contents[*]} " =~ " ${assets} " ]] ; then 11 | if [[ " ${contents[*]} " =~ " ${variables} " ]] ; then # (($contents[(Ie)$variables])) ; then 12 | for i in $contents; do 13 | if [[ "$i"=="model.pb" ]] ; then 14 | return 0 15 | fi 16 | done 17 | return 1 18 | else 19 | return 1 20 | fi 21 | else 22 | return 1 23 | fi 24 | } 25 | 26 | SEARCH_DIR=$1 27 | STAGING_DIR=$2 28 | 29 | matches=() 30 | 31 | # Find Tensorflow directories (based on signature) 32 | for f in $SEARCH_DIR/**/* ; do 33 | if [[ -d "$f" ]] ; then 34 | if is_tensorflow_dir $f; then 35 | for g in $(realpath $f)/* ; do 36 | matches+=$(realpath $g) 37 | done 38 | fi 39 | fi 40 | done 41 | 42 | # Find Tensorflow checkpoint (.ckpt) files 43 | ckpt_files=$(find $SEARCH_DIR -name "*$TF_MODEL_CKPT_EXT*" -type f -not -path '*/\.*' 2>/dev/null) 44 | matches+=$ckpt_files 45 | 46 | # Stage files 47 | if [ ${#matches[@]} -ne 0 ] ; then 48 | mkdir -p $STAGING_DIR 49 | for m in $matches; do 50 | cp -r $m $STAGING_DIR 2>/dev/null; 51 | done 52 | fi 53 | 54 | echo $STAGING_DIR 55 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | python3-venv 2 | counterfit[dev] @ git+https://github.com/Azure/counterfit.git@main 3 | -------------------------------------------------------------------------------- /templates/arsenal.html: -------------------------------------------------------------------------------- 1 |
2 |
3 |

Arsenal

4 |

5 | The Arsenal plugin contains a collection of TTPs (tactics, techniques, and procedures), adversary profiles, 6 | data sources and 7 | planners. These can be used to construct dynamic operations against targeted hosts for Machine Learning 8 | Systems in conjunction with other CALDERA plugins. 9 |

10 |
11 |
12 |
13 |
14 |
15 |

16 |

abilities

17 | 19 | Abilities 20 | 21 | 22 |
23 |
24 |

25 |

adversaries

26 | 28 | Adversaries 29 | 30 | 31 |
32 |
33 | 34 |

View or edit these abilities and adversaries on their respective pages.

35 |
36 |
37 | 38 | 59 | 60 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mitre-atlas/arsenal/2d80c59a637e32dc5d8bdc06f1b449745330b1ec/tests/__init__.py -------------------------------------------------------------------------------- /tests/parsers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mitre-atlas/arsenal/2d80c59a637e32dc5d8bdc06f1b449745330b1ec/tests/parsers/__init__.py -------------------------------------------------------------------------------- /tests/parsers/test_endpoint.py: -------------------------------------------------------------------------------- 1 | 2 | import pytest 3 | 4 | from plugins.arsenal.app.parsers.endpoint import Parser 5 | 6 | 7 | class UsedFact(): 8 | def __init__(self, name, value): 9 | self.name = name 10 | self.value = value 11 | 12 | 13 | class Mapper(): 14 | def __init__(self, source, edge=None, target=None): 15 | self.source = source 16 | self.edge = edge 17 | self.target = target 18 | 19 | 20 | @pytest.fixture 21 | def parser(): 22 | parser_info = { 23 | 'mappers': [ 24 | Mapper(source='target.model_server.prediction_endpoint') 25 | ], 26 | 'used_facts': [UsedFact('target.model_server.inference_address', 'bind_address')], 27 | 'source_facts': [] 28 | } 29 | endpoint = Parser( 30 | parser_info=parser_info 31 | ) 32 | return endpoint 33 | 34 | 35 | @pytest.fixture 36 | def blob(): 37 | cmd_output = """resnet-18 38 | other-model""" 39 | return cmd_output 40 | 41 | 42 | class TestEndpointParser(): 43 | def test_parse_0(self, parser, blob): 44 | relationships = parser.parse(blob) 45 | 46 | assert len(relationships) == 2 47 | assert relationships[0].source.name == 'target.model_server.prediction_endpoint' 48 | assert relationships[0].source.value == 'bind_address/predictions/resnet-18' 49 | assert relationships[1].source.name == 'target.model_server.prediction_endpoint' 50 | assert relationships[1].source.value == 'bind_address/predictions/other-model' -------------------------------------------------------------------------------- /tests/parsers/test_ipaddr.py: -------------------------------------------------------------------------------- 1 | 2 | import pytest 3 | 4 | from plugins.arsenal.app.parsers.ipaddr import Parser 5 | 6 | 7 | class Mapper(): 8 | def __init__(self, source, edge=None, target=None): 9 | self.source = source 10 | self.edge = edge 11 | self.target = target 12 | 13 | 14 | @pytest.fixture 15 | def parser(): 16 | parser_info = { 17 | 'mappers': [ 18 | Mapper(source='host.network_interface.name', edge='has_IPv4_address', target='host.network_interface.IPv4_address'), 19 | Mapper(source='host.network_interface.name', edge='has_IPv4_network', target='host.network_interface.IPv4_network') 20 | ], 21 | 'used_facts': [], 22 | 'source_facts': [] 23 | } 24 | ipaddr = Parser( 25 | parser_info=parser_info 26 | ) 27 | return ipaddr 28 | 29 | 30 | @pytest.fixture 31 | def blob(): 32 | cmd_output = """lo 127.0.0.1/8 33 | interface0 10.0.0.62/24 34 | interface1 172.0.0.1/16""" 35 | return cmd_output 36 | 37 | 38 | class TestIPAddrParser(): 39 | def test_parse_0(self, parser, blob): 40 | relationships = parser.parse(blob) 41 | 42 | assert len(relationships) == 2 43 | assert relationships[0].source.name == 'host.network_interface.name' 44 | assert relationships[0].source.value == 'interface0' 45 | assert relationships[1].source.name == 'host.network_interface.name' 46 | assert relationships[1].source.value == 'interface0' 47 | assert relationships[0].target.name == 'host.network_interface.IPv4_address' 48 | assert relationships[0].target.value == '10.0.0.62' 49 | assert relationships[1].target.name == 'host.network_interface.IPv4_network' 50 | assert relationships[1].target.value == '10.0.0.0/24' 51 | -------------------------------------------------------------------------------- /tests/parsers/test_nmap.py: -------------------------------------------------------------------------------- 1 | 2 | import pytest 3 | 4 | from plugins.arsenal.app.parsers.nmap import Parser 5 | 6 | 7 | class Mapper(): 8 | def __init__(self, source, edge=None, target=None): 9 | self.source = source 10 | self.edge = edge 11 | self.target = target 12 | 13 | 14 | @pytest.fixture 15 | def parser(): 16 | parser_info = { 17 | 'mappers': [ 18 | Mapper(source='target.api.binding_address_list') 19 | ], 20 | 'used_facts': [], 21 | 'source_facts': [] 22 | } 23 | nmap = Parser( 24 | parser_info=parser_info 25 | ) 26 | return nmap 27 | 28 | 29 | @pytest.fixture 30 | def blob(): 31 | cmd_output = """Host: 0.0.0.0 ()\tPorts: 22/open/tcp//ssh///, 630/open/tcp//test///, 3005/open/tcp//test///\tIgnored State: closed (997)""" 32 | return cmd_output 33 | 34 | 35 | class TestNmapParser(): 36 | def test_parse_0(self, parser, blob): 37 | relationships = parser.parse(blob) 38 | 39 | assert len(relationships) == 1 40 | assert relationships[0].source.name == 'target.api.binding_address_list' 41 | assert relationships[0].source.value == '0.0.0.0:630, 0.0.0.0:3005' 42 | -------------------------------------------------------------------------------- /tests/parsers/test_ss.py: -------------------------------------------------------------------------------- 1 | 2 | import pytest 3 | 4 | from plugins.arsenal.app.parsers.ss import Parser 5 | 6 | 7 | class UsedFact(): 8 | def __init__(self, name, value): 9 | self.name = name 10 | self.value = value 11 | 12 | 13 | class Mapper(): 14 | def __init__(self, source, edge=None, target=None): 15 | self.source = source 16 | self.edge = edge 17 | self.target = target 18 | 19 | 20 | @pytest.fixture 21 | def parser(): 22 | parser_info = { 23 | 'mappers': [Mapper(source='target.api.binding_address_list')], 24 | 'used_facts': [UsedFact('IPv4_address', 'bind_address')], 25 | 'source_facts': [] 26 | } 27 | ss = Parser( 28 | parser_info=parser_info 29 | ) 30 | return ss 31 | 32 | 33 | @pytest.fixture 34 | def blob(): 35 | cmd_output = """127.0.0.53%lo:53 users:(("systemd-resolve",pid=868,fd=13)) 36 | 0.0.0.0:22 users:(("sshd",pid=1611,fd=3)) 37 | 127.0.0.1:631 users:(("cupsd",pid=118744,fd=7)) 38 | 127.0.0.1:34041 users:(("node",pid=124268,fd=18)) 39 | 0.0.0.0:3001 users:(("test",pid=2640,fd=4)) 40 | 0.0.0.0:3005 users:(("test",pid=2641,fd=4)) 41 | 10.2.4.1:3006 users:(("test",pid=2642,fd=4)) 42 | 127.0.0.1:36703 users:(("node",pid=124588,fd=29)) 43 | 127.0.0.1:2947 users:(("systemd",pid=1,fd=171)) 44 | [::]:22 users:(("sshd",pid=1611,fd=4))""" 45 | return cmd_output 46 | 47 | 48 | class TestSSParser(): 49 | def test_parse_0(self, parser, blob): 50 | relationships = parser.parse(blob) 51 | 52 | assert len(relationships) == 1 53 | assert relationships[0].source.name == 'target.api.binding_address_list' 54 | assert relationships[0].source.value == 'bind_address:3001, bind_address:3005, 10.2.4.1:3006' 55 | -------------------------------------------------------------------------------- /tests/parsers/test_torchserve_api.py: -------------------------------------------------------------------------------- 1 | 2 | import pytest 3 | 4 | from plugins.arsenal.app.parsers.torchserve_api import Parser 5 | 6 | 7 | class Mapper(): 8 | def __init__(self, source, edge=None, target=None): 9 | self.source = source 10 | self.edge = edge 11 | self.target = target 12 | 13 | 14 | @pytest.fixture 15 | def parser(): 16 | parser_info = { 17 | 'mappers': [ 18 | Mapper(source='target.model_server.framework', edge='has_inference_address', target='target.model_server.inference_address'), 19 | Mapper(source='target.model_server.framework', edge='has_management_address', target='target.model_server.management_address') 20 | ], 21 | 'used_facts': [], 22 | 'source_facts': [] 23 | } 24 | torchserve = Parser( 25 | parser_info=parser_info 26 | ) 27 | return torchserve 28 | 29 | 30 | @pytest.fixture 31 | def blob(): 32 | cmd_output = """INFERENCE_API 20.20.20.20:8888 33 | MANAGEMENT_API 10.10.10.10:9999""" 34 | return cmd_output 35 | 36 | 37 | class TestTorchserveAPIParser(): 38 | def test_parse_0(self, parser, blob): 39 | relationships = parser.parse(blob) 40 | 41 | assert len(relationships) == 2 42 | assert relationships[0].source.name == 'target.model_server.framework' 43 | assert relationships[0].source.value == 'TorchServe' 44 | assert relationships[1].source.name == 'target.model_server.framework' 45 | assert relationships[1].source.value == 'TorchServe' 46 | assert relationships[0].target.name == 'target.model_server.inference_address' 47 | assert relationships[0].target.value == '20.20.20.20:8888' 48 | assert relationships[1].target.name == 'target.model_server.management_address' 49 | assert relationships[1].target.value == '10.10.10.10:9999' 50 | --------------------------------------------------------------------------------