├── .gitignore ├── Excavator ├── Excavator.py ├── LICENSE ├── Readme.md └── requirements.txt ├── README.md ├── ausys └── ausys.bat ├── dbc-helper ├── 1-preprocess.ps1 └── 2-execute-custom.ps1 ├── eXir-lin-helper ├── find_interesting.sh └── patterns.txt ├── eXir-win-helper ├── requirements.txt ├── script.py └── utils.py ├── eXir-win ├── exir.bat └── resources │ ├── autoruns │ ├── Autoruns64.dll │ ├── Eula.txt │ ├── autoruns.chm │ ├── autorunsc.exe │ └── autorunsc64.exe │ ├── logonsessions │ ├── Eula.txt │ ├── logonsessions.exe │ └── logonsessions64.exe │ ├── psloggedon │ ├── Eula.txt │ ├── PsLoggedon.exe │ └── PsLoggedon64.exe │ └── winaudit │ └── WinAudit.exe ├── eXir ├── eXir_2.4.sh └── usage-instructions.txt ├── exvt ├── exvt-0.8.py └── usage-instructions.txt ├── lies └── lies-0.2.py └── vetter-py ├── README.md ├── config.ini ├── requirements.txt └── vetter.py /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | **__pycache__* -------------------------------------------------------------------------------- /Excavator/Excavator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | #Excavator.py 3 | 4 | import os 5 | import re 6 | import sys 7 | import json 8 | import logging 9 | import colorlog 10 | import argparse 11 | import xmltodict 12 | from pprint import pprint 13 | from datetime import datetime 14 | from subprocess import check_output 15 | from elasticsearch import Elasticsearch, helpers 16 | 17 | 18 | ##global vars## 19 | status_details = { 20 | 'time_start': '', 21 | 'time_end': '', 22 | 'files':{ 23 | 'successful': { 24 | 'files': { 25 | #'name': 'error' 26 | }, 27 | 'count': 0 28 | }, 29 | 'failed': { 30 | 'files': { 31 | #'name': 'error' 32 | }, 33 | 'count': 0 34 | } 35 | } 36 | } 37 | default_args = { 38 | 'elasticsearch': { 39 | 'ip': '127.0.0.1', 40 | 'port': 9200 41 | }, 42 | "date_fmts": [ 43 | "%m/%d/%Y %H:%M:%S %p", 44 | "%m/%d/%Y %H:%M %p", 45 | "%Y-%m-%dT%H:%M:%S.%fZ" 46 | ] 47 | } 48 | ### 49 | 50 | 51 | def setup_logger(): 52 | formatter = colorlog.ColoredFormatter( 53 | "%(log_color)s%(asctime)s:%(levelname)s:%(message)s", 54 | datefmt='%D' 55 | ) 56 | 57 | log = logging.getLogger() 58 | 59 | handler2 = logging.FileHandler('.output.log') 60 | handler = logging.StreamHandler() 61 | handler.setFormatter(formatter) 62 | log.addHandler(handler) 63 | log.addHandler(handler2) 64 | log.setLevel(logging.DEBUG) 65 | 66 | return log 67 | 68 | 69 | def summarize(): 70 | print("[INFO] Start Time: {}".format(status_details['time_start'])) 71 | status_details["time_end"] = datetime.now() 72 | print("[INFO] End Time: {}".format(status_details['time_end'])) 73 | print("[INFO] Files sucessfully ingested: {}".format(status_details['files']['successful']['count'])) 74 | print("[INFO] Files failed during ingestion: {}".format(status_details['files']['failed']['count'])) 75 | print("[INFO] Following is the list of successful files and logs count") 76 | pprint(status_details['files']['successful']['files']) 77 | print("[INFO] Following is the list of failed files") 78 | pprint(status_details['files']['failed']['files']) 79 | print("[INFO] Start Time: {}".format(status_details['time_start'])) 80 | print("[INFO] End Time: {}".format(status_details['time_end'])) 81 | print("[INFO] Time difference: {}".format(status_details["time_end"]-status_details["time_start"])) 82 | 83 | 84 | #perform a sanity check on OS 85 | def check_os(condition): 86 | if condition == "wevtutil": 87 | if os.name != 'nt': 88 | print('[INFO] OS: Not Windows\n[ERROR] Quitting!\n') 89 | exit() 90 | else: 91 | print('[INFO] OS: Windows\n[SUCCESS] All OK!\n') 92 | elif condition == "slashes": 93 | slash = '\\' 94 | if os.name != 'nt': 95 | slash = '/' 96 | return slash 97 | 98 | 99 | def convert(path,file): 100 | print('[SUCCESS] ' + file) 101 | try: 102 | # check_output('wevtutil qe ' + path + check_os("slashes") + file + ' /lf:true /f:XML >> ' + path + check_os("slashes") + file + '.xml', shell=True) 103 | check_output('wevtutil qe "{0}{1}{2}" /lf:true /f:XML >> "{0}{1}{2}.xml"'.format(path, check_os('slashes'), file), shell=True) 104 | return True 105 | except Exception as exception: 106 | print('[INFO] ', exception) 107 | print('[ERROR] Unable to execute command!') 108 | return False 109 | 110 | 111 | #convert evtx files to xml 112 | def evt_to_xml(path,file): 113 | #check if running on windows 114 | conversion_success = False 115 | check_os("wevtutil") 116 | #define scope of files 117 | if file == '*': 118 | for file in os.listdir(path): 119 | if file.endswith('.evtx'): 120 | conversion_success = convert(path,file) 121 | else: 122 | conversion_success = convert(path,file) 123 | return conversion_success 124 | 125 | 126 | def is_date(mstr): 127 | validate = False 128 | for fmt in default_args.get("date_fmts"): 129 | try: 130 | datetime.strptime(mstr, fmt) 131 | validate = True 132 | break 133 | except Exception as e: pass 134 | # print("[Warning] Exception {} occurred in is_date while validating date {}...".format(e, mstr)) 135 | # print("[Warning] But continuing") 136 | # sys.exit() 137 | if not validate: 138 | print("[Warning] Exception occurred in is_date while validating date {}...".format(mstr)) 139 | return validate 140 | 141 | 142 | def get_date(mstr): 143 | valid_date = "-" 144 | validate = False 145 | for fmt in default_args.get("date_fmts"): 146 | try: 147 | valid_date = datetime.strptime(mstr, fmt) 148 | validate = True 149 | break 150 | except Exception as e: pass 151 | # print("[Warning] Exception {} occurred in get_date while returning date {}...".format(e, mstr)) 152 | # print("[Warning] But continuing") 153 | # sys.exit() 154 | if not validate: 155 | print("[Warning] Exception occurred in is_date while validating date {}...".format(mstr)) 156 | return valid_date 157 | 158 | 159 | #correct structure of the data field 160 | def correct_data_field_structure(event): 161 | data = {} 162 | try: 163 | if ('Data' in event['Event']['EventData']) and not (event['Event']['EventData']['Data'] == None): 164 | for field in range(0,len(event['Event']['EventData']['Data'])): 165 | field_name = event['Event']['EventData']['Data'][field]['@Name'] 166 | already_done = False 167 | try: 168 | if 'time' in field_name.lower(): 169 | # to parse strings containing ? in them 170 | temp = event['Event']['EventData']['Data'][field]['#text'].replace("?", "") 171 | # to parse strings containing nanoseconds in them 172 | if '.' in temp and temp[-1] == 'Z' and len(temp) - temp.index('.') - 2 == 9 : temp = temp[:temp.index('.') + 7] + 'Z' 173 | if is_date(temp): 174 | text = get_date(temp) 175 | already_done = True 176 | except: pass 177 | if not already_done: 178 | try: text = event['Event']['EventData']['Data'][field]['#text'] 179 | except: text = '-' 180 | data[field_name] = text 181 | except: 182 | return event 183 | event['Event']['EventData']['Data'] = data 184 | return event 185 | 186 | 187 | def validate_event(event): 188 | #print the log that is parsed from XML before editing anything 189 | if ('EventData' in event['Event']) and not (event['Event']['EventData'] == None): 190 | if ('Data' in event['Event']['EventData']) and not (event['Event']['EventData']['Data'] == None): 191 | if not ('@Name' in event['Event']['EventData']['Data']): 192 | try: 193 | event['Event']['EventData']['Data'][0]['@Name'] 194 | except: 195 | group_data = [{'@Name': 'param1', '#text': str(event['Event']['EventData']['Data'])}] 196 | event['Event']['EventData']['Data'] = group_data 197 | if ('System' in event['Event']) and not (event['Event']['System'] == None): 198 | if ('EventID' in event['Event']['System']) and not (event['Event']['System'] == None): 199 | try: 200 | event['Event']['System']['EventID']['@Qualifiers'] 201 | except: 202 | group_data = {'@Qualifiers': 'Unknown', '#text': event['Event']['System']['EventID']} 203 | event['Event']['System']['EventID'] = group_data 204 | return event 205 | 206 | 207 | def push_to_elk(ip,port,index,user,pwd,bulk,scheme): 208 | elk = None 209 | if(user == None) and (pwd == None): 210 | elk = Elasticsearch(ip,scheme=scheme,port=port,) 211 | else: 212 | elk = Elasticsearch(ip,http_auth=(user,pwd),scheme=scheme,port=port,) 213 | try: 214 | helpers.bulk(elk, bulk) 215 | return True 216 | except Exception as exception: 217 | print('[INFO] ELK ingestion error') 218 | print(exception) 219 | return False 220 | 221 | 222 | def send_now(ip,port,index,user,pwd,bulk,scheme): 223 | logs_sent = False 224 | #keep looping until the bulked logs have not been sent successfully 225 | while not logs_sent: 226 | logs_sent = push_to_elk(ip,port,index,user,pwd,bulk,scheme) 227 | if not logs_sent: 228 | continue 229 | else: 230 | return [] 231 | 232 | 233 | def process_file(action,path,ip,port,file,index,user,pwd,size,scheme): 234 | bulk = [] 235 | successful_events = 0 236 | fileName = file.split('.')[0] 237 | 238 | with open(path+check_os("slashes")+file,'r', encoding='iso-8859-15') as opened_file: 239 | eventlog_maker = "" 240 | for line in opened_file: 241 | # Joins all broken XML parts to form one complete eventlog! 242 | if not ('' in eventlog_maker): 243 | try: 244 | line = line.replace("\n","") 245 | line = line.replace("\t","") 246 | line = line.replace("\r","") 247 | eventlog_maker+=line 248 | if not ('' in eventlog_maker): 249 | continue 250 | except Exception as exception: 251 | print(f'[ERROR] Exception {exception} was generated while making a complete log from file {file}') 252 | print(f'[INFO] During the conversion, the following line caused issue {line}') 253 | status_details['files']['failed']['count'] += 1 254 | status_details['files']['failed']['files'][file] = exception 255 | eventlog = eventlog_maker 256 | eventlog_maker = "" 257 | try: 258 | eventlog = xmltodict.parse(eventlog) 259 | except Exception as exception: 260 | print(f'[ERROR] Exception {exception} was generated while converting log to dict type from {file}') 261 | print(f'[INFO] During the conversion, the following log caused issue {eventlog}') 262 | status_details['files']['failed']['count'] += 1 263 | status_details['files']['failed']['files'][file] = exception 264 | with open(index+"-"+fileName+"-logs.xml", "a") as crashFileHandle: 265 | crashFileHandle.write(eventlog+"\n") 266 | continue 267 | 268 | eventlog = json.loads(json.dumps(eventlog)) 269 | eventlog = validate_event(eventlog) 270 | eventlog = correct_data_field_structure(eventlog) 271 | eventlog['file_name'] = file 272 | successful_events=successful_events+1 273 | if action == 'send' or action == 'auto': 274 | bulk.append({ 275 | "_index": index, 276 | "_type": index, 277 | "@timestamp": eventlog['Event']['System']['TimeCreated']['@SystemTime'], 278 | "body": eventlog 279 | }) 280 | if (len(bulk) == size): 281 | print(f'[INFO] Time Passed: {datetime.now()-status_details["time_start"]} -- Sending Logs from {file} to ELK: {successful_events}') 282 | bulk = send_now(ip,port,index,user,pwd,bulk,scheme) 283 | elif action == 'json': 284 | print(json.dumps(eventlog, indent=4)) 285 | status_details['files']['successful']['count'] += 1 286 | status_details['files']['successful']['files'][file] = successful_events 287 | print(f'[INFO] Elapsed Time: {datetime.now()-status_details["time_start"]} -- Sending Logs from {file} to ELK: {successful_events}') 288 | bulk = send_now(ip,port,index,user,pwd,bulk,scheme) 289 | print('[SUCCESS] Successfully processed the logs of file') 290 | 291 | 292 | def xml_to_json_to_es(action,path,ip,port,file,index,user,pwd,size,scheme): 293 | #define scope of files for converting xml to json 294 | if file == '*': 295 | for file in os.listdir(path): 296 | if file.endswith('.xml'): 297 | process_file(action,path,ip,port,file,index,user,pwd,size,scheme) 298 | else: 299 | if file.endswith('.xml'): 300 | process_file(action,path,ip,port,file,index,user,pwd,size,scheme) 301 | 302 | 303 | def process(action,path,ip,port,file,index,user,pwd,size,scheme): 304 | index = index.lower() 305 | if action == 'xml': 306 | evt_to_xml(path,file) 307 | if (action == 'send') or (action == 'json'): 308 | xml_to_json_to_es(action,path,ip,port,file,index,user,pwd,size,scheme) 309 | if (action == 'auto'): 310 | print("[CAUTION] AUTO only works with windows!") 311 | evt_to_xml(path,file) 312 | if not file == '*': 313 | if not file.endswith('.xml'): 314 | file = file + '.xml' 315 | xml_to_json_to_es(action,path,ip,port,file,index,user,pwd,size,scheme) 316 | 317 | 318 | #Perform a sanity check on log path and IP address provided by user 319 | def sanity_check(action,path,ip,file,scheme): 320 | if not action or (action != 'xml' and action != 'send' and action != 'json' and action != 'auto'): 321 | print('[ERROR] Please specify a valid action i.e. xml, send, json, auto') 322 | exit() 323 | if not path: 324 | print('[ERROR] Excavator needs to know the path to logs') 325 | exit() 326 | elif not os.path.isdir(''+path): 327 | print('[ERROR] Specified path does not exist') 328 | exit() 329 | if file != '*': 330 | if not os.path.isfile(path + check_os("slashes") + file): 331 | print('[ERROR] Specified file does not exist') 332 | exit() 333 | if not ip and (action=='auto' or action=='send'): 334 | print('[ERROR] IP not specified') 335 | exit() 336 | elif ip: 337 | sanity = re.match(r"[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}", ip) 338 | if not (bool(sanity) and all(map(lambda n: 0 <= int(n) <= 255, sanity.groups()))): 339 | print('[ERROR] Invalid IP address!') 340 | exit() 341 | if scheme and not (scheme == 'http' or scheme == 'https'): 342 | print('[ERROR] Invalid scheme!') 343 | exit() 344 | 345 | 346 | #main 347 | if __name__ == '__main__': 348 | # logger = setup_logger() 349 | status_details["time_start"] = datetime.now() 350 | print(f'[INFO] Time of start: {status_details["time_start"]}') 351 | parser = argparse.ArgumentParser('Excavator.py') 352 | parser.add_argument('-m', metavar='', type=str, help='auto, json, send, xml') 353 | parser.add_argument('-p', metavar='', type=str, help='path to Evtx files') 354 | parser.add_argument('-ip', metavar='', default=default_args.get('elasticsearch').get('ip'), help='elasticsearch IP. Default is {}'.format(default_args.get('elasticsearch').get('ip'))) 355 | parser.add_argument('-port', metavar='', type=int, default=default_args.get('elasticsearch').get('port'), help='elasticsearch port. Default is {}'.format(default_args.get('elasticsearch').get('port'))) 356 | parser.add_argument('-f', metavar='', type=str, default='*', help='evtx file to process. Only use for single file') 357 | parser.add_argument('-i', metavar='', type=str, default='excavator', help='name of ELK index') 358 | parser.add_argument('-user', metavar='', type=str, help='username of ELK for authorization') 359 | parser.add_argument('-pwd', metavar='', type=str, help='password of ELK for authorization') 360 | parser.add_argument('-s', metavar='', type=int, default=100, help='size of queue, default=100') 361 | parser.add_argument('-scheme', metavar='', type=str, default='http', help='http or https') 362 | if len(sys.argv) <= 1: 363 | parser.print_help() 364 | exit() 365 | args = parser.parse_args() 366 | sanity_check(args.m,args.p,args.ip,args.f,args.scheme) 367 | process(args.m,args.p,args.ip,args.port,args.f,args.i,args.user,args.pwd,args.s,args.scheme) 368 | summarize() -------------------------------------------------------------------------------- /Excavator/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Ebryx, LLC 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Excavator/Readme.md: -------------------------------------------------------------------------------- 1 | # Excavator 2 | 3 | *A light-weight tool to parse Windows event-logs to XML and send them to ELK* 4 | 5 | __Requirments:__ 6 | 7 | - xmltodict 8 | - elasticsearch 9 | 10 | __Tested OS:__ 11 | 12 | - Windows 10 13 | - Ubuntu 18.04 14 | 15 | __Tested Python Version:__ 16 | 17 | - Python 3.7.2 18 | 19 | __What You Can Do With Excavator:__ 20 | 21 | - You can convert any or all evtx files in a path to XML __*-m xml*__ 22 | - You can send event-logs from any or all files in a given path to ELK __*-m send*__ 23 | - You can achieve both of the above tasks in a single run __*-m auto*__ 24 | - If you do not want to send the logs to ELK but only convert them to JSON instead, you can display the JSON output on your terminal __*-m json*__ 25 | 26 | __How Exacavtor Works:__ 27 | 28 | - Uses windows' own utility __wevtutil__ to parse the event-logs to XML 29 | - Requires *xmltodict* for converting the logs form XML to JSON 30 | - Requires *elasticsearch* to push the event-logs to your ELK 31 | - Windows platform is a must for converting logs to xml we use windows' own utility for that 32 | 33 | *NOTE: Excavator saves the XML files in the same directory after converting them from EVTX* 34 | 35 | ## Usage: 36 | 37 | ``` 38 | Excavator.py [-h] [-m ] [-p ] [-ip ] [-port ] 39 | [-f ] [-i ] [-user ] [-pwd ] 40 | [-s ] [-scheme ] 41 | 42 | optional arguments: 43 | -h, --help show this help message and exit 44 | -m xml, send, dont_send, auto 45 | -p path to Evtx files 46 | -ip elasticsearch IP 47 | -port elasticsearch port 48 | -f evtx file to process. Only use for single file 49 | -i name of ELK index 50 | -user username of ELK for authorization 51 | -pwd password of ELK for authorization 52 | -s size of queue 53 | -scheme http or https 54 | ``` 55 | 56 | ## Examples: 57 | 58 | - Convert all evtx files in a directory to XML 59 | ``` 60 | python Excavator.py -m xml -p 61 | ``` 62 | - Convert a single file in a directory to XML 63 | ``` 64 | python Excavator.py -m xml -p -f 65 | ``` 66 | - Display all event-logs from all XML files in a directory as JSON 67 | ``` 68 | python Excavator.py -m json -p 69 | ``` 70 | - Send 1000 logs at a time into ELK from a single XML file generated from its corresponding evtx file 71 | ``` 72 | python Excavator.py -m send -p -f -ip -port -user -pwd -s 1000 73 | ``` 74 | - Send 100 logs at a time into ELK from a single EVTX file 75 | ``` 76 | python Excavator.py -m auto -p -f -ip -port -user -pwd 77 | ``` 78 | -------------------------------------------------------------------------------- /Excavator/requirements.txt: -------------------------------------------------------------------------------- 1 | xmltodict 2 | elasticsearch 3 | colorlog -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Quick & Dirty DFIR Scripts 2 | ## Excavator.py 3 | - Original Author: DFIR Team @ Ebryx LLC 4 | - Description: A light-weight tool to parse Windows event-logs to XML and send them to ELK 5 | - Usage: python Excavator.py -m send -p `` -ip `` -port `` -user `` -pwd `` -i `` -scheme http -s `` 6 | - Note: First use parameter `-m xml` to change all files in the folder to xml format 7 | ## exvt.py 8 | - Original Author: Makman @ Ebryx LLC 9 | - Description: It first checks the hash .. If exists, it'll grab those results .. otherwise upload and push it to a queue to be checked again for the results 10 | - Usage: python vt.py `/path/of/samples` 11 | - If we comment line 110 .. It'll just check for the hash without uploading 12 | - If we comment, 106 to 109 .. it'll upload everything .. and check for the results 13 | ## eXir-win.bat 14 | - Original Author: Ahmad @ Ebryx LLC 15 | - Contributor: heyibrahimkhan @ Ebryx LLC 16 | - Description: Let's just say its FASTIRfor Windows OS 17 | - Usage: run with admin exir.bat 18 | ## eXir.py 19 | - Original Author: Ishaq & Dan @ Ebryx LLC 20 | - Description: Inspired by FASTIR but better 21 | - Usage: run with sudo exir.py 22 | ## lies.py 23 | - Original Author: UK @ Ebryx LLC 24 | - Description: A script to ingest IOC scanner result files to ES in bulk 25 | - Usage: lies.py 26 | - Change variable parameters in the script enclosed in <> 27 | ## eXir-lin-helper 28 | - Original Author: heyibrahimkhan @ Ebryx LLC 29 | - Description: A script to help with output of eXir by neatly highlighting the results 30 | - Usage: /bin/bash find_interesting.sh 31 | - Read the script to see the params details 32 | ## eXir-win-helper 33 | - Original Author: heyibrahimkhan @ Ebryx LLC 34 | - Description: A script to help with output of eXir-win by converting it to CSV 35 | - Usage: script.py 36 | - Change variable parameters in the script enclosed in <> 37 | ## dbc-helper 38 | - Original Author: heyibrahimkhan @ Ebryx LLC 39 | - Description: Scripts to help with DeepBlueCLI bulk execution 40 | - Usage: - preprocess.ps1 - execute-custom.ps1 41 | - Change variable parameters in the script enclosed in <> 42 | ## vetter-py 43 | - Original Author: SyeedHasan @ Ebryx LLC 44 | - Description: Calculated hashes for files and run a scan against VT 45 | - Usage: python vetter.py -h 46 | ## ausys.bat 47 | - Original Author: SyeedHasan @ Ebryx LLC 48 | - Description: Export system audit configurations and policies for a review 49 | - Usage: ausys.bat 50 | - Pre-requisites: Administrator privileges 51 | -------------------------------------------------------------------------------- /ausys/ausys.bat: -------------------------------------------------------------------------------- 1 | @REM 2 | @REM Ausys - An Advanced Audit Policy Configuration Checker by Ebryx (Pvt. Ltd) 3 | @REM Date: 20-10-2020 4 | @REM Version: 0.3 5 | @REM Description: Check the current status of advanced audit policy configurations in the system 6 | @REM Pre-requisites: Requires admin privileges to execute 7 | @REM 8 | 9 | cls 10 | @echo off 11 | 12 | ::: 13 | ::: ___ 14 | ::: / _ |__ _____ __ _____ 15 | ::: / __ / // (_-nul 2>&1 36 | if %errorLevel% == 0 ( 37 | echo [+] SUCCESS: Administrative privileges are available. 38 | echo [+] Continuing the script's execution 39 | echo. 40 | ) else ( 41 | echo [-] Failure: Current permissions are inadequate to execute the script. Please re-run the console window as an administrator or execute the script as such... 42 | echo [-] Halting the script's execution... 43 | timeout 5 44 | Exit /B 1 45 | ) 46 | 47 | set host=%COMPUTERNAME% 48 | set currPath=%~dp0 49 | cd %currPath% 50 | 51 | @REM 52 | @REM Return audit policy configurations 53 | @REM 54 | echo Advanced Audit Policy Configurations 55 | echo [+] Acquiring the system's audit policy configurations using 'auditpol.exe' 56 | auditpol.exe /get /Category:* > %host%_sys_auditpol.txt 57 | echo [+] Acquired audit policy configurations and saved to disk. Continuing... 58 | echo. 59 | 60 | @REM 61 | @REM Return PowerShell based logging 62 | @REM 63 | echo PowerShell Logging Status 64 | echo [+] Retrieving PowerShell logging status from the system's Registry hives 65 | echo Module Logging Status: > %host%_powershell_logging.txt 66 | reg query "HKEY_LOCAL_MACHINE\SOFTWARE\Policies\Microsoft\Windows\PowerShell\ModuleLogging" >> %host%_powershell_logging.txt 67 | IF errorlevel 1 ( 68 | echo Disabled >> %host%_powershell_logging.txt 69 | ) 70 | echo. >> %host%_powershell_logging.txt 71 | echo Script-block Logging Status: >> %host%_powershell_logging.txt 72 | reg query "HKEY_LOCAL_MACHINE\SOFTWARE\Policies\Microsoft\Windows\PowerShell\ScriptBlockLogging" >> %host%_powershell_logging.txt 73 | IF errorlevel 1 ( 74 | echo Disabled >> %host%_powershell_logging.txt 75 | ) 76 | echo. >> %host%_powershell_logging.txt 77 | echo Transcription Status for PowerShell: >> %host%_powershell_logging.txt 78 | reg query "HKEY_LOCAL_MACHINE\SOFTWARE\Policies\Microsoft\Windows\PowerShell\Transcription" >> %host%_powershell_logging.txt 79 | IF errorlevel 1 ( 80 | echo Disabled >> %host%_powershell_logging.txt 81 | ) 82 | echo [+] Acquired PowerShell logging status from the system's Registry hives 83 | echo. 84 | 85 | @REM 86 | @REM Return audit settings 87 | @REM 88 | echo Audit Trail 89 | echo [+] Retrieving audit trail of the system from the Registry hives 90 | echo Audit Settings on the System: > %host%_auditsettings.txt 91 | reg query "HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows\CurrentVersion\Policies\System\Audit" >> %host%_auditsettings.txt 92 | IF errorlevel 1 ( 93 | echo Disabled >> %host%_auditsettings.txt 94 | ) 95 | echo [+] Acquired audit trail of the sytem and stored to disk 96 | echo. 97 | 98 | @REM 99 | @REM Checking log sources 100 | @REM 101 | echo Log Channels [Size, Retention Policies, Access Times] 102 | echo [+] Retrieving key information about log sources using wevtutil 103 | echo Channel: Application > %host%_logsources.txt 104 | wevtutil gli Application >> %host%_logsources.txt 105 | echo. >> %host%_logsources.txt 106 | echo Channel: Security >> %host%_logsources.txt 107 | wevtutil gli Security >> %host%_logsources.txt 108 | echo. >> %host%_logsources.txt 109 | echo Channel: System >> %host%_logsources.txt 110 | wevtutil gli System >> %host%_logsources.txt 111 | echo. >> %host%_logsources.txt 112 | echo Channel: Powershell-Admin >> %host%_logsources.txt 113 | wevtutil gli Microsoft-Windows-PowerShell/Admin >> %host%_logsources.txt 114 | IF errorlevel 1 ( 115 | echo Disabled >> %host%_logsources.txt 116 | ) 117 | echo. >> %host%_logsources.txt 118 | echo Channel: Powershell-Operational >> %host%_logsources.txt 119 | wevtutil gli Microsoft-Windows-PowerShell/Operational >> %host%_logsources.txt 120 | IF errorlevel 1 ( 121 | echo Disabled >> %host%_logsources.txt 122 | ) 123 | echo. >> %host%_logsources.txt 124 | echo [+] Acuiqred key information about log sources and stored to disk 125 | 126 | @REM 127 | @REM Execution Completed 128 | @REM 129 | echo. 130 | echo [+] EXECUTION STATUS: Complete 131 | echo [+] Analyze results from auditsettings.txt, logsources.txt, powershell_logging.txt, and sys_auditpol.txt for a review of the logging configurations... 132 | timeout 10 -------------------------------------------------------------------------------- /dbc-helper/1-preprocess.ps1: -------------------------------------------------------------------------------- 1 | param([string]$path=".") 2 | 3 | $pwd=(Invoke-Expression "pwd") 4 | 5 | cd $path 6 | 7 | Get-ChildItem -File | Rename-Item -NewName { $_.Name -replace '%4','' } 8 | Get-ChildItem -File | Rename-Item -NewName { $_.Name -replace ' ','' } 9 | 10 | cd $pwd -------------------------------------------------------------------------------- /dbc-helper/2-execute-custom.ps1: -------------------------------------------------------------------------------- 1 | <# 2 | .SYNOPSIS 3 | A PowerShell module for executing DeepBlueCli for hunting on Windows event logs 4 | 5 | .DESCRIPTION 6 | Just pass the folder path to event logs and script with automatically execute on System.evtx, Security.evtx, Application.evtx, *powershell*.evtx and store findings in relevant files 7 | 8 | .Example 9 | execute-custom-deepbluecli.ps1 -folder \path\to\folder -output_folder_prefix mycustomlogoutput 10 | #> 11 | 12 | param ([string]$evtx_folder=".", [string]$deepbluecli_folder=".", [string]$output_folder_prefix=(Get-Date -UFormat "%Y%m%d%H%M%S")) 13 | 14 | function Main { 15 | if ($evtx_folder -eq ""){ 16 | Write-Host("Please provide a folder name ...") 17 | Write-Host("Exiting script ...") 18 | exit 19 | } 20 | else { 21 | Write-Host("Executing on folder $evtx_folder ...") 22 | } 23 | if ($output_folder_prefix -eq ""){ 24 | Write-Host("Log File prefix can't be empty ...") 25 | Write-Host("Exiting script...") 26 | exit 27 | } 28 | else { 29 | Write-Host("Output folder prefix would be $output_folder_prefix ...") 30 | } 31 | 32 | $output_folder_name="out-deepbluecli-$output_folder_prefix" 33 | mkdir -p $output_folder_name 34 | 35 | # ForEach($filename in ls $evtx_folder\*.evtx | Get-ChildItem -Name){ 36 | # if (($filename -eq "Security.evtx") -or ($filename -like "*powershell*") -or ($filename -like "*winrm*") -or ($filename -like "*wmi*") -or ($filename -eq "System.evtx") -or ($filename -like "Application.evtx")){ 37 | # Write-Host("Executing Deepbluecli on $filename ...") 38 | # Invoke-Expression "$deepbluecli_folder\DeepBlue.ps1 $evtx_folder\$filename | tee -a $output_folder_name\$filename.log" 39 | # } 40 | # } 41 | 42 | ForEach($filename in ls $evtx_folder\*.evtx | Get-ChildItem -Name){ 43 | Write-Host("Executing Deepbluecli on $filename ...") 44 | Invoke-Expression "$deepbluecli_folder\DeepBlue.ps1 $evtx_folder\$filename | tee -a $output_folder_name\$filename.log" 45 | } 46 | 47 | } 48 | 49 | Main -------------------------------------------------------------------------------- /eXir-lin-helper/find_interesting.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Example 4 | # /bin/bash script.sh /path/to/patterns_file /path/to/target/file 5 | 6 | # Example for secure file 7 | # grep -r -o -E "(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)" 8 | 9 | patterns_file=$1; 10 | target_file=$2; 11 | temp_file="test_`date +%s-%N`.txt"; 12 | 13 | grep --color=always -i -n -r -E -f $patterns_file $target_file; 14 | # grep --color=always -i -n -r -E -f $patterns_file $target_file | tee -a $temp_file > /dev/null 2>&1; 15 | # zgrep --color=always -i -n -f $patterns_file $target_file | tee -a $temp_file > /dev/null 2>&1; 16 | 17 | # only gather unique names from the file 18 | # cat $temp_file | uniq; 19 | # cat $temp_file; 20 | # rm $temp_file; -------------------------------------------------------------------------------- /eXir-lin-helper/patterns.txt: -------------------------------------------------------------------------------- 1 | adduser 2 | admin 3 | alias 4 | audit 5 | aws 6 | azure 7 | b64 8 | base 9 | base64 10 | bash 11 | black 12 | chmod 13 | cron 14 | crontab 15 | curl 16 | docx 17 | echo 18 | export 19 | find 20 | firewall 21 | gcp 22 | gedit 23 | groupadd 24 | invoice 25 | ip 26 | iptables 27 | key 28 | kill 29 | kshrc 30 | locate 31 | mysql 32 | nano 33 | nc 34 | net 35 | netcat 36 | netstat 37 | nmap 38 | pass 39 | passwd 40 | password 41 | pdf 42 | php 43 | ping 44 | proc 45 | python 46 | reverse 47 | rm 48 | root 49 | route 50 | scp 51 | script 52 | sed 53 | service 54 | sftp 55 | sh 56 | shadow 57 | shell 58 | source 59 | ss 60 | ssh 61 | sudo 62 | systemctl 63 | tar 64 | telnet 65 | tmp 66 | user 67 | useradd 68 | vnc 69 | wget 70 | which 71 | who 72 | zip 73 | zshrc 74 | 777 75 | 1521 76 | 3306 77 | (25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?) 78 | (([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])) 79 | \./ -------------------------------------------------------------------------------- /eXir-win-helper/requirements.txt: -------------------------------------------------------------------------------- 1 | colorlog 2 | pyspellchecker -------------------------------------------------------------------------------- /eXir-win-helper/script.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | from datetime import datetime 3 | from utils import setup_logger 4 | from spellchecker import SpellChecker 5 | # from argparse import RawTextHelpFormatter 6 | 7 | 8 | # global vars 9 | g_vars = { 10 | "time": { 11 | "start": datetime.utcnow(), 12 | "end": "" 13 | }, 14 | "output_file": "", 15 | "valid_operations": { 16 | "display_dns": "Converts display DNS file into a CSV for easier analysis", 17 | "firewall_rules": "Parses the output of firewall rule dumps", 18 | "ntst_anb": "Parses the output from ntsts_anb file of exir-win", 19 | "sc_query": "Parses sc query files. Module isn't complete yet" 20 | }, 21 | 'verbosity': 'INFO', 22 | 'logger': setup_logger(), 23 | 'replacement_keywords': { 24 | # string1: replace_with_string2 25 | ',': '|=|', 26 | "\n": "", 27 | "\t": "" 28 | }, 29 | "whitelists": { 30 | "firewall_rules": [ 31 | "BranchCache", 32 | "" 33 | ] 34 | } 35 | } 36 | 37 | 38 | def replace_keywords(mstr): 39 | try: 40 | for k, v in g_vars.get('replacement_keywords').items(): 41 | if k in mstr: mstr = mstr.replace(str(k), str(v)) 42 | except Exception as e: 43 | g_vars.get('logger').error('Exception {} occurred in replace_keywords for {}'.format(e, mstr)) 44 | return mstr 45 | 46 | 47 | def is_spelling_correct(word_list): 48 | ret = True 49 | if len(SpellChecker().unknown(word_list)) > 0: ret = False 50 | if not ret: g_vars.get('logger').warning('Spell Check failed for {}'.format(word_list)) 51 | return ret 52 | 53 | 54 | def parser_sc_query(input_file, output_file): 55 | log_msg("#,ServiceName,DisplayName,Type,State,StateDetails,Win32ExitCode,ServiceExitCode,CheckPoint,WaitHint,Interesting", output_file) 56 | try: 57 | len_records = 0 58 | with open(input_file) as in_file: 59 | in_file = in_file.readlines() 60 | len_in_file = len(in_file) 61 | g_vars.get('logger').info('Num lines: {}'.format(len_in_file)) 62 | idx = 1 # Since this is the first readable line in file 63 | while True: 64 | mstr = [] 65 | interesting = '' 66 | idx2 = 0 67 | for idx2 in range(0, 9): 68 | g_vars.get('logger').debug('file line num: {}'.format(idx + idx2)) 69 | if idx + idx2 >= len_in_file: break 70 | line = replace_keywords(in_file[idx + idx2].rstrip('\n').lstrip(' ')) 71 | if idx2 == 8 and line == '': idx2 -= 1 72 | g_vars.get('logger').info('line: {}'.format(line)) 73 | if 'STOPPED' in in_file[idx + idx2 - 1] and idx2 == 4: mstr.append('') 74 | elif ':' in line: mstr.append(line.split(':')[1].lstrip(' ')) 75 | else: mstr.append(line.lstrip(' ')) 76 | # if idx2 in [1] and not is_spelling_correct(line.split(':')[1].lstrip(' ').split(' ')): 77 | # interesting += 'DisplayName spellings not correct --- ' 78 | mstr.append(interesting.rstrip(' --- ')) 79 | len_records += 1 80 | log_msg("{},{}".format(len_records, ','.join(mstr)), output_file) 81 | # input('Press to continue...') 82 | idx += idx2 + 2 83 | if idx > len_in_file: 84 | print("No more records to look for...") 85 | break 86 | except Exception as e: 87 | g_vars.get('logger').error("Exception {} occurred in parser_sc_query...".format(e)) 88 | 89 | 90 | def parser_ntst_anb(input_file, output_file): 91 | log_msg("#,Executable,Executable2,Protocol,LocalAddress,LocalPort,ForeignAddress,ForeignPort,State", output_file) 92 | try: 93 | records = [] 94 | with open(input_file) as in_file: 95 | in_file = in_file.readlines() 96 | idx = 4 # Since this is the first readable line in display_dns output file 97 | while True: 98 | next_idx = 2 99 | mstr = [] 100 | jump = False 101 | # executable info 102 | executable_info = in_file[idx+1].rstrip('\n') 103 | if 'Can not obtain ownership information' in executable_info: 104 | mstr.append('') 105 | mstr.append('Can not obtain ownership information') 106 | elif '[' in executable_info: 107 | mstr.append('') 108 | mstr.append(replace_keywords(in_file[idx+1].rstrip('\n').replace('[', '').replace(']', '').lstrip(' ').rstrip(' '))) 109 | elif len(executable_info) > 56: 110 | mstr.append('') 111 | mstr.append('') 112 | next_idx = 1 113 | else: 114 | jump = True 115 | mstr.append(replace_keywords(in_file[idx+1].rstrip('\n').replace('[', '').replace(']', '').lstrip(' ').rstrip(' '))) 116 | mstr.append(replace_keywords(in_file[idx+2].rstrip('\n').replace('[', '').replace(']', '').lstrip(' ').rstrip(' '))) 117 | g_vars.get('logger').debug('Executable info extracted -- {}...'.format(mstr)) 118 | # network info 119 | g_vars.get('logger').debug(in_file[idx]) 120 | network_info = [i for i in in_file[idx].rstrip('\n').split(" ") if i != ''] 121 | g_vars.get('logger').debug('Starting extraction of network info...') 122 | g_vars.get('logger').debug(network_info) 123 | for idx2, item in enumerate(network_info): 124 | g_vars.get('logger').debug('Currently processing idx - {} and item - {}'.format(idx2, item)) 125 | if idx2 == 1 or idx2 == 2: 126 | if ']' in item: 127 | addr_port = item.split(']:') 128 | addr_port[0] += ']' 129 | else: addr_port = item.split(':') 130 | g_vars.get('logger').debug(addr_port) 131 | for item2 in addr_port: mstr.append(item2) 132 | else: mstr.append(item) 133 | ### 134 | idx += next_idx + (1 if jump else 0) 135 | records.append(','.join(mstr)) 136 | records[-1] = "{},{}".format(len(records), records[-1]) 137 | log_msg(records[-1], output_file) 138 | # input('Press any key to continue...') 139 | if idx >= len(in_file) - 1: 140 | g_vars.get('logger').info("No more records to look for...") 141 | break 142 | except Exception as e: 143 | g_vars.get('logger').error("Exception {} occurred in parser_ntst_anb...".format(e)) 144 | 145 | 146 | def highlight_interesting_logs(m_list, mstr, list_type='whitelist'): 147 | ret = 'Intersting' 148 | if list_type == 'whitelist': 149 | for item in m_list: 150 | if item in mstr: 151 | ret = '' 152 | break 153 | return ret 154 | 155 | 156 | def parser_firewall_rules(input_file, output_file): 157 | log_msg("#,RuleName,Enabled,Direction,Profiles,Grouping,LocalIP,RemoteIP,Protocol,LocalPort,RemotePort,EdgeTraversal,Action,Interesting", output_file) 158 | try: 159 | records = [] 160 | with open(input_file) as in_file: 161 | in_file = in_file.readlines() 162 | g_vars.get('logger').debug('File to lines...') 163 | idx = 1 # Since this is the first readable line in display_dns output file 164 | while True: # each firewall rule 165 | mstr = '' 166 | idx2 = 0 167 | while True: # each item inside firewall rule 168 | if idx + 1 > len(in_file): 169 | idx += 1 170 | g_vars.get('logger').info("No more records to look for inner loop...") 171 | break 172 | if "----------------------------------------------------------------------" in in_file[idx+idx2]: 173 | idx2 += 1 174 | g_vars.get('logger').debug('First if cleared...') 175 | continue 176 | elif in_file[idx+idx2] == "\n": 177 | idx += idx2 + 1 178 | g_vars.get('logger').debug('First elif cleared...') 179 | break 180 | splitted = in_file[idx+idx2].split(":") 181 | g_vars.get('logger').debug('Split complete...') 182 | if len(splitted) < 2: 183 | splitted = replace_keywords(splitted[0].lstrip(" ").rstrip("\n")) 184 | g_vars.get('logger').debug('Replacement complete...') 185 | mstr = mstr[:-1] 186 | else: 187 | splitted = replace_keywords(splitted[1].lstrip(" ").rstrip("\n")) 188 | mstr += "{},".format(splitted) 189 | idx += 1 190 | if mstr != '': 191 | # highlight inetersting logs 192 | mstr += highlight_interesting_logs(g_vars.get('whitelists').get('firewall_rules'), mstr.split(',')[0], list_type='whitelist') 193 | records.append(mstr) 194 | records[-1] = "{},{}{}".format(len(records), records[-1], ','*(9 - mstr.count(','))) 195 | log_msg(records[-1], output_file) 196 | if idx > len(in_file): 197 | g_vars.get('logger').info("No more records to look for outer loop...") 198 | break 199 | except Exception as e: 200 | g_vars.get('logger').error("Exception {} occurred in parser_firewall_rules...".format(e)) 201 | 202 | 203 | def find_next_dspdns_index(in_file, idx): 204 | try: 205 | len_in_file = len(in_file) 206 | while True: 207 | idx += 1 208 | if idx < len_in_file: 209 | g_vars.get('logger').debug('finding next idx - line: {}'.format(in_file[idx].replace('\n', ''))) 210 | if "Record Name" in in_file[idx]: 211 | g_vars.get('logger').debug('finding next idx in Record Name - idx: {}'.format(idx)) 212 | break 213 | elif "----------------------------------------" in in_file[idx]: 214 | g_vars.get('logger').debug('finding next idx in ----------- - idx: {}'.format(idx)) 215 | idx -= 1 216 | break 217 | else: pass 218 | else: 219 | g_vars.get('logger').debug('finding next idx in len_file - idx: {}'.format(idx)) 220 | break 221 | return idx 222 | except Exception as e: 223 | g_vars.get('logger').error("Exception {} occurred in find_next_dspdns_index...".format(e)) 224 | return idx 225 | 226 | 227 | def parser_display_dns(input_file, output_file): 228 | log_msg("#,Record,RecordName,RecordType,TTL,DataLength,Section", output_file) 229 | try: 230 | records = [] 231 | with open(input_file) as in_file: 232 | in_file = in_file.readlines() 233 | len_in_file = len(in_file) 234 | idx = 3 # Since this is the first readable line in display_dns output file 235 | record = '' 236 | while True: 237 | idx2 = 0 238 | g_vars.get('logger').debug('=============Started iteration for idx: {} - line: {}'.format(idx, in_file[idx].replace('\n', ''))) 239 | g_vars.get('logger').debug('File idx: {}'.format(idx)) 240 | mstr = '' 241 | if '----------------------------------------' in in_file[idx+1]: 242 | if 'No records' in in_file[idx+2] or 'Name does not exist.' in in_file[idx+2]: 243 | mstr += in_file[idx].rstrip('\n').lstrip(' ') + ',' + in_file[idx+2].rstrip('\n').lstrip(' ') 244 | if 'Record Name' in in_file[idx+2]: 245 | for i in range(2, 8): 246 | mstr += replace_keywords(in_file[idx-idx2+i].split(" : ")[1]) + "," 247 | idx += 7 248 | 249 | if idx < len_in_file: 250 | if idx + 4 < len_in_file and '----------------------------------------' in in_file[idx+4]: 251 | idx += 3 252 | else: idx += 1 253 | g_vars.get('logger').debug('Get all values - new idx: {}'.format(idx)) 254 | if mstr != '': 255 | records.append(mstr) 256 | records[-1] = "{},{}".format(len(records), records[-1]) 257 | log_msg(records[-1], output_file) 258 | if idx >= len_in_file - 2: 259 | g_vars.get('logger').info("No more records to look for...") 260 | break 261 | except Exception as e: 262 | g_vars.get('logger').error("Exception {} occurred in parser_display_dns...".format(e)) 263 | 264 | 265 | 266 | def log_msg(mstr, output_file): 267 | with open(output_file, 'a') as o: 268 | s_mstr = str(mstr) 269 | g_vars.get('logger').info("{} --- {}".format(datetime.utcnow(), s_mstr)) 270 | o.write('{}\n'.format(s_mstr)) 271 | 272 | 273 | def arg_parser(): 274 | parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter) 275 | parser.add_argument("-i", "--input_file", help="/path/to/input/file. Default is {}".format(None), default=None, type=str) 276 | parser.add_argument("-v", "--verbosity", help="Verbosity level of the script. Default is {}".format(g_vars.get('verbosity')), default=g_vars.get('verbosity'), type=str) 277 | parser.add_argument("-p", "--operation", help="Type of operation to be performed on the input_file.\n1. display_dns --- {}\n2. firewall_rules --- {}\n3. ntst_anb --- {}\n4. sc_query --- {}\n".format( 278 | g_vars.get('valid_operations').get('display_dns'), 279 | g_vars.get('valid_operations').get('firewall_rules'), 280 | g_vars.get('valid_operations').get('ntst_anb'), 281 | g_vars.get('valid_operations').get('sc_query') 282 | ), 283 | default=None, type=str 284 | ) 285 | args = parser.parse_args() 286 | g_vars["output_file"] = "report-{}-{}.csv".format(str(args.operation).replace(" ", ""), g_vars["time"]["start"].timestamp()) 287 | g_vars.get('logger').info("Output file name {}...".format(g_vars['output_file'])) 288 | return args 289 | 290 | 291 | def main(): 292 | args = arg_parser() 293 | g_vars.get('logger').setLevel(args.verbosity) 294 | 295 | print("Checking if {} is a valid operation...".format(args.operation)) 296 | if args.operation in g_vars["valid_operations"]: 297 | print("Valid operation {} detected...".format(args.operation)) 298 | if args.operation == "display_dns": 299 | parser_display_dns(args.input_file, g_vars["output_file"]) 300 | elif args.operation == "firewall_rules": 301 | parser_firewall_rules(args.input_file, g_vars["output_file"]) 302 | elif args.operation == "ntst_anb": 303 | parser_ntst_anb(args.input_file, g_vars["output_file"]) 304 | elif args.operation == "sc_query": 305 | parser_sc_query(args.input_file, g_vars["output_file"]) 306 | else: 307 | g_vars.get('logger').warning("Operation {} is unknown...".format(args.operation)) 308 | else: 309 | g_vars.get('logger').warning("Operation {} is unknown...".format(args.operation)) 310 | 311 | 312 | if __name__ == "__main__": 313 | log_msg("Start Time: {}".format(g_vars["time"]["start"]), "output.log") 314 | main() 315 | # print(g_vars.get('valid_operations').get('sc_qtdg')) 316 | g_vars["time"]["end"] = datetime.utcnow() 317 | log_msg("End Time: {}".format(g_vars["time"]["end"]), "output.log") 318 | log_msg("Time Difference: {}".format(g_vars["time"]["end"] - g_vars["time"]["start"]), "output.log") 319 | log_msg("Output file: {}".format(g_vars.get('output_file')), "output.log") -------------------------------------------------------------------------------- /eXir-win-helper/utils.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import colorlog 3 | 4 | 5 | def setup_logger(log_fmt="%(log_color)s%(asctime)s:%(levelname)s:%(message)s", log_file_name=".output.log", level='DEBUG', start_with_empty_log_file=True): 6 | 7 | # Create an empty log file 8 | if start_with_empty_log_file: 9 | with open(log_file_name, 'w') as o: pass 10 | 11 | formatter = colorlog.ColoredFormatter( 12 | log_fmt, 13 | datefmt='%D' 14 | ) 15 | 16 | logger = logging.getLogger() 17 | 18 | handler2 = logging.FileHandler(log_file_name) 19 | handler = logging.StreamHandler() 20 | handler.setFormatter(formatter) 21 | logger.addHandler(handler) 22 | logger.addHandler(handler2) 23 | logger.setLevel(level) 24 | 25 | return logger -------------------------------------------------------------------------------- /eXir-win/exir.bat: -------------------------------------------------------------------------------- 1 | @ECHO ON 2 | 3 | 4 | REM ######################################################### 5 | REM # Sample Execution command 1. Non-intrusive mode # 6 | REM # exir.bat # 7 | REM # ########################## # 8 | REM # Sample Execution command 2. Medium intrusive mode # 9 | REM # exir.bat medium # 10 | REM ######################################################### 11 | 12 | 13 | set host=%COMPUTERNAME% 14 | mkdir C:\artifacts-%host% 15 | mkdir C:\artifacts-%host%\%host%_evtx 16 | mkdir C:\artifacts-%host%\info 17 | reg Query "HKLM\Hardware\Description\System\CentralProcessor\0" | find /i "x86" > NUL && set OS=32BIT || set OS=64BIT 18 | wmic useraccount get disabled,domain,name,sid > C:\artifacts-%host%\info\%host%_user_accounts_and_sids.txt 19 | REM listed all user accounts and their SIDs respectively 20 | wmic sysaccount get domain,name,sid > C:\artifacts-%host%\info\%host%_system_accounts_and_sids.txt 21 | REM listed all system accounts and their SIDs respectively 22 | wmic group get domain,name,sid > C:\artifacts-%host%\info\%host%_domain_groups_and_sids.txt 23 | REM listed all groups domain memberships and their SIDs respectively 24 | wmic net localgroup Administrators > C:\artifacts-%host%\info\%host%_localadmins_and_sids.txt 25 | REM listed all localadministrator group members 26 | mkdir C:\artifacts-%host%\registry 27 | for /f %%A in ('wmic useraccount get sid') DO ( 28 | reg query HKEY_USERS\%%A\SOFTWARE\Sysinternals\ /s > C:\artifacts-%host%\registry\%%A_si_entries.txt 29 | ) 30 | REM listed all sysinternals utilities entries 31 | if %OS%==32BIT ( 32 | echo "OS is 32 bit" 33 | resources\logonsessions\.\logonsessions.exe -accepteula -c > C:\artifacts-%host%\%host%_32bit_logonssns-c.txt 34 | resources\logonsessions\.\logonsessions.exe -accepteula -p > C:\artifacts-%host%\%host%_32bit_logonssns-p.txt 35 | ) 36 | if %OS%==64BIT ( 37 | echo "OS is 64 bit" 38 | resources\logonsessions\.\logonsessions64.exe -accepteula -c > C:\artifacts-%host%\%host%_64bit_logonssns-c.txt 39 | resources\logonsessions\.\logonsessions64.exe -accepteula -p > C:\artifacts-%host%\%host%_64bit_logonssns-p.txt 40 | ) 41 | REM logonssessions command executed successfully! 42 | if %OS%==32BIT ( 43 | echo "OS is 32 bit" 44 | resources\psloggedon\.\PsLoggedon.exe -accepteula > C:\artifacts-%host%\%host%_32bit_psloggedon.txt 45 | ) 46 | if %OS%==64BIT ( 47 | echo "OS is 64 bit" 48 | resources\psloggedon\.\PsLoggedon64.exe -accepteula > C:\artifacts-%host%\%host%_64bit_psloggedon.txt 49 | ) 50 | REM psloggedon command executed successfully! 51 | netstat -anb > C:\artifacts-%host%\%host%_ntst-anb.txt 52 | REM netstat command executed successfully! 53 | ipconfig /displaydns > C:\artifacts-%host%\%host%_dspdns.txt 54 | REM ipconfig command executed successfully! 55 | schtasks > C:\artifacts-%host%\%host%_schtsk.txt 56 | schtasks /query > C:\artifacts-%host%\%host%_schtsk-qry.txt 57 | REM schtasks command executed successfully! 58 | sc query > C:\artifacts-%host%\%host%_sq.txt 59 | sc query eventlog > C:\artifacts-%host%\%host%_sqet.txt 60 | sc queryex eventlog > C:\artifacts-%host%\%host%_sqel.txt 61 | sc query type= driver > C:\artifacts-%host%\%host%_sqtd.txt 62 | sc query type= service > C:\artifacts-%host%\%host%_sqts.txt 63 | sc query state= all > C:\artifacts-%host%\%host%_sqsa.txt 64 | sc query bufsize= 50 > C:\artifacts-%host%\%host%_sqb50.txt 65 | sc query ri= 14 > C:\artifacts-%host%\%host%_sqr14.txt 66 | sc query type= interact > C:\artifacts-%host%\%host%_scqti.txt 67 | sc query type= driver group= NDIS > C:\artifacts-%host%\%host%_qtdg.txt 68 | REM sc commands executed successfully! 69 | copy c:\windows\system32\winevt\logs\* c:\artifacts-%host%\%host%_evtx\ 70 | REM event logs copied successfully! 71 | netsh advfirewall firewall show rule name=all > c:\artifacts-%host%\%host%_firewall_rules.txt 72 | REM firewall rules copied successfully! 73 | powershell -command "[System.IO.Directory]::GetFiles(\"\\.\\pipe\\\")" >> c:\artifacts-%host%\%host%_pipes.txt 74 | REM All pipe names copied successfully 75 | powershell -command "get-childitem \\.\pipe\\" >> c:\artifacts-%host%\%host%_pipes_details.txt 76 | REM All pipe nameds copied with more information successfully 77 | mkdir C:\artifacts-%host%\Powershell\PowerShell_history\ 78 | for /f "tokens=3" %%A in ('reg query "HKLM\Software\Microsoft\Windows NT\CurrentVersion\ProfileList" /s /v ProfileImagePath ^| find "REG_EXPAND_SZ"') do ( 79 | if exist %%A\AppData\Roaming\Microsoft\Windows\PowerShell\PSReadline\ ( 80 | for /f "tokens=3 delims=\" %%a in ('echo %%A') do ( 81 | mkdir C:\artifacts-%host%\Powershell\PowerShell_history\%%a 82 | copy %%A\AppData\Roaming\Microsoft\Windows\PowerShell\PSReadline\ConsoleHost_history* C:\artifacts-%host%\Powershell\PowerShell_history\%%a\ 83 | ) 84 | ) 85 | ) 86 | REM Powershell command history fetched successfully! 87 | mkdir C:\artifacts-%host%\Powershell\PowerShell_transcripts\ 88 | for /f "tokens=3" %%A in ('reg query "HKLM\Software\Microsoft\Windows NT\CurrentVersion\ProfileList" /s /v ProfileImagePath ^| find "REG_EXPAND_SZ"') do ( 89 | if exist %%A\Documents\ ( 90 | for /f "tokens=3 delims=\" %%a in ('echo %%A') do ( 91 | mkdir C:\artifacts-%host%\Powershell\PowerShell_transcripts\%%a 92 | copy %%A\Documents\PowerShell_transcript* C:\artifacts-%host%\Powershell\PowerShell_transcripts\%%a\ 93 | ) 94 | ) 95 | ) 96 | REM Powershell transcript logs fetched successfully! 97 | systeminfo > C:\artifacts-%host%\%host%_systeminfo.txt 98 | REM collected system information successfully 99 | for /f "tokens=3" %%A in ('reg query "HKLM\Software\Microsoft\Windows NT\CurrentVersion\ProfileList" /s /v ProfileImagePath ^| find "REG_EXPAND_SZ"') do ( 100 | if exist %%A\AppData\Local\Microsoft\AzureAD\Powershell\ ( 101 | for /f "tokens=3 delims=\" %%a in ('echo %%A') do ( 102 | mkdir C:\artifacts-%host%\Powershell\PowerShell_log_AD\%%a 103 | copy %%A\AppData\Local\Microsoft\AzureAD\Powershell\AzureADPowershell* C:\artifacts-%host%\Powershell\PowerShell_log_AD\%%a\ 104 | ) 105 | ) 106 | ) 107 | REM collected Powershell log file for AD as well! 108 | if [%1]==[] ( 109 | echo "Will not be running in any mode other than normal..." 110 | goto DONE 111 | ) else ( 112 | if NOT %1==medium ( 113 | echo "Parameter %1 passed is not correct"... 114 | echo "Will not be running in medium mode..." 115 | goto DONE 116 | ) else ( 117 | echo "Will be executing in medium mode..." 118 | timeout 3 119 | dir /r /s C:\ | findstr /r "$DATA Directory" >> C:\artifacts-%host%\%host%_ads.txt 120 | REM successfully collected names of all ADS! 121 | if %OS%==32BIT ( 122 | echo "OS is 32 bit" 123 | resources\autoruns\.\autorunsc.exe -accepteula * -a * -h -s -c -o C:\artifacts-%host%\%host%_32bit_autoruns.csv 124 | ) 125 | if %OS%==64BIT ( 126 | echo "OS is 64 bit" 127 | resources\autoruns\.\autorunsc64.exe -accepteula * -a * -h -s -c -o C:\artifacts-%host%\%host%_64bit_autoruns.csv 128 | ) 129 | REM Autoruns executed successfully 130 | mkdir C:\artifacts-%host%\winaudit 131 | resources\winaudit\.\WinAudit.exe /r=gsoPxuTUeERNtzDaIbMpmidcSArCOHG /f=C:\artifacts-%host%\winaudit\%host%_winaudit-report.html /l=C:\artifacts-%host%\winaudit\%host%_winaudit.log /T=datetime 132 | REM WinAudit.exe executed successfully 133 | ) 134 | ) 135 | :DONE 136 | echo Done... -------------------------------------------------------------------------------- /eXir-win/resources/autoruns/Autoruns64.dll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EbryxLabs/__DFIR-scripts/305edcef268972f0cc52880dd68802bd6f40d05b/eXir-win/resources/autoruns/Autoruns64.dll -------------------------------------------------------------------------------- /eXir-win/resources/autoruns/Eula.txt: -------------------------------------------------------------------------------- 1 | Sysinternals Software License Terms 2 | These license terms are an agreement between Sysinternals (a wholly owned subsidiary of Microsoft Corporation) and you. Please read them. They apply to the software you are downloading from technet.microsoft.com/sysinternals, which includes the media on which you received it, if any. The terms also apply to any Sysinternals 3 | * updates, 4 | * supplements, 5 | * Internet-based services, 6 | * and support services 7 | for this software, unless other terms accompany those items. If so, those terms apply. 8 | BY USING THE SOFTWARE, YOU ACCEPT THESE TERMS. IF YOU DO NOT ACCEPT THEM, DO NOT USE THE SOFTWARE. 9 | If you comply with these license terms, you have the rights below. 10 | 11 | Installation and User Rights 12 | 13 | You may install and use any number of copies of the software on your devices. 14 | 15 | Scope of License 16 | 17 | The software is licensed, not sold. This agreement only gives you some rights to use the software. Sysinternals reserves all other rights. Unless applicable law gives you more rights despite this limitation, you may use the software only as expressly permitted in this agreement. In doing so, you must comply with any technical limitations in the software that only allow you to use it in certain ways. You may not 18 | * work around any technical limitations in the software; 19 | * reverse engineer, decompile or disassemble the software, except and only to the extent that applicable law expressly permits, despite this limitation; 20 | * make more copies of the software than specified in this agreement or allowed by applicable law, despite this limitation; 21 | * publish the software for others to copy; 22 | * rent, lease or lend the software; 23 | * transfer the software or this agreement to any third party; or 24 | * use the software for commercial software hosting services. 25 | 26 | Sensitive Information 27 | 28 | Please be aware that, similar to other debug tools that capture “process state” information, files saved by Sysinternals tools may include personally identifiable or other sensitive information (such as usernames, passwords, paths to files accessed, and paths to registry accessed). By using this software, you acknowledge that you are aware of this and take sole responsibility for any personally identifiable or other sensitive information provided to Microsoft or any other party through your use of the software. 29 | 30 | Documentation 31 | 32 | Any person that has valid access to your computer or internal network may copy and use the documentation for your internal, reference purposes. 33 | 34 | Export Restrictions 35 | 36 | The software is subject to United States export laws and regulations. You must comply with all domestic and international export laws and regulations that apply to the software. These laws include restrictions on destinations, end users and end use. For additional information, see www.microsoft.com/exporting . 37 | 38 | Support Services 39 | 40 | Because this software is "as is," we may not provide support services for it. 41 | 42 | Entire Agreement 43 | 44 | This agreement, and the terms for supplements, updates, Internet-based services and support services that you use, are the entire agreement for the software and support services. 45 | 46 | Applicable Law 47 | 48 | United States . If you acquired the software in the United States , Washington state law governs the interpretation of this agreement and applies to claims for breach of it, regardless of conflict of laws principles. The laws of the state where you live govern all other claims, including claims under state consumer protection laws, unfair competition laws, and in tort. 49 | Outside the United States . If you acquired the software in any other country, the laws of that country apply. 50 | 51 | Legal Effect 52 | 53 | This agreement describes certain legal rights. You may have other rights under the laws of your country. You may also have rights with respect to the party from whom you acquired the software. This agreement does not change your rights under the laws of your country if the laws of your country do not permit it to do so. 54 | 55 | Disclaimer of Warranty 56 | 57 | The software is licensed "as-is." You bear the risk of using it. Sysinternals gives no express warranties, guarantees or conditions. You may have additional consumer rights under your local laws which this agreement cannot change. To the extent permitted under your local laws, sysinternals excludes the implied warranties of merchantability, fitness for a particular purpose and non-infringement. 58 | 59 | Limitation on and Exclusion of Remedies and Damages 60 | 61 | You can recover from sysinternals and its suppliers only direct damages up to U.S. $5.00. You cannot recover any other damages, including consequential, lost profits, special, indirect or incidental damages. 62 | This limitation applies to 63 | * anything related to the software, services, content (including code) on third party Internet sites, or third party programs; and 64 | * claims for breach of contract, breach of warranty, guarantee or condition, strict liability, negligence, or other tort to the extent permitted by applicable law. 65 | 66 | It also applies even if Sysinternals knew or should have known about the possibility of the damages. The above limitation or exclusion may not apply to you because your country may not allow the exclusion or limitation of incidental, consequential or other damages. 67 | Please note: As this software is distributed in Quebec , Canada , some of the clauses in this agreement are provided below in French. 68 | Remarque : Ce logiciel étant distribué au Québec, Canada, certaines des clauses dans ce contrat sont fournies ci-dessous en français. 69 | EXONÉRATION DE GARANTIE. Le logiciel visé par une licence est offert « tel quel ». Toute utilisation de ce logiciel est à votre seule risque et péril. Sysinternals n'accorde aucune autre garantie expresse. Vous pouvez bénéficier de droits additionnels en vertu du droit local sur la protection dues consommateurs, que ce contrat ne peut modifier. La ou elles sont permises par le droit locale, les garanties implicites de qualité marchande, d'adéquation à un usage particulier et d'absence de contrefaçon sont exclues. 70 | LIMITATION DES DOMMAGES-INTÉRÊTS ET EXCLUSION DE RESPONSABILITÉ POUR LES DOMMAGES. Vous pouvez obtenir de Sysinternals et de ses fournisseurs une indemnisation en cas de dommages directs uniquement à hauteur de 5,00 $ US. Vous ne pouvez prétendre à aucune indemnisation pour les autres dommages, y compris les dommages spéciaux, indirects ou accessoires et pertes de bénéfices. 71 | Cette limitation concerne : 72 | tout ce qui est relié au logiciel, aux services ou au contenu (y compris le code) figurant sur des sites Internet tiers ou dans des programmes tiers ; et 73 | les réclamations au titre de violation de contrat ou de garantie, ou au titre de responsabilité stricte, de négligence ou d'une autre faute dans la limite autorisée par la loi en vigueur. 74 | Elle s'applique également, même si Sysinternals connaissait ou devrait connaître l'éventualité d'un tel dommage. Si votre pays n'autorise pas l'exclusion ou la limitation de responsabilité pour les dommages indirects, accessoires ou de quelque nature que ce soit, il se peut que la limitation ou l'exclusion ci-dessus ne s'appliquera pas à votre égard. 75 | EFFET JURIDIQUE. Le présent contrat décrit certains droits juridiques. Vous pourriez avoir d'autres droits prévus par les lois de votre pays. Le présent contrat ne modifie pas les droits que vous confèrent les lois de votre pays si celles-ci ne le permettent pas. 76 | -------------------------------------------------------------------------------- /eXir-win/resources/autoruns/autoruns.chm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EbryxLabs/__DFIR-scripts/305edcef268972f0cc52880dd68802bd6f40d05b/eXir-win/resources/autoruns/autoruns.chm -------------------------------------------------------------------------------- /eXir-win/resources/autoruns/autorunsc.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EbryxLabs/__DFIR-scripts/305edcef268972f0cc52880dd68802bd6f40d05b/eXir-win/resources/autoruns/autorunsc.exe -------------------------------------------------------------------------------- /eXir-win/resources/autoruns/autorunsc64.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EbryxLabs/__DFIR-scripts/305edcef268972f0cc52880dd68802bd6f40d05b/eXir-win/resources/autoruns/autorunsc64.exe -------------------------------------------------------------------------------- /eXir-win/resources/logonsessions/Eula.txt: -------------------------------------------------------------------------------- 1 | Sysinternals Software License Terms 2 | These license terms are an agreement between Sysinternals (a wholly owned subsidiary of Microsoft Corporation) and you. Please read them. They apply to the software you are downloading from technet.microsoft.com/sysinternals, which includes the media on which you received it, if any. The terms also apply to any Sysinternals 3 | * updates, 4 | * supplements, 5 | * Internet-based services, 6 | * and support services 7 | for this software, unless other terms accompany those items. If so, those terms apply. 8 | BY USING THE SOFTWARE, YOU ACCEPT THESE TERMS. IF YOU DO NOT ACCEPT THEM, DO NOT USE THE SOFTWARE. 9 | If you comply with these license terms, you have the rights below. 10 | 11 | Installation and User Rights 12 | 13 | You may install and use any number of copies of the software on your devices. 14 | 15 | Scope of License 16 | 17 | The software is licensed, not sold. This agreement only gives you some rights to use the software. Sysinternals reserves all other rights. Unless applicable law gives you more rights despite this limitation, you may use the software only as expressly permitted in this agreement. In doing so, you must comply with any technical limitations in the software that only allow you to use it in certain ways. You may not 18 | * work around any technical limitations in the software; 19 | * reverse engineer, decompile or disassemble the software, except and only to the extent that applicable law expressly permits, despite this limitation; 20 | * make more copies of the software than specified in this agreement or allowed by applicable law, despite this limitation; 21 | * publish the software for others to copy; 22 | * rent, lease or lend the software; 23 | * transfer the software or this agreement to any third party; or 24 | * use the software for commercial software hosting services. 25 | 26 | Sensitive Information 27 | 28 | Please be aware that, similar to other debug tools that capture “process state” information, files saved by Sysinternals tools may include personally identifiable or other sensitive information (such as usernames, passwords, paths to files accessed, and paths to registry accessed). By using this software, you acknowledge that you are aware of this and take sole responsibility for any personally identifiable or other sensitive information provided to Microsoft or any other party through your use of the software. 29 | 30 | Documentation 31 | 32 | Any person that has valid access to your computer or internal network may copy and use the documentation for your internal, reference purposes. 33 | 34 | Export Restrictions 35 | 36 | The software is subject to United States export laws and regulations. You must comply with all domestic and international export laws and regulations that apply to the software. These laws include restrictions on destinations, end users and end use. For additional information, see www.microsoft.com/exporting . 37 | 38 | Support Services 39 | 40 | Because this software is "as is," we may not provide support services for it. 41 | 42 | Entire Agreement 43 | 44 | This agreement, and the terms for supplements, updates, Internet-based services and support services that you use, are the entire agreement for the software and support services. 45 | 46 | Applicable Law 47 | 48 | United States . If you acquired the software in the United States , Washington state law governs the interpretation of this agreement and applies to claims for breach of it, regardless of conflict of laws principles. The laws of the state where you live govern all other claims, including claims under state consumer protection laws, unfair competition laws, and in tort. 49 | Outside the United States . If you acquired the software in any other country, the laws of that country apply. 50 | 51 | Legal Effect 52 | 53 | This agreement describes certain legal rights. You may have other rights under the laws of your country. You may also have rights with respect to the party from whom you acquired the software. This agreement does not change your rights under the laws of your country if the laws of your country do not permit it to do so. 54 | 55 | Disclaimer of Warranty 56 | 57 | The software is licensed "as-is." You bear the risk of using it. Sysinternals gives no express warranties, guarantees or conditions. You may have additional consumer rights under your local laws which this agreement cannot change. To the extent permitted under your local laws, sysinternals excludes the implied warranties of merchantability, fitness for a particular purpose and non-infringement. 58 | 59 | Limitation on and Exclusion of Remedies and Damages 60 | 61 | You can recover from sysinternals and its suppliers only direct damages up to U.S. $5.00. You cannot recover any other damages, including consequential, lost profits, special, indirect or incidental damages. 62 | This limitation applies to 63 | * anything related to the software, services, content (including code) on third party Internet sites, or third party programs; and 64 | * claims for breach of contract, breach of warranty, guarantee or condition, strict liability, negligence, or other tort to the extent permitted by applicable law. 65 | 66 | It also applies even if Sysinternals knew or should have known about the possibility of the damages. The above limitation or exclusion may not apply to you because your country may not allow the exclusion or limitation of incidental, consequential or other damages. 67 | Please note: As this software is distributed in Quebec , Canada , some of the clauses in this agreement are provided below in French. 68 | Remarque : Ce logiciel étant distribué au Québec, Canada, certaines des clauses dans ce contrat sont fournies ci-dessous en français. 69 | EXONÉRATION DE GARANTIE. Le logiciel visé par une licence est offert « tel quel ». Toute utilisation de ce logiciel est à votre seule risque et péril. Sysinternals n'accorde aucune autre garantie expresse. Vous pouvez bénéficier de droits additionnels en vertu du droit local sur la protection dues consommateurs, que ce contrat ne peut modifier. La ou elles sont permises par le droit locale, les garanties implicites de qualité marchande, d'adéquation à un usage particulier et d'absence de contrefaçon sont exclues. 70 | LIMITATION DES DOMMAGES-INTÉRÊTS ET EXCLUSION DE RESPONSABILITÉ POUR LES DOMMAGES. Vous pouvez obtenir de Sysinternals et de ses fournisseurs une indemnisation en cas de dommages directs uniquement à hauteur de 5,00 $ US. Vous ne pouvez prétendre à aucune indemnisation pour les autres dommages, y compris les dommages spéciaux, indirects ou accessoires et pertes de bénéfices. 71 | Cette limitation concerne : 72 | tout ce qui est relié au logiciel, aux services ou au contenu (y compris le code) figurant sur des sites Internet tiers ou dans des programmes tiers ; et 73 | les réclamations au titre de violation de contrat ou de garantie, ou au titre de responsabilité stricte, de négligence ou d'une autre faute dans la limite autorisée par la loi en vigueur. 74 | Elle s'applique également, même si Sysinternals connaissait ou devrait connaître l'éventualité d'un tel dommage. Si votre pays n'autorise pas l'exclusion ou la limitation de responsabilité pour les dommages indirects, accessoires ou de quelque nature que ce soit, il se peut que la limitation ou l'exclusion ci-dessus ne s'appliquera pas à votre égard. 75 | EFFET JURIDIQUE. Le présent contrat décrit certains droits juridiques. Vous pourriez avoir d'autres droits prévus par les lois de votre pays. Le présent contrat ne modifie pas les droits que vous confèrent les lois de votre pays si celles-ci ne le permettent pas. 76 | -------------------------------------------------------------------------------- /eXir-win/resources/logonsessions/logonsessions.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EbryxLabs/__DFIR-scripts/305edcef268972f0cc52880dd68802bd6f40d05b/eXir-win/resources/logonsessions/logonsessions.exe -------------------------------------------------------------------------------- /eXir-win/resources/logonsessions/logonsessions64.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EbryxLabs/__DFIR-scripts/305edcef268972f0cc52880dd68802bd6f40d05b/eXir-win/resources/logonsessions/logonsessions64.exe -------------------------------------------------------------------------------- /eXir-win/resources/psloggedon/Eula.txt: -------------------------------------------------------------------------------- 1 | Sysinternals Software License Terms 2 | These license terms are an agreement between Sysinternals (a wholly owned subsidiary of Microsoft Corporation) and you. Please read them. They apply to the software you are downloading from technet.microsoft.com/sysinternals, which includes the media on which you received it, if any. The terms also apply to any Sysinternals 3 | * updates, 4 | * supplements, 5 | * Internet-based services, 6 | * and support services 7 | for this software, unless other terms accompany those items. If so, those terms apply. 8 | BY USING THE SOFTWARE, YOU ACCEPT THESE TERMS. IF YOU DO NOT ACCEPT THEM, DO NOT USE THE SOFTWARE. 9 | If you comply with these license terms, you have the rights below. 10 | 11 | Installation and User Rights 12 | 13 | You may install and use any number of copies of the software on your devices. 14 | 15 | Scope of License 16 | 17 | The software is licensed, not sold. This agreement only gives you some rights to use the software. Sysinternals reserves all other rights. Unless applicable law gives you more rights despite this limitation, you may use the software only as expressly permitted in this agreement. In doing so, you must comply with any technical limitations in the software that only allow you to use it in certain ways. You may not 18 | * work around any technical limitations in the software; 19 | * reverse engineer, decompile or disassemble the software, except and only to the extent that applicable law expressly permits, despite this limitation; 20 | * make more copies of the software than specified in this agreement or allowed by applicable law, despite this limitation; 21 | * publish the software for others to copy; 22 | * rent, lease or lend the software; 23 | * transfer the software or this agreement to any third party; or 24 | * use the software for commercial software hosting services. 25 | 26 | Sensitive Information 27 | 28 | Please be aware that, similar to other debug tools that capture “process state” information, files saved by Sysinternals tools may include personally identifiable or other sensitive information (such as usernames, passwords, paths to files accessed, and paths to registry accessed). By using this software, you acknowledge that you are aware of this and take sole responsibility for any personally identifiable or other sensitive information provided to Microsoft or any other party through your use of the software. 29 | 30 | Documentation 31 | 32 | Any person that has valid access to your computer or internal network may copy and use the documentation for your internal, reference purposes. 33 | 34 | Export Restrictions 35 | 36 | The software is subject to United States export laws and regulations. You must comply with all domestic and international export laws and regulations that apply to the software. These laws include restrictions on destinations, end users and end use. For additional information, see www.microsoft.com/exporting . 37 | 38 | Support Services 39 | 40 | Because this software is "as is," we may not provide support services for it. 41 | 42 | Entire Agreement 43 | 44 | This agreement, and the terms for supplements, updates, Internet-based services and support services that you use, are the entire agreement for the software and support services. 45 | 46 | Applicable Law 47 | 48 | United States . If you acquired the software in the United States , Washington state law governs the interpretation of this agreement and applies to claims for breach of it, regardless of conflict of laws principles. The laws of the state where you live govern all other claims, including claims under state consumer protection laws, unfair competition laws, and in tort. 49 | Outside the United States . If you acquired the software in any other country, the laws of that country apply. 50 | 51 | Legal Effect 52 | 53 | This agreement describes certain legal rights. You may have other rights under the laws of your country. You may also have rights with respect to the party from whom you acquired the software. This agreement does not change your rights under the laws of your country if the laws of your country do not permit it to do so. 54 | 55 | Disclaimer of Warranty 56 | 57 | The software is licensed "as-is." You bear the risk of using it. Sysinternals gives no express warranties, guarantees or conditions. You may have additional consumer rights under your local laws which this agreement cannot change. To the extent permitted under your local laws, sysinternals excludes the implied warranties of merchantability, fitness for a particular purpose and non-infringement. 58 | 59 | Limitation on and Exclusion of Remedies and Damages 60 | 61 | You can recover from sysinternals and its suppliers only direct damages up to U.S. $5.00. You cannot recover any other damages, including consequential, lost profits, special, indirect or incidental damages. 62 | This limitation applies to 63 | * anything related to the software, services, content (including code) on third party Internet sites, or third party programs; and 64 | * claims for breach of contract, breach of warranty, guarantee or condition, strict liability, negligence, or other tort to the extent permitted by applicable law. 65 | 66 | It also applies even if Sysinternals knew or should have known about the possibility of the damages. The above limitation or exclusion may not apply to you because your country may not allow the exclusion or limitation of incidental, consequential or other damages. 67 | Please note: As this software is distributed in Quebec , Canada , some of the clauses in this agreement are provided below in French. 68 | Remarque : Ce logiciel étant distribué au Québec, Canada, certaines des clauses dans ce contrat sont fournies ci-dessous en français. 69 | EXONÉRATION DE GARANTIE. Le logiciel visé par une licence est offert « tel quel ». Toute utilisation de ce logiciel est à votre seule risque et péril. Sysinternals n'accorde aucune autre garantie expresse. Vous pouvez bénéficier de droits additionnels en vertu du droit local sur la protection dues consommateurs, que ce contrat ne peut modifier. La ou elles sont permises par le droit locale, les garanties implicites de qualité marchande, d'adéquation à un usage particulier et d'absence de contrefaçon sont exclues. 70 | LIMITATION DES DOMMAGES-INTÉRÊTS ET EXCLUSION DE RESPONSABILITÉ POUR LES DOMMAGES. Vous pouvez obtenir de Sysinternals et de ses fournisseurs une indemnisation en cas de dommages directs uniquement à hauteur de 5,00 $ US. Vous ne pouvez prétendre à aucune indemnisation pour les autres dommages, y compris les dommages spéciaux, indirects ou accessoires et pertes de bénéfices. 71 | Cette limitation concerne : 72 | tout ce qui est relié au logiciel, aux services ou au contenu (y compris le code) figurant sur des sites Internet tiers ou dans des programmes tiers ; et 73 | les réclamations au titre de violation de contrat ou de garantie, ou au titre de responsabilité stricte, de négligence ou d'une autre faute dans la limite autorisée par la loi en vigueur. 74 | Elle s'applique également, même si Sysinternals connaissait ou devrait connaître l'éventualité d'un tel dommage. Si votre pays n'autorise pas l'exclusion ou la limitation de responsabilité pour les dommages indirects, accessoires ou de quelque nature que ce soit, il se peut que la limitation ou l'exclusion ci-dessus ne s'appliquera pas à votre égard. 75 | EFFET JURIDIQUE. Le présent contrat décrit certains droits juridiques. Vous pourriez avoir d'autres droits prévus par les lois de votre pays. Le présent contrat ne modifie pas les droits que vous confèrent les lois de votre pays si celles-ci ne le permettent pas. 76 | -------------------------------------------------------------------------------- /eXir-win/resources/psloggedon/PsLoggedon.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EbryxLabs/__DFIR-scripts/305edcef268972f0cc52880dd68802bd6f40d05b/eXir-win/resources/psloggedon/PsLoggedon.exe -------------------------------------------------------------------------------- /eXir-win/resources/psloggedon/PsLoggedon64.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EbryxLabs/__DFIR-scripts/305edcef268972f0cc52880dd68802bd6f40d05b/eXir-win/resources/psloggedon/PsLoggedon64.exe -------------------------------------------------------------------------------- /eXir-win/resources/winaudit/WinAudit.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/EbryxLabs/__DFIR-scripts/305edcef268972f0cc52880dd68802bd6f40d05b/eXir-win/resources/winaudit/WinAudit.exe -------------------------------------------------------------------------------- /eXir/eXir_2.4.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | 5 | ############################################################## 6 | # Copyleft by Ebryx LLC - www.ebryx.com © 2019 7 | ############################################################## 8 | 9 | mkdir ~/artifacts 10 | 11 | 12 | dt=$(date '+%d-%m-%Y_%H-%M-%S_%Z') 13 | echo "$dt" 14 | 15 | # mac chrome profile path ~/Library/Application\ Support/Google/Chrome 16 | # linux chrome profile path ~/.config/google-chrome/ 17 | 18 | echo "[I] File Created: ~/artifacts/1.1_$dt.txt" 19 | touch ~/artifacts/1.1_$dt.txt 20 | 21 | 22 | 23 | echo "##############################################################" 24 | echo "# Copyleft by Ebryx LLC - www.ebryx.com © 2019" 25 | echo "##############################################################" 26 | 27 | 28 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 29 | echo "Script Started" >> ~/artifacts/1.1_$dt.txt 30 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 31 | date >> ~/artifacts/1.1_$dt.txt 32 | sudo echo $(date) >> ~/artifacts/1.1_$dt.txt 33 | 34 | 35 | # Determine OS platform 36 | UNAME=$(uname | tr "[:upper:]" "[:lower:]") 37 | # If Linux, try to determine specific distribution 38 | if [ "$UNAME" == "linux" ]; then 39 | # If available, use LSB to identify distribution 40 | if [ -f /etc/lsb-release -o -d /etc/lsb-release.d ]; then 41 | export DISTRO=$(lsb_release -i | cut -d: -f2 | sed s/'^\t'//) 42 | # Otherwise, use release info file 43 | else 44 | export DISTRO=$(ls -d /etc/[A-Za-z]*[_-][rv]e[lr]* | grep -v "lsb" | cut -d'/' -f3 | cut -d'-' -f1 | cut -d'_' -f1) 45 | fi 46 | fi 47 | # For everything else (or if above failed), just use generic identifier 48 | [ "$DISTRO" == "" ] && export DISTRO=$UNAME 49 | unset UNAME 50 | 51 | shopt -s nocasematch 52 | if [[ "$DISTRO" =~ "redhat" ]]; then 53 | DISTRO="redhat" 54 | elif [[ "$DISTRO" =~ "centos" ]]; then 55 | DISTRO="centos" 56 | elif [[ "$DISTRO" =~ "ubuntu" ]]; then 57 | DISTRO="ubuntu" 58 | elif [[ "$DISTRO" =~ "darwin" ]]; then 59 | DISTRO="osx" 60 | else 61 | DISTRO="unknown" 62 | fi 63 | 64 | 65 | 66 | 67 | if [ -f /var/log/auth.log ];then 68 | echo "------------------" >> ~/artifacts/17_sudo_commands_by_user_$dt.txt 69 | echo "Details about sudo commands executed by all user" >> ~/artifacts/17_sudo_commands_by_user_$dt.txt 70 | echo "------------------" >> ~/artifacts/17_sudo_commands_by_user_$dt.txt 71 | sudo grep sudo /var/log/auth.log >> ~/artifacts/17_sudo_commands_by_user_$dt.txt 2>&1 72 | echo >> ~/artifacts/17_sudo_commands_by_user_$dt.txt 73 | fi 74 | 75 | 76 | if [ -f /var/log/secure ];then 77 | echo "------------------" >> ~/artifacts/17_sudo_commands_by_user_$dt.txt 78 | echo "Details about sudo commands executed by all user" >> ~/artifacts/17_sudo_commands_by_user_$dt.txt 79 | echo "------------------" >> ~/artifacts/17_sudo_commands_by_user_$dt.txt 80 | sudo grep sudo /var/log/secure >> ~/artifacts/17_sudo_commands_by_user_$dt.txt 2>&1 81 | echo >> ~/artifacts/17_sudo_commands_by_user_$dt.txt 82 | fi 83 | 84 | 85 | echo "------------------" >> ~/artifacts/17_sudo_commands_by_user_$dt.txt 86 | echo "Details about sudo commands executed by all user" >> ~/artifacts/17_sudo_commands_by_user_$dt.txt 87 | echo "------------------" >> ~/artifacts/17_sudo_commands_by_user_$dt.txt 88 | sudo journalctl _COMM=sudo >> ~/artifacts/17_sudo_commands_by_user_$dt.txt 2>&1 89 | echo >> ~/artifacts/17_sudo_commands_by_user_$dt.txt 90 | 91 | 92 | 93 | 94 | echo "------------------" >> ~/artifacts/16_files_changed_$dt.txt 95 | echo "Files changed in close delta (top 25 entries) (Latest edited/created files)" >> ~/artifacts/16_files_changed_$dt.txt 96 | echo "------------------" >> ~/artifacts/16_files_changed_$dt.txt 97 | sudo find . -type f -printf '%T@ %p\n' | sort -n | tail -25 | cut -f2- -d" " >> ~/artifacts/16_files_changed_$dt.txt 98 | echo >> ~/artifacts/16_files_changed_$dt.txt 99 | 100 | 101 | 102 | 103 | echo "------------------" >> ~/artifacts/16_files_changed_$dt.txt 104 | echo "Files being written right now" >> ~/artifacts/16_files_changed_$dt.txt 105 | echo "------------------" >> ~/artifacts/16_files_changed_$dt.txt 106 | sudo lsof $( find /var/log /var/www/log -type f ) >> ~/artifacts/16_files_changed_$dt.txt 107 | echo >> ~/artifacts/16_files_changed_$dt.txt 108 | 109 | 110 | 111 | echo "------------------" >> ~/artifacts/16_files_changed_$dt.txt 112 | echo "Files changed during last 24 hours in a dir & sub-dirs" >> ~/artifacts/16_files_changed_$dt.txt 113 | echo "------------------" >> ~/artifacts/16_files_changed_$dt.txt 114 | sudo find / -newermt "1 day ago" -ls >> ~/artifacts/16_files_changed_$dt.txt 2>&1 115 | echo >> ~/artifacts/16_files_changed_$dt.txt 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 125 | echo "Host Name" >> ~/artifacts/1.1_$dt.txt 126 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 127 | sudo hostname >> ~/artifacts/1.1_$dt.txt 2>&1 128 | echo >> ~/artifacts/1.1_$dt.txt 129 | 130 | 131 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 132 | echo "Kernal Verion" >> ~/artifacts/1.1_$dt.txt 133 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 134 | sudo uname -a >> ~/artifacts/1.1_$dt.txt 2>&1 135 | echo >> ~/artifacts/1.1_$dt.txt 136 | 137 | 138 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 139 | echo "System uptime" >> ~/artifacts/1.1_$dt.txt 140 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 141 | sudo uptime >> ~/artifacts/1.1_$dt.txt 2>&1 142 | echo >> ~/artifacts/1.1_$dt.txt 143 | 144 | 145 | 146 | if [ "$DISTRO" == "osx" ]; then 147 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 148 | echo "System Profiler" >> ~/artifacts/1.1_$dt.txt 149 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 150 | sudo system_profiler >> ~/artifacts/1.1_$dt.txt 2>&1 151 | echo >> ~/artifacts/1.1_$dt.txt 152 | else 153 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 154 | echo "OS Version" >> ~/artifacts/1.1_$dt.txt 155 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 156 | sudo cat /etc/*-release >> ~/artifacts/1.1_$dt.txt 2>&1 157 | echo >> ~/artifacts/1.1_$dt.txt 158 | fi 159 | 160 | 161 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 162 | echo "Current Logged In User Name - whoami" >> ~/artifacts/1.1_$dt.txt 163 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 164 | sudo whoami >> ~/artifacts/1.1_$dt.txt 2>&1 165 | echo >> ~/artifacts/1.1_$dt.txt 166 | 167 | 168 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 169 | echo "Current Logged In Users - who -u" >> ~/artifacts/1.1_$dt.txt 170 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 171 | sudo who -u >> ~/artifacts/1.1_$dt.txt 2>&1 172 | echo >> ~/artifacts/1.1_$dt.txt 173 | 174 | 175 | 176 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 177 | echo "Past Logged in Users" >> ~/artifacts/1.1_$dt.txt 178 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 179 | sudo lastlog >> ~/artifacts/1.1_$dt.txt 2>&1 180 | sudo last >> ~/artifacts/1.1_$dt.txt 2>&1 181 | echo >> ~/artifacts/1.1_$dt.txt 182 | 183 | 184 | 185 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 186 | echo "Last System Reboot Time" >> ~/artifacts/1.1_$dt.txt 187 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 188 | sudo who -b >> ~/artifacts/1.1_$dt.txt 2>&1 189 | echo >> ~/artifacts/1.1_$dt.txt 190 | 191 | 192 | 193 | 194 | if [ -f /proc/mounts ];then 195 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 196 | echo "Mounted Hard Drives Partition" >> ~/artifacts/1.1_$dt.txt 197 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 198 | sudo cat /proc/mounts >> ~/artifacts/1.1_$dt.txt 2>&1 199 | echo >> ~/artifacts/1.1_$dt.txt 200 | elif [ -f /proc/self/mounts ]; then 201 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 202 | echo "Mounted Hard Drives Partition" >> ~/artifacts/1.1_$dt.txt 203 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 204 | sudo cat /proc/self/mounts >> ~/artifacts/1.1_$dt.txt 2>&1 205 | echo >> ~/artifacts/1.1_$dt.txt 206 | fi 207 | 208 | 209 | 210 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 211 | echo "Show file system disk space usage" >> ~/artifacts/1.1_$dt.txt 212 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 213 | sudo df -aTh >> ~/artifacts/1.1_$dt.txt 2>&1 214 | echo >> ~/artifacts/1.1_$dt.txt 215 | 216 | 217 | 218 | 219 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 220 | echo "Print env" >> ~/artifacts/1.1_$dt.txt 221 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 222 | 223 | if [ -x "$(command -v printenv)" ]; then 224 | sudo printenv >> ~/artifacts/1.1_$dt.txt 2>&1 225 | else 226 | sudo env >> ~/artifacts/1.1_$dt.txt 2>&1 227 | fi 228 | 229 | echo >> ~/artifacts/1.1_$dt.txt 230 | 231 | 232 | 233 | 234 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 235 | echo "Currently running screen sessions - screen -ls - empty output means no screen" >> ~/artifacts/1.1_$dt.txt 236 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 237 | sudo screen -ls >> ~/artifacts/1.1_$dt.txt 2>&1 238 | echo >> ~/artifacts/1.1_$dt.txt 239 | 240 | 241 | 242 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 243 | echo "Currently running screen sessions - ps auxw|grep -i screen|grep -v grep" >> ~/artifacts/1.1_$dt.txt 244 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 245 | sudo ps auxw|grep -i screen|grep -v grep >> ~/artifacts/1.1_$dt.txt 2>&1 246 | echo >> ~/artifacts/1.1_$dt.txt 247 | 248 | 249 | 250 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 251 | echo "Bash Profile" >> ~/artifacts/1.1_$dt.txt 252 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 253 | if [ -f ~/.bash_profile ];then 254 | sudo cat ~/.bash_profile >> ~/artifacts/1.1_$dt.txt 255 | else 256 | sudo echo "~/.bash_profile not exists" >> ~/artifacts/1.1_$dt.txt 257 | fi 258 | echo >> ~/artifacts/1.1_$dt.txt 259 | 260 | 261 | 262 | echo "------------------" >> ~/artifacts/1_passwd_$dt.txt 263 | echo "Users List (Passwd File)" >> ~/artifacts/1_passwd_$dt.txt 264 | echo "------------------" >> ~/artifacts/1_passwd_$dt.txt 265 | sudo cat /etc/passwd >> ~/artifacts/1_passwd_$dt.txt 266 | echo >> ~/artifacts/1_passwd_$dt.txt 267 | 268 | 269 | 270 | 271 | 272 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 273 | echo "Users with high privileges" >> ~/artifacts/1.1_$dt.txt 274 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 275 | echo ":::::grep '^sudo:.*$' /etc/group | cut -d: -f4:::::" >> ~/artifacts/1.1_$dt.txt 276 | tempVar=$(sudo grep '^sudo:.*$' /etc/group | cut -d: -f4) 277 | echo $tempVar >> ~/artifacts/1.1_$dt.txt 2>&1 278 | for i in $(echo $tempVar | sed "s/,/ /g") 279 | do 280 | id $i >> ~/artifacts/1.1_$dt.txt 281 | done 282 | echo >> ~/artifacts/1.1_$dt.txt 283 | echo ":::::grep '^root:.*$' /etc/group | cut -d: -f4:::::" >> ~/artifacts/1.1_$dt.txt 284 | tempVar=$(sudo grep '^root:.*$' /etc/group | cut -d: -f4) 285 | echo $tempVar >> ~/artifacts/1.1_$dt.txt 2>&1 286 | for i in $(echo $tempVar | sed "s/,/ /g") 287 | do 288 | id $i >> ~/artifacts/1.1_$dt.txt 289 | done 290 | echo >> ~/artifacts/1.1_$dt.txt 291 | echo ":::::grep '^admin:.*$' /etc/group | cut -d: -f4:::::" >> ~/artifacts/1.1_$dt.txt 292 | tempVar=$(sudo grep '^admin:.*$' /etc/group | cut -d: -f4) 293 | echo $tempVar >> ~/artifacts/1.1_$dt.txt 2>&1 294 | for i in $(echo $tempVar | sed "s/,/ /g") 295 | do 296 | id $i >> ~/artifacts/1.1_$dt.txt 297 | done 298 | echo >> ~/artifacts/1.1_$dt.txt 299 | echo ":::::grep '^wheel:.*$' /etc/group | cut -d: -f4:::::" >> ~/artifacts/1.1_$dt.txt 300 | tempVar=$(sudo grep '^wheel:.*$' /etc/group | cut -d: -f4) 301 | echo $tempVar >> ~/artifacts/1.1_$dt.txt 2>&1 302 | for i in $(echo $tempVar | sed "s/,/ /g") 303 | do 304 | id $i >> ~/artifacts/1.1_$dt.txt 305 | done 306 | echo >> ~/artifacts/1.1_$dt.txt 307 | 308 | 309 | 310 | 311 | 312 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 313 | echo "Sudoers file" >> ~/artifacts/1.1_$dt.txt 314 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 315 | sudo cat /etc/sudoers >> ~/artifacts/1.1_$dt.txt 316 | echo >> ~/artifacts/1.1_$dt.txt 317 | 318 | 319 | 320 | 321 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 322 | echo "Group file" >> ~/artifacts/1.1_$dt.txt 323 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 324 | sudo cat /etc/group >> ~/artifacts/1.1_$dt.txt 325 | echo >> ~/artifacts/1.1_$dt.txt 326 | 327 | 328 | 329 | echo "------------------" >> ~/artifacts/18_processes_details_$dt.txt 330 | echo "Top memory consuming processes" >> ~/artifacts/18_processes_details_$dt.txt 331 | echo "------------------" >> ~/artifacts/18_processes_details_$dt.txt 332 | sudo top -b -n 1 -o +%MEM | head -n 22 >> ~/artifacts/18_processes_details_$dt.txt 333 | echo >> ~/artifacts/18_processes_details_$dt.txt 334 | 335 | 336 | 337 | 338 | 339 | echo "------------------" >> ~/artifacts/18_processes_details_$dt.txt 340 | echo "Non-GUI running processess" >> ~/artifacts/18_processes_details_$dt.txt 341 | echo "------------------" >> ~/artifacts/18_processes_details_$dt.txt 342 | sudo ps -C "$(xlsclients | cut -d' ' -f3 | paste - -s -d ',')" --deselect >> ~/artifacts/18_processes_details_$dt.txt 343 | echo >> ~/artifacts/18_processes_details_$dt.txt 344 | 345 | 346 | 347 | echo "------------------" >> ~/artifacts/18_processes_details_$dt.txt 348 | echo "GUI running processess" >> ~/artifacts/18_processes_details_$dt.txt 349 | echo "------------------" >> ~/artifacts/18_processes_details_$dt.txt 350 | sudo xlsclients | cut -d' ' -f3 | paste - -s -d ',' >> ~/artifacts/18_processes_details_$dt.txt 351 | echo >> ~/artifacts/18_processes_details_$dt.txt 352 | 353 | 354 | 355 | 356 | 357 | echo "------------------" >> ~/artifacts/18_processes_details_$dt.txt 358 | echo "Processess with no TTY attached" >> ~/artifacts/18_processes_details_$dt.txt 359 | echo "------------------" >> ~/artifacts/18_processes_details_$dt.txt 360 | sudo ps -C "$(xlsclients | cut -d' ' -f3 | paste - -s -d ',')" --deselect -o tty,args | grep ^? >> ~/artifacts/18_processes_details_$dt.txt 361 | echo >> ~/artifacts/18_processes_details_$dt.txt 362 | 363 | 364 | 365 | 366 | echo "------------------" >> ~/artifacts/18_processes_details_$dt.txt 367 | echo "All non-GUI processes running without a controlling terminal" >> ~/artifacts/18_processes_details_$dt.txt 368 | echo "------------------" >> ~/artifacts/18_processes_details_$dt.txt 369 | sudo ps -C "$(xlsclients | cut -d' ' -f3 | paste - -s -d ',')" --ppid 2 --pid 2 --deselect -o tty,uid,pid,ppid,args | grep ^? >> ~/artifacts/18_processes_details_$dt.txt 370 | echo >> ~/artifacts/18_processes_details_$dt.txt 371 | 372 | 373 | 374 | 375 | echo "------------------" >> ~/artifacts/18_processes_details_$dt.txt 376 | echo "Top processes by memory and cpu usage" >> ~/artifacts/18_processes_details_$dt.txt 377 | echo "------------------" >> ~/artifacts/18_processes_details_$dt.txt 378 | sudo ps -eo pid,ppid,cmd,%mem,%cpu --sort=-%mem | head >> ~/artifacts/18_processes_details_$dt.txt 379 | echo >> ~/artifacts/18_processes_details_$dt.txt 380 | 381 | 382 | 383 | 384 | 385 | 386 | 387 | 388 | 389 | 390 | 391 | if [ "$DISTRO" != "osx" ]; then 392 | echo "------------------" >> ~/artifacts/2_shadow_$dt.txt 393 | echo "Shadow File" >> ~/artifacts/2_shadow_$dt.txt 394 | echo "------------------" >> ~/artifacts/2_shadow_$dt.txt 395 | sudo cat /etc/shadow >> ~/artifacts/2_shadow_$dt.txt 396 | echo >> ~/artifacts/2_shadow_$dt.txt 397 | fi 398 | 399 | 400 | echo "------------------" >> ~/artifacts/3_cmd_history_$dt.txt 401 | echo "Commands History" >> ~/artifacts/3_cmd_history_$dt.txt 402 | echo "------------------" >> ~/artifacts/3_cmd_history_$dt.txt 403 | sudo cat ~/.bash_history >> ~/artifacts/3_cmd_history_$dt.txt 404 | echo >> ~/artifacts/3_cmd_history_$dt.txt 405 | 406 | 407 | 408 | if [ "$DISTRO" == "osx" ]; then 409 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 410 | echo "Startup Services" >> ~/artifacts/1.1_$dt.txt 411 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 412 | sudo python find_all_startup_items.py >> ~/artifacts/1.1_$dt.txt 413 | echo >> ~/artifacts/1.1_$dt.txt 414 | else 415 | # man bash 416 | # -x file 417 | # True if file exists and is executable. 418 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 419 | echo "Startup Services" >> ~/artifacts/1.1_$dt.txt 420 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 421 | 422 | 423 | if [ -x "$(command -v systemctl)" ]; then 424 | sudo systemctl list-unit-files --type=service >> ~/artifacts/1.1_$dt.txt 425 | elif [ -x "$(command -v service)" ]; then 426 | sudo service --status-all >> ~/artifacts/1.1_$dt.txt 427 | elif [ -x "$(command -v initctl)" ]; then 428 | sudo initctl list >> ~/artifacts/1.1_$dt.txt 429 | else 430 | echo "ERROR: Unable To find startup service" >> ~/artifacts/1.1_$dt.txt 431 | fi 432 | echo >> ~/artifacts/1.1_$dt.txt 433 | fi 434 | 435 | 436 | 437 | echo "------------------" >> ~/artifacts/4_md5_bin_$dt.txt 438 | echo "md5 of all binaries" >> ~/artifacts/4_md5_bin_$dt.txt 439 | echo "------------------" >> ~/artifacts/4_md5_bin_$dt.txt 440 | OIFS=$IFS 441 | path=$(echo $PATH) 442 | IFS=':' # : is set as delimiter 443 | read -ra element <<< "$path" # path is read into an array as tokens separated by IFS 444 | IFS=' ' # reset to default value after usage 445 | 446 | for i in "${element[@]}"; do # access each element of array 447 | echo "$i" >> ~/artifacts/4_md5_bin_$dt.txt 448 | done 449 | 450 | echo >> ~/artifacts/4_md5_bin_$dt.txt 451 | echo >> ~/artifacts/4_md5_bin_$dt.txt 452 | 453 | IFS=':' # : is set as delimiter 454 | read -ra element <<< "$path" # path is read into an array as tokens separated by IFS 455 | IFS=' ' # reset to default value after usage 456 | 457 | for i in "${element[@]}"; do # access each element of array 458 | echo "$i" >> ~/artifacts/4_md5_bin_$dt.txt 459 | if [ "$DISTRO" == "osx" ]; then 460 | sudo md5 $i/* >> ~/artifacts/4_md5_bin_$dt.txt 2>&1 461 | else 462 | sudo md5sum $i/* >> ~/artifacts/4_md5_bin_$dt.txt 2>&1 463 | fi 464 | echo >> ~/artifacts/4_md5_bin_$dt.txt 465 | done 466 | IFS=$OIFS 467 | echo >> ~/artifacts/4_md5_bin_$dt.txt 468 | 469 | 470 | 471 | 472 | 473 | 474 | 475 | echo "------------------" >> ~/artifacts/6_processes_$dt.txt 476 | echo "Running Process - ps auxf" >> ~/artifacts/6_processes_$dt.txt 477 | echo "------------------" >> ~/artifacts/6_processes_$dt.txt 478 | sudo ps auxf >> ~/artifacts/6_processes_$dt.txt 479 | echo >> ~/artifacts/6_processes_$dt.txt 480 | 481 | 482 | 483 | 484 | 485 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 486 | echo "Network connections" >> ~/artifacts/1.1_$dt.txt 487 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 488 | 489 | if [ "$DISTRO" == "osx" ]; then 490 | echo "Command netstat -ap tcp:" >> ~/artifacts/1.1_$dt.txt 491 | sudo netstat -ap tcp >> ~/artifacts/1.1_$dt.txt 492 | elif [ -x "$(command -v netstat)" ]; then 493 | echo "Command netstat -plant:" >> ~/artifacts/1.1_$dt.txt 494 | sudo netstat -plant >> ~/artifacts/1.1_$dt.txt 495 | echo "Command netstat -a:" >> ~/artifacts/1.1_$dt.txt 496 | sudo netstat -a >> ~/artifacts/1.1_$dt.txt 497 | elif [ -x "$(command -v ss)" ]; then 498 | echo "Command ss -autp:" >> ~/artifacts/1.1_$dt.txt 499 | sudo ss -autp >> ~/artifacts/1.1_$dt.txt 500 | fi 501 | 502 | 503 | 504 | 505 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 506 | echo "Network Adopter Settings" >> ~/artifacts/1.1_$dt.txt 507 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 508 | echo "::: ifconfig command :::" >> ~/artifacts/1.1_$dt.txt 509 | if [ -x "$(command -v ifconfig)" ]; then 510 | sudo ifconfig >> ~/artifacts/1.1_$dt.txt 511 | else 512 | sudo ip addr >> ~/artifacts/1.1_$dt.txt 513 | fi 514 | echo "::: iwconfig command :::" >> ~/artifacts/1.1_$dt.txt 515 | if [ -x "$(command -v iwconfig)" ]; then 516 | sudo iwconfig >> ~/artifacts/1.1_$dt.txt 2>&1 517 | fi 518 | 519 | echo "::: lspci command :::" >> ~/artifacts/1.1_$dt.txt 520 | if [ -x "$(command -v lspci)" ]; then 521 | sudo lspci >> ~/artifacts/1.1_$dt.txt 2>&1 522 | fi 523 | echo >> ~/artifacts/1.1_$dt.txt 524 | 525 | 526 | 527 | 528 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 529 | echo "Routing Table" >> ~/artifacts/1.1_$dt.txt 530 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 531 | if [ -x "$(command -v ip)" ]; then 532 | sudo ip route show >> ~/artifacts/1.1_$dt.txt 2>&1 533 | else 534 | sudo netstat -nr >> ~/artifacts/1.1_$dt.txt 2>&1 535 | fi 536 | echo >> ~/artifacts/1.1_$dt.txt 537 | 538 | 539 | 540 | 541 | 542 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 543 | echo "ARP Table" >> ~/artifacts/1.1_$dt.txt 544 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 545 | if [ -x "$(command -v ip)" ]; then 546 | sudo ip neighbor >> ~/artifacts/1.1_$dt.txt 2>&1 547 | else 548 | sudo arp -a >> ~/artifacts/1.1_$dt.txt 2>&1 549 | fi 550 | echo >> ~/artifacts/1.1_$dt.txt 551 | 552 | 553 | 554 | 555 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 556 | echo "List of all active UDP and TCP services" >> ~/artifacts/1.1_$dt.txt 557 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 558 | sudo lsof -i >> ~/artifacts/1.1_$dt.txt 2>&1 559 | echo >> ~/artifacts/1.1_$dt.txt 560 | 561 | 562 | 563 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 564 | echo "Hosts File" >> ~/artifacts/1.1_$dt.txt 565 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 566 | cat /etc/hosts >> ~/artifacts/1.1_$dt.txt 2>&1 567 | echo >> ~/artifacts/1.1_$dt.txt 568 | 569 | 570 | 571 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 572 | echo "Resolv.conf File" >> ~/artifacts/1.1_$dt.txt 573 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 574 | sudo cat /etc/resolv.conf >> ~/artifacts/1.1_$dt.txt 2>&1 575 | echo >> ~/artifacts/1.1_$dt.txt 576 | 577 | 578 | 579 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 580 | echo "ip_forward File" >> ~/artifacts/1.1_$dt.txt 581 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 582 | if [ "$DISTRO" == "osx" ]; then 583 | sudo sysctl -w net.inet.ip.forwarding >> ~/artifacts/1.1_$dt.txt 2>&1 584 | else 585 | sudo cat /proc/sys/net/ipv4/ip_forward >> ~/artifacts/1.1_$dt.txt 2>&1 586 | fi 587 | 588 | echo >> ~/artifacts/1.1_$dt.txt 589 | 590 | 591 | 592 | if [ -f /etc/sysctl.conf ]; then 593 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 594 | echo "sysctl.conf File" >> ~/artifacts/1.1_$dt.txt 595 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 596 | cat /etc/sysctl.conf >> ~/artifacts/1.1_$dt.txt 2>&1 597 | echo >> ~/artifacts/1.1_$dt.txt 598 | fi 599 | 600 | 601 | if [ -f /etc/sysconfig/network ];then 602 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 603 | echo "sysconfig/network File (Redhat)" >> ~/artifacts/1.1_$dt.txt 604 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 605 | cat /etc/sysconfig/network >> ~/artifacts/1.1_$dt.txt 2>&1 606 | echo >> ~/artifacts/1.1_$dt.txt 607 | fi 608 | 609 | 610 | 611 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 612 | echo "SSH authorized_keys File" >> ~/artifacts/1.1_$dt.txt 613 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 614 | sudo cat ~/.ssh/authorized_keys >> ~/artifacts/1.1_$dt.txt 2>&1 615 | echo >> ~/artifacts/1.1_$dt.txt 616 | 617 | 618 | 619 | 620 | 621 | 622 | 623 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 624 | echo "30 largest files on the disk" >> ~/artifacts/1.1_$dt.txt 625 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 626 | sudo find / -xdev -type f -size +100M -exec du -sh {} ';' | sort -rh | head -n50 >> ~/artifacts/1.1_$dt.txt 627 | echo >> ~/artifacts/1.1_$dt.txt 628 | 629 | 630 | 631 | 632 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 633 | echo "Files over 100MB on entire filesystem" >> ~/artifacts/1.1_$dt.txt 634 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 635 | sudo find / -xdev -type f -size +100M -exec ls -lha {} \; | sort -nk 5 >> ~/artifacts/1.1_$dt.txt 636 | echo >> ~/artifacts/1.1_$dt.txt 637 | 638 | 639 | 640 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 641 | echo "Largest directories from /" >> ~/artifacts/1.1_$dt.txt 642 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 643 | sudo du -ahx / | sort -rh | head -20 >> ~/artifacts/1.1_$dt.txt 2>&1 644 | echo >> ~/artifacts/1.1_$dt.txt 645 | 646 | 647 | 648 | 649 | 650 | 651 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 652 | echo "Finding name of devices" >> ~/artifacts/1.1_$dt.txt 653 | echo "------------------" >> ~/artifacts/1.1_$dt.txt 654 | disk_name=$(df / | tail -1 | cut -d' ' -f1) 655 | echo $disk_name 656 | echo $disk_name >> ~/artifacts/1.1_$dt.txt 657 | echo "lsdel" > ~/artifacts/cmd.txt 658 | sudo debugfs $disk_name -f ~/artifacts/cmd.txt | cat >> ~/artifacts/1.1_$dt.txt 659 | echo >> ~/artifacts/1.1_$dt.txt 660 | 661 | 662 | 663 | 664 | 665 | 666 | echo "------------------" >>~/artifacts/7_listing-path_$dt.txt 667 | echo "PATH Environment Variable Listing" >> ~/artifacts/7_listing-path_$dt.txt 668 | echo "------------------" >> ~/artifacts/7_listing-path_$dt.txt 669 | OIFS=$IFS 670 | path=$(echo $PATH) 671 | IFS=':' # : is set as delimiter 672 | read -ra element <<< "$path" # path is read into an array as tokens separated by IFS 673 | IFS=' ' # reset to default value after usage 674 | 675 | for i in "${element[@]}"; do # access each element of array 676 | echo "$i" >> ~/artifacts/7_listing-path_$dt.txt 677 | done 678 | 679 | echo >> ~/artifacts/7_listing-path_$dt.txt 680 | echo >> ~/artifacts/7_listing-path_$dt.txt 681 | 682 | IFS=':' # : is set as delimiter 683 | read -ra element <<< "$path" # path is read into an array as tokens separated by IFS 684 | IFS=' ' # reset to default value after usage 685 | 686 | for i in "${element[@]}"; do # access each element of array 687 | echo "$i" >> ~/artifacts/7_listing-path_$dt.txt 688 | sudo ls -lah $i >> ~/artifacts/7_listing-path_$dt.txt 2>&1 689 | echo >> ~/artifacts/7_listing-path_$dt.txt 690 | done 691 | IFS=$OIFS 692 | echo >> ~/artifacts/7_listing-path_$dt.txt 693 | 694 | 695 | 696 | 697 | echo "------------------" >> ~/artifacts/19_ec2_metadata_$dt.txt 698 | echo "EC2 Metadata" >> ~/artifacts/19_ec2_metadata_$dt.txt 699 | echo "------------------" >> ~/artifacts/19_ec2_metadata_$dt.txt 700 | 701 | if [ -x "$(command -v ec2metadata)" ]; then 702 | sudo ec2metadata >> ~/artifacts/19_ec2_metadata_$dt.txt 2>&1 703 | else 704 | echo "ec2metadata command not found" >> ~/artifacts/19_ec2_metadata_$dt.txt 2>&1 705 | fi 706 | 707 | echo >> ~/artifacts/19_ec2_metadata_$dt.txt 708 | 709 | 710 | 711 | 712 | echo "------------------" >> ~/artifacts/20_packages_list_$dt.txt 713 | echo "Packages List" >> ~/artifacts/20_packages_list_$dt.txt 714 | echo "------------------" >> ~/artifacts/20_packages_list_$dt.txt 715 | sudo dpkg -l >> ~/artifacts/20_packages_list_$dt.txt 2>&1 716 | echo >> ~/artifacts/20_packages_list_$dt.txt 717 | 718 | 719 | 720 | 721 | mkdir ~/artifacts/8_interesting-files_$dt 722 | echo "------------------" >> ~/artifacts/8_interesting-files_$dt.txt 723 | echo "Interesting Files (.conf .config .yaml user password .err .deb .rpm boot.log .exe, .ps, .py and .sh)" >> ~/artifacts/8_interesting-files_$dt.txt 724 | echo "------------------" >> ~/artifacts/8_interesting-files_$dt.txt 725 | sudo find / -type f \( -iname "*.conf" -o -iname "*.config" -o -iname "*.yaml" -o -iname "*user*"\ 726 | -o -iname "*password*" -o -iname "*passwd*" -o -iname "*.err" -o -iname "*.deb" -o -iname "*.rpm" -o -iname "boot.log" -o -iname "*.exe" -o -iname "*.ps" -o -iname "*.py" -o -iname "*.sh" \) \ 727 | | while read -r file; do 728 | echo "$file" >> ~/artifacts/8_interesting-files_$dt.txt 729 | sudo cp "$file" ~/artifacts/8_interesting-files_$dt # double quotes inside filename is necessary to handle whitespaces 730 | done 731 | echo "Zipping Interesting Files" 732 | 733 | mkdir ~/artifacts/8_interesting-files_$dt/conf_$dt 734 | mkdir ~/artifacts/8_interesting-files_$dt/config_$dt 735 | mkdir ~/artifacts/8_interesting-files_$dt/yaml_$dt 736 | mkdir ~/artifacts/8_interesting-files_$dt/user_$dt 737 | mkdir ~/artifacts/8_interesting-files_$dt/password_$dt 738 | mkdir ~/artifacts/8_interesting-files_$dt/error_$dt 739 | mkdir ~/artifacts/8_interesting-files_$dt/deb_$dt 740 | mkdir ~/artifacts/8_interesting-files_$dt/rpm_$dt 741 | sudo mv ~/artifacts/8_interesting-files_$dt/*.conf ~/artifacts/8_interesting-files_$dt/conf_$dt 742 | sudo mv ~/artifacts/8_interesting-files_$dt/*.config ~/artifacts/8_interesting-files_$dt/config_$dt 743 | sudo mv ~/artifacts/8_interesting-files_$dt/*.yaml ~/artifacts/8_interesting-files_$dt/yaml_$dt 744 | sudo mv ~/artifacts/8_interesting-files_$dt/*.err ~/artifacts/8_interesting-files_$dt/error_$dt 745 | sudo mv ~/artifacts/8_interesting-files_$dt/*.deb ~/artifacts/8_interesting-files_$dt/deb_$dt 746 | sudo mv ~/artifacts/8_interesting-files_$dt/*.rpm ~/artifacts/8_interesting-files_$dt/rpm_$dt 747 | sudo mv ~/artifacts/8_interesting-files_$dt/*[uU][sS][eE][rR]* ~/artifacts/8_interesting-files_$dt/user_$dt 748 | sudo mv ~/artifacts/8_interesting-files_$dt/*[pP][aA][sS][sS][wW][oO][rR][dD]* ~/artifacts/8_interesting-files_$dt/password_$dt 749 | sudo mv ~/artifacts/8_interesting-files_$dt/*[pP][aA][sS][sS][wW][dD]* ~/artifacts/8_interesting-files_$dt/password_$dt 750 | 751 | sudo tar cvf ~/artifacts/8_interesting-files_$dt.tar --absolute-names ~/artifacts/8_interesting-files_$dt 752 | sudo rm -rf ~/artifacts/8_interesting-files_$dt 753 | echo >> ~/artifacts/8_interesting-files_$dt.txt 754 | 755 | 756 | 757 | 758 | 759 | 760 | 761 | # mkdir ~/artifacts/9_interesting-directories_$dt 762 | echo "------------------" >> ~/artifacts/9_interesting-directories_$dt.txt 763 | echo "Interesting Directories" >> ~/artifacts/9_interesting-directories_$dt.txt 764 | echo "------------------" >> ~/artifacts/9_interesting-directories_$dt.txt 765 | sudo find / -type d \( -name "www" -o -name "htdocs" \) \ 766 | | while read -r file; do 767 | echo "$file" >> ~/artifacts/9_interesting-directories_$dt.txt 768 | # sudo cp -R "$file" ~/artifacts/9_interesting-directories_$dt 769 | done 770 | # echo "Zipping Interesting Directories" 771 | # sudo tar cvf ~/artifacts/9_interesting-directories_$dt.tar ~/artifacts/9_interesting-directories_$dt 772 | # sudo rm -rf ~/artifacts/9_interesting-directories_$dt 773 | echo >> ~/artifacts/9_interesting-directories_$dt.txt 774 | 775 | 776 | 777 | 778 | 779 | 780 | 781 | 782 | 783 | echo "------------------" >> ~/artifacts/10_tree_$dt.txt 784 | echo "Tree /home/" >> ~/artifacts/10_tree_$dt.txt 785 | echo "------------------" >> ~/artifacts/10_tree_$dt.txt 786 | #if [ -n "$(command -v yum)" ]; then 787 | # sudo yum install -y tree 788 | #else 789 | # sudo apt install -y tree 790 | #fi 791 | 792 | sudo ls -laR /home/ >> ~/artifacts/10_tree_$dt.txt 793 | 794 | echo >> ~/artifacts/10_tree_$dt.txt 795 | 796 | 797 | 798 | 799 | 800 | 801 | mkdir ~/artifacts/11_mail_$dt 802 | echo "------------------" 803 | echo "Mail Directories" 804 | echo "------------------" 805 | if [ -d "/var/mail/" ]; then 806 | sudo cp -R /var/mail/ ~/artifacts/11_mail_$dt 807 | fi 808 | if [ -d "/var/spool/mail/" ]; then 809 | sudo cp -R /var/spool/mail/ ~/artifacts/11_mail_$dt 810 | fi 811 | if [ -d "/var/vmail/" ]; then 812 | sudo cp -R /var/vmail/ ~/artifacts/11_mail_$dt 813 | fi 814 | 815 | cat /etc/passwd | cut -d: -f6 | grep /home/ | while read -r directory; do 816 | if [ -d "$directory/Maildir" ]; then 817 | sudo cp -R $directory/Maildir ~/artifacts/11_mail_$dt 818 | fi 819 | if [ -d "$directory/mail/sent-mail" ]; then 820 | sudo cp -R $directory/mail/sent-mail ~/artifacts/11_mail_$dt 821 | fi 822 | done 823 | echo 824 | 825 | 826 | 827 | 828 | 829 | echo "------------------" >> ~/artifacts/12_mysql_history_$dt.txt 830 | echo "MySQL History" >> ~/artifacts/12_mysql_history_$dt.txt 831 | echo "------------------" >> ~/artifacts/12_mysql_history_$dt.txt 832 | cat /etc/passwd | cut -d: -f6 | grep /home/ | while read -r directory; do 833 | if [ -f "$directory/.mysql_history" ]; then 834 | sudo cat $directory/.mysql_history >> ~/artifacts/12_mysql_history_$dt.txt 835 | fi 836 | done 837 | echo >> ~/artifacts/12_mysql_history_$dt.txt 838 | 839 | 840 | 841 | 842 | 843 | mkdir ~/artifacts/13_cron_$dt 844 | echo "------------------" >> ~/artifacts/13_cron_$dt.txt 845 | echo "Cron Jobs of Every User" >> ~/artifacts/13_cron_$dt.txt 846 | echo "------------------" >> ~/artifacts/13_cron_$dt.txt 847 | for user in $(cut -f1 -d: /etc/passwd); do 848 | echo $user >> ~/artifacts/13_cron_$dt.txt 849 | tmp_var=$(sudo crontab -u $user -l 2>&1) 850 | echo $tmp_var >> ~/artifacts/13_cron_$dt.txt 851 | done 852 | sudo cp -R /etc/cron* ~/artifacts/13_cron_$dt/ 853 | echo >> ~/artifacts/13_cron_$dt.txt 854 | 855 | 856 | 857 | mkdir ~/artifacts/14_tmp_$dt 858 | echo "------------------" >> ~/artifacts/14_tmp_$dt.txt 859 | echo "zipping /tmp/" >> ~/artifacts/14_tmp_$dt.txt 860 | echo "------------------" >> ~/artifacts/14_tmp_$dt.txt 861 | tar cvf ~/artifacts/14_tmp_$dt.tar --absolute-names /tmp/ 862 | echo >> ~/artifacts/14_tmp_$dt.txt 863 | 864 | 865 | 866 | mkdir ~/artifacts/15_home_hidden_file_dir_$dt 867 | echo "------------------" >> ~/artifacts/15_home_hidden_file_dir_$dt.txt 868 | echo "zipping /home/ hidden dir & files" >> ~/artifacts/15_home_hidden_file_dir_$dt.txt 869 | echo "------------------" >> ~/artifacts/15_home_hidden_file_dir_$dt.txt 870 | 871 | usersHavingHomeDir=$(cat /etc/passwd | grep /bin/bash | cut -d: -f6) 872 | 873 | 874 | for userHavingHomeDir in $usersHavingHomeDir 875 | do 876 | tempVar=$(cd $userHavingHomeDir ; tmp=$(ls -d .?*); echo $tmp ) 877 | listOfFileToZip="" 878 | 879 | userName=$(echo ${userHavingHomeDir##*/} | tr -d '\040\011\012\015') 880 | 881 | for f in $tempVar 882 | do 883 | if [ $f == ".." ]; then 884 | continue 885 | fi 886 | listOfFileToZip=$listOfFileToZip" $userHavingHomeDir/$f" 887 | done 888 | 889 | echo $listOfFileToZip >> ~/artifacts/15_home_hidden_file_dir_$dt.txt 890 | echo -e "\n" >> ~/artifacts/15_home_hidden_file_dir_$dt.txt 891 | tarName=(~/artifacts/15_home_hidden_file_dir_"$userName"_$dt.tar) 892 | tar cvf $tarName --absolute-names $listOfFileToZip 893 | done 894 | echo >> ~/artifacts/15_home_hidden_file_dir_$dt.txt 895 | 896 | 897 | if [ "$DISTRO" == "osx" ]; then 898 | echo "------------------" 899 | echo "Please Wait - This step will take some time" 900 | echo "------------------" 901 | sudo python osxcollector.py 902 | fi 903 | 904 | echo "------------------" 905 | echo "Zipping Artifacts Folder" 906 | echo "------------------" 907 | rm ~/artifacts/cmd.txt 908 | sudo tar cvf ~/artifacts/output_$dt.tar --absolute-names ~/artifacts/ 909 | 910 | 911 | echo "------------------" 912 | echo "Removing Temporary Artifacts" 913 | echo "------------------" 914 | sudo rm -rf ~/artifacts/*.txt ~/artifacts/11_mail* ~/artifacts/13_cron* ~/artifacts/8_interesting-files*.tar ~/artifacts/14_tmp* ~/artifacts/15_home_hidden* 915 | 916 | 917 | echo "------------------" 918 | echo "Zipping Log File /var/log/" 919 | echo "------------------" 920 | sudo tar cvf ~/artifacts/logs.tar --absolute-names /var/log/ 921 | 922 | 923 | 924 | 925 | echo "------------------" 926 | echo "TCP Dump for 20 minutes" 927 | echo "------------------" 928 | if [ "$DISTRO" == "osx" ]; then 929 | activeInterface=$(sudo route get example.com | grep interface | cut -d: -f2) 930 | fi 931 | activeInterface=$(sudo route | grep '^default' | grep -o '[^ ]*$') 932 | sudo tcpdump -i $activeInterface -w tcpdump.pcap & 933 | pid=$! 934 | sleep 1200 # sleep for 20 minutes 935 | sudo kill $pid 936 | 937 | 938 | 939 | sudo mv tcpdump.pcap ~/artifacts/ 940 | 941 | 942 | echo "script execute successfully!" 943 | 944 | -------------------------------------------------------------------------------- /eXir/usage-instructions.txt: -------------------------------------------------------------------------------- 1 | //text goes here 2 | -------------------------------------------------------------------------------- /exvt/exvt-0.8.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | # Original Author: Mukarram Khalid, Ebryx LLC 3 | # Description: It first checks the hash .. If exists, it'll grab those results .. otherwise upload and push it to a queue to be checked again for the results. 4 | # Usage: python vt.py /path/of/samples 5 | # If we comment line 110 .. It'll just check for the hash without uploading 6 | # If we comment, 106 to 109 .. it'll upload everything .. and check for the results .. 7 | 8 | import requests 9 | import os, json, sys, secrets, time, hashlib 10 | 11 | keys = ['ac3c52036bc0de77187e2dd55e26ecc4f10a4e9a07cdaf5ffd3567c9721e1272', '405a1bdf56ce5fc78fe98fda1a43806abbbf0107141033da9883cbd7b8f7fb9f', 'd7dbb7b00c76d77ffc1a03573312e82966b7f5de6dcf61204f8ca94d74930daa'] 12 | 13 | class VirusTotal: 14 | ''' VirusTotal file scanner ''' 15 | #seconds 16 | timeout = 10 17 | samplesPath = None 18 | headers = {} 19 | 20 | def __init__(self, samplesPath): 21 | self.samplesPath = samplesPath.rstrip('/') + '/' 22 | self.headers = { 23 | 'Accept-Encoding': 'gzip, deflate', 24 | 'User-Agent' : 'Mozilla 5.0' 25 | } 26 | 27 | def queryHash(self, sampleHash): 28 | params = {'apikey': secrets.choice(keys), 'resource': sampleHash} 29 | try: 30 | response = requests.get('https://www.virustotal.com/vtapi/v2/file/report', params = params, headers = self.headers) 31 | except: 32 | time.sleep(self.timeout * 2) 33 | return self.queryHash(sampleHash) 34 | time.sleep(self.timeout) 35 | return response 36 | 37 | def checkHash(self, sample, queuedHash = False): 38 | if queuedHash: 39 | sampleHash = queuedHash 40 | else: 41 | sampleHash = hashlib.md5(open(self.samplesPath + sample, 'rb').read()).hexdigest() 42 | sampleHashresult = None 43 | while sampleHashresult == None: 44 | sampleHashresult = self.queryHash(sampleHash) 45 | try: 46 | sampleHashresult = sampleHashresult.json() 47 | except: 48 | sampleHashresult = None 49 | time.sleep(self.timeout * 2) 50 | if 'response_code' in sampleHashresult and sampleHashresult['response_code'] == 0: 51 | return False 52 | if 'total' in sampleHashresult and 'positives' in sampleHashresult: 53 | return {'hash' : sampleHash, 'total' : sampleHashresult['total'], 'positives' : sampleHashresult['positives']} 54 | return False 55 | 56 | def hashFound(self, sample, details): 57 | print('[+] ' + self.samplesPath + sample + ' - hash : ' + details['hash'] + ' - Detections : ' + str(details['positives']) + '/' + str(details['total'])) 58 | with open(self.samplesPath.replace('/', '_') + '_results.txt', 'a+') as results: 59 | results.write('[+] ' + self.samplesPath + sample + ' - hash : ' + details['hash'] + ' - Detections : ' + str(details['positives']) + '/' + str(details['total']) + "\n") 60 | 61 | def upload(self, sample): 62 | params = {'apikey': secrets.choice(keys)} 63 | files = {'file': (sample, open(self.samplesPath + sample, 'rb'))} 64 | try: 65 | response = requests.post('https://www.virustotal.com/vtapi/v2/file/scan', files = files, params = params, headers = self.headers) 66 | except: 67 | time.sleep(self.timeout * 2) 68 | return self.upload(sample) 69 | time.sleep(self.timeout) 70 | return response 71 | 72 | def queueToUpload(self, sample): 73 | results = None 74 | while results == None: 75 | results = self.upload(sample) 76 | try: 77 | results = results.json() 78 | except: 79 | results = None 80 | time.sleep(self.timeout * 2) 81 | if 'scan_id' in results: 82 | print('[+] ' + self.samplesPath + sample + ' - scan_id : ' + results['scan_id'] + ' - Queued') 83 | with open(self.samplesPath.replace('/', '_') + '_queued.txt', 'a+') as queued: 84 | queued.write(json.dumps({'path' : self.samplesPath, 'sample' : sample, 'scan_id' : results['scan_id']}) + "\n") 85 | 86 | def checkQueued(self): 87 | print('[+] Checking queued scans') 88 | if not os.path.isfile(self.samplesPath.replace('/', '_') + '_queued.txt'): 89 | return 90 | queuedItems = [] 91 | with open(self.samplesPath.replace('/', '_') + '_queued.txt') as queued: 92 | queuedItems = queued.read().splitlines() 93 | open(self.samplesPath.replace('/', '_') + '_queued.txt', 'w').close() 94 | if len(queuedItems) < 1: 95 | return 96 | for item in queuedItems: 97 | item = json.loads(item) 98 | hashFound = self.checkHash(None, item['scan_id']) 99 | if hashFound: 100 | self.hashFound(item['sample'], hashFound) 101 | continue 102 | with open(self.samplesPath.replace('/', '_') + '_queued.txt', 'a+') as queued: 103 | queued.write(json.dumps({'path' : self.samplesPath, 'sample' : item['sample'], 'scan_id' : item['scan_id']}) + "\n") 104 | time.sleep(self.timeout * 2) 105 | return self.checkQueued() 106 | 107 | def scan(self): 108 | samples = next(os.walk(self.samplesPath))[2] 109 | for sample in samples: 110 | hashFound = self.checkHash(sample) 111 | if hashFound: 112 | self.hashFound(sample, hashFound) 113 | continue 114 | self.queueToUpload(sample) 115 | time.sleep(self.timeout * 2) 116 | self.checkQueued() 117 | print('[+] Done') 118 | 119 | def main(): 120 | if len(sys.argv) < 2 or not os.path.isdir(sys.argv[1]): 121 | print('[-] Usage : %s /path/of/samples/here' % sys.argv[0]) 122 | print('[-] Example: %s /home/makman/Desktop' % sys.argv[0]) 123 | exit() 124 | virustotal = VirusTotal(sys.argv[1]) 125 | virustotal.scan() 126 | 127 | if __name__ == '__main__': 128 | try: 129 | main() 130 | except KeyboardInterrupt: 131 | exit("\n[-] CTRL-C detected.\n") 132 | # End 133 | -------------------------------------------------------------------------------- /exvt/usage-instructions.txt: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /lies/lies-0.2.py: -------------------------------------------------------------------------------- 1 | 2 | import re 3 | from elasticsearch import Elasticsearch 4 | from elasticsearch import helpers 5 | import glob, os 6 | 7 | es = Elasticsearch(host="", port=9200) 8 | from dateutil import parser 9 | import pprint 10 | 11 | reg = "([a-z,A-Z].*:\d\d) ([a-z,A-Z].*) (sshd.*\:) ([A-Z,a-z].*.from)(.\d*.\d*.\d*.\d*)" 12 | reg_keys = ["@timestamp","system","service","message","IP"] 13 | 14 | keys_process = ["MODULE:","MESSAGE:","PID:","NAME:","OWNER:","CMD:","PATH:"] 15 | keys_filescan = ["MODULE:","MESSAGE:","FILE:","TYPE:","SIZE:","FIRST_BYTES:","MD5:","SHA1:","SHA256:","CREATED:", 16 | "MODIFIED:","ACCESSED:","REASON_1:","REASON_2","REASON_3","SCORE:"] 17 | os.chdir("/media//li-results/") 18 | for file in glob.glob("*"): 19 | print "Bulking:",file,"to ES" 20 | f=open(file) 21 | bundle = [] 22 | for line in f.readlines(): 23 | try: 24 | if 'FileScan' == re.search('MODULE:(.*)MESSAGE',line).groups()[0].strip(' '): 25 | log = {} 26 | log['@timestamp']= parser.parse(line.split(' ')[0]) 27 | for count,key in enumerate(keys_filescan): 28 | try: 29 | found = re.search(key+'(.*)'+keys_filescan[count+1],line).groups()[0] 30 | log[key]=found 31 | except: 32 | log['SCORE']=line.split(':')[-1].strip() 33 | pass 34 | 35 | else: 36 | log = {} 37 | log['@timestamp']=parser.parse(line.split(' ')[0]) 38 | for count,key in enumerate(keys_process): 39 | try: 40 | found = re.search(key+'(.*)'+keys_process[count+1],line).groups()[0] 41 | log[key]=found 42 | except: 43 | pass 44 | except: 45 | pass 46 | bundle.append({"_index": "", "_type": "loki", "Workstation": file, "body": log}) 47 | if len(bundle) > 1000: 48 | print "INFO: Ingesting logs to Elasticsearch" 49 | helpers.bulk(es, bundle) 50 | bundle = [] 51 | 52 | helpers.bulk(es, bundle) 53 | -------------------------------------------------------------------------------- /vetter-py/README.md: -------------------------------------------------------------------------------- 1 | # Vetter.py 2 | 3 | Calculate MD5, SHA-1, or SHA-256 hashes for the files and search them against VirusTotal's databases (PublicAPIv3) 4 | 5 | ## Getting Started 6 | 7 | Simply clone the repository and the script's all you need along with a few public APIs 8 | 9 | ### Prerequisites and Requirements 10 | 11 | Before you can run the script, you need to: 12 | 13 | 1. Register for VirusTotal's Public API. 14 | 15 | Here's an excellent article covering just that (by VT itself): [VirusTotal APIs](https://support.virustotal.com/hc/en-us/articles/115002100149-API) 16 | 17 | 2. Requires Python (3.XX) 18 | 19 | 3. Install all dependencies using the requirements.txt file. Here's how: 20 | 21 | Windows: 22 | ``` 23 | pip install -r requirements.txt 24 | ``` 25 | Linux: 26 | ``` 27 | pip3 install -r requirements.txt 28 | ``` 29 | 30 | 3. Once you've signed up for the API, insert the API_KEY into the config.ini file which is provided along with the cloned script. As an example: 31 | 32 | ``` 33 | [VirusTotal] 34 | apiKey = YYYYXXXXZZZZ 35 | ``` 36 | 37 | 4. Start-up the script using: 38 | 39 | Windows: 40 | ``` 41 | python vetter.py -h 42 | ``` 43 | Linux: 44 | ``` 45 | python3 vetter.py -h 46 | ``` 47 | 48 | ### Few Usecases 49 | 50 | Here's a list of commands you can use to get started with Vetter: 51 | ``` 52 | d:\EbryxLabs\vetter-py>python vetter.py -h 53 | 54 | usage: vetter.py [-h] --dir Directory to scan [--config Configuration file] [--algo Algorithms to use] [--filepath File to scan on VT] --mode Mode of operations [hash/search/scan/auto] 55 | 56 | optional arguments: 57 | -h, --help show this help message and exit 58 | --dir Directory to scan 59 | Starting point of files to hash or hashes to search on VT (./) 60 | --config Configuration file 61 | Configuration file for VT (config.ini) 62 | --algo Algorithms to use 63 | Hashing algorithms [MD5, SHA1, SHA256*] 64 | --filepath File to scan on VT 65 | Scan the file on VT by using it's complete path {MAX SIZE: 32MB} 66 | --mode Mode of operations [hash/search/scan/auto] 67 | Calculate hashes, search hashes, or scan a file on VT. 'auto' calculates hashes and searches them on VT 68 | 69 | ``` 70 | 71 | At the moment, Vetter provides three options. 72 | 73 | 1. You can calculate the hashes for files in the input directory 74 | ``` 75 | python vetter.py --dir ./ --mode hash 76 | ``` 77 | 78 | 2. You can search the calculated hashes or any of your own hash files against VirusTotal's APIs (it uses PublicAPIV3) 79 | ``` 80 | python vetter.py --dir ./ --mode search 81 | ``` 82 | 83 | 3. You can do both these steps at once using the "auto" mode 84 | ``` 85 | python vetter.py --dir ./ --mode auto 86 | ``` 87 | 88 | 4. Specify the configuration file if you're not using the standard file provided with the script 89 | ``` 90 | python vetter.py --dir ./ --mode auto --config config-prod.ini 91 | ``` 92 | 93 | 5. Specify the hashing function you'd like to use by specifying it in a CSV format: 94 | ``` 95 | python vetter.py --dir ./ --mode search --algo md5,sha1 96 | ``` 97 | 6. If you wish to scan a file on VT, you can do it by selecting the 'scan' mode: {Please ensure a file of less than 32 MB!} 98 | ``` 99 | python vetter.py --dir ./ --mode scan --filepath ./Scripts/abc.ps1 100 | ``` 101 | 102 | ## Tested 103 | 104 | Tested on: 105 | 1. Windows 10 Pro 106 | 2. Ubuntu 18.04 107 | 108 | ## Contributing 109 | 110 | Please feel free to open issues related to your queries, problems, or anything you'd like us to add. It's open for contribution as well! 111 | -------------------------------------------------------------------------------- /vetter-py/config.ini: -------------------------------------------------------------------------------- 1 | [VirusTotal] 2 | apiKey = INSERT_YOUR_API_KEY_HERE 3 | 4 | -------------------------------------------------------------------------------- /vetter-py/requirements.txt: -------------------------------------------------------------------------------- 1 | certifi==2024.7.4 2 | chardet==3.0.4 3 | idna==3.7 4 | requests==2.32.2 5 | urllib3==1.26.19 6 | virustotal-api==1.1.11 7 | -------------------------------------------------------------------------------- /vetter-py/vetter.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Name: Vetter 3 | Description: Calculate hashes from a given directory and check against VT's databases 4 | Or, you can scan a file on VT and check it's output 5 | 6 | Author: Ebryx 7 | Version: 0.2 8 | Date: 18-02-2020 9 | 10 | ''' 11 | 12 | # Library imports 13 | import os 14 | import requests 15 | import time 16 | import json 17 | import hashlib 18 | import platform 19 | import configparser 20 | import argparse 21 | 22 | from datetime import datetime 23 | from virus_total_apis import PublicApi 24 | 25 | currTime = datetime.now() 26 | currTime = currTime.strftime("%d-%m-%Y_%H%M%S") 27 | 28 | # Helper Functions 29 | 30 | def saveVtResults(vtResults, mode): 31 | ''' Stores VT's results in a file in the same directory ''' 32 | 33 | for result in vtResults: 34 | jsonResult = json.dumps(result, indent=4) 35 | fileObj = open(f'vetter-{mode}-{currTime}.json', 'a+') 36 | print(jsonResult, file=fileObj) 37 | 38 | def getFiles(directory): 39 | '''Return the files in the current directory and all its child directories''' 40 | 41 | targetFiles = [] 42 | fileCount = 0 43 | 44 | for root, dirs, files in os.walk(directory): 45 | 46 | for file in files: 47 | fileName = os.path.abspath(os.path.join(root, file)) 48 | #print("[+] Successfully found file at: " + str(fileName)) 49 | fileCount += 1 50 | try: 51 | targetFiles.append(fileName) 52 | except: 53 | print(f"[-] An error occured while processing file: {fileName}") 54 | 55 | print(f"[+] Located all files. Final Count: {fileCount}") 56 | return targetFiles 57 | 58 | def saveHashes(hashes, mode): 59 | ''' Save all hashes in files ''' 60 | 61 | with open(f"vetter-{platform.node()}-{mode}.txt", "a") as fileObj: 62 | for aHash in hashes: 63 | record = str(aHash[1]) + " ; " + str(aHash[0]) + " \n" 64 | fileObj.write(record) 65 | 66 | # VirusTotal Search 67 | 68 | def processVtMode(directory, config): 69 | '''Starts up VT mode for searching hashes''' 70 | 71 | vt = setupVt(config) 72 | getSearchReports(vt, directory) 73 | 74 | def setupVt(config): 75 | '''Initialize the VirusTotal Public API Object''' 76 | 77 | API_KEY = returnApiKey(config) 78 | vt = PublicApi(API_KEY) 79 | return vt 80 | 81 | def returnApiKey(configFile): 82 | '''Returns the VT API Key from the configuration file ''' 83 | 84 | config = configparser.ConfigParser() 85 | 86 | try: 87 | config.read(configFile) 88 | except: 89 | print("[-] Error in reading config.ini. Setup the configuration properly and execute Vetter.") 90 | vtApiKey = config['VirusTotal']['apiKey'] 91 | 92 | if vtApiKey: 93 | print("[+] Loaded VT API Key") 94 | 95 | return vtApiKey 96 | 97 | def getSearchReports(vtObj, directory): 98 | 99 | extensions = ['txt'] 100 | hashFiles = getHashFiles(directory, extensions) 101 | if not hashFiles: 102 | # TODO Add this argument support 103 | print("[-] No files found to match hashes from. Please use the '--files' argument to specify your files or rename them with 'vetter'") 104 | exit() 105 | 106 | searchCount = 1 107 | vtOutputs = [] 108 | hashLength = (32, 40, 64) 109 | 110 | for file in hashFiles: 111 | with open(file, 'r') as fileObj: 112 | for line in fileObj: 113 | if not ";" in line: 114 | continue 115 | 116 | hash = line.split(";")[0].rstrip(" ") 117 | fileName = line.split(";")[1].lstrip(" ").rstrip(" \n") 118 | if len(hash) not in hashLength: 119 | print(f"[-] Unable to process hash: {hash}") 120 | continue 121 | 122 | # TODO: Add generator support! (or async calls for faster execution) 123 | # TODO: Add support for batch reporting 124 | response = vtObj.get_file_report(hash) 125 | compResponse = { 126 | 'response': response, 127 | 'file_name': fileName 128 | } 129 | vtOutputs.append(compResponse) 130 | if searchCount%4 == 0: 131 | analyzeVtOutput(vtOutputs) 132 | vtOutputs = [] 133 | print("[+] Cool down time to stay within assigned quota!") 134 | time.sleep(60) 135 | 136 | searchCount += 1 137 | 138 | def getHashFiles(directory, extensions): 139 | 140 | hashFiles = [] 141 | fileMatchKeywords = ['vetter', 'md5', 'sha1', 'sha-1', 'sha-256', 'sha256'] 142 | 143 | for root, dirs, files in os.walk(directory): 144 | for file in files: 145 | try: 146 | fileName, fileExt = file.split(".") 147 | matches = [option for option in fileMatchKeywords if option in fileName] 148 | 149 | if len(matches) >= 1 and fileExt in extensions: 150 | hashFiles.append(file) 151 | 152 | except: 153 | pass 154 | 155 | return hashFiles 156 | 157 | def analyzeVtOutput(outputs): 158 | 159 | vtResults = [] 160 | noVtResultsAvailable = [] 161 | vtLink = "https://https://www.virustotal.com/gui/file/" 162 | 163 | for output in outputs: 164 | 165 | singleResult = output['response'] 166 | 167 | try: 168 | respCode = singleResult['response_code'] 169 | 170 | # There's an error due to the limit being crossed or some other issue 171 | if respCode == 204 or ("error" in singleResult.keys()): 172 | print(f"[-] ERROR: {singleResult['error']}") 173 | return 174 | 175 | # The hash isn't available on VT and needs manual scanning 176 | elif respCode == 200 and (singleResult['results']['response_code'] == 0): 177 | print(f"[-] The hash isn't available for searching on VT. Check the 'manual-scan' file for more information.") 178 | noVtResultsAvailable.append(output['file_name']) 179 | 180 | # The hash is available on VT and might be a positive 181 | elif respCode == 200 and ("scans" in singleResult['results'].keys()): 182 | results = singleResult['results'] 183 | sha1Hash = results['sha1'] 184 | message = f'https://www.virustotal.com/gui/file/{sha1Hash}/detection' 185 | result = { 186 | 'File': output['file_name'], 187 | 'SHA-256 Hash': results['sha256'], 188 | 'SHA1 Hash': sha1Hash, 189 | 'MD5 Hash': results['md5'], 190 | 'Positives': results['positives'], 191 | 'Total': results['total'], 192 | 'Message': message 193 | } 194 | print(f"[+] Found a match. Positives: {result['Positives']} out of {result['Total']}") 195 | 196 | vtResults.append(result) 197 | 198 | else: 199 | print("[-] Illegal output received by VT.") 200 | 201 | except Exception as e: 202 | 203 | if hasattr(e, 'message'): 204 | print(e.message) 205 | else: 206 | print(e) 207 | 208 | if vtResults is not []: 209 | saveVtResults(vtResults, mode='results') 210 | 211 | if noVtResultsAvailable is not []: 212 | saveVtResults(noVtResultsAvailable, mode='manual-search') 213 | 214 | 215 | # Hashing 216 | 217 | def processHashMode(args): 218 | 219 | currDir = args['dir'] 220 | # Parse the algorithm choice 221 | hashingAlgos = args['algo'].split(',') 222 | 223 | # Get all files in the given directory 224 | targetFiles = getFiles(currDir) 225 | 226 | # Calculate hashes and save them 227 | calculateHashes(hashingAlgos, targetFiles) 228 | 229 | def calculateBlockHash(bytesiter, hasher): 230 | ''' Processes each block in bytes and updates the hash ''' 231 | 232 | for block in bytesiter: 233 | hasher.update(block) 234 | return hasher.hexdigest() 235 | 236 | def processFile(fileName, blockSize=65536): 237 | '''Returns data in chunks for processing by the hashing algorithm''' 238 | 239 | try: 240 | with open(fileName, 'rb') as fileObj: 241 | block = fileObj.read(blockSize) 242 | while len(block) > 0: 243 | yield block 244 | block = fileObj.read(blockSize) 245 | except: 246 | print(f"[-] Failure in processing file: {fileName}") 247 | 248 | def calculateHashes(hashingAlgos, files): 249 | '''Calculate file hashes against each found file ''' 250 | 251 | md5hash = [] 252 | sha1hash = [] 253 | sha256hash = [] 254 | 255 | for algo in hashingAlgos: 256 | algoName = algo.lower() 257 | if algoName == "md5": 258 | for aFile in files: 259 | calcHash = calculateBlockHash(processFile(aFile), hashlib.md5()) 260 | # Format: File Name, Hash 261 | md5hash.append((aFile, calcHash)) 262 | print("[+] MD5 hashes calculated.") 263 | saveHashes(md5hash, "md5") 264 | 265 | elif algoName == "sha1" or algoName == "sha-1": 266 | for aFile in files: 267 | calcHash = calculateBlockHash(processFile(aFile), hashlib.sha1()) 268 | sha1hash.append((aFile, calcHash)) 269 | print("[+] SHA-1 hashes calculated.") 270 | saveHashes(sha1hash, "sha1") 271 | 272 | elif algoName == "sha256" or algoName == "sha-256": 273 | for aFile in files: 274 | calcHash = calculateBlockHash(processFile(aFile), hashlib.sha256()) 275 | # Just need the file name? Use this: .split('\\')[-1] with aFile and voila! 276 | sha256hash.append((aFile, calcHash)) 277 | print("[+] SHA-256 hashes calculated.") 278 | sha256 = 1 279 | saveHashes(sha256hash, "sha256") 280 | 281 | # Scanner 282 | 283 | def processScanMode(config, filePath): 284 | '''Setup scan mode by configuring API and then present report''' 285 | 286 | vt = setupVt(config) 287 | getScanReport(vt, filePath) 288 | 289 | def getScanReport(vtObj, filePath): 290 | '''Scans the given file on VT''' 291 | 292 | # TODO: Find existing reports on VT to save bandwidth 293 | 294 | results = vtObj.scan_file(filePath) 295 | if results['response_code'] == 200: 296 | scanReport = { 297 | 'File Path': filePath, 298 | 'scan_ID': results['results']['scan_id'], 299 | 'SHA1': results['results']['sha1'], 300 | 'SHA256': results['results']['sha256'], 301 | 'MD5': results['results']['md5'], 302 | 'Permalink': results['results']['permalink'], 303 | 'Message': results['results']['verbose_msg'], 304 | } 305 | 306 | # TODO: Write a mode to get the scanned report only (since new scans are queued only) 307 | fileReport = vtObj.get_file_report(scanReport['scan_ID']) 308 | 309 | fileOutput = json.dumps(fileReport, indent=4) 310 | jsonOutput = json.dumps(scanReport, indent=4) 311 | fileObj = open(f'vetter-scan-{currTime}.json', 'a+') 312 | print(jsonOutput, file=fileObj) 313 | print(fileOutput, file=fileObj) 314 | 315 | print("[+] Successfully scanned the file and saved output in the current directory.") 316 | 317 | elif results['response_code'] == 204: 318 | print("[-] You've crossed your quota limits. Please wait for a minute to continue scanning") 319 | exit() 320 | 321 | else: 322 | print("[-] Either the file is already scanned on VT or there's a different issue. Crash output: ") 323 | print(results) 324 | 325 | # General Calls 326 | 327 | def processModes(args): 328 | ''' Determine the appropriate execution flow based on selected mode ''' 329 | 330 | mode = args['mode'] 331 | 332 | if mode == "hash": 333 | processHashMode(args) 334 | 335 | elif mode == "search": 336 | processVtMode(args['dir'], args['config']) 337 | 338 | elif mode == "scan": 339 | processScanMode(args['config'], args['filepath']) 340 | 341 | elif mode == "auto": 342 | processHashMode(args) 343 | processVtMode(args['dir'], args['config']) 344 | 345 | def sanityCheck(args): 346 | ''' Check for the sanity of all arguments passed ''' 347 | possibleModes = ('hash', 'search', 'scan', 'auto') 348 | 349 | # Check if configuration file exists 350 | if not (os.path.exists(args['config'])): 351 | print(f"[-] Error reading the configuration file: {args['config']}") 352 | exit() 353 | 354 | # Configure the right directory 355 | elif not os.path.isdir(''+args['dir']): 356 | print("[ERROR] Specified path does not exist. Issue: --dir ") 357 | exit() 358 | 359 | elif args['mode']=="scan": 360 | if not os.path.isfile(args['filepath']): 361 | print("[ERROR] Use the correct file path to scan. Issue: --filepath") 362 | exit() 363 | 364 | elif args['mode'] not in possibleModes: 365 | print('[ERROR] Wrong mode selected!') 366 | exit() 367 | 368 | def parseArgs(): 369 | ''' Parse arguments from command line ''' 370 | 371 | ap = argparse.ArgumentParser() 372 | ap.add_argument("--dir", metavar="Directory to scan", required=True, help="Starting point of files to hash or hashes to search on VT (./)") 373 | ap.add_argument("--config", metavar="Configuration file", default="config.ini", help="Configuration file for VT (config.ini)") 374 | ap.add_argument("--algo", metavar="Algorithms to use", default="SHA256", help="Hashing algorithms [MD5, SHA1, SHA256*]") 375 | ap.add_argument("--filepath", metavar="File to scan on VT", help="Scan the file on VT by using it's complete path {MAX SIZE: 32MB}") 376 | ap.add_argument("--mode", metavar="Mode of operations [hash/search/scan/auto]", required=True, help="Calculate hashes, search hashes, or scan a file on VT. 'auto' calculates hashes and searches them on VT") 377 | args = vars(ap.parse_args()) 378 | return args 379 | 380 | def main(): 381 | ''' Starting point of our program ''' 382 | 383 | args = parseArgs() 384 | sanityCheck(args) 385 | 386 | processModes(args) 387 | 388 | if __name__ == '__main__': 389 | main() 390 | --------------------------------------------------------------------------------