├── README.md ├── pwnwatch.py ├── beaconify.py └── hashieclean.py /README.md: -------------------------------------------------------------------------------- 1 | # pwnagotchi-beacon-plugins 2 | Custom plugin repository 3 | 4 | Edit your `/etc/pwnagotchi/config.toml` to look like this 5 | 6 | ```TOML 7 | main.custom_plugin_repos = [ 8 | "https://github.com/arturandre/pwnagotchi-beacon-plugins/archive/master.zip", 9 | ] 10 | ``` 11 | Then run this command: `sudo pwnagotchi plugins update` 12 | 13 | ## beconify 14 | 15 | The messaging system between pwnagotchi units sometimes simply doesn't work. So this plugins tries to fix that by sending the "beacons" directly via a plugin, rather then using pwngrid-peer. 16 | 17 | ## hashieclean 18 | 19 | This version removes "lonely pcaps", those can't be converted 20 | either to the formats .22000 (EAPOL) or .16800 (PMKID). As 21 | the number of lonely pcaps increase the loading time increases 22 | too. Besides that, the checking for completed handshakes 23 | is done more efficiently, thus reducing even further 24 | the loading time of the plugin. 25 | 26 | It is based on (actually build upon) hashi by junohea.mail@gmail.com (their decription): 27 | 28 | Attempt to automatically convert pcaps to a crackable format. 29 | If successful, the files containing the hashes will be saved 30 | in the same folder as the handshakes. 31 | The files are saved in their respective Hashcat format: 32 | - EAPOL hashes are saved as *.22000 33 | - PMKID hashes are saved as *.16800 34 | All PCAP files without enough information to create a hash are 35 | stored in a file that can be read by the webgpsmap plugin. 36 | 37 | Why use it?: 38 | - Automatically convert handshakes to crackable formats! 39 | We dont all upload our hashes online ;) 40 | - Repair PMKID handshakes that hcxpcapngtool misses 41 | - If running at time of handshake capture, on_handshake can 42 | be used to improve the chance of the repair succeeding 43 | - Be a completionist! Not enough packets captured to crack a network? 44 | This generates an output file for the webgpsmap plugin, use the 45 | location data to revisit networks you need more packets for! 46 | 47 | Additional information: 48 | - Currently requires hcxpcapngtool compiled and installed 49 | - Attempts to repair PMKID hashes when hcxpcapngtool cant find the SSID 50 | - hcxpcapngtool sometimes has trouble extracting the SSID, so we 51 | use the raw 16800 output and attempt to retrieve the SSID via tcpdump 52 | - When access_point data is available (on_handshake), we leverage 53 | the reported AP name and MAC to complete the hash 54 | - The repair is very basic and could certainly be improved! 55 | Todo: 56 | Make it so users dont need hcxpcapngtool (unless it gets added to the base image) 57 | Phase 1: Extract/construct 22000/16800 hashes through tcpdump commands 58 | Phase 2: Extract/construct 22000/16800 hashes entirely in python 59 | Improve the code, a lot 60 | 61 | ## pwnwatch 62 | 63 | Work in progress 64 | -------------------------------------------------------------------------------- /pwnwatch.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import io 3 | import subprocess 4 | import os 5 | import json 6 | import pwnagotchi 7 | import pwnagotchi.plugins as plugins 8 | from pwnagotchi.ui.components import LabeledValue 9 | from pwnagotchi.ui.view import BLACK 10 | import pwnagotchi.ui.fonts as fonts 11 | from flask import jsonify 12 | from flask import abort 13 | from flask import Response 14 | 15 | class pwnwatch(plugins.Plugin): 16 | __author__ = 'Artur Oliveira' 17 | __version__ = '1.0.0' 18 | __license__ = 'GPL3' 19 | __description__ = ''' 20 | This plugin allows the pwnagotchi to receive 21 | commands from the pwnagotchi-watch and respond 22 | them properly. 23 | 24 | parseSessionStats and parseSessionStatsFile borrowed from: 25 | https://github.com/GaelicThunder/Experience-Plugin-Pwnagotchi/blob/f456040de4951de1e6ab3fcb4453d42a7da362d1/exp.py#L215 26 | 27 | ''' 28 | 29 | def __init__(self): 30 | logging.info("[PWNWATCH] plugin loaded") 31 | 32 | # called when everything is ready and the main loop is about to start 33 | def on_config_changed(self, config): 34 | # (note) I've just kept this here for reference 35 | #handshake_dir = config['bettercap']['handshakes'] 36 | pass 37 | 38 | def parseSessionStats(self): 39 | dir = pwnagotchi.config['main']['plugins']['session-stats']['save_directory'] 40 | for filename in os.listdir(dir): 41 | logging.debug("[PWNWATCH] Parsing " + filename + "...") 42 | if filename.endswith(".json") & filename.startswith("stats"): 43 | try: 44 | self.parseSessionStatsFile(os.path.join(dir,filename)) 45 | except: 46 | logging.error("[PWNWATCH] ERROR parsing File: "+ filename) 47 | 48 | def parseSessionStatsFile(self, path): 49 | deauths = 0 50 | handshakes = 0 51 | associations = 0 52 | with open(path) as json_file: 53 | data = json.load(json_file) 54 | for entry in data["data"]: 55 | deauths += data["data"][entry]["num_deauths"] 56 | handshakes += data["data"][entry]["num_handshakes"] 57 | associations += data["data"][entry]["num_associations"] 58 | 59 | 60 | self.deauths = deauths 61 | self.handshakes = handshakes 62 | self.associations = associations 63 | 64 | 65 | def on_webhook(self, path, request): 66 | if not self.ready: 67 | return "Plugin not ready" 68 | 69 | if not path or path == "/": 70 | self.parseSessionStats() 71 | #(last_session.handshakes, utils.total_unique_handshakes(self._config['bettercap']['handshakes'])) 72 | Response(str(self.handshakes), mimetype='text/plain') 73 | 74 | if path == 'stream': 75 | # Just for reference 76 | # def generate(): 77 | # with open(self.config['main']['log']['path']) as f: 78 | # yield ''.join(f.readlines()[-self.options.get('max-lines', 4096):]) 79 | # while True: 80 | # yield f.readline() 81 | 82 | # return Response(generate(), mimetype='text/plain') 83 | pass 84 | 85 | abort(404) 86 | -------------------------------------------------------------------------------- /beaconify.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | import logging 3 | import sys 4 | import json 5 | import struct 6 | import time 7 | import subprocess 8 | import threading 9 | from threading import Thread 10 | from pwnagotchi import grid 11 | from pwnagotchi.mesh.peer import Peer 12 | 13 | from scapy.all import Dot11, Dot11Beacon, Dot11Elt, RadioTap, sendp 14 | import pwnagotchi 15 | import pwnagotchi.plugins as plugins 16 | from pwnagotchi.grid import call, get_advertisement_data 17 | 18 | # Ref: https://stackoverflow.com/a/325528/3562468 19 | class StoppableThread(threading.Thread): 20 | """Thread class with a stop() method. The thread itself has to check 21 | regularly for the stopped() condition.""" 22 | 23 | def __init__(self, *args, **kwargs): 24 | super(StoppableThread, self).__init__(*args, **kwargs) 25 | self._stop_event = threading.Event() 26 | 27 | def stop(self): 28 | self._stop_event.set() 29 | 30 | def stopped(self): 31 | return self._stop_event.is_set() 32 | 33 | class Beaconify(plugins.Plugin): 34 | __author__ = 'Artur Oliveira' 35 | __version__ = '1.0.8' 36 | __license__ = 'GPL3' 37 | __description__ = 'A plugin to send beacon frames more often and restarts pwngrid when it stops listening for other units\' beacons.' 38 | 39 | # Define the custom Information Element IDs 40 | # Taken from: 41 | # https://github.com/jayofelony/pwngrid/blob/6ff48395fa19257c8296f127f4bbdec1152ba5e1/wifi/defines.go#L21 42 | ID_WHISPER_PAYLOAD = 222 43 | ID_WHISPER_COMPRESSION = 223 44 | ID_WHISPER_IDENTITY = 224 45 | ID_WHISPER_SIGNATURE = 225 46 | ID_WHISPER_STREAM_HEADER = 226 47 | 48 | BroadcastAddr = "ff:ff:ff:ff:ff:ff" 49 | SignatureAddrStr = "de:ad:be:ef:de:ad" 50 | 51 | def __init__(self): 52 | self._lock = threading.Lock() 53 | self.options = dict() 54 | self.peer_id = None 55 | self.signature = None 56 | self.stream_id = 0 57 | self.seq_num = 0 58 | self.seq_tot = 0 59 | self.compress = False 60 | self.self_encounters = 0 61 | self.restart_pwngrid_retries = -1 62 | self.restart_pwngrid_time = 60 63 | self.init_pwngrid_time = 60 64 | self.cooldown_pwngrid_check = time.perf_counter() 65 | self.waiting_pwngrid = False 66 | self.beacon_thread = None 67 | self.pwngrid_thread = None 68 | 69 | def info_element(self, id, info): 70 | return Dot11Elt(ID=id, info=info) 71 | 72 | 73 | def on_loaded(self): 74 | logging.info(f"[{self.__class__.__name__}] plugin loaded") 75 | self.iface = pwnagotchi.config['main']['iface'] 76 | self.sleeptime = self.options.get('sleeptime') 77 | if self.sleeptime is None: 78 | self.sleeptime = 5 79 | logging.info(f"[Beaconify] sleeptime not defined in config. Setting to default {self.sleeptime}") 80 | self.beacontime = self.options.get('beacontime') 81 | if self.beacontime is None: 82 | self.beacontime = 0.1 83 | logging.info(f"[Beaconify] beacontime not defined in config. Setting to default {self.beacontime}") 84 | 85 | def restart_pwngrid(self): 86 | def inner_func(obj): 87 | with obj._lock: 88 | self.waiting_pwngrid = True 89 | # retries = obj.restart_pwngrid_retries 90 | # while retries != 0: 91 | process = subprocess.Popen( 92 | f"systemctl restart pwngrid-peer", 93 | shell=True, 94 | stdin=None, 95 | stdout=open("/dev/null", "w"), 96 | stderr=None, executable="/bin/bash") 97 | process.wait() 98 | # if process.returncode > 0: 99 | # logging.warning(f"[Beaconify] pwngrid restarted! Waiting {obj.init_pwngrid_time} for its initialization.") 100 | # time.sleep(obj.init_pwngrid_time) 101 | # self.waiting_pwngrid = False 102 | # return 103 | # else: 104 | # logging.warning(f"[Beaconify] Failed to restart pwngrid! Waiting {obj.restart_pwngrid_time} before trying again.") 105 | # time.sleep(obj.restart_pwngrid_time) 106 | # retries -= 1 107 | # logging.warning(f"[Beaconify] Failed to restart pwngrid too many times! The unit probably won't send or receive beacons until reboot.") 108 | # self.waiting_pwngrid = False 109 | 110 | if (self.pwngrid_thread is None) or (not self.pwngrid_thread.is_alive()) : 111 | self.pwngrid_thread = StoppableThread(target=inner_func, args=(self,)) 112 | self.pwngrid_thread.start() 113 | else: 114 | logging.info(f"[Beaconify] Skipping pwngrid restart thread because there is one alive yet.") 115 | 116 | 117 | 118 | 119 | # called when a known peer is lost 120 | def on_peer_lost(self, agent, peer): 121 | pass 122 | 123 | # called when a new peer is detected 124 | def on_peer_detected(self, agent, peer): 125 | # Checks for self beacons to detect 126 | logging.info(f"[Beaconify] I'm {self.identity} and just detect {peer.identity()}.") 127 | if peer.identity() == self.identity: 128 | self.found_self = True 129 | logging.info(f"[Beaconify] Hey! I can hear my own echoes!.") 130 | 131 | def on_wait(self, agent, t): 132 | # Start sending beacons for t seconds 133 | logging.info(f"[Beaconify] Waiting for {t} seconds. Sending beacons!") 134 | self.send_beacon(agent, t) 135 | 136 | # called when the agent is sleeping for t seconds 137 | def on_sleep(self, agent, t): 138 | # Start sending beacons for t seconds 139 | logging.info(f"[Beaconify] Sleeping for {t} seconds. Sending beacons!") 140 | self.send_beacon(agent, t) 141 | 142 | 143 | def on_unload(self, ui): 144 | # Stop sending beacons 145 | logging.info("[Beaconify] Unloading. Stopping beacon and pwngrid restart threads.") 146 | join_thread = [] 147 | if self.beacon_thread is not None and\ 148 | self.beacon_thread.is_alive(): 149 | self.beacon_thread.stop() 150 | join_thread.append(self.beacon_thread) 151 | logging.info("[Beaconify] Beacon thread stopped.") 152 | if self.pwngrid_thread is not None and\ 153 | self.pwngrid_thread.is_alive(): 154 | self.pwngrid_thread.stop() 155 | join_thread.append(self.pwngrid_thread) 156 | logging.info("[Beaconify] pwngrid restart thread stopped.") 157 | for t in join_thread: 158 | t.join() 159 | logging.info("[Beaconify] All threads joined. Exiting.") 160 | 161 | def on_ready(self, agent): 162 | self.identity = agent.fingerprint() 163 | self.mon_iface = pwnagotchi.config['main']['iface'] 164 | pass 165 | 166 | def on_config_changed(self, config): 167 | logging.info(f"[Beaconify] config changed") 168 | self.iface = config['main']['iface'] 169 | if self.options.get('sleeptime') is not None: 170 | self.sleeptime = self.options.get('sleeptime') 171 | logging.info(f"[Beaconify] sleeptime not defined in config. Setting to default {self.sleeptime}") 172 | if self.options.get('beacontime') is not None: 173 | self.beacontime = self.options.get('beacontime') 174 | logging.info(f"[Beaconify] beacontime not defined in config. Setting to default {self.beacontime}") 175 | 176 | 177 | #def pack_one_of(from_addr, to_addr, peer_id, signature, stream_id, seq_num, seq_tot, payload, compress): 178 | def pack_one_of(self, payload): 179 | layers = [ 180 | RadioTap(), 181 | Dot11( 182 | addr1=Beaconify.BroadcastAddr, 183 | addr2=Beaconify.SignatureAddrStr, 184 | addr3=Beaconify.SignatureAddrStr, 185 | type=0, subtype=8), 186 | Dot11Beacon() 187 | ] 188 | 189 | if self.peer_id is not None: 190 | layers.append( 191 | self.info_element(Beaconify.ID_WHISPER_IDENTITY, self.peer_id)) 192 | 193 | if self.signature is not None: 194 | layers.append( 195 | self.info_element(Beaconify.ID_WHISPER_SIGNATURE, self.signature)) 196 | 197 | if self.stream_id > 0: 198 | stream_header = struct.pack( 199 | ' obj.init_pwngrid_time: 251 | grid_peers = grid.peers() 252 | if len(grid_peers) == 0: 253 | logging.info(f"[Beaconify] No peers (not even myself!) Restarting pwngrid!") 254 | self.restart_pwngrid() 255 | else: 256 | logging.info(f"[Beaconify] Found {len(grid_peers)} peers (including myself!)") 257 | else: 258 | logging.info(f"[Beaconify] Cooldown of {pwngrid_check_cooldown} before checking pwngrid again.") 259 | 260 | if (self.beacon_thread is None) or (not self.beacon_thread.is_alive()) : 261 | self.beacon_thread = StoppableThread(target=inner_func, args=(self,time_duration, agent)) 262 | self.beacon_thread.start() 263 | else: 264 | logging.info(f"[Beaconify] Skipping beacon thread because there is one alive yet.") 265 | 266 | 267 | -------------------------------------------------------------------------------- /hashieclean.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import io 3 | import subprocess 4 | import os 5 | import json 6 | import pwnagotchi.plugins as plugins 7 | from threading import Lock 8 | from pwnagotchi.ui.components import LabeledValue 9 | from pwnagotchi.ui.view import BLACK 10 | import pwnagotchi.ui.fonts as fonts 11 | 12 | ''' 13 | hcxpcapngtool needed, to install: 14 | > git clone https://github.com/ZerBea/hcxtools.git 15 | > cd hcxtools 16 | > apt-get install libcurl4-openssl-dev libssl-dev zlib1g-dev 17 | > make 18 | > sudo make install 19 | ''' 20 | 21 | 22 | class hashieclean(plugins.Plugin): 23 | __author__ = 'Artur Oliveira' 24 | __version__ = '1.0.0' 25 | __license__ = 'GPL3' 26 | __description__ = ''' 27 | 28 | This version removes "lonely pcaps", those can't be converted 29 | either to the formats .22000 (EAPOL) or .16800 (PMKID). As 30 | the number of lonely pcaps increase the loading time increases 31 | too. Besides that, the checking for completed handshakes 32 | is done more efficiently, thus reducing even further 33 | the loading time of the plugin. 34 | 35 | Based on hashi by junohea.mail@gmail.com: 36 | 37 | Attempt to automatically convert pcaps to a crackable format. 38 | If successful, the files containing the hashes will be saved 39 | in the same folder as the handshakes. 40 | The files are saved in their respective Hashcat format: 41 | - EAPOL hashes are saved as *.22000 42 | - PMKID hashes are saved as *.16800 43 | All PCAP files without enough information to create a hash are 44 | stored in a file that can be read by the webgpsmap plugin. 45 | 46 | Why use it?: 47 | - Automatically convert handshakes to crackable formats! 48 | We dont all upload our hashes online ;) 49 | - Repair PMKID handshakes that hcxpcapngtool misses 50 | - If running at time of handshake capture, on_handshake can 51 | be used to improve the chance of the repair succeeding 52 | - Be a completionist! Not enough packets captured to crack a network? 53 | This generates an output file for the webgpsmap plugin, use the 54 | location data to revisit networks you need more packets for! 55 | 56 | Additional information: 57 | - Currently requires hcxpcapngtool compiled and installed 58 | - Attempts to repair PMKID hashes when hcxpcapngtool cant find the SSID 59 | - hcxpcapngtool sometimes has trouble extracting the SSID, so we 60 | use the raw 16800 output and attempt to retrieve the SSID via tcpdump 61 | - When access_point data is available (on_handshake), we leverage 62 | the reported AP name and MAC to complete the hash 63 | - The repair is very basic and could certainly be improved! 64 | Todo: 65 | Make it so users dont need hcxpcapngtool (unless it gets added to the base image) 66 | Phase 1: Extract/construct 22000/16800 hashes through tcpdump commands 67 | Phase 2: Extract/construct 22000/16800 hashes entirely in python 68 | Improve the code, a lot 69 | ''' 70 | 71 | def __init__(self): 72 | logging.info("[hashieclean] plugin loaded") 73 | self.lock = Lock() 74 | 75 | # called when everything is ready and the main loop is about to start 76 | def on_config_changed(self, config): 77 | handshake_dir = config['bettercap']['handshakes'] 78 | 79 | if 'interval' not in self.options or not (self.status.newer_then_hours(self.options['interval'])): 80 | logging.info('[hashieclean] Starting batch conversion of pcap files') 81 | with self.lock: 82 | self._process_stale_pcaps(handshake_dir) 83 | 84 | def is22000(self, filename): 85 | fullpathNoExt = filename.split('.')[0] 86 | return os.path.isfile(fullpathNoExt + '.22000') 87 | 88 | def is16800(self, filename): 89 | fullpathNoExt = filename.split('.')[0] 90 | return os.path.isfile(fullpathNoExt + '.16800') 91 | 92 | def on_handshake(self, agent, filename, access_point, client_station): 93 | with self.lock: 94 | handshake_status = [] 95 | fullpathNoExt = filename.split('.')[0] 96 | name = filename.split('/')[-1:][0].split('.')[0] 97 | 98 | if self.is22000(filename) or \ 99 | self.is16800(filename): 100 | if self.is22000(filename): 101 | handshake_status.append('Already have {}.22000 (EAPOL)'.format(name)) 102 | if self.is16800(filename): 103 | handshake_status.append('Already have {}.16800 (PMKID)'.format(name)) 104 | else: 105 | if self._writeEAPOL(filename): 106 | handshake_status.append('Created {}.22000 (EAPOL) from pcap'.format(name)) 107 | if self._writePMKID(filename, access_point): 108 | handshake_status.append('Created {}.16800 (PMKID) from pcap'.format(name)) 109 | 110 | if handshake_status: 111 | logging.info('[hashieclean] Good news:\n\t' + '\n\t'.join(handshake_status)) 112 | 113 | def _writeEAPOL(self, fullpath): 114 | fullpathNoExt = fullpath.split('.')[0] 115 | filename = fullpath.split('/')[-1:][0].split('.')[0] 116 | result = subprocess.getoutput('hcxpcapngtool -o {}.22000 {} >/dev/null 2>&1'.format(fullpathNoExt,fullpath)) 117 | if os.path.isfile(fullpathNoExt + '.22000'): 118 | logging.debug('[hashieclean] [+] EAPOL Success: {}.22000 created'.format(filename)) 119 | return True 120 | else: 121 | return False 122 | 123 | def _writePMKID(self, fullpath, apJSON): 124 | fullpathNoExt = fullpath.split('.')[0] 125 | filename = fullpath.split('/')[-1:][0].split('.')[0] 126 | result = subprocess.getoutput('hcxpcapngtool -k {}.16800 {} >/dev/null 2>&1'.format(fullpathNoExt,fullpath)) 127 | if os.path.isfile(fullpathNoExt + '.16800'): 128 | logging.debug('[hashieclean] [+] PMKID Success: {}.16800 created'.format(filename)) 129 | return True 130 | else: #make a raw dump 131 | result = subprocess.getoutput('hcxpcapngtool -K {}.16800 {} >/dev/null 2>&1'.format(fullpathNoExt,fullpath)) 132 | if os.path.isfile(fullpathNoExt + '.16800'): 133 | if self._repairPMKID(fullpath, apJSON) == False: 134 | logging.debug('[hashieclean] [-] PMKID Fail: {}.16800 could not be repaired'.format(filename)) 135 | return False 136 | else: 137 | logging.debug('[hashieclean] [+] PMKID Success: {}.16800 repaired'.format(filename)) 138 | return True 139 | else: 140 | logging.debug('[hashieclean] [-] Could not attempt repair of {} as no raw PMKID file was created'.format(filename)) 141 | return False 142 | 143 | def _repairPMKID(self, fullpath, apJSON): 144 | hashString = "" 145 | clientString = [] 146 | fullpathNoExt = fullpath.split('.')[0] 147 | filename = fullpath.split('/')[-1:][0].split('.')[0] 148 | logging.debug('[hashieclean] Repairing {}'.format(filename)) 149 | with open(fullpathNoExt + '.16800','r') as tempFileA: 150 | hashString = tempFileA.read() 151 | if apJSON != "": 152 | clientString.append('{}:{}'.format(apJSON['mac'].replace(':',''), apJSON['hostname'].encode('hex'))) 153 | else: 154 | #attempt to extract the AP's name via hcxpcapngtool 155 | result = subprocess.getoutput('hcxpcapngtool -X /tmp/{} {} >/dev/null 2>&1'.format(filename,fullpath)) 156 | if os.path.isfile('/tmp/' + filename): 157 | with open('/tmp/' + filename,'r') as tempFileB: 158 | temp = tempFileB.read().splitlines() 159 | for line in temp: 160 | clientString.append(line.split(':')[0] + ':' + line.split(':')[1].strip('\n').encode().hex()) 161 | os.remove('/tmp/{}'.format(filename)) 162 | #attempt to extract the AP's name via tcpdump 163 | tcpCatOut = subprocess.check_output("tcpdump -ennr " + fullpath + " \"(type mgt subtype beacon) || (type mgt subtype probe-resp) || (type mgt subtype reassoc-resp) || (type mgt subtype assoc-req)\" 2>/dev/null | sed -E 's/.*BSSID:([0-9a-fA-F:]{17}).*\\((.*)\\).*/\\1\t\\2/g'",shell=True).decode('utf-8') 164 | if ":" in tcpCatOut: 165 | for i in tcpCatOut.split('\n'): 166 | if ":" in i: 167 | clientString.append(i.split('\t')[0].replace(':','') + ':' + i.split('\t')[1].strip('\n').encode().hex()) 168 | if clientString: 169 | for line in clientString: 170 | if line.split(':')[0] == hashString.split(':')[1]: #if the AP MAC pulled from the JSON or tcpdump output matches the AP MAC in the raw 16800 output 171 | hashString = hashString.strip('\n') + ':' + (line.split(':')[1]) 172 | if (len(hashString.split(':')) == 4) and not (hashString.endswith(':')): 173 | with open(fullpath.split('.')[0] + '.16800','w') as tempFileC: 174 | logging.debug('[hashieclean] Repaired: {} ({})'.format(filename,hashString)) 175 | tempFileC.write(hashString + '\n') 176 | return True 177 | else: 178 | logging.debug('[hashieclean] Discarded: {} {}'.format(line, hashString)) 179 | else: 180 | os.remove(fullpath.split('.')[0] + '.16800') 181 | return False 182 | 183 | def _process_stale_pcaps(self, handshake_dir): 184 | handshakes_list = [os.path.join(handshake_dir, filename) for filename in os.listdir(handshake_dir) if filename.endswith('.pcap')] 185 | failed_jobs = [] 186 | successful_jobs = [] 187 | lonely_pcaps = [] 188 | failed_files = set() 189 | for num, handshake in enumerate(handshakes_list): 190 | fullpathNoExt = handshake.split('.')[0] 191 | pcapFileName = handshake.split('/')[-1:][0] 192 | lonely = True 193 | if self.is22000(handshake) or\ 194 | self.is16800(handshake): #Ignore completed handshakes 195 | lonely = False 196 | continue 197 | else: 198 | if self._writeEAPOL(handshake): 199 | successful_jobs.append('22000: ' + pcapFileName) 200 | lonely = False 201 | else: 202 | failed_jobs.append('22000: ' + pcapFileName) 203 | if self._writePMKID(handshake, ""): 204 | successful_jobs.append('16800: ' + pcapFileName) 205 | lonely = False 206 | else: 207 | failed_jobs.append('16800: ' + pcapFileName) 208 | if lonely: #no 16800 AND no 22000 209 | lonely_pcaps.append(handshake) 210 | logging.debug('[hashieclean] Batch job: added {} to lonely list'.format(pcapFileName)) 211 | if ((num + 1) % 50 == 0) or (num + 1 == len(handshakes_list)): #report progress every 50, or when done 212 | logging.info('[hashieclean] Batch job: {}/{} done ({} fails)'.format(num + 1,len(handshakes_list),len(lonely_pcaps))) 213 | if len(successful_jobs) > 0: 214 | logging.info('[hashieclean] Batch job: {} new handshake files created'.format(len(successful_jobs))) 215 | if len(lonely_pcaps) > 0: 216 | logging.info('[hashieclean] Batch job: {} networks without enough packets to create a hash'.format(len(lonely_pcaps))) 217 | logging.info(f'[hashieclean] {len(lonely_pcaps)} lonely (failed) handshakes will be deleted.') 218 | self._getLocations(lonely_pcaps) 219 | for filename in lonely_pcaps: 220 | pcapFileName = filename.split('/')[-1:][0] 221 | logging.info('[hashieclean] The pcap file is not a valid handshake. Deleting file:' + pcapFileName.split('/')[0]) 222 | os.remove(filename) 223 | # Confirm the pcap file was deleted. 224 | if not os.path.exists(filename): 225 | logging.debug('[hashieclean] The pcap file was deleted for being incomplete. FILE: ' + pcapFileName) 226 | # If the pcap file was not deleted, then send an error to the log. 227 | if os.path.exists(filename): 228 | logging.error('[hashieclean] Could not delete the pcap file. Please delete it manually. FILE: ' + pcapFileName) 229 | 230 | def _getLocations(self, lonely_pcaps): 231 | #export a file for webgpsmap to load 232 | with open('/root/.incompletePcaps','w') as isIncomplete: 233 | count = 0 234 | for pcapFile in lonely_pcaps: 235 | filename = pcapFile.split('/')[-1:][0] #keep extension 236 | fullpathNoExt = pcapFile.split('.')[0] 237 | isIncomplete.write(filename + '\n') 238 | if os.path.isfile(fullpathNoExt + '.gps.json') or os.path.isfile(fullpathNoExt + '.geo.json') or os.path.isfile(fullpathNoExt + '.paw-gps.json'): 239 | count +=1 240 | if count != 0: 241 | logging.info('[hashieclean] Used {} GPS/GEO/PAW-GPS files to find lonely networks, go check webgpsmap! ;)'.format(str(count))) 242 | else: 243 | logging.info('[hashieclean] Could not find any GPS/GEO/PAW-GPS files for the lonely networks'.format(str(count))) 244 | 245 | def _getLocationsCSV(self, lonely_pcaps): 246 | #in case we need this later, export locations manually to CSV file, needs try/catch/paw-gps format/etc. 247 | locations = [] 248 | for pcapFile in lonely_pcaps: 249 | filename = pcapFile.split('/')[-1:][0].split('.')[0] 250 | fullpathNoExt = pcapFile.split('.')[0] 251 | if os.path.isfile(fullpathNoExt + '.gps.json'): 252 | with open(fullpathNoExt + '.gps.json','r') as tempFileA: 253 | data = json.load(tempFileA) 254 | locations.append(filename + ',' + str(data['Latitude']) + ',' + str(data['Longitude']) + ',50') 255 | elif os.path.isfile(fullpathNoExt + '.geo.json'): 256 | with open(fullpathNoExt + '.geo.json','r') as tempFileB: 257 | data = json.load(tempFileB) 258 | locations.append(filename + ',' + str(data['location']['lat']) + ',' + str(data['location']['lng']) + ',' + str(data['accuracy'])) 259 | elif os.path.isfile(fullpathNoExt + '.paw-gps.json'): 260 | with open(fullpathNoExt + '.paw-gps.json','r') as tempFileC: 261 | data = json.load(tempFileC) 262 | locations.append(filename + ',' + str(data['lat']) + ',' + str(data['long']) + ',50') 263 | if locations: 264 | with open('/root/locations.csv','w') as tempFileD: 265 | for loc in locations: 266 | tempFileD.write(loc + '\n') 267 | logging.info('[hashieclean] Used {} GPS/GEO files to find lonely networks, load /root/locations.csv into a mapping app and go say hi!'.format(len(locations))) 268 | --------------------------------------------------------------------------------