├── README.md
├── handshakes-dl-hashie.py
└── hashie-hcxpcapngtool.py
/README.md:
--------------------------------------------------------------------------------
1 | # pwnagotchi-plugins
2 | Some plugins updated 🙂
3 |
4 | ## How to use
5 |
6 | Add to `/etc/pwnagotchi/config.toml` :
7 | ```bash
8 | main.custom_plugin_repos = [
9 | "https://github.com/evilsocket/pwnagotchi-plugins-contrib/archive/master.zip",
10 | "https://github.com/PwnPeter/pwnagotchi-plugins/archive/master.zip",
11 | ]
12 | ```
13 |
14 | Next, `sudo pwnagotchi plugins update` and `sudo pwnagotchi plugins list`.
15 |
16 | You can now install and enable plugins :)
17 |
18 | ## Plugins
19 |
20 | ### hashie-hcxpcapngtool.py
21 |
22 | hashie.py with support of the 2021 version of hcxtools + new hashcat formats.
23 |
24 | ### handshakes-dl-hashie.py
25 | Download handshake captures from web-ui + handshake converted in hashcat format with `hashie-hcxpcapngtool.py`.
26 |
--------------------------------------------------------------------------------
/handshakes-dl-hashie.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import json
3 | import os
4 | import glob
5 |
6 | import pwnagotchi
7 | import pwnagotchi.plugins as plugins
8 |
9 | from flask import abort
10 | from flask import send_from_directory
11 | from flask import render_template_string
12 |
13 | TEMPLATE = """
14 | {% extends "base.html" %}
15 | {% set active_page = "handshakes" %}
16 | {% block title %}
17 | {{ title }}
18 | {% endblock %}
19 | {% block styles %}
20 | {{ super() }}
21 |
30 | {% endblock %}
31 | {% block script %}
32 | var shakeList = document.getElementById('list');
33 | var filter = document.getElementById('filter');
34 | var filterVal = filter.value.toUpperCase();
35 | filter.onkeyup = function() {
36 | document.body.style.cursor = 'progress';
37 | var table, tr, tds, td, i, txtValue;
38 | filterVal = filter.value.toUpperCase();
39 | li = shakeList.getElementsByTagName("li");
40 | for (i = 0; i < li.length; i++) {
41 | txtValue = li[i].textContent || li[i].innerText;
42 | if (txtValue.toUpperCase().indexOf(filterVal) > -1) {
43 | li[i].style.display = "list-item";
44 | } else {
45 | li[i].style.display = "none";
46 | }
47 | }
48 | document.body.style.cursor = 'default';
49 | }
50 | {% endblock %}
51 | {% block content %}
52 |
53 |
54 | {% for handshake in handshakes %}
55 | {% for ext in handshake.ext %}
56 | -
57 | {{handshake.name}}{{ext}}
58 |
59 | {% endfor %}
60 | {% endfor %}
61 |
62 | {% endblock %}
63 | """
64 |
65 | class handshakes:
66 | def __init__(self, name, path, ext):
67 | self.name = name
68 | self.path = path
69 | self.ext = ext
70 |
71 | class HandshakesDL(plugins.Plugin):
72 | __author__ = 'me@sayakb.com'
73 | __version__ = '1.0.0'
74 | __license__ = 'GPL3'
75 | __description__ = 'Download handshake captures from web-ui.'
76 |
77 | def __init__(self):
78 | self.ready = False
79 |
80 | def on_loaded(self):
81 | logging.info("[HandshakesDL] plugin loaded")
82 |
83 | def on_config_changed(self, config):
84 | self.config = config
85 | self.ready = True
86 |
87 | def on_webhook(self, path, request):
88 | if not self.ready:
89 | return "Plugin not ready"
90 |
91 | if path == "/" or not path:
92 | pcapfiles = glob.glob(os.path.join(self.config['bettercap']['handshakes'], "*.pcap"))
93 |
94 | data = []
95 | for path in pcapfiles:
96 | name = os.path.basename(path)[:-5]
97 | fullpathNoExt = path[:-5]
98 | possibleExt = ['.2500', '.16800', '.22000']
99 | foundExt = ['.pcap']
100 | for ext in possibleExt:
101 | if os.path.isfile(fullpathNoExt + ext):
102 | foundExt.append(ext)
103 | data.append(handshakes(name, fullpathNoExt, foundExt))
104 | return render_template_string(TEMPLATE,
105 | title="Handshakes | " + pwnagotchi.name(),
106 | handshakes=data)
107 | else:
108 | dir = self.config['bettercap']['handshakes']
109 | try:
110 | logging.info(f"[HandshakesDL] serving {dir}/{path}")
111 | return send_from_directory(directory=dir, filename=path, as_attachment=True)
112 | except FileNotFoundError:
113 | abort(404)
114 |
--------------------------------------------------------------------------------
/hashie-hcxpcapngtool.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import io
3 | import subprocess
4 | import os
5 | import json
6 | import pwnagotchi.plugins as plugins
7 | from threading import Lock
8 | from pwnagotchi.ui.components import LabeledValue
9 | from pwnagotchi.ui.view import BLACK
10 | import pwnagotchi.ui.fonts as fonts
11 |
12 | '''
13 | hcxpcapngtool needed, to install:
14 | > git clone https://github.com/ZerBea/hcxtools.git
15 | > cd hcxtools
16 | > apt-get install libcurl4-openssl-dev libssl-dev zlib1g-dev
17 | > make
18 | > sudo make install
19 | '''
20 |
21 |
22 | class hashie(plugins.Plugin):
23 | __author__ = 'junohea.mail@gmail.com'
24 | __version__ = '1.0.3'
25 | __license__ = 'GPL3'
26 | __description__ = '''
27 | Attempt to automatically convert pcaps to a crackable format.
28 | If successful, the files containing the hashes will be saved
29 | in the same folder as the handshakes.
30 | The files are saved in their respective Hashcat format:
31 | - EAPOL hashes are saved as *.22000
32 | - PMKID hashes are saved as *.16800
33 | All PCAP files without enough information to create a hash are
34 | stored in a file that can be read by the webgpsmap plugin.
35 |
36 | Why use it?:
37 | - Automatically convert handshakes to crackable formats!
38 | We dont all upload our hashes online ;)
39 | - Repair PMKID handshakes that hcxpcapngtool misses
40 | - If running at time of handshake capture, on_handshake can
41 | be used to improve the chance of the repair succeeding
42 | - Be a completionist! Not enough packets captured to crack a network?
43 | This generates an output file for the webgpsmap plugin, use the
44 | location data to revisit networks you need more packets for!
45 |
46 | Additional information:
47 | - Currently requires hcxpcapngtool compiled and installed
48 | - Attempts to repair PMKID hashes when hcxpcapngtool cant find the SSID
49 | - hcxpcapngtool sometimes has trouble extracting the SSID, so we
50 | use the raw 16800 output and attempt to retrieve the SSID via tcpdump
51 | - When access_point data is available (on_handshake), we leverage
52 | the reported AP name and MAC to complete the hash
53 | - The repair is very basic and could certainly be improved!
54 | Todo:
55 | Make it so users dont need hcxpcapngtool (unless it gets added to the base image)
56 | Phase 1: Extract/construct 22000/16800 hashes through tcpdump commands
57 | Phase 2: Extract/construct 22000/16800 hashes entirely in python
58 | Improve the code, a lot
59 | '''
60 |
61 | def __init__(self):
62 | logging.info("[hashie] plugin loaded")
63 | self.lock = Lock()
64 |
65 | # called when everything is ready and the main loop is about to start
66 | def on_config_changed(self, config):
67 | handshake_dir = config['bettercap']['handshakes']
68 |
69 | if 'interval' not in self.options or not (self.status.newer_then_hours(self.options['interval'])):
70 | logging.info('[hashie] Starting batch conversion of pcap files')
71 | with self.lock:
72 | self._process_stale_pcaps(handshake_dir)
73 |
74 | def on_handshake(self, agent, filename, access_point, client_station):
75 | with self.lock:
76 | handshake_status = []
77 | fullpathNoExt = filename.split('.')[0]
78 | name = filename.split('/')[-1:][0].split('.')[0]
79 |
80 | if os.path.isfile(fullpathNoExt + '.22000'):
81 | handshake_status.append('Already have {}.22000 (EAPOL)'.format(name))
82 | elif self._writeEAPOL(filename):
83 | handshake_status.append('Created {}.22000 (EAPOL) from pcap'.format(name))
84 |
85 | if os.path.isfile(fullpathNoExt + '.16800'):
86 | handshake_status.append('Already have {}.16800 (PMKID)'.format(name))
87 | elif self._writePMKID(filename, access_point):
88 | handshake_status.append('Created {}.16800 (PMKID) from pcap'.format(name))
89 |
90 | if handshake_status:
91 | logging.info('[hashie] Good news:\n\t' + '\n\t'.join(handshake_status))
92 |
93 | def _writeEAPOL(self, fullpath):
94 | fullpathNoExt = fullpath.split('.')[0]
95 | filename = fullpath.split('/')[-1:][0].split('.')[0]
96 | result = subprocess.getoutput('hcxpcapngtool -o {}.22000 {} >/dev/null 2>&1'.format(fullpathNoExt,fullpath))
97 | if os.path.isfile(fullpathNoExt + '.22000'):
98 | logging.debug('[hashie] [+] EAPOL Success: {}.22000 created'.format(filename))
99 | return True
100 | else:
101 | return False
102 |
103 | def _writePMKID(self, fullpath, apJSON):
104 | fullpathNoExt = fullpath.split('.')[0]
105 | filename = fullpath.split('/')[-1:][0].split('.')[0]
106 | result = subprocess.getoutput('hcxpcapngtool -k {}.16800 {} >/dev/null 2>&1'.format(fullpathNoExt,fullpath))
107 | if os.path.isfile(fullpathNoExt + '.16800'):
108 | logging.debug('[hashie] [+] PMKID Success: {}.16800 created'.format(filename))
109 | return True
110 | else: #make a raw dump
111 | result = subprocess.getoutput('hcxpcapngtool -K {}.16800 {} >/dev/null 2>&1'.format(fullpathNoExt,fullpath))
112 | if os.path.isfile(fullpathNoExt + '.16800'):
113 | if self._repairPMKID(fullpath, apJSON) == False:
114 | logging.debug('[hashie] [-] PMKID Fail: {}.16800 could not be repaired'.format(filename))
115 | return False
116 | else:
117 | logging.debug('[hashie] [+] PMKID Success: {}.16800 repaired'.format(filename))
118 | return True
119 | else:
120 | logging.debug('[hashie] [-] Could not attempt repair of {} as no raw PMKID file was created'.format(filename))
121 | return False
122 |
123 | def _repairPMKID(self, fullpath, apJSON):
124 | hashString = ""
125 | clientString = []
126 | fullpathNoExt = fullpath.split('.')[0]
127 | filename = fullpath.split('/')[-1:][0].split('.')[0]
128 | logging.debug('[hashie] Repairing {}'.format(filename))
129 | with open(fullpathNoExt + '.16800','r') as tempFileA:
130 | hashString = tempFileA.read()
131 | if apJSON != "":
132 | clientString.append('{}:{}'.format(apJSON['mac'].replace(':',''), apJSON['hostname'].encode('hex')))
133 | else:
134 | #attempt to extract the AP's name via hcxpcapngtool
135 | result = subprocess.getoutput('hcxpcapngtool -X /tmp/{} {} >/dev/null 2>&1'.format(filename,fullpath))
136 | if os.path.isfile('/tmp/' + filename):
137 | with open('/tmp/' + filename,'r') as tempFileB:
138 | temp = tempFileB.read().splitlines()
139 | for line in temp:
140 | clientString.append(line.split(':')[0] + ':' + line.split(':')[1].strip('\n').encode().hex())
141 | os.remove('/tmp/{}'.format(filename))
142 | #attempt to extract the AP's name via tcpdump
143 | tcpCatOut = subprocess.check_output("tcpdump -ennr " + fullpath + " \"(type mgt subtype beacon) || (type mgt subtype probe-resp) || (type mgt subtype reassoc-resp) || (type mgt subtype assoc-req)\" 2>/dev/null | sed -E 's/.*BSSID:([0-9a-fA-F:]{17}).*\\((.*)\\).*/\\1\t\\2/g'",shell=True).decode('utf-8')
144 | if ":" in tcpCatOut:
145 | for i in tcpCatOut.split('\n'):
146 | if ":" in i:
147 | clientString.append(i.split('\t')[0].replace(':','') + ':' + i.split('\t')[1].strip('\n').encode().hex())
148 | if clientString:
149 | for line in clientString:
150 | if line.split(':')[0] == hashString.split(':')[1]: #if the AP MAC pulled from the JSON or tcpdump output matches the AP MAC in the raw 16800 output
151 | hashString = hashString.strip('\n') + ':' + (line.split(':')[1])
152 | if (len(hashString.split(':')) == 4) and not (hashString.endswith(':')):
153 | with open(fullpath.split('.')[0] + '.16800','w') as tempFileC:
154 | logging.debug('[hashie] Repaired: {} ({})'.format(filename,hashString))
155 | tempFileC.write(hashString + '\n')
156 | return True
157 | else:
158 | logging.debug('[hashie] Discarded: {} {}'.format(line, hashString))
159 | else:
160 | os.remove(fullpath.split('.')[0] + '.16800')
161 | return False
162 |
163 | def _process_stale_pcaps(self, handshake_dir):
164 | handshakes_list = [os.path.join(handshake_dir, filename) for filename in os.listdir(handshake_dir) if filename.endswith('.pcap')]
165 | failed_jobs = []
166 | successful_jobs = []
167 | lonely_pcaps = []
168 | for num, handshake in enumerate(handshakes_list):
169 | fullpathNoExt = handshake.split('.')[0]
170 | pcapFileName = handshake.split('/')[-1:][0]
171 | if not os.path.isfile(fullpathNoExt + '.22000'): #if no 22000, try
172 | if self._writeEAPOL(handshake):
173 | successful_jobs.append('22000: ' + pcapFileName)
174 | else:
175 | failed_jobs.append('22000: ' + pcapFileName)
176 | if not os.path.isfile(fullpathNoExt + '.16800'): #if no 16800, try
177 | if self._writePMKID(handshake, ""):
178 | successful_jobs.append('16800: ' + pcapFileName)
179 | else:
180 | failed_jobs.append('16800: ' + pcapFileName)
181 | if not os.path.isfile(fullpathNoExt + '.22000'): #if no 16800 AND no 22000
182 | lonely_pcaps.append(handshake)
183 | logging.debug('[hashie] Batch job: added {} to lonely list'.format(pcapFileName))
184 | if ((num + 1) % 50 == 0) or (num + 1 == len(handshakes_list)): #report progress every 50, or when done
185 | logging.info('[hashie] Batch job: {}/{} done ({} fails)'.format(num + 1,len(handshakes_list),len(lonely_pcaps)))
186 | if successful_jobs:
187 | logging.info('[hashie] Batch job: {} new handshake files created'.format(len(successful_jobs)))
188 | if lonely_pcaps:
189 | logging.info('[hashie] Batch job: {} networks without enough packets to create a hash'.format(len(lonely_pcaps)))
190 | self._getLocations(lonely_pcaps)
191 |
192 | def _getLocations(self, lonely_pcaps):
193 | #export a file for webgpsmap to load
194 | with open('/root/.incompletePcaps','w') as isIncomplete:
195 | count = 0
196 | for pcapFile in lonely_pcaps:
197 | filename = pcapFile.split('/')[-1:][0] #keep extension
198 | fullpathNoExt = pcapFile.split('.')[0]
199 | isIncomplete.write(filename + '\n')
200 | if os.path.isfile(fullpathNoExt + '.gps.json') or os.path.isfile(fullpathNoExt + '.geo.json') or os.path.isfile(fullpathNoExt + '.paw-gps.json'):
201 | count +=1
202 | if count != 0:
203 | logging.info('[hashie] Used {} GPS/GEO/PAW-GPS files to find lonely networks, go check webgpsmap! ;)'.format(str(count)))
204 | else:
205 | logging.info('[hashie] Could not find any GPS/GEO/PAW-GPS files for the lonely networks'.format(str(count)))
206 |
207 | def _getLocationsCSV(self, lonely_pcaps):
208 | #in case we need this later, export locations manually to CSV file, needs try/catch/paw-gps format/etc.
209 | locations = []
210 | for pcapFile in lonely_pcaps:
211 | filename = pcapFile.split('/')[-1:][0].split('.')[0]
212 | fullpathNoExt = pcapFile.split('.')[0]
213 | if os.path.isfile(fullpathNoExt + '.gps.json'):
214 | with open(fullpathNoExt + '.gps.json','r') as tempFileA:
215 | data = json.load(tempFileA)
216 | locations.append(filename + ',' + str(data['Latitude']) + ',' + str(data['Longitude']) + ',50')
217 | elif os.path.isfile(fullpathNoExt + '.geo.json'):
218 | with open(fullpathNoExt + '.geo.json','r') as tempFileB:
219 | data = json.load(tempFileB)
220 | locations.append(filename + ',' + str(data['location']['lat']) + ',' + str(data['location']['lng']) + ',' + str(data['accuracy']))
221 | elif os.path.isfile(fullpathNoExt + '.paw-gps.json'):
222 | with open(fullpathNoExt + '.paw-gps.json','r') as tempFileC:
223 | data = json.load(tempFileC)
224 | locations.append(filename + ',' + str(data['lat']) + ',' + str(data['long']) + ',50')
225 | if locations:
226 | with open('/root/locations.csv','w') as tempFileD:
227 | for loc in locations:
228 | tempFileD.write(loc + '\n')
229 | logging.info('[hashie] Used {} GPS/GEO files to find lonely networks, load /root/locations.csv into a mapping app and go say hi!'.format(len(locations)))
230 |
--------------------------------------------------------------------------------