Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fixed issues that arose from hcxtools update #36

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
227 changes: 123 additions & 104 deletions hashie.py
Original file line number Diff line number Diff line change
@@ -1,54 +1,73 @@
import logging
import binascii
import io
import subprocess
import os
import json
import pwnagotchi.plugins as plugins
import logging
import os
import subprocess
from threading import Lock

from scapy.all import *

import pwnagotchi.plugins as plugins
import pwnagotchi.ui.fonts as fonts
from pwnagotchi.ui.components import LabeledValue
from pwnagotchi.ui.view import BLACK
import pwnagotchi.ui.fonts as fonts


class PMKIDPackage(object):
PMKID = ""
MAC_AP = ""
MAC_STN = ""
ESSID = ""

'''
hcxpcapngtool is required, to install:
> git clone https://github.com/ZerBea/hcxtools.git
> cd hcxtools
> apt-get install libcurl4-openssl-dev libssl-dev zlib1g-dev
> make
> sudo make install
'''

class hashie(plugins.Plugin):
__author__ = 'junohea.mail@gmail.com'
__version__ = '1.0.1'
__version__ = '1.0.3'
__license__ = 'GPL3'
__description__ = '''
Attempt to automatically convert pcaps to a crackable format.
If successful, the files containing the hashes will be saved
Attempt to automatically convert PCAPs to a crackable format.
If successful, the files containing the hashes will be saved
in the same folder as the handshakes.
The files are saved in their respective Hashcat format:
- EAPOL hashes are saved as *.2500
- PMKID hashes are saved as *.16800
All PCAP files without enough information to create a hash are
stored in a file that can be read by the webgpsmap plugin.
The files output by the plugin are intended for use with Hashcat:
- *.16800 files are manually exported with scapy/tcpdump
- *.22000 files are exported by hcxpcapngtool and contain 2 hash types
- "WPA*01*xxxx" hashes (Newer format for 16800s)
- "WPA*02*xxxx" hahes (Newer format for the old 2500s)
- Note: Hashcat accepts both of these in 22000 mode.
If a PCAP file doesn't have enough information to create a hash
it gets stored in a file that can be read by the webgpsmap plugin.

Why use it?:
- Automatically convert handshakes to crackable formats!
We dont all upload our hashes online ;)
- Repair PMKID handshakes that hcxpcaptool misses
- If running at time of handshake capture, on_handshake can
be used to improve the chance of the repair succeeding
- Be a completionist! Not enough packets captured to crack a network?
This generates an output file for the webgpsmap plugin, use the
- Automatically convert handshakes to crackable formats for use offline
- Manually extract PMKIDs that hcxpcapngtool misses
- Be a completionist! If you don't have enough packets captured this
generates an output file for the webgpsmap plugin, use the
location data to revisit networks you need more packets for!

Additional information:
- Currently requires hcxpcaptool compiled and installed
- Attempts to repair PMKID hashes when hcxpcaptool cant find the SSID
- hcxpcaptool sometimes has trouble extracting the SSID, so we
- Currently requires hcxpcapngtool compiled and installed
- Attempts to repair PMKID hashes when hcxpcapngtool cant find the SSID
- hcxpcapngtool sometimes has trouble extracting the SSID, so we
use the raw 16800 output and attempt to retrieve the SSID via tcpdump
- When access_point data is available (on_handshake), we leverage
the reported AP name and MAC to complete the hash
- The repair is very basic and could certainly be improved!
Todo:
Make it so users dont need hcxpcaptool (unless it gets added to the base image)
Phase 1: Extract/construct 2500/16800 hashes through tcpdump commands
Phase 2: Extract/construct 2500/16800 hashes entirely in python
Make it so users dont need hcxpcapngtool (unless it gets added to the base image)
Phase 1: Extract/construct 22000/16800 hashes through tcpdump commands
Phase 2: Extract/construct 22000/16800 hashes entirely in python
Improve the code, a lot
'''

def __init__(self):
logging.info("[hashie] plugin loaded")
self.lock = Lock()
Expand All @@ -58,7 +77,6 @@ def on_config_changed(self, config):
handshake_dir = config['bettercap']['handshakes']

if 'interval' not in self.options or not (self.status.newer_then_hours(self.options['interval'])):
logging.info('[hashie] Starting batch conversion of pcap files')
with self.lock:
self._process_stale_pcaps(handshake_dir)

Expand All @@ -68,110 +86,111 @@ def on_handshake(self, agent, filename, access_point, client_station):
fullpathNoExt = filename.split('.')[0]
name = filename.split('/')[-1:][0].split('.')[0]

if os.path.isfile(fullpathNoExt + '.2500'):
handshake_status.append('Already have {}.2500 (EAPOL)'.format(name))
elif self._writeEAPOL(filename):
handshake_status.append('Created {}.2500 (EAPOL) from pcap'.format(name))
if os.path.isfile(fullpathNoExt + '.22000'):
handshake_status.append('Already have {}.22000'.format(name))
elif self._write22000(filename):
handshake_status.append('Created {}.22000 from pcap'.format(name))

if os.path.isfile(fullpathNoExt + '.16800'):
handshake_status.append('Already have {}.16800 (PMKID)'.format(name))
elif self._writePMKID(filename, access_point):
handshake_status.append('Created {}.16800 (PMKID) from pcap'.format(name))
handshake_status.append('Already have {}.16800'.format(name))
elif self._write16800(filename, access_point):
handshake_status.append('Created {}.16800 from pcap'.format(name))

if handshake_status:
logging.info('[hashie] Good news:\n\t' + '\n\t'.join(handshake_status))

def _writeEAPOL(self, fullpath):
def _write22000(self, fullpath):
fullpathNoExt = fullpath.split('.')[0]
filename = fullpath.split('/')[-1:][0].split('.')[0]
result = subprocess.getoutput('hcxpcaptool -o {}.2500 {} >/dev/null 2>&1'.format(fullpathNoExt,fullpath))
if os.path.isfile(fullpathNoExt + '.2500'):
logging.debug('[hashie] [+] EAPOL Success: {}.2500 created'.format(filename))
result = subprocess.getoutput('hcxpcapngtool -o {}.22000 {} >/dev/null 2>&1'.format(fullpathNoExt,fullpath))
if os.path.isfile(fullpathNoExt + '.22000'):
logging.debug('[hashie] [+] EAPOL Success: {}.22000 created'.format(filename))
return True
else:
return False

def _getESSID(self, packet, fullpath):
tmpESSID = ''
if packet.haslayer(Dot11) & packet.type == 0:
try:
tmpESSID = packet.info.hex()
except:
pass
if tmpESSID != '':
return tmpESSID
tcpCatOut = subprocess.check_output("tcpdump -ennr " + fullpath + " \"(type mgt subtype beacon) || (type mgt subtype probe-resp) || (type mgt subtype reassoc-resp) || (type mgt subtype assoc-req)\" 2>/dev/null | sed -E 's/.*BSSID:([0-9a-fA-F:]{17}).*\\((.*)\\).*/\\1\t\\2/g'",shell=True).decode('utf-8')
if ":" in tcpCatOut:
for i in tcpCatOut.split('\n'):
if ":" in i:
tmpESSID = i.split('\t')[0].replace(':','') + ':' + i.split('\t')[1].strip('\n').encode().hex()
ret = tmpESSID.split(':')[1]
return ret

def _writePMKID(self, fullpath, apJSON):
fullpathNoExt = fullpath.split('.')[0]
filename = fullpath.split('/')[-1:][0].split('.')[0]
result = subprocess.getoutput('hcxpcaptool -k {}.16800 {} >/dev/null 2>&1'.format(fullpathNoExt,fullpath))
if os.path.isfile(fullpathNoExt + '.16800'):
logging.debug('[hashie] [+] PMKID Success: {}.16800 created'.format(filename))
return True
else: #make a raw dump
result = subprocess.getoutput('hcxpcaptool -K {}.16800 {} >/dev/null 2>&1'.format(fullpathNoExt,fullpath))
if os.path.isfile(fullpathNoExt + '.16800'):
if self._repairPMKID(fullpath, apJSON) == False:
logging.debug('[hashie] [-] PMKID Fail: {}.16800 could not be repaired'.format(filename))
return False
else:
logging.debug('[hashie] [+] PMKID Success: {}.16800 repaired'.format(filename))
return True
else:
logging.debug('[hashie] [-] Could not attempt repair of {} as no raw PMKID file was created'.format(filename))
return False

def _repairPMKID(self, fullpath, apJSON):
hashString = ""
clientString = []
def _populatePMKObj(self, packet, fullpath):
pmkObj = PMKIDPackage()
pmkObj.ESSID = self._getESSID(packet, fullpath)
pmkObj.PMKID = str(binascii.hexlify(packet.getlayer(Raw).load)[202:234], 'utf-8')
pmkObj.MAC_AP = packet.addr2.replace(':','')
pmkObj.MAC_STN = packet.addr1.replace(':','')
return pmkObj

def _getUniquePMKIDs(self, allPMKIDs):
tmpPMKIDs = []
for entry in allPMKIDs:
tmpPMKIDs.append("*".join([str(entry.PMKID), str(entry.MAC_AP), str(entry.MAC_STN), str(entry.ESSID)]))
uniqPMKIDs = sorted(set(tmpPMKIDs))
return uniqPMKIDs

def _write16800(self, fullpath, apJSON):
fullpathNoExt = fullpath.split('.')[0]
filename = fullpath.split('/')[-1:][0].split('.')[0]
logging.debug('[hashie] Repairing {}'.format(filename))
with open(fullpathNoExt + '.16800','r') as tempFileA:
hashString = tempFileA.read()
if apJSON != "":
clientString.append('{}:{}'.format(apJSON['mac'].replace(':',''), apJSON['hostname'].encode('hex')))
else:
#attempt to extract the AP's name via hcxpcaptool
result = subprocess.getoutput('hcxpcaptool -X /tmp/{} {} >/dev/null 2>&1'.format(filename,fullpath))
if os.path.isfile('/tmp/' + filename):
with open('/tmp/' + filename,'r') as tempFileB:
temp = tempFileB.read().splitlines()
for line in temp:
clientString.append(line.split(':')[0] + ':' + line.split(':')[1].strip('\n').encode().hex())
os.remove('/tmp/{}'.format(filename))
#attempt to extract the AP's name via tcpdump
tcpCatOut = subprocess.check_output("tcpdump -ennr " + fullpath + " \"(type mgt subtype beacon) || (type mgt subtype probe-resp) || (type mgt subtype reassoc-resp) || (type mgt subtype assoc-req)\" 2>/dev/null | sed -E 's/.*BSSID:([0-9a-fA-F:]{17}).*\\((.*)\\).*/\\1\t\\2/g'",shell=True).decode('utf-8')
if ":" in tcpCatOut:
for i in tcpCatOut.split('\n'):
if ":" in i:
clientString.append(i.split('\t')[0].replace(':','') + ':' + i.split('\t')[1].strip('\n').encode().hex())
if clientString:
for line in clientString:
if line.split(':')[0] == hashString.split(':')[1]: #if the AP MAC pulled from the JSON or tcpdump output matches the AP MAC in the raw 16800 output
hashString = hashString.strip('\n') + ':' + (line.split(':')[1])
if (len(hashString.split(':')) == 4) and not (hashString.endswith(':')):
with open(fullpath.split('.')[0] + '.16800','w') as tempFileC:
logging.debug('[hashie] Repaired: {} ({})'.format(filename,hashString))
tempFileC.write(hashString + '\n')
return True
else:
logging.debug('[hashie] Discarded: {} {}'.format(line, hashString))
else:
os.remove(fullpath.split('.')[0] + '.16800')
allPMKIDs = []
targetPCAP = []
try:
targetPCAP = rdpcap(fullpath)
except:
logging.debug('[hashie] [-] PCAP Read Fail: Scapy didn\'t like {}.pcap'.format(filename))
return False

for packet in targetPCAP:
try:
pmkObj = self._populatePMKObj(packet, fullpath)
if pmkObj.PMKID != '':
allPMKIDs.append(pmkObj)
except:
pass

if allPMKIDs:
uniqPMKIDs = self._getUniquePMKIDs(allPMKIDs)
with open(fullpath.split('.')[0] + '.16800','w') as tempFileOut:
for entry in uniqPMKIDs:
tempFileOut.write(entry + '\n')
logging.debug('[hashie] [+] PMKID Success: {}'.format(filename))
else:
logging.debug('[hashie] [-] PMKID Fail: No hashes extracted from {}'.format(filename))

def _process_stale_pcaps(self, handshake_dir):
logging.info('[hashie] Starting batch conversion of pcap files')
handshakes_list = [os.path.join(handshake_dir, filename) for filename in os.listdir(handshake_dir) if filename.endswith('.pcap')]
failed_jobs = []
successful_jobs = []
lonely_pcaps = []
for num, handshake in enumerate(handshakes_list):
fullpathNoExt = handshake.split('.')[0]
pcapFileName = handshake.split('/')[-1:][0]
if not os.path.isfile(fullpathNoExt + '.2500'): #if no 2500, try
if self._writeEAPOL(handshake):
successful_jobs.append('2500: ' + pcapFileName)
if not os.path.isfile(fullpathNoExt + '.22000'): #if no 22000, try
if self._write22000(handshake):
successful_jobs.append('22000: ' + pcapFileName)
else:
failed_jobs.append('2500: ' + pcapFileName)
failed_jobs.append('22000: ' + pcapFileName)
if not os.path.isfile(fullpathNoExt + '.16800'): #if no 16800, try
if self._writePMKID(handshake, ""):
if self._write16800(handshake, ""):
successful_jobs.append('16800: ' + pcapFileName)
else:
failed_jobs.append('16800: ' + pcapFileName)
if not os.path.isfile(fullpathNoExt + '.2500'): #if no 16800 AND no 2500
if not os.path.isfile(fullpathNoExt + '.22000'): #if no 16800 AND no 22000
lonely_pcaps.append(handshake)
logging.debug('[hashie] Batch job: added {} to lonely list'.format(pcapFileName))
#logging.debug('[hashie] Batch job: added {} to lonely list'.format(pcapFileName))
if ((num + 1) % 50 == 0) or (num + 1 == len(handshakes_list)): #report progress every 50, or when done
logging.info('[hashie] Batch job: {}/{} done ({} fails)'.format(num + 1,len(handshakes_list),len(lonely_pcaps)))
if successful_jobs:
Expand All @@ -193,10 +212,10 @@ def _getLocations(self, lonely_pcaps):
if count != 0:
logging.info('[hashie] Used {} GPS/GEO/PAW-GPS files to find lonely networks, go check webgpsmap! ;)'.format(str(count)))
else:
logging.info('[hashie] Could not find any GPS/GEO/PAW-GPS files for the lonely networks'.format(str(count)))
logging.info('[hashie] Could not find any GPS/GEO/PAW-GPS files for the lonely networks')

def _getLocationsCSV(self, lonely_pcaps):
#in case we need this later, export locations manually to CSV file, needs try/catch/paw-gps format/etc.
#in case we need this later, export locations manually to CSV file, needs try/catch, paw-gps format/etc.
locations = []
for pcapFile in lonely_pcaps:
filename = pcapFile.split('/')[-1:][0].split('.')[0]
Expand Down