├── .gitignore ├── README ├── tshark.commands ├── bruteForceAuthLogParser.py ├── filemoving.py ├── tsharkIPcount.py ├── findWords.py ├── weblogAnalysis.py ├── IPaddressHistogram.py ├── pandasWebLogAnalysis.py ├── NMAPparser.py ├── serverFilesHTTP.py ├── gephiGraphingTest.py ├── geolocationOSX.py ├── wordlistCreator.py ├── chartmaker.py ├── pcap2csv.py ├── trace.py ├── d3jsBarChart.ipynb ├── plottingCandleSticks.py └── pandasAuthLogAnalysis.ipynb /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | -------------------------------------------------------------------------------- /README: -------------------------------------------------------------------------------- 1 | This is a collection of useful tools I found on the Interwebs or created out of need. Feel free to use and resuse etc. -------------------------------------------------------------------------------- /tshark.commands: -------------------------------------------------------------------------------- 1 | tshark -o column.format:""Source", "%s", "Destination", "%d"" -z conv,tcp -r inFile.pcap > outFile.txt 2 | 3 | tshark -V -r infile.pcap > outputfile.txt 4 | 5 | tshark -o column.format:""No.", "%m", "Time", "%Yt", "Time", "%Tt", "Time", "%Rt", "Source", "%s", "Destination", "%d", "Protocol", "%p", "Info", "%i", "Length", "%L"" -z conv,tcp -r inFile.cap > outFile.txt -------------------------------------------------------------------------------- /bruteForceAuthLogParser.py: -------------------------------------------------------------------------------- 1 | def IPsearch(data): 2 | for line in data: 3 | if "Failed password for root from" in line: 4 | find_match = ip.search(line) 5 | IPaddress = find_match.group() 6 | if len(IPaddress) > 6: 7 | Ip = IPaddress.split(" ")[0] 8 | IpHitListing[Ip] = IpHitListing.get(Ip, 0) + 1 9 | return IpHitListing -------------------------------------------------------------------------------- /filemoving.py: -------------------------------------------------------------------------------- 1 | import re 2 | import string 3 | import os 4 | import shutil 5 | 6 | alphabet = string.lowercase 7 | path = "/Users/antigen/Downloads/" 8 | destination = '/Users/antigen/Downloads/files' 9 | 10 | 11 | for letter in alphabet: 12 | try: 13 | os.makedirs(path+letter, 0755) 14 | except: 15 | pass 16 | 17 | for a,b,file in os.walk(destination): 18 | for item in file: 19 | for letter in alphabet: 20 | if re.search('^'+letter, item): 21 | shutil.copyfile(item, path+letter+"/"+item) 22 | 23 | -------------------------------------------------------------------------------- /tsharkIPcount.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | 3 | ''' 4 | This program takes a file and counts up packets to each IP address found in PCAP. 5 | Replaces the commands below. 6 | ''' 7 | ########## 8 | # bash way 9 | ########## 10 | # cat test2.csv | awk {'print $5'} | sort | uniq -c 11 | 12 | ! tshark -r test.pcap -T fields -e frame.number -e eth.src -e eth.dst -e ip.src -e ip.dst -e frame.len -E header=y -E separator=" " > test2.csv 13 | data = ! cat test2.csv 14 | 15 | 16 | IpHitListing = {} 17 | 18 | for line in data: 19 | Ip = line.split(' ')[4] 20 | if 6 < len(Ip) <= 15: 21 | IpHitListing[Ip] = IpHitListing.get(Ip,0) + 1 -------------------------------------------------------------------------------- /findWords.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' 3 | This program is for finding words out of lists or books that end or being with certain letters or numbers of basic patterns 4 | ''' 5 | 6 | data = open ("namesFoundOnInternet.txt").readlines() 7 | 8 | #process each line and strip return characters 9 | #append each work matching "end with s" 10 | #create mylist filled with words ending in s 11 | for line in data: 12 | b = list(line.strip()) 13 | if b[-1] == "s": 14 | mylist.append(line) 15 | fout = open ('names.txt', 'w') 16 | for line in mylist: 17 | fout.write(line) 18 | fout.close() 19 | 20 | #process each line and strip return characters 21 | #append each work matching "begin with c" 22 | #create mylist filled with words begin with c 23 | mylist2=[] 24 | for line in mylist: 25 | b = list(line.strip()) 26 | if b[0] == "c": 27 | mylist2.append(line) 28 | 29 | fout = open ('names2.txt', 'w') 30 | for line in mylist2: 31 | fout.write(line) 32 | fout.close() 33 | -------------------------------------------------------------------------------- /weblogAnalysis.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | ''' 3 | This program takes in a apache www-media.log and provides basic report 4 | ''' 5 | 6 | for collections import Counters 7 | 8 | ipAddressList = [] 9 | methodList = [] 10 | requestedList = [] 11 | referalList = [] 12 | mylist = [] 13 | 14 | data = open('www-media.log').readlines() 15 | for line in data: 16 | ipAddressList.append(line.split()[0]) 17 | requestedList.append(line.split()[6]) 18 | methodList.append(line.split()[5]) 19 | referalList.append(line.split()[10]) 20 | 21 | count_ip = Counter(ipAddressList) 22 | count_requested = Counter(requestedList) 23 | count_method = Counter(methodList) 24 | count_referal = Counter(referalList) 25 | 26 | count_ip.most_common() 27 | count_requested.most_common() 28 | count_method.most_common() 29 | count_referal.most_common() 30 | 31 | ''' 32 | This is how you do the same thing in pandas!!!!!!!!!! 33 | 34 | import pandas 35 | data = open('www-media.log').readlines() 36 | frame = pandas.DataFrame([x.split() for x in data]) 37 | 38 | countIP = frame[0].value_counts() 39 | countRequested = frame[6].value_counts() 40 | countReferal = frame[10].value_counts() 41 | 42 | print countIP 43 | print countRequested 44 | print countReferal 45 | 46 | ''' -------------------------------------------------------------------------------- /IPaddressHistogram.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | ''' 3 | This program can reads a target file and counts up IP address hits into a dict 4 | 5 | >>>CalculateApacheIpHits('www-access.log') 6 | { 7 | '10.0.1.14': 18, 8 | '10.0.1.2': 241, 9 | } 10 | 11 | Isolate specific IP addresses with this query 12 | 13 | >>>HitsDictionary = CalculateApacheIpHits("www-access.log") 14 | >>>print HitsDictionary['10.0.1.2'] 15 | >>>18 16 | 17 | ''' 18 | 19 | 20 | def CalculateApacheIpHits(logfile_pathname): 21 | # Make a dictionary to store IP addresses and their hit counts 22 | # and read the contents of the log file line by line 23 | IpHitListing = {} 24 | Contents = open(logfile_pathname, "r").xreadlines( ) 25 | # You can use .readlines in old Python, but if the log is huge... 26 | 27 | # Go through each line of the logfile 28 | for line in Contents: 29 | # Split the string to isolate the IP address 30 | Ip = line.split(" ")[0] 31 | 32 | # Ensure length of the IP address is proper (see discussion) 33 | if 6 < len(Ip) <= 15: 34 | # Increase by 1 if IP exists; else set hit count = 1 35 | IpHitListing[Ip] = IpHitListing.get(Ip, 0) + 1 36 | 37 | return IpHitListing 38 | 39 | -------------------------------------------------------------------------------- /pandasWebLogAnalysis.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | ''' 3 | This program uses pandas to load logs into DataFrame for analysis 4 | ''' 5 | import pandas 6 | data = open('www-media.log').readlines() 7 | frame = pandas.DataFrame([x.split() for x in data]) 8 | 9 | countIP = frame[0].value_counts() 10 | countRequested = frame[6].value_counts() 11 | countReferal = frame[10].value_counts() 12 | 13 | print countIP 14 | print countRequested 15 | print countReferal 16 | 17 | def get_bruteRoot(data): 18 | for line in data: 19 | if 'Failed password for root' in line: 20 | mylist.append(line) 21 | frame = pandas.DataFrame([x.split() for x in mylist]) 22 | return frame[10].value_counts() 23 | 24 | def get_brute2(data, searchTerm): 25 | for line in data: 26 | if 'Failed password for '+searchTerm in line: 27 | mylist.append(line) 28 | frame = pandas.DataFrame([x.split() for x in mylist]) 29 | return frame[10].value_counts() 30 | 31 | def get_failedPasswordInvalidUser(data): 32 | for line in data: 33 | if 'Failed password for invalid user' in line: 34 | mylist.append(line) 35 | failedframe = pandas.DataFrame([x.split() for x in mylist]) 36 | return failedframe[10].value_counts() 37 | 38 | def get_loginHistory(data): 39 | for line in data: 40 | if 'Failed password for' in line: 41 | mylist.append(line) 42 | frame = pandas.DataFrame([x.split() for x in mylist]) 43 | return frame[8].value_counts() -------------------------------------------------------------------------------- /NMAPparser.py: -------------------------------------------------------------------------------- 1 | #/usr/bin/python 2 | ''' 3 | This program rips thru text dumps of NMAP text output and puts it into a pickled datafile for PythonPandas analysis 4 | ''' 5 | 6 | import re 7 | import cPickle as pickle 8 | import datetime 9 | 10 | data = ! cat * 11 | newDict = {} 12 | newList = [] 13 | splitLine = ['0.0.0.0','0.0.0.0'] 14 | pattern = re.compile('[0-9]{1,5}/tcp') 15 | nowTime = datetime.datetime.now().strftime('%Y-%m-%d') 16 | 17 | for line in data: 18 | if 'Nmap scan report for ' in line: 19 | cutLine = line[21:] 20 | splitLine = cutLine.split(' ') 21 | if len(splitLine) == 1: 22 | splitLine.insert(1, splitLine[0]) 23 | if len(splitLine) == 2: 24 | asdf = splitLine[1].lstrip('(') 25 | asdf = asdf.rstrip(')') 26 | splitLine.insert(1, asdf) 27 | splitLine.pop() 28 | 29 | match = pattern.search(line) 30 | if match: # looking for listings of open ports 31 | newList.append(match.group()) 32 | 33 | if len(line) == 0: # this is the end scan object and creates a DICT with all the objects 34 | newDict [splitLine[0]]=[splitLine[1],newList] 35 | newList = [] 36 | 37 | # saving pickled file to disk 38 | pickle.dump( newDict, open( "saveNewDict."+nowTime, "wb", True ) ) 39 | 40 | #printing output 41 | for key, value in newDict.items(): 42 | a,b = value 43 | if len(b) > 0: 44 | print 45 | print key, a, 46 | for item in b: 47 | print item, -------------------------------------------------------------------------------- /serverFilesHTTP.py: -------------------------------------------------------------------------------- 1 | import os 2 | import posixpath 3 | import urllib 4 | import BaseHTTPServer 5 | from SimpleHTTPServer import SimpleHTTPRequestHandler 6 | 7 | # modify this to add additional routes 8 | ROUTES = ( 9 | # [url_prefix , directory_path] 10 | ['/media', '/var/www/media'], 11 | ['', '/var/www/site'] # empty string for the 'default' match 12 | ) 13 | 14 | class RequestHandler(SimpleHTTPRequestHandler): 15 | 16 | def translate_path(self, path): 17 | """translate path given routes""" 18 | 19 | # set default root to cwd 20 | root = os.getcwd() 21 | 22 | # look up routes and set root directory accordingly 23 | for pattern, rootdir in ROUTES: 24 | if path.startswith(pattern): 25 | # found match! 26 | path = path[len(pattern):] # consume path up to pattern len 27 | root = rootdir 28 | break 29 | 30 | # normalize path and prepend root directory 31 | path = path.split('?',1)[0] 32 | path = path.split('#',1)[0] 33 | path = posixpath.normpath(urllib.unquote(path)) 34 | words = path.split('/') 35 | words = filter(None, words) 36 | 37 | path = root 38 | for word in words: 39 | drive, word = os.path.splitdrive(word) 40 | head, word = os.path.split(word) 41 | if word in (os.curdir, os.pardir): 42 | continue 43 | path = os.path.join(path, word) 44 | 45 | return path 46 | 47 | if __name__ == '__main__': 48 | BaseHTTPServer.test(RequestHandler, BaseHTTPServer.HTTPServer) 49 | -------------------------------------------------------------------------------- /gephiGraphingTest.py: -------------------------------------------------------------------------------- 1 | #some import 2 | import org.gephi.graph.api as graph_api 3 | 4 | #we do not need to init a project 5 | 6 | #Get a graph model - it exists because gephi has created the workspace 7 | graphModel = gephi.getLookup().lookup(graph_api.GraphController).getModel() 8 | 9 | #Create three nodes 10 | n0 = graphModel.factory().newNode("n0") #we just remove the type Node and the ; 11 | n0.getNodeData().setLabel("Node 0") 12 | n1 = graphModel.factory().newNode("n1") 13 | n1.getNodeData().setLabel("Node 1") 14 | n2 = graphModel.factory().newNode("n2") 15 | n2.getNodeData().setLabel("Node 2") 16 | 17 | #Create three edges 18 | e1 = graphModel.factory().newEdge(n1, n2, 1., True)#we remove Edge, true->True and 1f -> 1. 19 | #it was in java : Edge e1 = graphModel.factory().newEdge(n1, n2, 1f, true); 20 | e2 = graphModel.factory().newEdge(n0, n2, 2., True) 21 | e3 = graphModel.factory().newEdge(n2, n0, 2., True) #This is e2's mutual edge 22 | 23 | #Append as a Directed Graph 24 | directedGraph = graphModel.getDirectedGraph() 25 | directedGraph.addNode(n0) 26 | directedGraph.addNode(n1) 27 | directedGraph.addNode(n2) 28 | directedGraph.addEdge(e1) 29 | directedGraph.addEdge(e2) 30 | directedGraph.addEdge(e3) 31 | 32 | #Count nodes and edges 33 | print "Nodes: ", directedGraph.getNodeCount(), " Edges: ",directedGraph.getEdgeCount() #python does not transform objects into str. We use a more pythonic way to present output 34 | 35 | #Get a UndirectedGraph now and count edges 36 | undirectedGraph = graphModel.getUndirectedGraph() 37 | print "Edges: ", undirectedGraph.getEdgeCount() #The mutual edge is automatically merged 38 | 39 | #Iterate over nodes 40 | for n in directedGraph.getNodes() : 41 | neighbors = directedGraph.getNeighbors(n).toArray() 42 | print n.getNodeData().getLabel(), "has", len(neighbors), "neighbors" 43 | 44 | 45 | #Iterate over edges 46 | for e in directedGraph.getEdges() : 47 | print e.getSource().getNodeData().getId(), " -> ", e.getTarget().getNodeData().getId() 48 | 49 | 50 | #Find node by id 51 | node2 = directedGraph.getNode("n2") 52 | 53 | #Get degree 54 | print "Node2 degree: ", directedGraph.getDegree(node2) -------------------------------------------------------------------------------- /geolocationOSX.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Julien Deudon (initbrain) - 20/03/2012 15h35 3 | # modified to english version by Dan Gleebits 20/06/2012 4 | # modified to run on OS X by James Armitage 25/06/2012 5 | # modified to process in python Dan Gleebits 26/06/2012 6 | # parsing xml Vincent Ohprecio 01/10/2012 7 | 8 | from commands import getoutput 9 | import re, urllib2, webbrowser 10 | import json as simplejson 11 | import xml.etree.ElementTree as ET 12 | 13 | airport_scan_xml = '/System/Library/PrivateFrameworks/Apple80211.framework/Versions/Current/Resources/airport --scan -x' 14 | address_match = '([a-fA-F0-9]{1,2}[:|\-]?){6}' 15 | 16 | def get_signal_strengths(): 17 | signal_by_address = {} 18 | root = ET.fromstring(getoutput(airport_scan_xml)) 19 | networks = root.getchildren()[0] 20 | 21 | for network in networks: 22 | # First "string" child is MAC address 23 | address = network.find("string").text 24 | # Eighth "integer" is signal strength 25 | strength = abs(int(network.findall("integer")[7].text)) 26 | signal_by_address[address] = strength 27 | 28 | return signal_by_address 29 | 30 | def convert_dict_to_json(signal_by_address): 31 | location_request = { 32 | "version": "1.1.0", 33 | "request_address": False, 34 | "wifi_towers": [], 35 | } 36 | 37 | for address, signal in signal_by_address.items(): 38 | tower = {"mac_address": address, "signal_strength": signal} 39 | location_request["wifi_towers"].append(tower) 40 | 41 | return simplejson.JSONEncoder().encode(location_request) 42 | 43 | def post_json_and_get_lat_long(json): 44 | output = simplejson.loads(urllib2.urlopen('https://www.google.com/loc/json', json).read()) 45 | 46 | return output["location"]["latitude"], output["location"]["longitude"] 47 | 48 | 49 | if __name__ == "__main__": 50 | print "[+] Scanning network" 51 | signal_by_address = get_signal_strengths() 52 | 53 | json = convert_dict_to_json(signal_by_address) 54 | 55 | print "[+] Sending the request to Google" 56 | loc = post_json_and_get_lat_long(json) 57 | 58 | map_url = "http://maps.google.com/maps?q=%s,%s" % loc 59 | print "[+] Google Map" 60 | print map_url 61 | 62 | webbrowser.open(map_url) 63 | -------------------------------------------------------------------------------- /wordlistCreator.py: -------------------------------------------------------------------------------- 1 | alphabet='abcdefghijklmnopqrstuvwxyz' 2 | numbers = '0123456789' 3 | upperAlpha = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 4 | 5 | with open("test2.txt", "a") as myfile: 6 | 7 | for l1 in alphabet: 8 | final1 = l1+'\n' 9 | myfile.write(final1) 10 | 11 | for l2 in alphabet: 12 | final2 = l1+l2+'\n' 13 | myfile.write(final2) 14 | 15 | for l3 in alphabet: 16 | final3 = l1+l2+l3+'\n' 17 | myfile.write(final3) 18 | 19 | for l4 in alphabet: 20 | final4 = l1+l2+l3+l4+'\n' 21 | myfile.write(final4) 22 | 23 | for l5 in alphabet: 24 | final5 = l1+l2+l3+l4+l5+'\n' 25 | myfile.write(final5) 26 | 27 | for l6 in alphabet: 28 | final6 = l1+l2+l3+l4+l5+l6+'\n' 29 | myfile.write(final6) 30 | 31 | for l7 in alphabet: 32 | final7 = l1+l2+l3+l4+l5+l6+l7+'\n' 33 | myfile.write(final7) 34 | 35 | for l8 in alphabet: 36 | final8 = l1+l2+l3+l4+l5+l6+l7+l8+'\n' 37 | myfile.write(final8) 38 | 39 | import hashlib 40 | fh = open ('test2.txt') 41 | fw = open ('sha1Hashed.txt', 'w') 42 | for i in range(10000): 43 | password = fh.readline().strip() 44 | hex = hashlib.sha1(password).hexdigest() 45 | fw.write(password+','+hex+'\n') 46 | fh.close 47 | fw.close 48 | ''' 49 | # terrible code to generate 8 char wordlist 50 | 51 | 52 | alphabet='abcdefghijklmnopqrstuvwxyz' 53 | numbers = '0123456789' 54 | 55 | for l1 in alphabet: 56 | for l2 in alphabet: 57 | for l3 in alphabet: 58 | for l4 in alphabet: 59 | for l5 in alphabet: 60 | for l6 in alphabet: 61 | for l7 in alphabet: 62 | for l8 in alphabet: 63 | final1 = l1+'\n' 64 | final2 = l1+l2+'\n' 65 | final3 = l1+l2+l3+'\n' 66 | final4 = l1+l2+l3+l4+'\n' 67 | final5 = l1+l2+l3+l4+l5+'\n' 68 | final6 = l1+l2+l3+l4+l5+l6+'\n' 69 | final7 = l1+l2+l3+l4+l5+l6+l7+'\n' 70 | final8 = l1+l2+l3+l4+l5+l6+l7+l8+'\n' 71 | with open("test.txt", "a") as myfile: 72 | myfile.write(final1) 73 | myfile.write(final2) 74 | myfile.write(final3) 75 | myfile.write(final4) 76 | myfile.write(final5) 77 | myfile.write(final6) 78 | myfile.write(final7) 79 | myfile.write(final8) 80 | 81 | ''' 82 | -------------------------------------------------------------------------------- /chartmaker.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | ''' 3 | Program loads pickled data of NMAP scans and parses them into GoogleCharts API html webpage for visualization 4 | Writes both pie.html and bar.html to disk 5 | ''' 6 | 7 | 8 | import cPickle as pickle 9 | webcounter = 0 10 | SSLwebcounter=0 11 | Web8080=0 12 | Windows135=0 13 | Windows139=0 14 | Windows445=0 15 | Windows3389=0 16 | 17 | dump = pickle.load(open('saveNewDict.2012-05-10')) 18 | 19 | for item in dump.items(): 20 | if len(item[1][1])>0: print item 21 | 22 | for item in dump.items(): 23 | dnsName = item[0] 24 | ipPorts = item[1] 25 | ipaddress = item[1][0] 26 | ports = item[1][1] 27 | if len(ports)>0: 28 | print 29 | print dnsName,ipaddress 30 | for i in range(len(ports)): 31 | print '*'*100 32 | print ipaddress, ports 33 | if '80/tcp' in ports: 34 | webcounter += 1 35 | if '443/tcp' in ports: 36 | SSLwebcounter +=1 37 | if '8080/tcp' in ports: 38 | Web8080 +=1 39 | if '135/tcp' in ports: 40 | Windows135 += 1 41 | if '139/tcp' in ports: 42 | Windows139 +=1 43 | if '445/tcp' in ports: 44 | Windows445 +=1 45 | if '3389/tcp' in ports: 46 | Windows3389 +=1 47 | 48 | wList = ['Webservers',webcounter] 49 | sslList = ['SSL Boxes',SSLwebcounter] 50 | web8080List = ['Web8080',Web8080] 51 | W135List = ['Windows135',Windows135] 52 | W139List = ['Windows139',Windows139] 53 | W445List = ['Windows445',Windows445] 54 | W3389List = ['RDP3389',Windows3389] 55 | 56 | htmlCodeStart = ''' 57 | 58 | 59 | 60 | 61 | 80 | 81 | 82 | 83 |
84 | 85 | 86 | ''' 87 | 88 | 89 | htmlCodeBarChart = ''' 90 | ]); 91 | var options = {'title':'Distribution External ', 92 | 'width':400, 93 | 'height':300}; 94 | var chart = new google.visualization.BarChart(document.getElementById('chart_div')); 95 | chart.draw(data, options); 96 | } 97 | 98 | 99 | 100 | 101 |
102 | 103 | 104 | ''' 105 | 106 | # this cats the Pie together 107 | makePagePie = htmlCodeStart+str(wList)+','+str(W135List)+','+str(W139List)+','+str(W445List)+','+str(sslList)+','+str(web8080List)+','+str(W3389List)+htmlCodePieChart 108 | 109 | # this cats the Bar together 110 | makePageBar = htmlCodeStart+str(wList)+','+str(W135List)+','+str(W139List)+','+str(W445List)+','+str(sslList)+','+str(web8080List)+','+str(W3389List)+htmlCodeBarChart 111 | 112 | #open file 113 | fh = open('pie.html', 'w') 114 | fd = open('bar.html', 'w') 115 | 116 | #write file 117 | fh.write(makePagePie) 118 | fd.write(makePageBar) 119 | 120 | #close handles 121 | fh.close() 122 | fd.close() 123 | -------------------------------------------------------------------------------- /pcap2csv.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/python 2 | ''' 3 | ALPHA Program takes PCAP files that have been converted by ***tshark -V -r infile.pcap > outputfile.txt*** 4 | and creates comma separated (csv) file 5 | ''' 6 | 7 | import sys 8 | import os 9 | import re 10 | import datetime 11 | 12 | 13 | fh = open('outputfile.txt') 14 | data = fh.readlines() 15 | 16 | fn = ' Frame Number: ' 17 | fl = ' Frame Length: ' 18 | src = ' Source: ' 19 | dest = ' Destination: ' 20 | srcPort = ' Src Port: ' 21 | destPort = ' Dst Port: ' 22 | seqNum = ' Seq: ' 23 | ack = ' Ack: ' 24 | lenPacket = ' Len: ' 25 | 26 | for line in data: 27 | if 'Frame Number:' in line: 28 | print line 29 | if 'Frame Length:' in line: 30 | print line 31 | if 'Source:' in line: 32 | print line 33 | if 'Destination:' in line: 34 | print line 35 | if 'Transmission Control Protocol, Src Port: ' in line: 36 | stats = line.split(',') 37 | for item in stats: 38 | if srcPort in item: 39 | print item[len(srcPort):] 40 | if destPort in item: 41 | print item[len(destPort):] 42 | if seqNum in item: 43 | print item[len(seqNum):] 44 | if ack in item: 45 | print item[len(ack):] 46 | if lenPacket in item: 47 | print item[len(lenPacket):] 48 | 49 | testdata = ''' 50 | Frame 1486: 60 bytes on wire (480 bits), 60 bytes captured (480 bits) 51 | Arrival Time: May 10, 2012 21:44:46.146028000 PDT 52 | Epoch Time: 1336711486.146028000 seconds 53 | [Time delta from previous captured frame: 2.139154000 seconds] 54 | [Time delta from previous displayed frame: 2.139154000 seconds] 55 | [Time since reference or first frame: 714.744465000 seconds] 56 | Frame Number: 1486 57 | Frame Length: 60 bytes (480 bits) 58 | Capture Length: 60 bytes (480 bits) 59 | [Frame is marked: False] 60 | [Frame is ignored: False] 61 | [Protocols in frame: eth:ip:igmp] 62 | Ethernet II, Src: CiscoSpv_df:58:a4 (48:44:87:df:58:a4), Dst: IPv4mcast_6f:00:05 (01:00:5e:6f:00:05) 63 | Destination: IPv4mcast_6f:00:05 (01:00:5e:6f:00:05) 64 | Address: IPv4mcast_6f:00:05 (01:00:5e:6f:00:05) 65 | .... ...1 .... .... .... .... = IG bit: Group address (multicast/broadcast) 66 | .... ..0. .... .... .... .... = LG bit: Globally unique address (factory default) 67 | Source: CiscoSpv_df:58:a4 (48:44:87:df:58:a4) 68 | Address: CiscoSpv_df:58:a4 (48:44:87:df:58:a4) 69 | .... ...0 .... .... .... .... = IG bit: Individual address (unicast) 70 | .... ..0. .... .... .... .... = LG bit: Globally unique address (factory default) 71 | Type: IP (0x0800) 72 | Trailer: 0000000000000000000000000000 73 | Internet Protocol Version 4, Src: 192.168.1.64 (192.168.1.64), Dst: 232.239.0.5 (232.239.0.5) 74 | Version: 4 75 | Header length: 24 bytes 76 | Differentiated Services Field: 0xa0 (DSCP 0x28: Class Selector 5; ECN: 0x00: Not-ECT (Not ECN-Capable Transport)) 77 | 1010 00.. = Differentiated Services Codepoint: Class Selector 5 (0x28) 78 | .... ..00 = Explicit Congestion Notification: Not-ECT (Not ECN-Capable Transport) (0x00) 79 | Total Length: 32 80 | Identification: 0xe0b6 (57526) 81 | Flags: 0x00 82 | 0... .... = Reserved bit: Not set 83 | .0.. .... = Don't fragment: Not set 84 | ..0. .... = More fragments: Not set 85 | Fragment offset: 0 86 | Time to live: 1 87 | Protocol: IGMP (2) 88 | Header checksum: 0x98a4 [correct] 89 | [Good: True] 90 | [Bad: False] 91 | Source: 192.168.1.64 (192.168.1.64) 92 | Destination: 232.239.0.5 (232.239.0.5) 93 | Options: (4 bytes) 94 | Router Alert: Every router examines packet 95 | Internet Group Management Protocol 96 | [IGMP Version: 2] 97 | Type: Membership Report (0x16) 98 | Max Response Time: 0.0 sec (0x00) 99 | Header checksum: 0x010b [correct] 100 | Multicast Address: 232.239.0.5 (232.239.0.5) 101 | 102 | Frame 1487: 1484 bytes on wire (11872 bits), 1484 bytes captured (11872 bits) 103 | Arrival Time: May 10, 2012 21:44:47.078486000 PDT 104 | Epoch Time: 1336711487.078486000 seconds 105 | [Time delta from previous captured frame: 0.932458000 seconds] 106 | [Time delta from previous displayed frame: 0.932458000 seconds] 107 | [Time since reference or first frame: 715.676923000 seconds] 108 | Frame Number: 1487 109 | Frame Length: 1484 bytes (11872 bits) 110 | Capture Length: 1484 bytes (11872 bits) 111 | [Frame is marked: False] 112 | [Frame is ignored: False] 113 | [Protocols in frame: eth:ip:udp:data] 114 | Ethernet II, Src: CiscoSpv_df:58:a4 (48:44:87:df:58:a4), Dst: IPv4mcast_7f:ff:fa (01:00:5e:7f:ff:fa) 115 | Destination: IPv4mcast_7f:ff:fa (01:00:5e:7f:ff:fa) 116 | Address: IPv4mcast_7f:ff:fa (01:00:5e:7f:ff:fa) 117 | .... ...1 .... .... .... .... = IG bit: Group address (multicast/broadcast) 118 | .... ..0. .... .... .... .... = LG bit: Globally unique address (factory default) 119 | Source: CiscoSpv_df:58:a4 (48:44:87:df:58:a4) 120 | Address: CiscoSpv_df:58:a4 (48:44:87:df:58:a4) 121 | .... ...0 .... .... .... .... = IG bit: Individual address (unicast) 122 | .... ..0. .... .... .... .... = LG bit: Globally unique address (factory default) 123 | Type: IP (0x0800) 124 | Internet Protocol Version 4, Src: 192.168.1.64 (192.168.1.64), Dst: 239.255.255.250 (239.255.255.250) 125 | Version: 4 126 | Header length: 20 bytes 127 | Differentiated Services Field: 0xa0 (DSCP 0x28: Class Selector 5; ECN: 0x00: Not-ECT (Not ECN-Capable Transport)) 128 | 1010 00.. = Differentiated Services Codepoint: Class Selector 5 (0x28) 129 | .... ..00 = Explicit Congestion Notification: Not-ECT (Not ECN-Capable Transport) (0x00) 130 | Total Length: 1470 131 | Identification: 0xe0b7 (57527) 132 | Flags: 0x00 133 | 0... .... = Reserved bit: Not set 134 | .0.. .... = Don't fragment: Not set 135 | ..0. .... = More fragments: Not set 136 | Fragment offset: 0 137 | Time to live: 1 138 | Protocol: UDP (17) 139 | Header checksum: 0x20f5 [correct] 140 | [Good: True] 141 | [Bad: False] 142 | Source: 192.168.1.64 (192.168.1.64) 143 | Destination: 239.255.255.250 (239.255.255.250) 144 | User Datagram Protocol, Src Port: neod2 (1048), Dst Port: us-cli (8082) 145 | Source port: neod2 (1048) 146 | Destination port: us-cli (8082) 147 | Length: 1450 148 | Checksum: 0x7a43 [validation disabled] 149 | [Good Checksum: False] 150 | [Bad Checksum: False] 151 | Data (1442 bytes) 152 | ''' 153 | 154 | ########################################### to do ############################### 155 | # 156 | #need re for ip address 157 | # '((2[0-5]|1[0-9]|[0-9])?[0-9]\.){3}((2[0-5]|1[0-9]|[0-9])?[0-9])' 158 | # 159 | #need re for mac address 160 | # '([0-9A-F]{2}[:-]){5}([0-9A-F]{2})' 161 | # 162 | #need to make test module 163 | # 164 | # ValidIpAddressRegex = "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$"; 165 | # 166 | # ValidHostnameRegex = "^(([a-zA-Z]|[a-zA-Z][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z]|[A-Za-z][A-Za-z0-9\-]*[A-Za-z0-9])$"; 167 | 168 | 169 | ''' 170 | import re, urllib2 171 | 172 | ips = re.findall('(?:[\d]{1,3})\.(?:[\d]{1,3})\.(?:[\d]{1,3})\.(?:[\d]{1,3})', page) 173 | urls = re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', page) 174 | emails = re.findall('[a-zA-Z0-9+_\-\.]+@[0-9a-zA-Z][.-0-9a-zA-Z]*.[a-zA-Z]+', page) 175 | page = ''.join( urllib2.urlopen('http://www.example.com/index.html').readlines() ) 176 | 177 | def date_from_filename (filename): 178 | m = re.match(".*?[0-9]{2}-(?P[0-9]{4})(?P[0-9]{2})(?P[0-9]{2})(?P[0-9]{2})(?P[0-9]{2})(?P[0-9]{2})-(?P[0-9]{2}).*?", filename) 179 | if m is None: 180 | print "Bad date parse in filename:", filename 181 | return None 182 | day = int(m.group('DAY')) 183 | month = int(m.group('MONTH')) 184 | year = int(m.group('YEAR')) 185 | hour = int(m.group('HOUR')) 186 | min = int(m.group('MIN')) 187 | sec = int(m.group('SEC')) 188 | dts = (year, month, day, hour, min, sec, 0, 1, -1) 189 | return dts 190 | 191 | re.search ("(?is)username.*?password", string) 192 | ''' 193 | -------------------------------------------------------------------------------- /trace.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | ''' 4 | Copyright (C) 2011 by Sebastien Goasguen 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy 7 | of this software and associated documentation files (the "Software"), to deal 8 | in the Software without restriction, including without limitation the rights 9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the Software is 11 | furnished to do so, subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in 14 | all copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 22 | THE SOFTWARE. 23 | ''' 24 | 25 | import subprocess 26 | import networkx as nx 27 | import socket 28 | import sys 29 | import os 30 | import re 31 | import getopt 32 | import json 33 | import datetime 34 | import pprint 35 | 36 | '''Need to be root to use scapy''' 37 | import scapy 38 | from scapy.all import * 39 | 40 | '''Needed to plot the graph, otherwise skip''' 41 | import matplotlib 42 | matplotlib.use('TKAgg') 43 | import matplotlib.pyplot as plt 44 | 45 | '''Needed to keep a store of all targets 46 | Requires to run a mongodb server on your machine 47 | ''' 48 | import pymongo 49 | from pymongo import Connection 50 | 51 | def setup_db(): 52 | '''Needs mongodb to store targets as dictionaries''' 53 | try: 54 | dbconn=Connection() 55 | except: 56 | print "Could not connect to db, make sure you started mongo" 57 | try: 58 | db=dbconn.pentest 59 | except: 60 | print "Could not get the pentest database" 61 | 62 | try: 63 | collection=db.targets 64 | except: 65 | print "Could not get the collection of targets" 66 | 67 | return collection 68 | 69 | def get_all_targets(): 70 | '''Retruns all targets stored in the database''' 71 | for t in coll.find(): 72 | pprint.pprint(t,indent=4) 73 | 74 | class target(): 75 | '''Create a target class based on a dictionary 76 | Stored in a mongodb document store 77 | IP input is assumed to be a tuple returned by bruteforce_reversedns call 78 | Needs to be improved 79 | ''' 80 | def __init__(self,ip,coll=None): 81 | self.target={} 82 | self.ip=ip[0] 83 | self.target['hostname']=ip[1] 84 | self.collection=coll 85 | 86 | def __str__(self): 87 | '''Uses json to do a pretty print of the target dictionary''' 88 | return json.dumps(self.target,sort_keys=True,indent=4) 89 | 90 | def make_target(self): 91 | '''Populates the target dictionary and stores it in a database of targets''' 92 | self.target_ip() 93 | self.port_scanned() 94 | self.traceroute() 95 | self.target['Timestamp']=str(datetime.datetime.utcnow()) 96 | self.collection.insert(self.target) 97 | 98 | def target_ip(self): 99 | '''Sets the IP in the dictionary''' 100 | self.target['ip']=self.ip 101 | 102 | def port_scanned(self): 103 | ''''Sets the open/closed/filtered ports for that target based on a basic nmap scan 104 | and inserts it in the dictionary''' 105 | ports=nmap_scan(self.ip) 106 | self.target['ports']=ports 107 | 108 | def traceroute(self): 109 | '''Runs a scapy TCP traceroute and inserts it in the dictionary''' 110 | hops=scapytraceroute(self.ip) 111 | self.target['traceroute']=hops 112 | 113 | def bruteforce_reversedns(base_ip): 114 | '''Gets hostnames registered in DNS from IP range, takes Class C as input 115 | i.e 130.127.39. Would be nice to do a proper CIDR notation 116 | Could try to do DNS requests in scapy 117 | ''' 118 | ip_list=[] 119 | for i in range(255): 120 | try: 121 | (hostname,alias,ip)=socket.gethostbyaddr(base_ip+str(i)) 122 | ip_list.append((ip[0],hostname)) 123 | except: 124 | pass 125 | 126 | return ip_list 127 | 128 | def nmap_scan(host): 129 | '''Calls nmap with a subprocess to get the list of open ports 130 | Uses a Syn scan, edit the cmd string to your needs and taste. 131 | Returns a list of ports via basic regular expression of nmap output 132 | Could be improved using nmap xml output and proper xml parsing 133 | ''' 134 | ports = [] 135 | cmd = 'sudo nmap -Pn -sS ' + host 136 | print 'Scanning: ' + cmd 137 | p=subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE) 138 | (pout,perr)=p.communicate() 139 | 140 | foobar=re.compile('tcp') 141 | for line in pout.split('\n'): 142 | if foobar.search(line): 143 | print line 144 | ports.append(line) 145 | return ports 146 | 147 | def localtraceroute(host,num_hops): 148 | '''Calls traceroute via subprocess, needs host and the number of hops as arguments 149 | Edit the trace string to your needs and taste, it returns a list of hops 150 | ''' 151 | hops=[] 152 | trace='traceroute -m %d %s' % (num_hops,host) 153 | print trace 154 | res=subprocess.Popen(trace,shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE) 155 | (pstdout,psterr)=res.communicate() 156 | lines=pstdout.split('\n') 157 | for line in lines[:num_hops]: 158 | hops.append(line.split(' ')[num_hops-1].rstrip(')').lstrip('(')) 159 | return hops 160 | 161 | def scapytraceroute(host): 162 | '''Uses scapy to do a tcp traceroute hopefully goes through firewalls 163 | Returns a list of hops 164 | ''' 165 | hops=[] 166 | try: 167 | res,unans=traceroute(host) 168 | except: 169 | print "Could not trace route with scapy !" 170 | return hops 171 | 172 | host_key=res.get_trace().keys()[0] 173 | 174 | for key in res.get_trace()[host_key].keys(): 175 | hops.append(res.get_trace()[host_key][key][0]) 176 | 177 | return hops 178 | 179 | def traceroute_plot(targets): 180 | '''Plots the graph of the traceroutes for a list of IP targets 181 | Calls the scapytraceroute. 182 | ''' 183 | g=nx.Graph() 184 | source=socket.gethostbyname(socket.gethostname()) 185 | 186 | for t in targets: 187 | hops=scapytraceroute(t) 188 | print hops 189 | 190 | g.add_node(t) 191 | g.add_edge(source,hops[0]) 192 | 193 | if len(hops) >= 1: 194 | for hop in hops: 195 | next_hop_index=hops.index(hop)+1 196 | if next_hop_index != len(hops): 197 | g.add_edge(hop,hops[next_hop_index]) 198 | else: 199 | g.add_edge(hop,t) 200 | 201 | nx.draw(g,with_labels=False) 202 | plt.savefig("/Users/runseb/Desktop/481_f2011/trace.png") 203 | nx.write_dot(g,"/Users/runseb/Desktop/481_f2011/trace.dot") 204 | 205 | def traceroute_plot_from_db(targets): 206 | '''Assumes that if a target is in the db then the traceroute has already been run 207 | The targets input is a list of dictionaries from the db instead of IPs only. 208 | ''' 209 | g=nx.Graph() 210 | source=socket.gethostbyname(socket.gethostname()) 211 | 212 | for t in targets: 213 | hops=t['traceroute'] 214 | print hops 215 | 216 | g.add_node(t['ip']) 217 | g.add_edge(source,hops[0]) 218 | 219 | if len(hops) >= 1: 220 | for hop in hops: 221 | next_hop_index=hops.index(hop)+1 222 | if next_hop_index != len(hops): 223 | g.add_edge(hop,hops[next_hop_index]) 224 | else: 225 | g.add_edge(hop,t['ip']) 226 | 227 | nx.draw(g,with_labels=False) 228 | plt.savefig("/Users/runseb/Desktop/481_f2011/trace.png") 229 | nx.write_dot(g,"/Users/runseb/Desktop/481_f2011/trace.dot") 230 | 231 | def main(): 232 | '''Main function''' 233 | targets=[] 234 | 235 | try: 236 | fh=open(targets_file,'r') 237 | except: 238 | print "targets.list file not present" 239 | sys.exit() 240 | 241 | for line in fh.readlines(): 242 | targets.append(line.strip('\n')) 243 | 244 | traceroute_plot(targets) 245 | 246 | def readopt(): 247 | ''' Uses getopt to read the input arguments/options ''' 248 | ''' Should be improve to specify file of targets ''' 249 | try: 250 | options, remainder = getopt.getopt(sys.argv[1:],'b:s:t:f:') 251 | except getopt.GetoptError, err: 252 | print str(err) 253 | usage() 254 | sys.exit(2) 255 | 256 | '''Set defaults''' 257 | global base_ip,host_to_scan,host_to_traceroute,targets_file 258 | base_ip = '127.0.0' 259 | host_to_scan = '127.0.0.1' 260 | host_to_traceroute = '127.0.0.1' 261 | targets_file = 'targets.list' 262 | 263 | for opt, arg in options: 264 | if opt == '-b': 265 | base_ip = arg 266 | elif opt == '-s': 267 | host_to_scan = arg 268 | elif opt == '-t': 269 | host_to_traceroute == arg 270 | elif opt == '-f': 271 | targets_file == arg 272 | else: 273 | usage() 274 | sys.exit(2) 275 | def usage(): 276 | '''Prints the input arguments options if you run the code directly''' 277 | print "This code plots the traceroute to a set of hosts" 278 | 279 | if __name__=="__main__": 280 | readopt() 281 | sys.exit(main()) 282 | else: 283 | print "The db setup will called and can be refered as trace.coll() in an interactive shell" 284 | coll=setup_db() 285 | -------------------------------------------------------------------------------- /d3jsBarChart.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "name": "d3jsBarChart" 4 | }, 5 | "nbformat": 2, 6 | "worksheets": [ 7 | { 8 | "cells": [ 9 | { 10 | "cell_type": "code", 11 | "collapsed": true, 12 | "input": [ 13 | "front1 = '''", 14 | "", 15 | "", 16 | "\t", 17 | "\t\tAttack Report - Bar Chart", 18 | "\t\t", 19 | "\t\t", 22 | "\t", 23 | "\t", 24 | "\t\t

Login attempts for '''", 25 | "\t\t", 26 | "\t\t", 27 | "front2 = '''

", 28 | "\t\t", 82 | "\t", 83 | "", 84 | "'''", 85 | "hostname = 'www.example.com'", 86 | "data = str([10,20,30,40,50,40,30,20,10])", 87 | "", 88 | "def populateBarChart(hostname, data):", 89 | "\t'''", 90 | "\tThis function takes 2 parameters - hostname and data and populates into D3.js Bar Chart.", 91 | "\t'''", 92 | "\treturn front1+hostname+front2+data+end1" 93 | ], 94 | "language": "python", 95 | "outputs": [], 96 | "prompt_number": 18 97 | }, 98 | { 99 | "cell_type": "code", 100 | "collapsed": true, 101 | "input": [ 102 | "fh = open('index.html', 'w')", 103 | "fh.write (populateBarChart(hostname,data))", 104 | "fh.close()" 105 | ], 106 | "language": "python", 107 | "outputs": [], 108 | "prompt_number": 19 109 | }, 110 | { 111 | "cell_type": "code", 112 | "collapsed": false, 113 | "input": [ 114 | "from IPython.core.display import HTML", 115 | "HTML('