├── data ├── __init__.py └── param_blacklist.py ├── exploits ├── __init__.py ├── template.py └── PUT.py ├── tornado_proxy ├── __init__.py ├── requirements.txt ├── setup_https_intercept.sh ├── LICENSE ├── README.md ├── .gitignore ├── filelock.py ├── timer.py ├── socket_wrapper.py ├── gen_cert.py └── proxy.py ├── requirements.txt ├── stop.sh ├── README.md ├── templates ├── modal_style.html ├── show_detect.html └── show_vul.html ├── LICENSE ├── proxy.pac ├── run.sh ├── config.py ├── AutoFill.user.js ├── .gitignore ├── install.sql ├── parser.py ├── httplog.py ├── zoneresolver.py ├── fuzzer.py ├── proxy.py └── XSSHelper.user.js /data/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /data/param_blacklist.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /exploits/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tornado_proxy/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tornado_proxy/requirements.txt: -------------------------------------------------------------------------------- 1 | tornado 2 | pycurl 3 | pyopenssl -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | tornado 2 | requests 3 | pycurl 4 | pymysql 5 | flask 6 | dnslib 7 | -------------------------------------------------------------------------------- /stop.sh: -------------------------------------------------------------------------------- 1 | cmd=$(which tmux) # tmux path 2 | session="zeroexploit" 3 | 4 | $cmd kill-session -t $session -------------------------------------------------------------------------------- /tornado_proxy/setup_https_intercept.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | openssl genrsa -out ca.key 2048 4 | openssl req -new -x509 -days 3650 -key ca.key -out ca.crt -subj "/CN=proxy2 CA" 5 | openssl genrsa -out cert.key 2048 6 | mkdir certs/ 7 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ZeroExploit 2 | ultimate web vul hunter 3 | 4 | 前后端结合检测漏洞,正在开发中。。 5 | 是个大坑,并不知道什么时候能写完 6 | 7 | 代理设置成pac,路径见httplog.py 8 | tampermonkey加载XSS Helper.user.js并设置成页面加载完之后执行,方便自动填表单 9 | 10 | 设置XSS Helper.user.js里的作用域url和报告url 11 | 设置config.py里的各种信息 12 | 13 | 正常用要开3个脚本 14 | 15 | dns: zoneresolver.py 16 | httpserver: httplog.py 17 | proxyserver: proxy.py 18 | 19 | 浏览器加载XSS Helper.user.js 20 | 浏览器设置代理 21 | 22 | 就先这样吧 23 | 24 | ## TODO 25 | 26 | * onerror事件总是晚于某些js代码的执行,浏览器在页面最前方插入hook代码 27 | * 暂时不考虑伪静态 28 | * 目前只处理post query string的情况,对get请求没有参数不插数据库 -------------------------------------------------------------------------------- /exploits/template.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | author = 'md5_salt' # 可选 3 | timeout = 30 # 建议 4 | vtype = 'all' #value type: all/int/float/str/url/json/special/ special|json 5 | 6 | 7 | def exploit(request, response, method, key, is_array=False): 8 | info = "this is a test" 9 | return {'result': 'safe', 'info': info, 'hash': None} # result为('safe', 'vul', 'unknown', 'continue'), 第3个返回用于unknown时的延时判断 10 | 11 | if __name__ == '__main__': 12 | req = {'uri': '', 'body':'', 'headers': ''} 13 | rsp = {} 14 | method = 'GET' 15 | key = '' 16 | print exploit(req, rsp, method, key) -------------------------------------------------------------------------------- /templates/modal_style.html: -------------------------------------------------------------------------------- 1 | 2 | 5 | 6 | -------------------------------------------------------------------------------- /exploits/PUT.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import requests 3 | import os 4 | import config 5 | author = 'md5_salt' # 可选 6 | timeout = 5 # 建议 7 | vtype = 'all' #value type: all/int/float/str/url/json/special 8 | stype = 'java|asp|aspx' 9 | 10 | # https://paper.seebug.org/403/ 11 | # https://mp.weixin.qq.com/s/uTiWDsPKEjTkN6z9QNLtSA 12 | # https://pivotal.io/security/cve-2017-8046 13 | 14 | def exploit(request, response, method, key, is_array=False): 15 | 16 | if config.dbconn().fetch_rows('result', condition="exploit='%s' and result != 'continue' and `host`='%s'" % (os.path.basename(__file__)[:-3], request['host']), order="id asc", limit="1", fetchone=True): return 17 | allow = requests.options(request['uri']).headers.get('Allow', '') 18 | if allow.find('PUT') != -1 or allow.find('PATCH') != -1: 19 | return {'result': 'vul', 'info': "Server support put/patch method", 'hash': None, 'level': "middle"} 20 | else: 21 | return {'result': 'safe', 'info': "Server does not support put/patch method", 'hash': None, 'level': "middle"} 22 | 23 | if __name__ == '__main__': 24 | pass -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 5alt 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /tornado_proxy/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 5alt 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /tornado_proxy/README.md: -------------------------------------------------------------------------------- 1 | # tornado-proxy 2 | 3 | tornado-proxy 是基于 tornado 实现的 HTTP/HTTPS 代理服务器,支持 python2 和 python3。 4 | 5 | 本程序在 owtf-proxy 的基础上修改而成,整合了 owtf-proxy 和 tornado-proxy 对 HTTPS 的两种不同方式的支持,参考 proxy2 中对请求和响应的拦截与处理方式。 6 | 7 | ## 特性 8 | 9 | * 支持对 HTTPS 请求的透明代理和动态伪造证书拦截流量两种方式 10 | * 高性能 11 | * 自定义对请求和响应的处理 12 | * 可配置此代理服务器的代理服务器 13 | 14 | ## 依赖 15 | 16 | * pyopenssl 17 | * tornado 18 | * pycurl 19 | 20 | `pip install -r requirements.txt` 21 | 22 | 在 osx 下 pip 安装 pycurl 报编译错误,加上环境变量 `archflags -arch x86_64` 即可。 23 | 24 | `sudo env ARCHFLAGS="-arch x86_64" pip install pycurl` 25 | 26 | ubuntu 下需先安装 `libssl-dev` 和 `libcurl4-openssl-dev` 27 | 28 | ## 开启 HTTPS 拦截 29 | 30 | 拦截 HTTPS 流量需要先生成私钥和CA证书。将生成的CA证书添加到浏览器的信任区域中。 31 | 32 | `$ ./setup_https_intercept.sh` 33 | 34 | 删除产生的证书以及私钥文件即不拦截 HTTPS 流量。 35 | 36 | ## 自定义功能 37 | 38 | 在 `ProxyHandler` 中有三个方法可以用来修改或者保存请求和响应信息。 39 | 40 | * request_handler: 在代理服务器向web服务器发送请求之前调用 41 | * response_handler: 在代理服务器向客户端返回响应之前调用 42 | * save_handler: 在客户端获取响应之后调用 43 | 44 | 45 | ## 参考资料 46 | 47 | https://github.com/tunnelshade/owtf-proxy 48 | 49 | https://github.com/senko/tornado-proxy 50 | 51 | https://github.com/inaz2/proxy2 52 | -------------------------------------------------------------------------------- /proxy.pac: -------------------------------------------------------------------------------- 1 | function FindProxyForURL(url, host) { 2 | var proxy_host = '{{proxy_host}}' 3 | var proxy_port = {{proxy_port}} 4 | 5 | var whitelist_domain = {{included_host|safe}} 6 | var blacklist_domain = {{excluded_host|safe}} 7 | var blacklist_ext = {{filter_file|safe}} 8 | 9 | if (url.substring(0,5)!="http:" && url.substring(0,6)!="https:") return "DIRECT"; 10 | 11 | if(blacklist_domain.length){ 12 | for (var i = 0, len = blacklist_domain.length; i < len; i++) { 13 | if (host.endsWith(blacklist_domain[i])) 14 | return 'DIRECT'; 15 | } 16 | } 17 | 18 | if(whitelist_domain.length){ 19 | for (var i = 0, len = whitelist_domain.length; i < len; i++) { 20 | if (!host.endsWith(blacklist_domain[i])) 21 | return 'DIRECT'; 22 | } 23 | } 24 | 25 | if(blacklist_ext.length){ 26 | for (var i = 0, len = blacklist_ext.length; i < len; i++) { 27 | if (url.toLowerCase().split('?')[0].endsWith(blacklist_ext[i])) 28 | return 'DIRECT'; 29 | } 30 | } 31 | 32 | return "PROXY "+proxy_host+":"+proxy_port+"; DIRECT"; 33 | 34 | } -------------------------------------------------------------------------------- /run.sh: -------------------------------------------------------------------------------- 1 | cmd=$(which tmux) # tmux path 2 | session="zeroexploit" 3 | 4 | if [ -z $cmd ]; then 5 | echo "You need to install tmux." 6 | exit 1 7 | fi 8 | 9 | $cmd has-session -t $session > /dev/null 10 | 11 | if [ $? != 0 ]; then 12 | $cmd new -d -n base-act -s $session "" 13 | $cmd splitw -v -t $session 14 | $cmd splitw -h -t $session 15 | $cmd splitw -v -t $session 16 | $cmd splitw -h -t $session 17 | #$cmd splitw -v -t $session 18 | $cmd select-layout -t $session tiled 19 | 20 | $cmd send-keys -t $session:0.0 'python httplog.py' C-m 21 | $cmd send-keys -t $session:0.1 'python proxy.py' C-m 22 | $cmd send-keys -t $session:0.2 'python parser.py' C-m 23 | $cmd send-keys -t $session:0.3 'python fuzzer.py' C-m 24 | #$cmd send-keys -t $session:0.4 'whoami' C-m 25 | 26 | #$cmd set-window-option synchronize-panes on 27 | #$cmd neww -n vim -t $session "zsh" 28 | #$cmd selectw -t $session:5 29 | fi 30 | 31 | $cmd att -t $session 32 | 33 | exit 0 34 | -------------------------------------------------------------------------------- /config.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from lib.libmysql import MYSQL 3 | 4 | def ip(): 5 | import socket 6 | return socket.gethostbyname(socket.gethostname()) 7 | 8 | proxy_host = '127.0.0.1'#ip() 9 | proxy_port = 8888 10 | 11 | DNS_DOMAIN = 'd.5alt.me' 12 | SERVER_IP = '127.0.0.1'#ip() 13 | 14 | hash_pattern = r'[a-z0-9]{5}' 15 | 16 | filter_file = [".js", ".ico", ".flv", ".css", ".jpg", ".png", ".jpeg", ".gif", ".pdf", ".ss3", ".txt", ".rar", ".zip", ".avi", ".mp4", ".swf", ".wmi", ".exe", ".mpeg"] #host.endswith(item), '.html', '.htm', 17 | filter_code = r'4\d{2}' #re.match 18 | included_host = ['vulnweb.com'] #host.endswith(item) 19 | excluded_host = [".gov", "localhost", "127.0.0.1", "google.com", "gstatic.com", "cnzz.com", "doubleclick.com", "mil.cn", "gov.cn", "gov.com"] #host.endswith(item) 20 | filter_content_type = ['html', 'xml', 'json', 'javascript', 'plain'] 21 | 22 | dbhost = 'localhost' 23 | dbuser = 'root' 24 | dbpwd = 'root' 25 | dbname = 'zeroexploit' 26 | dbcharset = 'utf8' 27 | unix_socket = None # for other platform 28 | #unix_socket = "/Applications/MAMP/tmp/mysql/mysql.sock" #for osx MAMP 29 | 30 | def dbconn(): 31 | return MYSQL(dbhost, dbuser, dbpwd, dbname, dbcharset, unix_socket) 32 | 33 | 34 | max_save_size = 1024*1024 # 1M 35 | 36 | EXPLOITS_PATH = "./exploits/" 37 | EXPLOIT_TIMEOUT = 5 38 | -------------------------------------------------------------------------------- /AutoFill.user.js: -------------------------------------------------------------------------------- 1 | // ==UserScript== 2 | // @name AutoFill 3 | // @version 0.1 4 | // @description XSS Helper 5 | // @exclude http://*.google.com/* 6 | // @exclude https://*.google.com/* 7 | // @grant none 8 | // @run-at document-idle 9 | // ==/UserScript== 10 | 11 | 12 | // @match http://*/* 13 | // @match https://*/* 14 | 15 | //md5_salt 16 | 17 | // auto fill forms 18 | (function() { 19 | document.all.constructor.prototype.forEach = Array.prototype.forEach; 20 | document.all.forEach(function(n){ 21 | fuzz_mark_color = '#cfffdd'; 22 | //fuzz_value = 'md5_salt23333\'"\\'; 23 | //fuzz_textarea = 'md5_salt23333\'"\\
\\'; 25 | fuzz_textarea = fuzz_value; 26 | if((n.tagName.toLowerCase() == 'input' && n.type.toLowerCase() == 'text') && n.type.toLowerCase() !== 'hidden' && !n.readOnly){ 27 | n.style.backgroundColor = fuzz_mark_color; 28 | n.value = fuzz_value; 29 | } 30 | if(n.tagName.toLowerCase() == 'textarea' && n.type.toLowerCase() !== 'hidden'){ 31 | n.style.backgroundColor = fuzz_mark_color; 32 | n.value = fuzz_textarea; 33 | } 34 | }); 35 | })(); -------------------------------------------------------------------------------- /tornado_proxy/.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *,cover 46 | .hypothesis/ 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | local_settings.py 55 | 56 | # Flask stuff: 57 | instance/ 58 | .webassets-cache 59 | 60 | # Scrapy stuff: 61 | .scrapy 62 | 63 | # Sphinx documentation 64 | docs/_build/ 65 | 66 | # PyBuilder 67 | target/ 68 | 69 | # IPython Notebook 70 | .ipynb_checkpoints 71 | 72 | # pyenv 73 | .python-version 74 | 75 | # celery beat schedule file 76 | celerybeat-schedule 77 | 78 | # dotenv 79 | .env 80 | 81 | # virtualenv 82 | venv/ 83 | ENV/ 84 | 85 | # Spyder project settings 86 | .spyderproject 87 | 88 | # Rope project settings 89 | .ropeproject 90 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | *.py[cod] 6 | *$py.class 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Distribution / packaging 12 | .Python 13 | env/ 14 | build/ 15 | develop-eggs/ 16 | dist/ 17 | downloads/ 18 | eggs/ 19 | .eggs/ 20 | lib/ 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | wheels/ 26 | *.egg-info/ 27 | .installed.cfg 28 | *.egg 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | .hypothesis/ 50 | 51 | # Translations 52 | *.mo 53 | *.pot 54 | 55 | # Django stuff: 56 | *.log 57 | local_settings.py 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # dotenv 85 | .env 86 | 87 | # virtualenv 88 | .venv 89 | venv/ 90 | ENV/ 91 | 92 | # Spyder project settings 93 | .spyderproject 94 | .spyproject 95 | 96 | # Rope project settings 97 | .ropeproject 98 | 99 | # mkdocs documentation 100 | /site 101 | 102 | # mypy 103 | .mypy_cache/ 104 | -------------------------------------------------------------------------------- /install.sql: -------------------------------------------------------------------------------- 1 | DROP DATABASE IF EXISTS zeroexploit; 2 | 3 | CREATE DATABASE `zeroexploit` DEFAULT CHARACTER SET utf8 collate utf8_general_ci; 4 | 5 | USE zeroexploit; 6 | 7 | # type=1 返回是text/html的基准请求 8 | # type=2 是ajax请求 9 | # type=3 其他请求 10 | # checked=0 是未处理 11 | # checked=1 按参数分解处理 12 | 13 | CREATE TABLE `http` ( 14 | `id` int NOT NULL AUTO_INCREMENT, 15 | `gid` int(11) NOT NULL, 16 | `host` varchar(255) NOT NULL, 17 | `req` LongText COLLATE utf8_bin NOT NULL, 18 | `rsp` LongText COLLATE utf8_bin NOT NULL, 19 | `time` int(11) NOT NULL, 20 | `type` int(8) NOT NULL, 21 | `signature` varchar(64) NOT NULL, 22 | `checked` int(3) DEFAULT 0, 23 | PRIMARY KEY (`id`), 24 | UNIQUE KEY (`signature`) 25 | )ENGINE=InnoDB DEFAULT CHARSET=utf8; 26 | 27 | 28 | CREATE TABLE `detector`( 29 | `id` int NOT NULL AUTO_INCREMENT, 30 | `hash` varchar(255) NOT NULL, 31 | `url` TEXT, 32 | `method` varchar(255) NOT NULL, 33 | `info` TEXT, 34 | `show` int(3) DEFAULT 1, 35 | PRIMARY KEY (`id`), 36 | UNIQUE KEY (`hash`) 37 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8; 38 | 39 | # lang/framework 40 | CREATE TABLE `siteinfo`( 41 | `host` varchar(255) NOT NULL, 42 | `key` varchar(255) NOT NULL, 43 | `value` varchar(255) NOT NULL, 44 | PRIMARY KEY (`host`, `key`) 45 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8; 46 | 47 | # type: int float url str array 48 | CREATE TABLE `requests`( 49 | `id` int NOT NULL AUTO_INCREMENT, 50 | `requestid` int NOT NULL, 51 | `method` varchar(255) NOT NULL, 52 | `key` varchar(255), 53 | `type` varchar(255), 54 | `checked` int DEFAULT 0, 55 | PRIMARY KEY (`id`), 56 | FOREIGN KEY (requestid) REFERENCES http(id) ON UPDATE CASCADE ON DELETE RESTRICT 57 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8; 58 | 59 | CREATE TABLE `result`( 60 | `id` int NOT NULL AUTO_INCREMENT, 61 | `requestid` int NOT NULL, 62 | `method` varchar(255) NOT NULL, 63 | `key` varchar(255) NOT NULL, 64 | `host` varchar(255) NOT NULL, 65 | `url` TEXT, 66 | `hash` varchar(255), 67 | `result` varchar(255) NOT NULL, 68 | `level` varchar(255) NOT NULL, 69 | `info` TEXT, 70 | `exploit` varchar(255) NOT NULL, 71 | `show` int(3) DEFAULT 1, 72 | PRIMARY KEY (`id`), 73 | FOREIGN KEY (requestid) REFERENCES http(id) ON UPDATE CASCADE ON DELETE RESTRICT 74 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8; 75 | 76 | CREATE TABLE `hashlog`( 77 | `id` int NOT NULL AUTO_INCREMENT, 78 | `hash` varchar(255), 79 | PRIMARY KEY (`id`), 80 | UNIQUE KEY (`hash`) 81 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8; 82 | -------------------------------------------------------------------------------- /parser.py: -------------------------------------------------------------------------------- 1 | #coding=utf8 2 | import config 3 | import json 4 | import sys, time 5 | py3k = sys.version_info.major > 2 6 | import os.path 7 | import urllib 8 | 9 | if py3k: 10 | from urllib import parse as urlparse 11 | else: 12 | import urlparse 13 | 14 | def get_one(): 15 | return config.dbconn().fetch_rows('http', condition={'checked': 0}, order="id asc", limit="1", fetchone=True) 16 | 17 | def check_key(key): 18 | ''' 19 | 是否需要保留这个key 20 | ''' 21 | blacklist = ['t', 'r', 'submit'] 22 | if key.lower() in blacklist: 23 | return False 24 | return True 25 | 26 | def check_value(value, vtype): 27 | ''' 28 | 是否需要保留这个value 29 | ''' 30 | if vtype == 'array': return False 31 | return True 32 | 33 | def get_type(key, value): 34 | if type(value) == type([]): return 'array' 35 | value = value[0] 36 | if value.isdigit(): return 'int' 37 | try: 38 | float(value) 39 | return 'float' 40 | except: 41 | pass 42 | # url check 43 | u = urlparse.urlparse(value) 44 | if u.scheme and u.netloc: 45 | return 'url' 46 | try: 47 | j = json.loads(value) 48 | if type(j) == type([]) or type(j) == type({}): 49 | return 'json' 50 | except: 51 | pass 52 | return 'str' 53 | 54 | while True: 55 | http = get_one() 56 | if not http: 57 | time.sleep(3) 58 | continue 59 | req = json.loads(http['req']) 60 | 61 | if req['rtype'] not in ['qs', 'rewrite']: 62 | config.dbconn().insert('requests', {'requestid': http['id'], 'method': req['method'], 'key': '', 'type': 'special|'+req['rtype']}) 63 | else: 64 | # support array like a[]=1&a[]=2 65 | parsed = urlparse.urlparse(req['uri']) 66 | get_parts = urlparse.parse_qs(parsed.query) 67 | if get_parts: 68 | for k,v in get_parts.items(): 69 | v = v[0] if len(v) == 1 else v 70 | vtype = get_type(k, v) 71 | if check_key(k) and check_value(v, vtype): 72 | config.dbconn().insert('requests', {'requestid': http['id'], 'method': "GET", 'key': k, 'type': vtype}) 73 | 74 | if not parsed.query and not os.path.splitext(parsed.path)[1] and len(parsed.path.split('/')) > 3: 75 | path_parts = parsed.path.split('/') 76 | for i in range(3, len(path_parts)): 77 | vtype = 'rewrite|'+get_type('rewrite', path_parts[i]) 78 | config.dbconn().insert('requests', {'requestid': http['id'], 'method': "GET", 'key': str(i), 'type': vtype}) 79 | 80 | 81 | if req['method'] == "POST": 82 | post_parts = urlparse.parse_qs(urlparse.urlparse(req['body']).query) 83 | if post_parts: 84 | for k,v in post_parts.items(): 85 | v = v[0] if len(v) == 1 else v 86 | vtype = get_type(k, v) 87 | if check_key(k) and check_value(v, vtype): 88 | config.dbconn().insert('requests', {'requestid': http['id'], 'method': "POST", 'key': k, 'type': vtype}) 89 | 90 | config.dbconn().update('http', {'checked': 1}, {'id': http['id']}) -------------------------------------------------------------------------------- /tornado_proxy/filelock.py: -------------------------------------------------------------------------------- 1 | import os 2 | import time 3 | import errno 4 | 5 | # Based on http://www.evanfosmark.com/2009/01/cross-platform-file-locking-support-in-python/ 6 | 7 | 8 | class FileLockTimeoutException(Exception): 9 | pass 10 | 11 | 12 | class FileLock(object): 13 | """ A file locking mechanism that has context-manager support so 14 | you can use it in a with statement. This should be relatively cross 15 | compatible as it doesn't rely on msvcrt or fcntl for the locking. 16 | Timeout when waiting for lock is supported. 17 | 18 | WARNING: The code is not safe for all NFS implementations. 19 | 20 | See the O_EXCL section of the "open" manpage, test it first! 21 | 22 | Usage example:: 23 | 24 | with FileLock('/tmp/myproject-critical-processing'): 25 | print "This section will only be run by a single thread." 26 | 27 | """ 28 | def __init__(self, file_name, timeout=30, delay=.15): 29 | """ Prepare the file locker. Specify the file to lock and optionally 30 | the maximum timeout and the delay between each attempt to lock. 31 | """ 32 | self.is_locked = False 33 | self.lockfile = os.path.join(os.getcwd(), "%s.lock" % file_name) 34 | self.file_name = file_name 35 | self.timeout = timeout 36 | self.delay = delay 37 | 38 | def acquire(self): 39 | """ Acquire the lock, if possible. If the lock is in use, it check again 40 | every `wait` seconds. It does this until it either gets the lock or 41 | exceeds `timeout` number of seconds, in which case it throws 42 | an exception. 43 | """ 44 | start_time = time.time() 45 | while True: 46 | try: 47 | self.fd = os.open(self.lockfile, os.O_CREAT|os.O_EXCL|os.O_RDWR) 48 | break 49 | except OSError as e: 50 | if e.errno != errno.EEXIST: 51 | raise 52 | if (time.time() - start_time) >= self.timeout: 53 | raise FileLockTimeoutException("%d seconds passed." % self.timeout) 54 | time.sleep(self.delay) 55 | self.is_locked = True 56 | 57 | def release(self): 58 | """ Get rid of the lock by deleting the lockfile. 59 | When working in a `with` statement, this gets automatically 60 | called at the end. 61 | """ 62 | if self.is_locked: 63 | os.close(self.fd) 64 | os.unlink(self.lockfile) 65 | self.is_locked = False 66 | 67 | def __enter__(self): 68 | """ Activated when used in the with statement. 69 | Should automatically acquire a lock to be used in the with block. 70 | """ 71 | if not self.is_locked: 72 | self.acquire() 73 | return self 74 | 75 | def __exit__(self, type, value, traceback): #@UnusedVariable 76 | """ Activated at the end of the with statement. 77 | It automatically releases the lock if it isn't locked. 78 | """ 79 | if self.is_locked: 80 | self.release() 81 | 82 | def __del__(self): 83 | """ Make sure that the FileLock instance doesn't leave a lockfile 84 | lying around. 85 | """ 86 | self.release() 87 | -------------------------------------------------------------------------------- /httplog.py: -------------------------------------------------------------------------------- 1 | import config 2 | 3 | from flask import Flask, request, render_template_string, send_from_directory, make_response, render_template 4 | import re 5 | import hashlib 6 | import json 7 | 8 | from lib.httphelper import build_request 9 | 10 | import urlparse 11 | 12 | app = Flask(__name__) 13 | 14 | @app.route('/pac') 15 | def pac(): 16 | with open('proxy.pac', 'r') as f: 17 | content = f.read() 18 | return render_template_string(content, proxy_host=config.proxy_host, proxy_port=config.proxy_port, included_host=config.included_host, excluded_host=config.excluded_host, filter_file=config.filter_file) 19 | 20 | @app.route('/report_log', methods=['POST']) 21 | def report_log(): 22 | url = request.form.get('url', '') 23 | method = request.form.get('method', '') 24 | info = request.form.get('info', '') 25 | print url 26 | if url and method and info: 27 | #h = hashlib.md5(url+method+info).hexdigest() 28 | host = urlparse.urlparse(url).netloc 29 | h = hashlib.md5(host+method+info).hexdigest() 30 | config.dbconn().insert('detector', {'hash': h, 'url': url, 'method': method, 'info': info}) 31 | rst = make_response('') 32 | rst.headers['Access-Control-Allow-Origin'] = '*' 33 | return rst 34 | 35 | 36 | @app.route('/', defaults={'path': ''}) 37 | @app.route('/') 38 | def catch_all(path): 39 | h = request.args.get('hash', '') 40 | host = request.headers.get('host', '').split('.')[0] 41 | if h and re.match(config.hash_pattern, h): 42 | #update database 43 | config.dbconn().update('payload', {'status': "vul"}, {'hash':h}) 44 | config.dbconn().insert('hashlog', {'hash':h}) 45 | if host and re.match(config.hash_pattern, host): 46 | config.dbconn().update('payload', {'status': "vul"}, {'hash':host}) 47 | config.dbconn().insert('hashlog', {'hash':host}) 48 | return '' 49 | 50 | @app.route('/api/get_detector') 51 | def get_detector(): 52 | def process(obj): 53 | st = """%s"""%(obj['url'], obj['url']) 54 | obj['url'] = st 55 | obj['info'] = render_template('modal_style.html', content=obj['info'], target_hash=obj['hash']) 56 | return obj 57 | return json.dumps(map(process, config.dbconn().fetch_rows('detector', condition={'show': 1}, order="id DESC"))) 58 | 59 | @app.route('/api/del_detector') 60 | def del_detector(): 61 | h = request.args.get('hash', '') 62 | if h: 63 | config.dbconn().update('detector', {'show': 0}, {'hash':h}) 64 | return '' 65 | 66 | @app.route('/detector') 67 | def detector(): 68 | return send_from_directory('templates' ,'show_detect.html') 69 | 70 | 71 | @app.route('/api/get_vul') 72 | def get_vul(): 73 | def process(obj): 74 | st = """%s"""%(obj['url'], obj['url']) 75 | obj['url'] = st 76 | request = json.loads(config.dbconn().fetch_rows('http', condition={'id': obj['requestid']}, fetchone=True)['req']) 77 | obj['request'] = render_template('modal_style.html', content=build_request(request), target_hash=obj['id']) 78 | return obj 79 | return json.dumps(map(process, config.dbconn().fetch_rows('result', condition="`show`=1 and result='vul'", order="id DESC"))) 80 | 81 | @app.route('/api/del_vul') 82 | def del_vul(): 83 | h = request.args.get('id', '') 84 | if h: 85 | config.dbconn().update('result', {'show': 0}, {'id':h}) 86 | return '' 87 | 88 | @app.route('/vul') 89 | def vul(): 90 | return send_from_directory('templates' ,'show_vul.html') 91 | 92 | 93 | if __name__ == '__main__': 94 | app.run(debug=True, host='0.0.0.0')#, ssl_context=("1_zeroexploit.5alt.me_bundle.crt", "2_zeroexploit.5alt.me.key")) -------------------------------------------------------------------------------- /zoneresolver.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import os 4 | import copy 5 | import re 6 | from dnslib import RR, QTYPE, RCODE 7 | from dnslib.server import DNSServer, DNSHandler, BaseResolver, DNSLogger 8 | 9 | import config 10 | 11 | 12 | class MysqlLogger(): 13 | def log_data(self, dnsobj): 14 | pass 15 | 16 | def log_error(self, handler, e): 17 | pass 18 | 19 | def log_pass(self, *args): 20 | pass 21 | 22 | def log_prefix(self, handler): 23 | pass 24 | 25 | def log_recv(self, handler, data): 26 | pass 27 | 28 | def log_reply(self, handler, reply): 29 | pass 30 | 31 | def log_request(self, handler, request): 32 | domain = request.q.qname.__str__() 33 | h = domain.split('.')[0] 34 | if domain.endswith(config.DNS_DOMAIN + '.'): 35 | if h and re.match(config.hash_pattern, h): 36 | #update database 37 | config.dbconn().update('result', {'result': "vul"}, {'hash':h}) 38 | config.dbconn().insert('hashlog', {'hash':h}) 39 | 40 | def log_send(self, handler, data): 41 | pass 42 | 43 | def log_truncated(self, handler, reply): 44 | pass 45 | 46 | 47 | class ZoneResolver(BaseResolver): 48 | """ 49 | Simple fixed zone file resolver. 50 | """ 51 | 52 | def __init__(self, zone, glob=False): 53 | """ 54 | Initialise resolver from zone file. 55 | Stores RRs as a list of (label,type,rr) tuples 56 | If 'glob' is True use glob match against zone file 57 | """ 58 | self.zone = [(rr.rname, QTYPE[rr.rtype], rr) for rr in RR.fromZone(zone)] 59 | self.glob = glob 60 | self.eq = 'matchGlob' if glob else '__eq__' 61 | 62 | def resolve(self, request, handler): 63 | """ 64 | Respond to DNS request - parameters are request packet & handler. 65 | Method is expected to return DNS response 66 | """ 67 | reply = request.reply() 68 | qname = request.q.qname 69 | qtype = QTYPE[request.q.qtype] 70 | for name, rtype, rr in self.zone: 71 | # Check if label & type match 72 | if getattr(qname, self.eq)(name) and ( 73 | qtype == rtype or qtype == 'ANY' or rtype == 'CNAME'): 74 | # If we have a glob match fix reply label 75 | if self.glob: 76 | a = copy.copy(rr) 77 | a.rname = qname 78 | reply.add_answer(a) 79 | else: 80 | reply.add_answer(rr) 81 | # Check for A/AAAA records associated with reply and 82 | # add in additional section 83 | if rtype in ['CNAME', 'NS', 'MX', 'PTR']: 84 | for a_name, a_rtype, a_rr in self.zone: 85 | if a_name == rr.rdata.label and a_rtype in ['A', 'AAAA']: 86 | reply.add_ar(a_rr) 87 | if not reply.rr: 88 | reply.header.rcode = RCODE.NXDOMAIN 89 | return reply 90 | 91 | 92 | def main(): 93 | zone = ''' 94 | *.{dnsdomain}. IN A {serverip} 95 | {dnsdomain}. IN A {serverip} 96 | '''.format( 97 | dnsdomain=config.DNS_DOMAIN, serverip=config.SERVER_IP) 98 | resolver = ZoneResolver(zone, True) 99 | logger = MysqlLogger() 100 | print("Starting Zone Resolver (%s:%d) [%s]" % ("*", 53, "UDP")) 101 | 102 | udp_server = DNSServer(resolver, 103 | port=53, 104 | address='', 105 | logger=logger) 106 | udp_server.start() 107 | if __name__ == '__main__': 108 | main() 109 | -------------------------------------------------------------------------------- /tornado_proxy/timer.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' 3 | owtf is an OWASP+PTES-focused try to unite great tools and facilitate pen testing 4 | Copyright (c) 2011, Abraham Aranguren Twitter: @7a_ http://7-a.org 5 | All rights reserved. 6 | 7 | Redistribution and use in source and binary forms, with or without 8 | modification, are permitted provided that the following conditions are met: 9 | * Redistributions of source code must retain the above copyright 10 | notice, this list of conditions and the following disclaimer. 11 | * Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in the 13 | documentation and/or other materials provided with the distribution. 14 | * Neither the name of the copyright owner nor the 15 | names of its contributors may be used to endorse or promote products 16 | derived from this software without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 19 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 20 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY 22 | DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 23 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 24 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 25 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 27 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | 29 | The time module allows the rest of the framework to time how long it takes for certain actions to execute and present this information in both seconds and human-readable form 30 | ''' 31 | import time 32 | 33 | class Timer: 34 | Time = {} # Dictionary of Timers, Several timers can be set at any given point in time 35 | 36 | def __init__(self, DateTimeFormat = "%d/%m/%Y-%H:%M"): 37 | self.DateTimeFormat = DateTimeFormat 38 | 39 | def StartTimer(self, OffSet = '0'): 40 | self.Time[OffSet] = {} 41 | self.Time[OffSet]['Start'] = self.GetCurrentDateTime() 42 | self.Time[OffSet]['Time'] = time.time() 43 | return [ self.Time[OffSet]['Start'], self.Time[OffSet]['Time'] ] 44 | 45 | def GetCurrentDateTimeAsStr(self): 46 | return self.GetTimeAsStr(self.GetCurrentDateTime()) 47 | 48 | def GetCurrentDateTime(self): 49 | return time.strftime(self.DateTimeFormat) 50 | 51 | def GetElapsedTime(self, OffSet = '0'): 52 | Time = time.time() - self.Time[OffSet]['Time'] 53 | return Time 54 | 55 | def GetTimeAsStr(self, seconds): 56 | seconds, miliseconds = str(seconds).split('.') 57 | seconds = int(seconds) 58 | miliseconds = int(miliseconds[0:3]) 59 | hours = seconds / 3600 60 | seconds -= 3600*hours 61 | minutes = seconds / 60 62 | seconds -= 60*minutes 63 | TimeStr = '' 64 | if hours > 0: 65 | TimeStr += "%2dh, " % hours 66 | if minutes > 0: 67 | TimeStr += "%2dm, " % minutes 68 | TimeStr += "%2ds, %3dms" % (seconds,miliseconds) 69 | return TimeStr.strip() # Strip necessary to get rid of leading spaces sometimes 70 | 71 | def EndTimer(self, Offset = '0'): 72 | self.Time[Offset]['End'] = self.GetCurrentDateTime() 73 | 74 | def GetElapsedTimeAsStr(self, Offset = '0'): 75 | Elapsed = self.GetElapsedTime(Offset) 76 | ToString = self.GetTimeAsStr(Elapsed) 77 | self.EndTimer(Offset) 78 | #print "Elapsed="+str(Elapsed)+", ToString="+ToString 79 | return ToString 80 | 81 | def GetStartDateTimeAsStr(self, Offset = '0'): 82 | return self.Time[Offset]['Start'] 83 | 84 | def GetEndDateTimeAsStr(self, Offset = '0'): 85 | if not 'End' in self.Time[Offset]: 86 | self.EndTimer(Offset) 87 | return self.Time[Offset]['End'] -------------------------------------------------------------------------------- /fuzzer.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import multiprocessing 3 | import time 4 | import sys 5 | import imp 6 | from os import listdir,path 7 | import config 8 | import os, signal 9 | import json 10 | 11 | import traceback 12 | 13 | conn = config.dbconn() 14 | 15 | def AttackTargets(module_name, request, response, method, key, is_array=False): 16 | signal.signal(signal.SIGALRM, timeout_handler) 17 | try: 18 | module = imp.load_source(module_name, config.EXPLOITS_PATH+ os.sep + module_name + '.py') 19 | timeout = module.timeout if module.timeout else config.EXPLOIT_TIMEOUT 20 | signal.alarm(timeout) 21 | ret = module.exploit(request, response, method, key, is_array) 22 | signal.alarm(0) 23 | return ret 24 | except TimeoutError: 25 | print '%s timeout' % module_name 26 | except Exception as e: 27 | traceback.print_exc() 28 | print ('`%s`: %s' % (module_name, e)) 29 | signal.alarm(0) 30 | 31 | class TimeoutError(Exception): 32 | pass 33 | 34 | def timeout_handler(signum, frame): 35 | raise TimeoutError() 36 | 37 | class Result: 38 | def __init__(self, module_name, requestid, method, key, url, host): 39 | self.requestid=requestid 40 | self.method=method 41 | self.key=key 42 | self.host=host 43 | self.url=url 44 | self.module_name = module_name 45 | 46 | 47 | def callback(self, ret): 48 | if not ret: return 49 | result = ret.get('result', 'unknown') 50 | info = ret.get('info', 'unknown') 51 | level = ret.get('level', 'low') 52 | hash = ret.get('hash', None) 53 | if check_hash(hash) and result=='unknown': 54 | result = 'vul' 55 | result = {'requestid': self.requestid, 56 | 'method': self.method, 57 | 'key': self.key, 58 | 'host': self.host, 59 | 'url': self.url, 60 | 'level': level, 61 | 'hash': hash, 62 | 'info': info, 63 | 'result': result, 64 | 'exploit': self.module_name} 65 | config.dbconn().insert(table= 'result', data = result) 66 | 67 | def get_one(): 68 | r = conn.fetch_rows('requests', condition={'checked': 0}, order="id asc", limit="1", fetchone=True) 69 | if not r: return None 70 | r['http'] = conn.fetch_rows('http', condition={'id': r['requestid']}, fetchone=True) 71 | return r 72 | 73 | def mark_checked(id): 74 | conn.update('requests', {'checked': 1}, {'id': id}) 75 | 76 | def check_special(module_name, hostname): 77 | if conn.fetch_rows('result', condition="exploit='%s' and result != 'continue' and `host`='%s'" % (module_name, hostname), limit="1", fetchone=True): 78 | return True 79 | else: 80 | return False 81 | 82 | def check_hash(hash): 83 | if config.dbconn().fetch_rows('hashlog', condition={'hash': hash}, limit="1", fetchone=True): 84 | return True 85 | else: 86 | return False 87 | 88 | 89 | pool = multiprocessing.Pool() 90 | 91 | modules = [] 92 | for f in os.listdir(config.EXPLOITS_PATH): 93 | if f.endswith('.py') and not f.endswith('__init__.py'): 94 | modules.append(imp.load_source(f[:-3], config.EXPLOITS_PATH+ os.sep + f)) 95 | 96 | while True: 97 | 98 | r = get_one() 99 | if not r: 100 | time.sleep(3) 101 | continue 102 | 103 | request = json.loads(r['http']['req']) 104 | response = json.loads(r['http']['rsp']) 105 | 106 | if 'special' in r['type']: 107 | m = filter(lambda module: 'special' == module.vtype or ('special' in module.vtype and r['type'].replace('special|','') in module.vtype), modules) 108 | m = filter(lambda module: check_special(module.__name__, request['host']), m) 109 | elif 'rewrite' in r['type']: 110 | m = filter(lambda module: 'rewrite' == module.vtype or ('rewrite' in module.vtype and r['type'].replace('rewrite|','') in module.vtype), modules) 111 | else: 112 | m = filter(lambda module: (module.vtype == 'all' or r['type'] in module.vtype) and 'special' not in module.vtype and 'rewrite' not in module.vtype, modules) 113 | 114 | 115 | stype = conn.fetch_rows('siteinfo', condition={'host': request['host'], 'key': 'lang'}, limit="1", fetchone=True) 116 | if stype: 117 | m = filter(lambda module: module.stype == 'all' or stype['value'] == 'unknown' or stype['value'] in module.stype, m) 118 | 119 | method = r['method'] 120 | key = r['key'] 121 | is_array = r['type'] == 'array' 122 | #print r['type'] 123 | for module in m: 124 | pool.apply_async(AttackTargets, args = (module.__name__, request, response, method, key, is_array,), callback = Result(module.__name__, r['requestid'], method, key, request['uri'], request['host'],).callback) 125 | mark_checked(r['id']) 126 | 127 | 128 | -------------------------------------------------------------------------------- /tornado_proxy/socket_wrapper.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' 3 | owtf is an OWASP+PTES-focused try to unite great tools & facilitate pentesting 4 | Copyright (c) 2013, Abraham Aranguren http://7-a.org 5 | All rights reserved. 6 | 7 | Redistribution and use in source and binary forms, with or without 8 | modification, are permitted provided that the following conditions are met: 9 | * Redistributions of source code must retain the above copyright 10 | notice, this list of conditions and the following disclaimer. 11 | * Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in the 13 | documentation and/or other materials provided with the distribution. 14 | * Neither the name of the copyright owner nor the 15 | names of its contributors may be used to endorse or promote products 16 | derived from this software without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 19 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 20 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 22 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 23 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 24 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 25 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 27 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | 29 | # Inbound Proxy Module developed by Bharadwaj Machiraju (blog.tunnelshade.in) 30 | # as a part of Google Summer of Code 2013 31 | ''' 32 | from tornado import ioloop 33 | import ssl 34 | 35 | from gen_cert import gen_signed_cert 36 | 37 | 38 | def wrap_socket(socket, domain, success=None, failure=None, io=None, **options): 39 | """Wrap an active socket in an SSL socket.""" 40 | 41 | # # Default Options 42 | options.setdefault('do_handshake_on_connect', False) 43 | options.setdefault('ssl_version', ssl.PROTOCOL_SSLv23) 44 | options.setdefault('server_side', True) 45 | 46 | # The idea is to handle domains with greater than 3 dots using wildcard certs 47 | if domain.count(".") >= 3: 48 | key, cert = gen_signed_cert("*." + ".".join(domain.split(".")[-3:])) 49 | else: 50 | key, cert = gen_signed_cert(domain) 51 | options.setdefault('certfile', cert) 52 | options.setdefault('keyfile', key) 53 | 54 | # # Handlers 55 | 56 | def done(): 57 | """Handshake finished successfully.""" 58 | 59 | io.remove_handler(wrapped.fileno()) 60 | success and success(wrapped) 61 | 62 | def error(): 63 | """The handshake failed.""" 64 | 65 | if failure: 66 | return failure(wrapped) 67 | # # By default, just close the socket. 68 | io.remove_handler(wrapped.fileno()) 69 | wrapped.close() 70 | 71 | def handshake(fd, events): 72 | """Handler fGetting the same error here... also looking for answers.... 73 | TheHippo Dec 19 '12 at 20:29or SSL handshake negotiation. 74 | See Python docs for ssl.do_handshake().""" 75 | 76 | if events & io.ERROR: 77 | error() 78 | return 79 | 80 | try: 81 | new_state = io.ERROR 82 | wrapped.do_handshake() 83 | return done() 84 | except ssl.SSLError as exc: 85 | if exc.args[0] == ssl.SSL_ERROR_WANT_READ: 86 | new_state |= io.READ 87 | elif exc.args[0] == ssl.SSL_ERROR_WANT_WRITE: 88 | new_state |= io.WRITE 89 | else: 90 | raise 91 | 92 | if new_state != state[0]: 93 | state[0] = new_state 94 | io.update_handler(fd, new_state) 95 | 96 | # # set up handshake state; use a list as a mutable cell. 97 | io = io or ioloop.IOLoop.instance() 98 | state = [io.ERROR] 99 | 100 | # # Wrap the socket; swap out handlers. 101 | io.remove_handler(socket.fileno()) 102 | wrapped = ssl.SSLSocket(socket, **options) 103 | wrapped.setblocking(0) 104 | io.add_handler(wrapped.fileno(), handshake, state[0]) 105 | 106 | # # Begin the handshake. 107 | handshake(wrapped.fileno(), 0) 108 | return wrapped 109 | -------------------------------------------------------------------------------- /tornado_proxy/gen_cert.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' 3 | owtf is an OWASP+PTES-focused try to unite great tools & facilitate pentesting 4 | Copyright (c) 2013, Abraham Aranguren http://7-a.org 5 | All rights reserved. 6 | 7 | Redistribution and use in source and binary forms, with or without 8 | modification, are permitted provided that the following conditions are met: 9 | * Redistributions of source code must retain the above copyright 10 | notice, this list of conditions and the following disclaimer. 11 | * Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in the 13 | documentation and/or other materials provided with the distribution. 14 | * Neither the name of the copyright owner nor the 15 | names of its contributors may be used to endorse or promote products 16 | derived from this software without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 19 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 20 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 22 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 23 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 24 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 25 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 27 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | 29 | # Inbound Proxy Module developed by Bharadwaj Machiraju (blog.tunnelshade.in) 30 | # as a part of Google Summer of Code 2013 31 | ''' 32 | from OpenSSL import crypto 33 | import os 34 | import hashlib 35 | 36 | from filelock import FileLock 37 | 38 | 39 | def gen_signed_cert(domain, 40 | ca_crt="ca.crt", 41 | ca_key="ca.key", 42 | key_path = "cert.key" 43 | ): 44 | """ 45 | This function takes a domain name as a parameter and then creates a certificate and key with the 46 | domain name(replacing dots by underscores), finally signing the certificate using specified CA and 47 | returns the path of key and cert files. If you are yet to generate a CA then check the top comments 48 | """ 49 | certs_folder = "certs/" 50 | 51 | 52 | cert_path = os.path.join(certs_folder, domain.replace('.', '_') + ".crt") 53 | 54 | # The first conditions checks if file exists, and does nothing if true 55 | # If file doenst exist lock is obtained for writing (Other processes in race must wait) 56 | # After obtaining lock another check to handle race conditions gracefully 57 | if os.path.exists(key_path) and os.path.exists(cert_path): 58 | pass 59 | else: 60 | with FileLock(cert_path, timeout=2): 61 | # Check happens if the certificate and key pair already exists for a domain 62 | if os.path.exists(key_path) and os.path.exists(cert_path): 63 | pass 64 | else: 65 | # Serial Generation - Serial number must be unique for each certificate, 66 | # so serial is generated based on domain name 67 | md5_hash = hashlib.md5() 68 | md5_hash.update(str.encode(domain)) 69 | serial = int(md5_hash.hexdigest(), 36) 70 | 71 | # The CA stuff is loaded from the same folder as this script 72 | ca_cert = crypto.load_certificate(crypto.FILETYPE_PEM, open(ca_crt).read()) 73 | # The last parameter is the password for your CA key file 74 | ca_key = crypto.load_privatekey(crypto.FILETYPE_PEM, open(ca_key).read()) 75 | 76 | key = crypto.load_privatekey(crypto.FILETYPE_PEM, open(key_path).read()) 77 | 78 | #key.generate_key(crypto.TYPE_RSA, 2048) 79 | 80 | cert = crypto.X509() 81 | cert.set_version(3-1) # version 3, starts at 0 82 | cert.get_subject().C = "IN" 83 | cert.get_subject().ST = "AP" 84 | cert.get_subject().L = "127.0.0.1" 85 | cert.get_subject().O = "TProxy" 86 | cert.get_subject().OU = domain 87 | #cert.get_subject().CN = domain 88 | cert.gmtime_adj_notBefore(0) 89 | cert.gmtime_adj_notAfter(365 * 24 * 60 * 60) 90 | cert.set_serial_number(serial) 91 | cert.set_issuer(ca_cert.get_subject()) 92 | 93 | #san_list = ["DNS:"+domain] 94 | cert.add_extensions([ 95 | crypto.X509Extension( 96 | "subjectAltName", True, "DNS: "+domain 97 | ) 98 | ]) 99 | 100 | cert.set_pubkey(key) 101 | cert.sign(ca_key, "sha256") 102 | 103 | # The key and cert files are dumped and their paths are returned 104 | 105 | domain_cert = open(cert_path, "wb") 106 | domain_cert.write(crypto.dump_certificate(crypto.FILETYPE_PEM, cert)) 107 | print(("[*] Generated signed certificate for %s" % (domain))) 108 | return key_path, cert_path 109 | -------------------------------------------------------------------------------- /templates/show_detect.html: -------------------------------------------------------------------------------- 1 |
2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 |
16 | 17 | 43 | 44 |
45 | 49 | 50 | 51 |
52 | 142 | -------------------------------------------------------------------------------- /templates/show_vul.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 |
16 | 17 | 43 | 44 |
45 | 49 | 50 | 51 |
52 | 142 | -------------------------------------------------------------------------------- /proxy.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import sys, json, time 3 | import config 4 | 5 | from tornado_proxy.proxy import ProxyHandler, ProxyServer 6 | from lib.httphelper import mark_unique, process_post_body, check_lang 7 | 8 | import lib.detector 9 | import hashlib 10 | 11 | import os 12 | 13 | py3k = sys.version_info.major > 2 14 | if py3k: 15 | from urllib import parse as urlparse 16 | else: 17 | import urlparse 18 | 19 | def obj2Dict(obj): 20 | ret = {} 21 | for i in obj: 22 | ret[i] = obj[i] 23 | return ret 24 | 25 | def parseReq(request, rtype): 26 | #rtype 代表post的类型 27 | #'multipart'/'json'/'qs'/'raw'/rewrite 28 | Entity={} 29 | Entity['rtype']=rtype 30 | Entity['body']=request.body 31 | Entity['protocol']=request.protocol 32 | Entity['remote_ip']=request.remote_ip 33 | Entity['host']=request.host 34 | Entity['method']=request.method 35 | Entity['uri']=request.uri 36 | Entity['version']=request.version 37 | Entity['headers']=obj2Dict(request.headers) 38 | return Entity 39 | 40 | def parseRsp(res, response_body, flag, raw_response_body): 41 | Entity={} 42 | Entity['code']=res.code 43 | Entity['headers']=obj2Dict(res.headers) 44 | if flag: 45 | Entity['response_body']=response_body[:config.max_save_size] 46 | else: 47 | Entity['response_body']= 'ignored:'+hashlib.md5(raw_response_body).hexdigest() 48 | return Entity 49 | 50 | def makeFullUri(uri, host, protocol): 51 | if urlparse.urlparse(uri).scheme: 52 | return uri 53 | else: 54 | return protocol+'://'+host+os.path.normpath(uri) 55 | 56 | class RequestProxy(ProxyHandler): 57 | def newgidtype(self, req, res): 58 | # 如果是ajax请求,合并到上一个group 59 | last = config.dbconn().fetch_rows('http', '*', {'type': 1}, order='time desc', limit=1, fetchone=True) 60 | if not last: return 1,1 61 | if req.headers.get('x-requested-with', '').lower() == 'xmlhttprequest' or 'xml' in res.headers.get('content-type', '') or 'json' in res.headers.get('content-type', ''): 62 | return last['gid'],2 63 | elif 'html' in res.headers.get('content-type', '') and time.time()-last['time']>3: 64 | return last['gid']+1,1 65 | else: 66 | return last['gid'],3 67 | 68 | def request_handler(self, request): 69 | pass 70 | 71 | def response_handler(self, request, response, response_body): 72 | pass 73 | 74 | def save_handler(self, request, response, response_body): 75 | #check res.status 76 | #if re.match(config.filter_code, str(res.status)): return 77 | reqtype = 'qs' 78 | 79 | raw_response_body = '' 80 | 81 | if request.body: 82 | request.body = request.body.decode('utf-8','ignore') 83 | if response_body: 84 | raw_response_body = response_body 85 | response_body = response_body.decode('utf-8','ignore') 86 | request.uri = makeFullUri(request.uri, request.host, request.protocol) 87 | 88 | parsed = urlparse.urlparse(request.uri) 89 | 90 | #check host 91 | if config.included_host and not len([h for h in config.included_host if request.host.endswith(h)]): return 92 | if len([h for h in config.excluded_host if request.host.endswith(h)]): return 93 | #check fileext 94 | if len([h for h in config.filter_file if parsed.path.endswith(h)]): return 95 | 96 | path = os.path.normpath(parsed.path) 97 | #check query string 98 | if request.method == 'GET' and not parsed.query: 99 | if os.path.splitext(path)[1]: return 100 | if len(path.split('/')) < 3: return 101 | reqtype = 'rewrite' 102 | 103 | #don' save response body into database 104 | save_body = True 105 | #check query, get must have query string or url-rewrited 106 | #GET method, have ext and do not have query string 107 | #if os.uri.splitext(request.uri)[1] and request.method == 'GET' and not urlparse.urlparse(request.uri).query: save_body = False 108 | #only some content-type save to database 109 | if config.filter_content_type and not len([h for h in config.filter_content_type if h in response.headers.get('content-type', '')]): save_body = False 110 | 111 | conn = config.dbconn() 112 | 113 | separator = None 114 | args = {} 115 | 116 | if 'multipart/form-data;' in request.headers.get('content-type', ''): 117 | separator = request.headers.get('content-type').split("=")[-1] 118 | 119 | if request.method == 'POST': 120 | reqtype, args, files = process_post_body(request.body, separator) 121 | 122 | signature = mark_unique(request.uri, args) 123 | 124 | if conn.fetch_rows('http', condition={'signature': signature}, fetchone=True): 125 | return 126 | 127 | # site basic info 128 | siteinfo = conn.fetch_rows('siteinfo', '*', {'host': request.host, 'key': 'lang'}, fetchone=True) 129 | if not siteinfo: 130 | lang, framework = check_lang(request, response) 131 | conn.insert('siteinfo', {'host': request.host, 'key': 'lang', 'value': lang}) 132 | conn.insert('siteinfo', {'host': request.host, 'key': 'framework', 'value': framework}) 133 | 134 | if siteinfo and siteinfo.get('value', '') == 'unkonwn': 135 | lang = check_lang(request.uri) 136 | conn.update('siteinfo', {'value': lang}, condition={'host': request.host, 'key': 'lang'}) 137 | 138 | 139 | req = parseReq(request, reqtype) 140 | rsp = parseRsp(response, response_body, save_body, raw_response_body) 141 | 142 | 143 | gid, rtype = self.newgidtype(request, response) 144 | 145 | data = {'gid':gid, 'host': request.host,'req':json.dumps(req), 'rsp':json.dumps(rsp), 'time':time.time(), 'type':rtype, 'signature': signature} 146 | conn.insert('http', data) 147 | 148 | detector = [i for i in dir(lib.detector) if i.startswith('detect_')] 149 | for d in detector: 150 | try: 151 | c = getattr(lib.detector,d) 152 | c(req, rsp) 153 | except Exception as e: 154 | print e 155 | 156 | if __name__ == "__main__": 157 | try: 158 | proxy = ProxyServer(RequestProxy, inbound_ip=config.proxy_host, inbound_port=config.proxy_port) 159 | proxy.start() 160 | except KeyboardInterrupt: 161 | proxy.stop() 162 | 163 | 164 | -------------------------------------------------------------------------------- /XSSHelper.user.js: -------------------------------------------------------------------------------- 1 | // ==UserScript== 2 | // @name XSSHelper 3 | // @version 0.1 4 | // @description XSS Helper 5 | // @exclude http://*.google.com/* 6 | // @exclude https://*.google.com/* 7 | // @grant none 8 | // @run-at document-start 9 | // ==/UserScript== 10 | 11 | 12 | // @match http://*/* 13 | // @match https://*/* 14 | 15 | //md5_salt 16 | __XSSHelper_reporturl = "http://127.0.0.1:5000/report_log"; 17 | function __XSSHelper_ReportDetection(method, info){ 18 | //var url = __XSSHelper_reporturl+"?url="+escape(window.location.href)+"&method="+escape(method)+"&info="+escape(info); 19 | //new Image().src=url; 20 | var xmlhttp=new XMLHttpRequest(); 21 | xmlhttp.open("POST",__XSSHelper_reporturl,true); 22 | xmlhttp.setRequestHeader("Content-type","application/x-www-form-urlencoded"); 23 | xmlhttp.send("url="+escape(window.location.href)+"&method="+escape(method)+"&info="+escape(info)); 24 | } 25 | 26 | // hook window.onerror 27 | (function() { 28 | __XSSHelper_onerror = window.onerror; 29 | window.onerror = function (msg, url, lineNo, columnNo, error) { 30 | if(__XSSHelper_onerror) __XSSHelper_onerror(); 31 | var string = msg.toLowerCase(); 32 | var substring = "script error"; 33 | if (string.indexOf(substring) > -1){ 34 | //alert('Script Error: See Browser Console for Detail'); 35 | var message = [ 36 | 'Message: ' + msg, 37 | 'URL: ' + url, 38 | 'Line: ' + lineNo, 39 | 'Column: ' + columnNo, 40 | 'Error object: ' + JSON.stringify(error) 41 | ].join('\n'); 42 | __XSSHelper_ReportDetection('window.onerror: script error', message); 43 | } else { 44 | var message = [ 45 | 'Message: ' + msg, 46 | 'URL: ' + url, 47 | 'Line: ' + lineNo, 48 | 'Column: ' + columnNo, 49 | 'Error object: ' + JSON.stringify(error) 50 | ].join('\n'); 51 | if(msg.indexOf('SyntaxError') != -1 || msg.indexOf('Uncaught ReferenceError') != -1){ 52 | console.log(message); 53 | __XSSHelper_ReportDetection('window.onerror: SyntaxError', message); 54 | } 55 | } 56 | 57 | return false; 58 | }; 59 | 60 | })(); 61 | 62 | function __XSSHelper_GetQueryParams() { 63 | var url = location.search; //获取url中"?"符后的字串 64 | var theRequest = new Object(); 65 | if (url.indexOf("?") != -1) { 66 | var str = url.substr(1); 67 | strs = str.split("&"); 68 | for(var i = 0; i < strs.length; i ++) { 69 | theRequest[strs[i].split("=")[0]] = unescape(strs[i].split("=")[1]); 70 | } 71 | } 72 | return theRequest; 73 | } 74 | 75 | // find flash 76 | /* 77 | (function() { 78 | setTimeout(function(){ 79 | var urls = performance.getEntries(); 80 | have_flash = false; 81 | urls.forEach(function(e){ 82 | if(e.name.indexOf(".swf") !== -1){ 83 | have_flash = true; 84 | console.warn("%c"+location.hostname+' 发现Flash:'+e.name,"color:red;font-size:13px"); 85 | } 86 | }); 87 | if(have_flash) __XSSHelper_ReportDetection('flash', 'Flash found!'); 88 | }, 5000); 89 | })(); 90 | */ 91 | //hook innerHTML 92 | (function () { 93 | var setter = Object.getOwnPropertyDescriptor(Element.prototype, 'innerHTML').set; 94 | Object.defineProperty(Element.prototype, 'innerHTML', { 95 | set: function innerHTML_Setter(val) { 96 | /* 97 | console.group(); 98 | console.log('innerHTML on Object:', this); 99 | console.log('Value:', JSON.stringify(val)); 100 | var stack = new Error().stack; 101 | // Remove this function from the stack: 102 | stack = stack.trim().split('\n').slice(1).join(' <- ') 103 | console.log('Stack: ', stack) 104 | console.groupEnd(); */ 105 | params = __XSSHelper_GetQueryParams(); 106 | if(window.location.hash.substr(1)) params["__local_hash"] = window.location.hash.substr(1); 107 | var info = ''; 108 | for (var key in params){ 109 | if(val.indexOf(params[key]) !== -1 && isNaN(params[key])){ 110 | console.group(); 111 | //console.log(document.currentScript.src); 112 | console.log('innerHTML on Object:', this); 113 | console.log('Key:', JSON.stringify(key)); 114 | console.log('Value:', JSON.stringify(val.substr(0, 256))); 115 | var stack = new Error().stack; 116 | // Remove this function from the stack: 117 | stack = stack.trim().split('\n').slice(1).join(' <- '); 118 | console.log('Stack: ', stack); 119 | console.groupEnd(); 120 | info += [ 121 | 'innerHTML on Object: ' + this, 122 | 'Key: ' + key, 123 | 'Value: ' + params[key], 124 | 'stack: ' + stack 125 | ].join('\n')+"\n"; 126 | //alert("query appears in innerHTML"); 127 | } 128 | } 129 | if(info){ 130 | info += "\n\ninnerHTML: "+val; 131 | __XSSHelper_ReportDetection('innerHTML: query appears in innerHTML', info); 132 | } 133 | // call original innerHTML setter: 134 | return setter.call(this, val); 135 | } 136 | }); 137 | })(); 138 | 139 | //hook window.postMessage 140 | (function () { 141 | window.__postMessage__ = window.postMessage; 142 | window.postMessage = function(){ 143 | if(arguments[1] == "*"){ 144 | //alert("Post Message origin is *!"); 145 | console.log(arguments[0]); 146 | console.log(document.currentScript.src); 147 | info = ["Data: "+arguments[0], 148 | "origin: "+arguments[1], 149 | "srcUrl: "+document.currentScript.src].join('\n'); 150 | __XSSHelper_ReportDetection('window.postMessage: origin is *', info); 151 | } 152 | window.__postMessage__.apply(this, arguments); 153 | }; 154 | })(); 155 | 156 | // check onmessage 157 | (function () { 158 | // 储存被 Hook 的函数 159 | var EventTarget_addEventListener = EventTarget.prototype.addEventListener; 160 | var document_addEventListener = document.addEventListener; 161 | 162 | // Hook addEventListener proc 163 | function addEventListener(type, func, useCapture) { 164 | var _addEventListener = this === document ? document_addEventListener : EventTarget_addEventListener; 165 | 166 | if(this === window && type === 'message'){ 167 | console.log("window.onmessage found"); 168 | console.log(func); 169 | info = ["Function: "+func].join('\n'); 170 | __XSSHelper_ReportDetection('window.onmessage: page has onmessage', info); 171 | } 172 | 173 | _addEventListener.apply(this, arguments); 174 | 175 | } 176 | // hook addEventListener 177 | EventTarget.prototype.addEventListener = addEventListener; 178 | document.addEventListener = addEventListener; 179 | })(); 180 | 181 | //hook window.prompt 182 | (function () { 183 | window.__prompt__ = window.prompt; 184 | window.prompt = function(){ 185 | if(arguments[0] == 233){ 186 | info = ["url: "+document.currentScript.src].join('\n'); 187 | __XSSHelper_ReportDetection('window.prompt: xss found', info); 188 | } 189 | window.__prompt__.apply(this, arguments); 190 | }; 191 | })(); -------------------------------------------------------------------------------- /tornado_proxy/proxy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' 3 | owtf is an OWASP+PTES-focused try to unite great tools & facilitate pentesting 4 | Copyright (c) 2013, Abraham Aranguren http://7-a.org 5 | All rights reserved. 6 | 7 | Redistribution and use in source and binary forms, with or without 8 | modification, are permitted provided that the following conditions are met: 9 | * Redistributions of source code must retain the above copyright 10 | notice, this list of conditions and the following disclaimer. 11 | * Redistributions in binary form must reproduce the above copyright 12 | notice, this list of conditions and the following disclaimer in the 13 | documentation and/or other materials provided with the distribution. 14 | * Neither the name of the copyright owner nor the 15 | names of its contributors may be used to endorse or promote products 16 | derived from this software without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 19 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 20 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 22 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 23 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 24 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 25 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 27 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | 29 | # Inbound Proxy Module developed by Bharadwaj Machiraju (blog.tunnelshade.in) 30 | # as a part of Google Summer of Code 2013 31 | 32 | # Modified by md5_salt (5alt.me) 33 | ''' 34 | import tornado.httpserver 35 | import tornado.ioloop 36 | import tornado.iostream 37 | import tornado.web 38 | import tornado.curl_httpclient 39 | import socket 40 | import ssl 41 | import tornado.escape 42 | import tornado.httputil 43 | import os 44 | 45 | from socket_wrapper import wrap_socket 46 | 47 | def join_with_script_dir(path): 48 | return os.path.join(os.path.dirname(os.path.abspath(__file__)), path) 49 | 50 | 51 | class ProxyHandler(tornado.web.RequestHandler): 52 | """ 53 | This RequestHandler processes all the requests that the application recieves 54 | """ 55 | SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT', 'HEAD', 'PUT', 'DELETE', 'OPTIONS', 'PATCH'] 56 | 57 | cakey = 'ca.key' 58 | cacert = 'ca.crt' 59 | certdir = 'certs/' 60 | 61 | response_body = None 62 | 63 | def request_handler(self, request): 64 | pass 65 | 66 | def response_handler(self, request, response, response_body): 67 | pass 68 | 69 | def save_handler(self, request, response, response_body): 70 | pass 71 | 72 | def set_status(self, status_code, reason=None): 73 | """Sets the status code for our response. 74 | Overriding is done so as to handle unknown 75 | response codes gracefully. 76 | """ 77 | self._status_code = status_code 78 | if reason is not None: 79 | self._reason = tornado.escape.native_str(reason) 80 | else: 81 | try: 82 | self._reason = tornado.httputil.responses[status_code] 83 | except KeyError: 84 | self._reason = tornado.escape.native_str("Unknown Error") 85 | 86 | @tornado.web.asynchronous 87 | def get(self): 88 | """ 89 | * This function handles all requests except the connect request. 90 | * Once ssl stream is formed between browser and proxy, the requests are 91 | then processed by this function 92 | """ 93 | 94 | # Data for handling headers through a streaming callback 95 | restricted_headers = ['Content-Length', 96 | 'Content-Encoding', 97 | 'Etag', 98 | 'Transfer-Encoding', 99 | 'Connection', 100 | 'Vary', 101 | 'Accept-Ranges', 102 | 'Pragma'] 103 | 104 | # This function is a callback after the async client gets the full response 105 | # This method will be improvised with more headers from original responses 106 | def handle_response(response): 107 | 108 | self.response_body = response.body if response.body else self.response_body 109 | 110 | # Hook response 111 | ret = self.response_handler(self.request, response, self.response_body) 112 | if ret: 113 | self.response_body = ret 114 | if self.response_body: 115 | self.write(self.response_body) 116 | 117 | self.set_status(response.code) 118 | for header, value in list(response.headers.items()): 119 | if header == "Set-Cookie": 120 | # print(("%s: %s" % (header, value))) 121 | self.add_header(header, value) 122 | else: 123 | if header not in restricted_headers: 124 | self.set_header(header, value) 125 | # print("\n\n") 126 | #self.set_header('Content-Type', response.headers['Content-Type']) 127 | self.finish() 128 | 129 | # Save request and response 130 | self.save_handler(self.request, response, self.response_body) 131 | 132 | # This function is a callback when a small chunk is recieved 133 | def handle_data_chunk(data): 134 | if data: 135 | if not self.response_body: 136 | self.response_body = data 137 | else: 138 | self.response_body += data 139 | 140 | # Hook request 141 | self.request_handler(self.request) 142 | 143 | # The requests that come through ssl streams are relative requests, so transparent 144 | # proxying is required. The following snippet decides the url that should be passed 145 | # to the async client 146 | if self.request.host in self.request.uri.split('/'): # Normal Proxy Request 147 | url = self.request.uri 148 | else: # Transparent Proxy Request 149 | url = self.request.protocol + "://" + self.request.host + self.request.uri 150 | 151 | # More headers are to be removed 152 | for header in ('Connection', 'Pragma', 'Cache-Control'): 153 | try: 154 | del self.request.headers[header] 155 | except: 156 | continue 157 | 158 | # httprequest object is created and then passed to async client with a callback 159 | # pycurl is needed for curl client 160 | 161 | async_client = tornado.curl_httpclient.CurlAsyncHTTPClient() 162 | request = tornado.httpclient.HTTPRequest( 163 | url=url, 164 | method=self.request.method, 165 | body=self.request.body if self.request.method != 'GET' else None, 166 | headers=self.request.headers, 167 | follow_redirects=False, 168 | use_gzip=True, 169 | streaming_callback=handle_data_chunk, 170 | header_callback=None, 171 | proxy_host=self.application.outbound_ip, 172 | proxy_port=self.application.outbound_port, 173 | allow_nonstandard_methods=True, 174 | validate_cert=False) 175 | 176 | try: 177 | async_client.fetch(request, callback=handle_response) 178 | except Exception as e: 179 | print(e) 180 | 181 | # The following 5 methods can be handled through the above implementation 182 | @tornado.web.asynchronous 183 | def post(self): 184 | return self.get() 185 | 186 | @tornado.web.asynchronous 187 | def head(self): 188 | return self.get() 189 | 190 | @tornado.web.asynchronous 191 | def put(self): 192 | return self.get() 193 | 194 | @tornado.web.asynchronous 195 | def delete(self): 196 | return self.get() 197 | 198 | @tornado.web.asynchronous 199 | def options(self): 200 | return self.get() 201 | 202 | @tornado.web.asynchronous 203 | def connect(self): 204 | if os.path.isfile(self.cakey) and os.path.isfile(self.cacert) and os.path.isdir(self.certdir): 205 | self.connect_intercept() 206 | else: 207 | self.connect_relay() 208 | 209 | def connect_intercept(self): 210 | """ 211 | This function gets called when a connect request is recieved. 212 | * The host and port are obtained from the request uri 213 | * A socket is created, wrapped in ssl and then added to SSLIOStream 214 | * This stream is used to connect to speak to the remote host on given port 215 | * If the server speaks ssl on that port, callback start_tunnel is called 216 | * An OK response is written back to client 217 | * The client side socket is wrapped in ssl 218 | * If the wrapping is successful, a new SSLIOStream is made using that socket 219 | * The stream is added back to the server for monitoring 220 | """ 221 | host, port = self.request.uri.split(':') 222 | 223 | def start_tunnel(): 224 | try: 225 | self.request.connection.stream.write(b"HTTP/1.1 200 OK CONNECTION ESTABLISHED\r\n\r\n") 226 | wrap_socket(self.request.connection.stream.socket, host, success=ssl_success) 227 | except tornado.iostream.StreamClosedError: 228 | pass 229 | 230 | def ssl_success(client_socket): 231 | client = tornado.iostream.SSLIOStream(client_socket) 232 | server.handle_stream(client, self.application.inbound_ip) # lint:ok 233 | 234 | try: 235 | s = ssl.wrap_socket(socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)) 236 | upstream = tornado.iostream.SSLIOStream(s) 237 | upstream.connect((host, int(port)), start_tunnel, host) 238 | except Exception: 239 | print(("[!] Dropping CONNECT request to " + self.request.uri)) 240 | self.write(b"404 Not Found :P") 241 | self.finish() 242 | 243 | def connect_relay(self): 244 | host, port = self.request.uri.split(':') 245 | client = self.request.connection.stream 246 | 247 | def read_from_client(data): 248 | upstream.write(data) 249 | 250 | def read_from_upstream(data): 251 | client.write(data) 252 | 253 | def client_close(data=None): 254 | if upstream.closed(): 255 | return 256 | if data: 257 | upstream.write(data) 258 | upstream.close() 259 | 260 | def upstream_close(data=None): 261 | if client.closed(): 262 | return 263 | if data: 264 | client.write(data) 265 | client.close() 266 | 267 | def start_tunnel(): 268 | client.read_until_close(client_close, read_from_client) 269 | upstream.read_until_close(upstream_close, read_from_upstream) 270 | client.write(b'HTTP/1.0 200 Connection established\r\n\r\n') 271 | 272 | def on_proxy_response(data=None): 273 | if data: 274 | first_line = data.splitlines()[0] 275 | http_v, status, text = first_line.split(None, 2) 276 | if int(status) == 200: 277 | start_tunnel() 278 | return 279 | 280 | self.set_status(500) 281 | self.finish() 282 | 283 | def start_proxy_tunnel(): 284 | upstream.write('CONNECT %s HTTP/1.1\r\n' % self.request.uri) 285 | upstream.write('Host: %s\r\n' % self.request.uri) 286 | upstream.write('Proxy-Connection: Keep-Alive\r\n\r\n') 287 | upstream.read_until('\r\n\r\n', on_proxy_response) 288 | 289 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) 290 | upstream = tornado.iostream.IOStream(s) 291 | 292 | if self.application.outbound_ip and self.application.outbound_port: 293 | upstream.connect((self.application.outbound_ip, self.application.outbound_port), start_proxy_tunnel) 294 | else: 295 | upstream.connect((host, int(port)), start_tunnel) 296 | 297 | 298 | class ProxyServer(object): 299 | 300 | def __init__(self, handler,inbound_ip="0.0.0.0", inbound_port=8088, outbound_ip=None, outbound_port=None): 301 | 302 | self.application = tornado.web.Application(handlers=[(r".*", handler)], debug=False, gzip=True) 303 | self.application.inbound_ip = inbound_ip 304 | self.application.inbound_port = inbound_port 305 | self.application.outbound_ip = outbound_ip 306 | self.application.outbound_port = outbound_port 307 | global server 308 | server = tornado.httpserver.HTTPServer(self.application, decompress_request=True) 309 | self.server = server 310 | 311 | # "0" equals the number of cores present in a machine 312 | def start(self, instances=0): 313 | try: 314 | #total = Profiler() 315 | #app = tornado.web.Application(handlers=[(r".*", handler)], debug=False, gzip=True) 316 | #global http_server # Easy to add SSLIOStream later in the request handlers 317 | #http_server = tornado.httpserver.HTTPServer(app) 318 | self.server.bind(self.application.inbound_port, address=self.application.inbound_ip) 319 | 320 | # To run any number of instances 321 | self.server.start(instances) 322 | tornado.ioloop.IOLoop.instance().start() 323 | 324 | except Exception as e: 325 | print(e) 326 | 327 | def stop(self): 328 | tornado.ioloop.IOLoop.instance().stop() 329 | print("[!] Shutting down the proxy server") 330 | 331 | if __name__ == "__main__": 332 | try: 333 | proxy = ProxyServer(ProxyHandler) 334 | proxy.start() 335 | except KeyboardInterrupt: 336 | proxy.stop() --------------------------------------------------------------------------------