├── .gitignore ├── AutoSqli.py ├── README.md ├── blacklist.txt ├── config.py ├── models.py ├── proxy.py └── sqlirunner.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | sqlmap/ 10 | bin/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | eggs/ 15 | lib/ 16 | lib64/ 17 | parts/ 18 | sdist/ 19 | var/ 20 | *.egg-info/ 21 | .installed.cfg 22 | *.egg 23 | 24 | # Installer logs 25 | pip-log.txt 26 | pip-delete-this-directory.txt 27 | 28 | # Unit test / coverage reports 29 | .tox/ 30 | .coverage 31 | .cache 32 | nosetests.xml 33 | coverage.xml 34 | 35 | # Translations 36 | *.mo 37 | 38 | # Mr Developer 39 | .mr.developer.cfg 40 | .project 41 | .pydevproject 42 | 43 | # Rope 44 | .ropeproject 45 | 46 | # Django stuff: 47 | *.log 48 | *.pot 49 | 50 | # Sphinx documentation 51 | docs/_build/ -------------------------------------------------------------------------------- /AutoSqli.py: -------------------------------------------------------------------------------- 1 | # -*-coding:utf-8-*- 2 | import requests 3 | import time 4 | import json 5 | 6 | from threading import Thread 7 | from models import * 8 | 9 | class AutoSqli(Thread): 10 | def __init__(self, server='', target='', 11 | data='', referer='', cookie='', req_text=''): 12 | Thread.__init__(self) 13 | self.server = server 14 | if self.server[-1] != '/': 15 | self.server = self.server + '/' 16 | self.target = target 17 | self.taskid = '' 18 | self.engineid = '' 19 | self.status = '' 20 | self.data = data 21 | self.referer = referer 22 | self.cookie = cookie 23 | self.req_text = req_text 24 | self.start_time = time.time() 25 | 26 | def task_new(self): 27 | self.taskid = json.loads( 28 | requests.get(self.server + 'task/new').text)['taskid'] 29 | print 'Created new task: ' + self.taskid + "\t" + self.target 30 | if len(self.taskid) > 0: 31 | return True 32 | return False 33 | 34 | def task_delete(self): 35 | if json.loads(requests.get(self.server + 'task/' + self.taskid + '/delete').text)['success']: 36 | print '[%s] Deleted task' % (self.taskid) 37 | return True 38 | return False 39 | 40 | def scan_start(self): 41 | headers = {'Content-Type': 'application/json'} 42 | payload = { 43 | 'url': self.target, 44 | 'data': self.data, 45 | 'cookie': self.cookie, 46 | 'referer': self.referer} 47 | url = self.server + 'scan/' + self.taskid + '/start' 48 | t = json.loads( 49 | requests.post(url, data=json.dumps(payload), headers=headers).text) 50 | self.engineid = t['engineid'] 51 | if len(str(self.engineid)) > 0 and t['success']: 52 | return True 53 | return False 54 | 55 | def scan_status(self): 56 | self.status = json.loads( 57 | requests.get(self.server + 'scan/' + self.taskid + '/status').text)['status'] 58 | if self.status == 'running': 59 | return 'running' 60 | elif self.status == 'terminated': 61 | return 'terminated' 62 | else: 63 | return 'error' 64 | 65 | def scan_data(self): 66 | self.data = json.loads( 67 | requests.get(self.server + 'scan/' + self.taskid + '/data').text)['data'] 68 | if len(self.data) == 0: 69 | print 'not injection:\t' + self.target 70 | else: 71 | print '=======> injection:\t' + self.target 72 | SQLIRecords.insert(url=self.target, request_text=self.req_text).execute() 73 | 74 | def option_set(self): 75 | headers = {'Content-Type': 'application/json'} 76 | option = {"options": { 77 | "smart": True, 78 | } 79 | } 80 | url = self.server + 'option/' + self.taskid + '/set' 81 | t = json.loads( 82 | requests.post(url, data=json.dumps(option), headers=headers).text) 83 | 84 | def scan_stop(self): 85 | json.loads( 86 | requests.get(self.server + 'scan/' + self.taskid + '/stop').text)['success'] 87 | 88 | def scan_kill(self): 89 | json.loads( 90 | requests.get(self.server + 'scan/' + self.taskid + '/kill').text)['success'] 91 | 92 | def write_to_db(self): 93 | pass 94 | 95 | def run(self): 96 | try: 97 | if not self.task_new(): 98 | return False 99 | self.option_set() 100 | if not self.scan_start(): 101 | return False 102 | while True: 103 | if self.scan_status() == 'running': 104 | time.sleep(10) 105 | elif self.scan_status() == 'terminated': 106 | break 107 | else: 108 | break 109 | print self.target + ":\t" + str(time.time() - self.start_time) 110 | if time.time() - self.start_time > 500: 111 | error = True 112 | self.scan_stop() 113 | self.scan_kill() 114 | break 115 | self.scan_data() 116 | self.task_delete() 117 | print self.target + ":\t" + str(time.time() - self.start_time) 118 | except Exception, e: 119 | print e 120 | 121 | 122 | # if __name__ == '__main__': 123 | # t = AutoSqli('http://127.0.0.1:8775', 'http://www.zxssyxx.com/read.asp?id=2471') 124 | # t.run() -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # sqli-proxy 2 | ##0x00 介绍 3 | sqli-proxy使用了sqlmapapi和基于tornado实现的http-proxy来探测上网的流量中是否存在SQL注入。 4 | sqli-proxy暂时不提供Web UI。 5 | ##0x01 配置 6 | 创建本地数据库sqli,相关数据库配置在`config.py`文件中,分别对sqlmapapi的server和数据进行配置,默认配置如下: 7 | ``` 8 | # sqlmapapi server config 9 | SERVER = 'http://127.0.0.1:8775' 10 | 11 | # database config 12 | host = '127.0.0.1' 13 | user = 'root' 14 | password = '' 15 | db_name = 'sqli' 16 | port = 3306 17 | charset = 'utf8' 18 | ``` 19 | 需要安装tornado,requests等第三方python库: 20 | ``` 21 | pip install tornado 22 | pip install requests 23 | ... 24 | ``` 25 | 在`blacklist.txt`配置忽略域名,每行一个,不允许存在空行: 26 | ``` 27 | www.baidu.com 28 | www.qq.com 29 | www.sina.cn 30 | www.163.com 31 | ``` 32 | ##0x02 运行 33 | 1、运行sqlmapapi和proxy.py即可: 34 | ``` 35 | python sqlmapapi.py -s 36 | python proxy.py 8080 baidu.com 37 | ``` 38 | python proxy.py [proxy-port] [detecting-domain] 39 | 默认端口为8080,domain为空。 40 | 41 | 2、设置浏览器代理为: 42 | ``` 43 | 127.0.0.1:[8080/你配置过的端口] 44 | ``` 45 | ##0x03 结果 46 | 结果保存在sqli数据库中的sqlirecords表中,分为url和request_body。 47 | -------------------------------------------------------------------------------- /blacklist.txt: -------------------------------------------------------------------------------- 1 | www.baidu.com 2 | www.qq.com 3 | www.sina.cn 4 | www.163.com 5 | www.wooyun.org 6 | weibo.com 7 | zone.wooyun.org 8 | drops.wooyun.org 9 | www.zhihu.com 10 | blog.csdn.net 11 | login.sina.com.cn 12 | widget.weibo.com -------------------------------------------------------------------------------- /config.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # sqlmapapi server config 5 | SERVER = 'http://127.0.0.1:8775' 6 | 7 | # database config 8 | host = '127.0.0.1' 9 | user = 'root' 10 | password = '' 11 | db_name = 'sqli' 12 | port = 3306 13 | charset = 'utf8' -------------------------------------------------------------------------------- /models.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | import sys 4 | import MySQLdb 5 | 6 | from config import * 7 | from peewee import * 8 | 9 | database = MySQLDatabase(host=host, user=user, 10 | passwd=password, database=db_name, 11 | port=port, charset=charset) 12 | 13 | def create_tables(): 14 | database.create_tables([SQLIRecords], True) 15 | 16 | class SQLIRecords(Model): 17 | url = CharField() 18 | request_text = TextField() 19 | class Meta: 20 | database = database 21 | 22 | -------------------------------------------------------------------------------- /proxy.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | """ 5 | ProxyHandler is a http proxy based on Tornado. The default port is 8080. 6 | Run this command to run the proxy: 7 | python proxy --port=8080 8 | Use the `port` option to change the proxy's port. 9 | """ 10 | 11 | import sys 12 | import socket 13 | 14 | import tornado.httpclient 15 | import tornado.httpserver 16 | import tornado.ioloop 17 | import tornado.web 18 | 19 | from models import * 20 | from urlparse import urlparse 21 | from sqlirunner import SqliRunner 22 | 23 | domain = "" 24 | urls_pool = set() 25 | blacklist = [] 26 | 27 | class ProxyHandler(tornado.web.RequestHandler): 28 | ''' 29 | A http proxy based on RequestHandler 30 | ''' 31 | def render_request(self, url, callback=None, **kwargs): 32 | ''' 33 | Use `AsyncHTTPClient` send http request 34 | ''' 35 | req = tornado.httpclient.HTTPRequest(url, **kwargs) 36 | asy_client = tornado.httpclient.AsyncHTTPClient() 37 | asy_client.fetch(req, callback) 38 | 39 | # Set supported methods for proxy 40 | SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PATCH", "PUT", 41 | "OPTIONS", "CONNECT") 42 | @tornado.web.asynchronous 43 | def get(self): 44 | # get request body 45 | body = self.request.body 46 | if not body: 47 | body = None 48 | try: 49 | # send request by proxy 50 | timeout = 5 51 | # proxy sends the request 52 | self.render_request( 53 | self.request.uri, 54 | callback=self.on_response, 55 | method=self.request.method, 56 | body=body, 57 | headers=self.request.headers, 58 | request_timeout=timeout, 59 | follow_redirects=True, 60 | allow_nonstandard_methods=True) 61 | except tornado.httpclient.HTTPError as httperror: 62 | if hasattr(httperror, 'response') and httperror.response: 63 | self.on_response(httperror.response) 64 | else: 65 | self.set_status(500) 66 | self.write('Internal server error:\n' + str(httperror)) 67 | self.finish() 68 | 69 | def on_response(self, response): 70 | ''' 71 | http requst callback 72 | ''' 73 | global domain, urls_pool, blacklist 74 | # handle exceptions 75 | if response.error and type(response.error) != tornado.httpclient.HTTPError: 76 | self.set_status(500) 77 | self.write('Internal server error:\n' + str(response.error)) 78 | self.finish() 79 | else: 80 | # run the sqlmap 81 | # only detect few content type: 82 | # text/html, application/json 83 | url = response.request.url 84 | method = response.request.method 85 | urlp = urlparse(url) 86 | host_path = "%s://%s/%s" % (urlp.scheme, urlp.netloc, urlp.path) 87 | content_type = response.headers.get_list('content-type') 88 | if len(content_type) > 0: 89 | content_type = content_type[0] 90 | # default domain is null 91 | if not domain: 92 | if (method == 'GET' and urlp.query) or \ 93 | (method == 'POST' and response.body): 94 | if ("text/html" in content_type) or \ 95 | ("application/json" in content_type): 96 | if (host_path not in urls_pool) and \ 97 | (urlp.netloc not in blacklist) and \ 98 | (response.code == 200): 99 | urls_pool.add(host_path) 100 | sqli_runner = SqliRunner(response.request) 101 | sqli_runner.run() 102 | # only detect the POST method and the GET 103 | # method with query string in url 104 | elif (method == 'GET' and urlp.query) or \ 105 | (method == 'POST' and response.body): 106 | if urlp.netloc.endswith(domain) and (host_path not in urls_pool) \ 107 | and response.code == 200 and (urlp.netloc not in blacklist): 108 | if ("text/html" in content_type) or \ 109 | ("application/json" in content_type): 110 | urls_pool.add(host_path) 111 | sqli_runner = SqliRunner(response.request) 112 | sqli_runner.run() 113 | 114 | try: 115 | self.set_status(response.code) 116 | except ValueError, e: 117 | pass 118 | # print '[ValueError]:%s' % str(e) 119 | # Set `self._headers` attribute for RequestHandler 120 | headers = self._headers.keys() 121 | for header in headers: 122 | value = response.headers.get(header) 123 | if value: 124 | self.set_header(header, value) 125 | # set the `set-cookie` header 126 | cookies = response.headers.get_list('Set-Cookie') 127 | if cookies: 128 | for i in cookies: 129 | self.add_header('Set-Cookie', i) 130 | try: 131 | if response.code != 304: 132 | self.write(response.body) 133 | except TypeError, e: 134 | pass 135 | # print '[TypeError]:%s' % str(e) 136 | self.finish() 137 | 138 | @tornado.web.asynchronous 139 | def post(self): 140 | self.get() 141 | 142 | @tornado.web.asynchronous 143 | def connect(self): 144 | # 获取request的socket 145 | req_stream = self.request.connection.stream 146 | 147 | # get port 148 | host, port = self.request.uri.split(':') 149 | port = int(port) 150 | 151 | def req_close(data=None): 152 | if conn_stream.closed(): 153 | return 154 | if data: 155 | conn_stream.write(data) 156 | conn_stream.close() 157 | 158 | def write_to_server(data): 159 | conn_stream.write(data) 160 | 161 | def proxy_close(data=None): 162 | if req_stream.closed(): 163 | return 164 | if data: 165 | req_stream.write(data) 166 | req_stream.close(data) 167 | 168 | def write_to_client(data): 169 | req_stream.write(data) 170 | 171 | def on_connect(): 172 | req_stream.read_until_close(req_close, write_to_server) 173 | conn_stream.read_until_close(proxy_close, write_to_client) 174 | req_stream.write(b'HTTP/1.0 200 Connection established\r\n\r\n') 175 | 176 | # 创建iostream 177 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) 178 | conn_stream = tornado.iostream.IOStream(s) 179 | conn_stream.connect((host, port), on_connect) 180 | 181 | if __name__ == '__main__': 182 | create_tables() 183 | with open('blacklist.txt', 'r') as f: 184 | blacklist = f.readlines() 185 | blacklist = [i.replace("\n", "") for i in blacklist] 186 | port = 8080 187 | if len(sys.argv) == 3: 188 | port = int(sys.argv[1]) 189 | domain = sys.argv[2] 190 | print "Starting Proxy on port %s, domain %s" % (port, domain) 191 | handlers = [ 192 | (r'.*', ProxyHandler), 193 | ] 194 | app = tornado.web.Application(handlers=handlers) 195 | app.listen(port) 196 | tornado.ioloop.IOLoop.instance().start() 197 | -------------------------------------------------------------------------------- /sqlirunner.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | 3 | from config import SERVER 4 | from AutoSqli import AutoSqli 5 | from urlparse import urlparse 6 | 7 | class SqliRunner(object): 8 | """ 9 | SqliRunner is used to detect a request from proxy is 10 | injectable or not. 11 | """ 12 | def __init__(self, request): 13 | self.request = request 14 | self.url = request.url 15 | self.data = request.body 16 | self.cookie = self.get_from_headers('cookie') 17 | self.referer = self.get_from_headers('referer') 18 | self.req_text = self.get_raw_request(self.request) 19 | 20 | def get_raw_request(self, request): 21 | """ 22 | Get raw http request body 23 | """ 24 | text = "" 25 | method = request.method 26 | url = request.url 27 | urlp = urlparse(url) 28 | body = request.body 29 | headers = request.headers 30 | protocol = 'HTTP/1.1' 31 | if not urlp.fragment and not urlp.query: 32 | link = "%s" % urlp.path 33 | elif not urlp.fragment: 34 | link = "%s?%s" % (urlp.path, urlp.query) 35 | elif not urlp.query: 36 | link = "%s#%s" % (urlp.path, urlp.fragment) 37 | else: 38 | link = "%s?%s#%s" % (urlp.path, urlp.query, urlp.fragment) 39 | text += "%s %s %s\r\n" % (method, link, protocol) 40 | for h in headers.get_all(): 41 | text += "%s: %s\r\n" % (h[0], h[1]) 42 | text += "\r\n" 43 | if body: text += body 44 | return text 45 | 46 | def get_from_headers(self, key): 47 | try: 48 | item = self.request.headers.get_list(key) 49 | if not item: 50 | return '' 51 | else: 52 | return item[0] 53 | except Exception, e: 54 | return '' 55 | 56 | def run(self): 57 | """ 58 | Run the sqli detection using HTTPRequest object. 59 | """ 60 | try: 61 | detecter = AutoSqli(SERVER, self.url, self.data, 62 | self.referer, self.cookie, self.req_text) 63 | detecter.deamon = True 64 | detecter.start() 65 | except Exception, e: 66 | print e 67 | --------------------------------------------------------------------------------