├── README.md ├── a.py ├── csrf.py ├── run.sh ├── sqlmap.sql ├── tasks.py ├── tasks.py.bak └── x.py /README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/leveryd/sqlmap_celery/307ec545cf8639dd42718ccd0f55cc9636a37c57/README.md -------------------------------------------------------------------------------- /a.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # encoding: utf-8 3 | # tasks.py 4 | # email: ringzero@0x557.org 5 | 6 | ''' 7 | Thorns Project 分布式任务控制脚本 8 | tasks 9 | -- nmap_dispath # nmap 扫描调度函数 10 | -- hydra_dispath # hydra 暴力破解调度函数 11 | -- medusa_dispath # medusa 暴力破解调度函数 12 | 13 | worker run() 14 | --workdir=/home/thorns 15 | ''' 16 | 17 | import subprocess 18 | from celery import Celery, platforms 19 | from time import sleep 20 | import requests,json 21 | import MySQLdb 22 | 23 | # 初始化芹菜对象 24 | app = Celery() 25 | 26 | # 允许celery以root权限启动 27 | platforms.C_FORCE_ROOT = True 28 | 29 | # 修改celery的全局配置 30 | app.conf.update( 31 | CELERY_IMPORTS = ("tasks", ), 32 | BROKER_URL = 'redis://203.195.211.242:6379/0', 33 | CELERY_RESULT_BACKEND = 'db+mysql://thornstest:thornstest@203.195.211.242:3306/thorns', 34 | CELERY_TASK_SERIALIZER='json', 35 | CELERY_RESULT_SERIALIZER='json', 36 | CELERY_TIMEZONE='Asia/Shanghai', 37 | CELERY_ENABLE_UTC=True, 38 | CELERY_REDIS_MAX_CONNECTIONS=5000, # Redis 最大连接数 39 | BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 3600}, # 如果任务没有在 可见性超时 内确认接收,任务会被重新委派给另一个Worker并执行 默认1 hour. 40 | # BROKER_TRANSPORT_OPTIONS = {'fanout_prefix': True}, # 设置一个传输选项来给消息加上前缀 41 | ) 42 | SQLMAPAPI_URL="http://127.0.0.1:9999" 43 | TASK_NEW_URL=SQLMAPAPI_URL+"/task/new" 44 | class Database: 45 | host = '203.195.211.242' 46 | user = 'sqlmap' 47 | password = 'sqlmapx123' 48 | db = 'sqlmap' 49 | charset = 'utf8' 50 | 51 | def __init__(self): 52 | self.connection = MySQLdb.connect(self.host, self.user, self.password, self.db,charset=self.charset) 53 | self.cursor = self.connection.cursor() 54 | 55 | def insert(self, query): 56 | try: 57 | self.cursor.execute(query) 58 | self.connection.commit() 59 | except: 60 | self.connection.rollback() 61 | 62 | def query(self, query): 63 | cursor = self.connection.cursor(MySQLdb.cursors.DictCursor) 64 | cursor.execute(query) 65 | return cursor.fetchall() 66 | 67 | def __del__(self): 68 | self.connection.close() 69 | 70 | @app.task 71 | def sqlmap_dispath(url,cookie,referer,data): 72 | task_new=requests.get(TASK_NEW_URL) 73 | task_id=task_new.json()["taskid"] 74 | if data!="mitm-for-test": 75 | requests.post(SQLMAPAPI_URL+"/scan/"+task_id+"/start",data=json.dumps({'url':url,"cookie":cookie,"referer":referer,"data":data}),headers={"content-type":"application/json"}) 76 | else: 77 | requests.post(SQLMAPAPI_URL+"/scan/"+task_id+"/start",data=json.dumps({'url':url,"cookie":cookie,"referer":referer}),headers={"content-type":"application/json"}) 78 | task_status=requests.get(SQLMAPAPI_URL+"/scan/"+task_id+"/status") 79 | count=1 80 | while(task_status.json()["status"]!="terminated"): 81 | task_status=requests.get(SQLMAPAPI_URL+"/scan/"+task_id+"/status") 82 | sleep(count) 83 | count=count*2 84 | task_result=requests.get(SQLMAPAPI_URL+"/scan/"+task_id+"/data") 85 | if task_result.json()['data']!="": 86 | mysql=Database() 87 | mysql.insert("insert into sqlmap_result(taskid,result,url,cookie,referer,data) values('%s','%s','%s','%s','%s','%s')"%("NULL",task_result.json()['data'],url,cookie,referer,data)) 88 | return task_result.json()['data'] 89 | else: 90 | return "nothing" 91 | 92 | # 失败任务重启休眠时间300秒,最大重试次数5次 93 | # @app.task(bind=True, default_retry_delay=300, max_retries=5) 94 | 95 | @app.task 96 | def nmap_dispath(targets, taskid=None): 97 | # nmap环境参数配置 98 | run_script_path = '/home/thorns' 99 | if taskid == None: 100 | cmdline = 'python wyportmap.py %s' % targets 101 | else: 102 | cmdline = 'python wyportmap.py %s %s' % (targets, taskid) 103 | nmap_proc = subprocess.Popen(cmdline,shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE) 104 | process_output = nmap_proc.stdout.readlines() 105 | return process_output 106 | 107 | @app.task 108 | def hydra_dispath(targets, protocol, userdic, passdic, taskid=None): 109 | # 命令执行环境参数配置 110 | run_script_path = '/home/thorns/script/hydra' 111 | run_env = '{"LD_LIBRARY_PATH": "/home/thorns/libs/"}' 112 | 113 | if taskid == None: 114 | cmdline = 'python hydra.py %s %s %s %s' % (target, protocol, userdic, passdic) 115 | else: 116 | cmdline = 'python hydra.py %s %s %s %s %s' % (target, protocol, userdic, passdic, taskid) 117 | 118 | nmap_proc = subprocess.Popen(cmdline,shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE,cwd=run_script_path,env=run_env) 119 | 120 | process_output = nmap_proc.stdout.readlines() 121 | return process_output 122 | 123 | @app.task 124 | def medusa_dispath(targets, protocol, userdic, passdic, taskid=None): 125 | # 命令执行环境参数配置 126 | run_script_path = '/home/thorns/script/medusa' 127 | run_env = '{"LD_LIBRARY_PATH": "/home/thorns/libs/"}' 128 | 129 | if taskid == None: 130 | cmdline = 'python medusa.py %s %s %s %s' % (target, protocol, userdic, passdic) 131 | else: 132 | cmdline = 'python medusa.py %s %s %s %s %s' % (target, protocol, userdic, passdic, taskid) 133 | 134 | nmap_proc = subprocess.Popen(cmdline,shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE,cwd=run_script_path,env=run_env) 135 | 136 | process_output = nmap_proc.stdout.readlines() 137 | return process_output 138 | @app.task 139 | def subbrute_dispath(targets, taskid=None): 140 | # 命令执行环境参数配置 141 | import os 142 | run_script_path = '/home/ubuntu/thorns/subbrute/' 143 | run_env = '{"LD_LIBRARY_PATH": "/home/ubuntu/thorns/libs/"}' 144 | 145 | if taskid == None: 146 | cmdline = 'python /home/ubuntu/thorns/subbrute/subbrute.py %s' % (targets) 147 | else: 148 | cmdline = 'python /home/ubuntu/thorns/subbrute/subbrute.py %s %s' % (targets,taskid) 149 | 150 | nmap_proc = subprocess.Popen(cmdline,shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE) 151 | 152 | process_output = nmap_proc.stdout.readlines() 153 | return process_output 154 | 155 | -------------------------------------------------------------------------------- /csrf.py: -------------------------------------------------------------------------------- 1 | import time 2 | import re 3 | from libmproxy.flow import FlowWriter 4 | from libmproxy.script import concurrent 5 | import requests,json 6 | #from libmproxy.protocol.http import decoded 7 | DEBUG="EBUG2" 8 | CSRF_FOUND_URLS=[] 9 | XXE_FOUND_URLS=[] 10 | SSRF_FOUND_URLS=[] 11 | FILE_INCLUDE_FOUND_URLS=["http://b.scorecardresearch.com/b"] 12 | JSONP_FOUND_URLS=["http://api.share.baidu.com/getnum"] 13 | SQLI_FOUND_URLS=[] 14 | SQLI_POST_HAVE_CHECKED_URLS=[] 15 | SQLI_GET_HAVE_CHECKED_URLS=[] 16 | SSRF_SITE="113.251.171.47" 17 | ######options check which###### 18 | ENABLE_CSRF=0 19 | ENABLE_XXE=0 20 | ENABLE_FILE_INCLUDE=0 21 | ENABLE_SSRF=0 22 | ENABLE_JSONP=0 23 | ENABLE_SQLI=1 24 | def url_exclude(url): 25 | filter_keywords=["js","css","gif","jpeg","png","swf","jpg","ico","http://www.google-analytics.com","http://192.168.0.1","xsxsxrxf=1","xcxsxrxf=1","http://pagead2.googlesyndication.com","http://googleads.g.doubleclick.net","http://pos.baidu.com","http://z8.cnzz.com/stat.htm"] 26 | for keyword in filter_keywords: 27 | if url.find(keyword)!=-1: 28 | return 1 29 | return 0 30 | def url_include(url): 31 | filter_keywords=[] 32 | for keyword in filter_keywords: 33 | if url.find(keyword)==-1: 34 | return 1 35 | return 0 36 | def d_print(msg,level=1): 37 | if DEBUG=="DEBUG"+str(level): 38 | print msg 39 | def p(keywords,content): 40 | for keyword in keywords: 41 | if keyword in content.lower(): 42 | return 1 43 | return 0 44 | def p_re(keywords,content): 45 | for keyword in keywords: 46 | d_print(keyword+" "+content) 47 | r=re.findall(keyword,content.lower()) 48 | if len(r)>0: 49 | return r[0] 50 | return 0 51 | def url_include_site(values): 52 | for value in values: 53 | r=p_re(["(http://)?(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})","(http://)?([\s\S]{1,}\.cn)","(http://)?([\s\S]{1,}\.com)","(http://)?([\s\S]{1,}\.tk)","(http://)?([\s\S]{1,}\.so)","(http://)?([\s\S]{1,}\.net)","(http://)?([\s\S]{1,}\.org)"],value) 54 | if r: 55 | return r[1] 56 | return 0 57 | #determine if url parameters contains filename 58 | def php_file_include(values): 59 | for value in values: 60 | if p_re(["[\d\w-]{1,}\.php","[\d\w-]{1,}\.asp","[\d\w-]{1,}\.jsp","[\d\w-]{1,}\.do","[\d\w-]{1,}\.action","[\d\w-]{1,}\.htm"],value): 61 | return 1 62 | return 0 63 | #determine if url parameters contains filename 64 | def file_include(values): 65 | for value in values: 66 | if p_re(["[\d\w-]{1,}\.htm","[\d\w-]{1,}\.php","[\d\w-]{1,}\.asp","[\d\w-]{1,}\.jsp","[\d\w-]{1,}\.do","[\d\w-]{1,}\.action","[\d\w-]{1,}\.txt","[\d\w-]{1,}\.xls","[\d\w-]{1,}\.doc","[\d\w-]{1,}\.xml"],value): 67 | return 1 68 | return 0 69 | def get_values(xxdict): 70 | yydict=[] 71 | for xx in xxdict: 72 | yydict.append(xx[1]) 73 | return yydict 74 | def chongfu(request,listname): 75 | base_url=request.url.split("?")[0] 76 | names=request.get_query().keys() 77 | names.sort() 78 | #determine tow similar url 79 | #such as a.com/a.php?file=a.html and a.com/a.php?file=b.html 80 | if base_url+str(names) not in listname: 81 | return 1 82 | return 0 83 | 84 | def output(listname,request,loudongming,context): 85 | base_url=request.url.split("?")[0] 86 | names=request.get_query().keys() 87 | names.sort() 88 | #determine tow similar url 89 | #such as a.com/a.php?file=a.html and a.com/a.php?file=b.html 90 | if chongfu(request,listname): 91 | print("----"+loudongming+" Found-----\n"+request.method+" "+request.url) 92 | listname.append(base_url+str(names)) 93 | if loudongming=="XXE": 94 | print request.headers 95 | print request.content 96 | d_print(listname,2) 97 | def CSRF_check(request,context): 98 | #token,or some positive keywords,jianshao wubao 99 | TOKEN_KEYWORDS=["token","csrf","search","login","xsrf","capture","captcha","form_hash"] 100 | PAGE_KEYWORDS=["search","login"] 101 | WHITE_KEYWORDS=[""] 102 | request.decode() 103 | if p(TOKEN_KEYWORDS,request.content)==1 or p(TOKEN_KEYWORDS,request.url)==1: 104 | d_print("-----csrf token found----\n"+"url:"+str(request.url)) 105 | elif p(PAGE_KEYWORDS,request.url)==1 or p(PAGE_KEYWORDS,request.content)==1: 106 | d_print("-----post,but it seems fuck page\n"+"url:"+str(request.url)) 107 | else: 108 | if p(["update","edit","add","delete","info","message","action"],request.url+request.content): 109 | #request.headers['cookie']={'cookie=testfortest'} 110 | output(CSRF_FOUND_URLS,request,"CSRF",context) 111 | #https content 112 | if request.url.startswith("http://"): 113 | print(request.content) 114 | # context.replay_request(f,block=True) 115 | else: 116 | #different url 117 | if len(request.get_query())==0: 118 | request.url=request.url+"?xcxsxrxf=1" 119 | else: 120 | request.url=request.url+"&xcxsxrxf=1" 121 | 122 | @concurrent 123 | def request(context, flow): 124 | request=flow.request 125 | #use request.url,not request.host 126 | #because http://anysice.google.com/xxx.php?url=http://test.com/aa 127 | if url_exclude(request.host)==0 and url_include(request.host)==0: 128 | #if f.request.method=="GET": 129 | # f.request.headers['cookie']={'cookie=testfortest'} 130 | # context.replay_request(f,block=True) 131 | values=get_values(request.get_query().items()) 132 | if ENABLE_FILE_INCLUDE: 133 | if file_include(values): 134 | output(FILE_INCLUDE_FOUND_URLS,request,"File include",context) 135 | r=url_include_site(values) 136 | if r: 137 | #SSRF 138 | if ENABLE_SSRF: 139 | #not will be xxx.com/a.gif?wap=xxx.com 140 | if p(["url","domain","share","wap","link","src","source","target","3g","display","u"],str(request.get_query().keys())) and php_file_include(request.get_path_components()): 141 | f=context.duplicate_flow(flow) 142 | f.request.url=f.request.url.replace(r,SSRF_SITE)+"&xsxsxrxf=1" 143 | d_print(f.request.url,2) 144 | context.replay_request(f) 145 | output(SSRF_FOUND_URLS,request,"SSRF",context) 146 | if request.method=="POST": 147 | #CSRF check 148 | if ENABLE_CSRF: 149 | if len(request.headers['user-agent'])>0 and p(["mozilla","firefox","ie"],request.headers['user-agent'][0]): 150 | CSRF_check(request,context) 151 | if ENABLE_SQLI: 152 | #GET request,may not send request to php,asp,jsp,so we need to determine 153 | #POST request,it must be sent to a web server script 154 | if request.method=="GET" and chongfu(request,SQLI_GET_HAVE_CHECKED_URLS) and php_file_include(request.get_path_components()): 155 | #print request.headers 156 | #print type(request.headers) 157 | #add this url to SQLI_GET_HAVE_HECEKD_URLS 158 | names=request.get_query().keys() 159 | names.sort() 160 | #SQLI_GET_HAVE_CHECKED_URLS.append(request.url.split("?")[0]+str(names)) 161 | #print SQLI_GET_HAVE_CHECKED_URLS 162 | #send to celery 163 | cookie="" 164 | referer="" 165 | if len(request.headers['cookie'])>0: 166 | cookie=request.headers['cookie'][0] 167 | if len(request.headers['referer'])>0: 168 | referer=request.headers['referer'][0] 169 | args = {'args': [request.url,cookie,referer,"mitm-test-for-get"]} 170 | resp = requests.post("http://localhost:5555/api/task/async-apply/tasks.sqlmap_dispath", data=json.dumps(args)) 171 | #resp = tasks.sqlmap_dispath.delay(request.url,cookie,referer,"mitm-test-for-get") 172 | print "push ",resp 173 | if request.method=="POST" and chongfu(request,SQLI_POST_HAVE_CHECKED_URLS): 174 | #add this to SQLI_POST_HAVE_CHECKED_URLS 175 | names=request.get_query().keys() 176 | names.sort() 177 | SQLI_POST_HAVE_CHECKED_URLS.append(request.url.split("?")[0]+str(names)) 178 | cookie="" 179 | referer="" 180 | if len(request.headers['cookie'])>0: 181 | cookie=request.headers['cookie'][0] 182 | if len(request.headers['referer'])>0: 183 | referer=request.headers['referer'][0] 184 | data=request.content 185 | args = {'args': [request.url,cookie,referer,data]} 186 | resp = requests.post("http://localhost:5555/api/task/async-apply/tasks.sqlmap_dispath", data=json.dumps(args)) 187 | #resp = tasks.sqlmap_dispath.delay(request.url,cookie,referer,data) 188 | print "push ",resp 189 | 190 | def response(context,flow): 191 | response=flow.response 192 | request=flow.request 193 | #use request.url,not request.host 194 | #because http://anysice.google.com/xxx.php?url=http://test.com/aa 195 | if url_exclude(request.host)==0 and url_include(request.host)==0: 196 | #print request.host,request.url 197 | if ENABLE_CSRF: 198 | if "xcxsxrxf=1" in flow.request.url: 199 | response.decode() 200 | d_print(flow.request.url) 201 | #CSRF response keywords. 202 | if p(["success","fail","data","msg"],response.content): 203 | output(CSRF_FOUND_URLS,flow.request,"CSRF response",context) 204 | if ENABLE_XXE: 205 | if "content-type" in request.headers.keys(): 206 | if len(request.headers["content-type"])>1: 207 | print "two content-type founds" 208 | if p(["xml"],request.headers["content-type"][0]): 209 | output(XXE_FOUND_URLS,request,"XXE",context) 210 | print "XXE RESPONSE" 211 | print request.headers 212 | print response.content 213 | if ENABLE_JSONP: 214 | if "callback" in request.url: 215 | output(JSONP_FOUND_URLS,request,"JSONP",context) 216 | 217 | fangdd smtp password fangduoduo 218 | -------------------------------------------------------------------------------- /run.sh: -------------------------------------------------------------------------------- 1 | sudo /etc/init.d/tomcat7 stop 2 | cd ~/sqlmap 3 | python ~/sqlmap/sqlmapapi.py -s -p 9999& 4 | cd ~/celery 5 | celery worker -A tasks -E --autoscale 10,3& 6 | celery flower -A tasks& 7 | mitmdump -s csrf.py& 8 | -------------------------------------------------------------------------------- /sqlmap.sql: -------------------------------------------------------------------------------- 1 | -- MySQL dump 10.13 Distrib 5.5.43, for debian-linux-gnu (i686) 2 | -- 3 | -- Host: localhost Database: sqlmap 4 | -- ------------------------------------------------------ 5 | -- Server version 5.5.43-0ubuntu0.14.04.1 6 | 7 | /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */; 8 | /*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */; 9 | /*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */; 10 | /*!40101 SET NAMES utf8 */; 11 | /*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */; 12 | /*!40103 SET TIME_ZONE='+00:00' */; 13 | /*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */; 14 | /*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */; 15 | /*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */; 16 | /*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */; 17 | 18 | -- 19 | -- Table structure for table `sqlmap_result` 20 | -- 21 | 22 | DROP TABLE IF EXISTS `sqlmap_result`; 23 | /*!40101 SET @saved_cs_client = @@character_set_client */; 24 | /*!40101 SET character_set_client = utf8 */; 25 | CREATE TABLE `sqlmap_result` ( 26 | `id` int(11) NOT NULL, 27 | `taskid` int(11) DEFAULT NULL, 28 | `result` varchar(1000) DEFAULT NULL, 29 | `url` varchar(256) DEFAULT NULL, 30 | `cookie` varchar(1000) DEFAULT NULL, 31 | `referer` varchar(256) DEFAULT NULL, 32 | `data` varchar(1000) DEFAULT NULL, 33 | PRIMARY KEY (`id`) 34 | ) ENGINE=InnoDB DEFAULT CHARSET=latin1; 35 | /*!40101 SET character_set_client = @saved_cs_client */; 36 | 37 | -- 38 | -- Dumping data for table `sqlmap_result` 39 | -- 40 | 41 | LOCK TABLES `sqlmap_result` WRITE; 42 | /*!40000 ALTER TABLE `sqlmap_result` DISABLE KEYS */; 43 | /*!40000 ALTER TABLE `sqlmap_result` ENABLE KEYS */; 44 | UNLOCK TABLES; 45 | /*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */; 46 | 47 | /*!40101 SET SQL_MODE=@OLD_SQL_MODE */; 48 | /*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */; 49 | /*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */; 50 | /*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */; 51 | /*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */; 52 | /*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */; 53 | /*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */; 54 | 55 | -- Dump completed on 2015-08-25 11:16:14 56 | -------------------------------------------------------------------------------- /tasks.py: -------------------------------------------------------------------------------- 1 | from celery import Celery,platforms 2 | from time import sleep 3 | import requests,json 4 | import MySQLdb 5 | 6 | SQLMAPAPI_URL="http://127.0.0.1:9999" 7 | TASK_NEW_URL=SQLMAPAPI_URL+"/task/new" 8 | app = Celery() 9 | platforms.C_FORCE_ROOT = True 10 | 11 | 12 | app.conf.update( 13 | CELERY_IMPORTS = ("tasks", ), 14 | BROKER_URL = 'redis://203.195.211.242:6379/0', 15 | #CELERY_RESULT_BACKEND = 'db+mysql://root:exp123@127.0.0.1:3306/test', 16 | CELERY_TASK_SERIALIZER='json', 17 | CELERY_RESULT_SERIALIZER='json', 18 | CELERY_TIMEZONE='Asia/Shanghai', 19 | CELERY_ENABLE_UTC=True, 20 | CELERY_REDIS_MAX_CONNECTIONS=5000, 21 | ) 22 | 23 | class Database: 24 | host = '203.195.211.242' 25 | user = 'sqlmap' 26 | password = 'sqlmapx123' 27 | db = 'sqlmap' 28 | charset = 'utf8' 29 | 30 | def __init__(self): 31 | self.connection = MySQLdb.connect(self.host, self.user, self.password, self.db,charset=self.charset) 32 | self.cursor = self.connection.cursor() 33 | 34 | def insert(self, query): 35 | try: 36 | self.cursor.execute(query) 37 | self.connection.commit() 38 | except: 39 | self.connection.rollback() 40 | 41 | def query(self, query): 42 | cursor = self.connection.cursor(MySQLdb.cursors.DictCursor) 43 | cursor.execute(query) 44 | return cursor.fetchall() 45 | 46 | def __del__(self): 47 | self.connection.close() 48 | 49 | @app.task 50 | def sqlmap_dispath(url,cookie,referer,data): 51 | task_new=requests.get(TASK_NEW_URL) 52 | task_id=task_new.json()["taskid"] 53 | if data!="mitm-for-test": 54 | requests.post(SQLMAPAPI_URL+"/scan/"+task_id+"/start",data=json.dumps({'url':url,"cookie":cookie,"referer":referer,"data":data}),headers={"content-type":"application/json"}) 55 | else: 56 | requests.post(SQLMAPAPI_URL+"/scan/"+task_id+"/start",data=json.dumps({'url':url,"cookie":cookie,"referer":referer}),headers={"content-type":"application/json"}) 57 | task_status=requests.get(SQLMAPAPI_URL+"/scan/"+task_id+"/status") 58 | count=1 59 | while(task_status.json()["status"]!="terminated"): 60 | task_status=requests.get(SQLMAPAPI_URL+"/scan/"+task_id+"/status") 61 | sleep(count) 62 | count=count*2 63 | task_result=requests.get(SQLMAPAPI_URL+"/scan/"+task_id+"/data") 64 | if task_result.json()['data']!="": 65 | mysql=Database() 66 | mysql.insert("insert into sqlmap_result(taskid,result,url,cookie,referer,data) values('%s','%s','%s','%s','%s','%s')"%("NULL",task_result.json()['data'],url,cookie,referer,data)) 67 | return task_result.json()['data'] 68 | else: 69 | return "nothing" 70 | 71 | #print add("http://contentrecommend-out.mobile.sina.cn/interface/pcright/pcright_topic.php?posid=pos520c8516722cb&psid=PDPS000000051603&wbVersion=v6&uid=2699581760&ip=106.39.10.162&cursor=18&eData=12.33,6&callback=wbad_14381098441337&rnd=14381505350298") 72 | -------------------------------------------------------------------------------- /tasks.py.bak: -------------------------------------------------------------------------------- 1 | from celery import Celery 2 | from time import sleep 3 | import requests,json 4 | app = Celery() 5 | 6 | app.conf.update( 7 | CELERY_IMPORTS = ("tasks", ), 8 | BROKER_URL = 'redis://203.195.211.242:6379/0', 9 | CELERY_RESULT_BACKEND = 'db+mysql://thornstest:thornstest@203.195.211.242:3306/thorns', 10 | CELERY_TASK_SERIALIZER='json', 11 | CELERY_RESULT_SERIALIZER='json', 12 | CELERY_TIMEZONE='Asia/Shanghai', 13 | CELERY_ENABLE_UTC=True, 14 | CELERY_REDIS_MAX_CONNECTIONS=5000, 15 | ) 16 | 17 | celery = Celery('tasks', broker='redis://localhost:6379/0') 18 | SQLMAPAPI_URL="http://127.0.0.1:9999" 19 | TASK_NEW_URL=SQLMAPAPI_URL+"/task/new" 20 | @celery.task 21 | def sqlmap_dispath(url,cookie,referer,data): 22 | task_new=requests.get(TASK_NEW_URL) 23 | task_id=task_new.json()["taskid"] 24 | if data=="mitm-test-for-get": 25 | requests.post(SQLMAPAPI_URL+"/scan/"+task_id+"/start",data=json.dumps({'url':url,'cookie':cookie,"referer":referer}),headers={"content-type":"application/json"}) 26 | else: 27 | requests.post(SQLMAPAPI_URL+"/scan/"+task_id+"/start",data=json.dumps({'url':url,'cookie':cookie,"referer":referer,"data":data}),headers={"content-type":"application/json"}) 28 | task_status=requests.get(SQLMAPAPI_URL+"/scan/"+task_id+"/status") 29 | count=1 30 | while(task_status.json()["status"]!="terminated"): 31 | task_status=requests.get(SQLMAPAPI_URL+"/scan/"+task_id+"/status") 32 | sleep(count) 33 | count=count*2 34 | task_result=requests.get(SQLMAPAPI_URL+"/scan/"+task_id+"/data") 35 | print task_result 36 | return task_result.json() 37 | -------------------------------------------------------------------------------- /x.py: -------------------------------------------------------------------------------- 1 | import requests,json 2 | r=requests.get("http://127.0.0.1:9999/task/new") 3 | taskid=r.json()["taskid"] 4 | rr=requests.post("http://127.0.0.1:9999/scan/"+taskid+"/start",data=json.dumps({'url':"http://127.0.0.1/sql.php?sql=root"}),headers={'content-type':'application/json'}) 5 | print rr.json() 6 | --------------------------------------------------------------------------------